[FLINK-23099] update table-walkthrough playground for Flink 1.13
diff --git a/README.md b/README.md
index 5de61a9..46ccf94 100644
--- a/README.md
+++ b/README.md
@@ -13,7 +13,7 @@
 Flink job. The playground is presented in detail in
 ["Flink Operations Playground"](https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/try-flink/flink-operations-playground.html), which is part of the _Try Flink_ section of the Flink documentation.
 
-* The **Table Walkthrough** (in the `table-walkthrough` folder) shows to use the Table API to build an analytics pipeline that reads streaming data from Kafka and writes results to MySQL, along with a real-time dashboard in Grafana. The walkthrough is presented in detail in ["Real Time Reporting with the Table API"](https://ci.apache.org/projects/flink/flink-docs-release-1.11/try-flink/table_api.html), which is part of the _Try Flink_ section of the Flink documentation.
+* The **Table Walkthrough** (in the `table-walkthrough` folder) shows to use the Table API to build an analytics pipeline that reads streaming data from Kafka and writes results to MySQL, along with a real-time dashboard in Grafana. The walkthrough is presented in detail in ["Real Time Reporting with the Table API"](https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/try-flink/table_api.html), which is part of the _Try Flink_ section of the Flink documentation.
 
 * The **PyFlink Walkthrough** (in the `pyflink-walkthrough` folder) provides a complete example that uses the Python API, and guides you through the steps needed to run and manage Pyflink Jobs. The pipeline used in this walkthrough reads data from Kafka, performs aggregations, and writes results to Elasticsearch that are visualized with Kibana. This walkthrough is presented in detail in the [pyflink-walkthrough README](pyflink-walkthrough).
 
diff --git a/docker/data-generator/src/main/java/org/apache/flink/playground/datagen/model/TransactionSerializer.java b/docker/data-generator/src/main/java/org/apache/flink/playground/datagen/model/TransactionSerializer.java
index aa6a7d3..16f4c0a 100644
--- a/docker/data-generator/src/main/java/org/apache/flink/playground/datagen/model/TransactionSerializer.java
+++ b/docker/data-generator/src/main/java/org/apache/flink/playground/datagen/model/TransactionSerializer.java
@@ -35,7 +35,8 @@
   public byte[] serialize(String s, Transaction transaction) {
     String csv =
         String.format(
-            "%s, %s, %s",
+            // Avoiding spaces here to workaround FLINK-23073
+            "%s,%s,%s",
             transaction.accountId, transaction.amount, transaction.timestamp.format(formatter));
 
     return csv.getBytes();
diff --git a/table-walkthrough/Dockerfile b/table-walkthrough/Dockerfile
index ee223d6..781cb56 100644
--- a/table-walkthrough/Dockerfile
+++ b/table-walkthrough/Dockerfile
@@ -22,12 +22,12 @@
 COPY ./src /opt/src
 RUN cd /opt; mvn clean install -Dmaven.test.skip
 
-FROM apache/flink:1.12.1-scala_2.11-java8
+FROM apache/flink:1.13.1-scala_2.12-java8
 
 # Download connector libraries
-RUN wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-kafka_2.11/1.12.1/flink-sql-connector-kafka_2.11-1.12.1.jar; \
-    wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-connector-jdbc_2.11/1.12.1/flink-connector-jdbc_2.11-1.12.1.jar; \
-    wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-csv/1.12.1/flink-csv-1.12.1.jar; \
+RUN wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-kafka_2.12/1.13.1/flink-sql-connector-kafka_2.12-1.13.1.jar; \
+    wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-connector-jdbc_2.12/1.13.1/flink-connector-jdbc_2.12-1.13.1.jar; \
+    wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-csv/1.13.1/flink-csv-1.13.1.jar; \
     wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.19/mysql-connector-java-8.0.19.jar;
 
 COPY --from=builder /opt/target/spend-report-*.jar /opt/flink/usrlib/spend-report.jar
diff --git a/table-walkthrough/docker-compose.yml b/table-walkthrough/docker-compose.yml
index 65be5ea..b4b641d 100644
--- a/table-walkthrough/docker-compose.yml
+++ b/table-walkthrough/docker-compose.yml
@@ -19,7 +19,7 @@
 version: '2.1'
 services:
   jobmanager:
-    image: apache/flink-table-walkthrough:1-FLINK-1.12-scala_2.11
+    image: apache/flink-table-walkthrough:1-FLINK-1.13-scala_2.12
     build: .
     hostname: "jobmanager"
     expose:
@@ -33,7 +33,7 @@
       - kafka
       - mysql
   taskmanager:
-    image: apache/flink-table-walkthrough:1-FLINK-1.12-scala_2.11
+    image: apache/flink-table-walkthrough:1-FLINK-1.13-scala_2.12
     build: .
     expose:
       - "6121"
@@ -80,7 +80,7 @@
       - ../docker/mysql-spend-report-init:/docker-entrypoint-initdb.d
       - ./data:/data
   grafana:
-    image: grafana/grafana
+    image: grafana/grafana:7.5.8
     ports:
       - "3000:3000"
     depends_on:
diff --git a/table-walkthrough/pom.xml b/table-walkthrough/pom.xml
index 529d5e9..9c95337 100644
--- a/table-walkthrough/pom.xml
+++ b/table-walkthrough/pom.xml
@@ -30,9 +30,9 @@
 
 	<properties>
 		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-		<flink.version>1.12.1</flink.version>
+		<flink.version>1.13.1</flink.version>
 		<java.version>1.8</java.version>
-		<scala.binary.version>2.11</scala.binary.version>
+		<scala.binary.version>2.12</scala.binary.version>
 		<maven.compiler.source>${java.version}</maven.compiler.source>
 		<maven.compiler.target>${java.version}</maven.compiler.target>
     </properties>