Skip to content

Commit

Permalink
[hotfix][doc] Refine code examples in data_stream_api
Browse files Browse the repository at this point in the history
  • Loading branch information
liuml07 authored and Myasuka committed Oct 28, 2022
1 parent bb9f252 commit 35c5f67
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 4 deletions.
9 changes: 7 additions & 2 deletions docs/content.zh/docs/dev/table/data_stream_api.md
Original file line number Diff line number Diff line change
Expand Up @@ -975,6 +975,7 @@ import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
import java.time.LocalDateTime;

// setup DataStream API
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Expand Down Expand Up @@ -1716,7 +1717,7 @@ table.printSchema();

// data types can be extracted reflectively as above or explicitly defined

Table table3 = tableEnv
Table table = tableEnv
.fromDataStream(
dataStream,
Schema.newBuilder()
Expand Down Expand Up @@ -1758,6 +1759,7 @@ The following code shows how to use `createTemporaryView` for different scenario
```java
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.table.api.Schema;

// create some DataStream
DataStream<Tuple2<Long, String>> dataStream = env.fromElements(
Expand Down Expand Up @@ -2016,6 +2018,8 @@ DataStream<Row> dataStream = tableEnv.toDataStream(table);

DataStream<User> dataStream = tableEnv.toDataStream(table, User.class);

// === EXAMPLE 3 ===

// data types can be extracted reflectively as above or explicitly defined

DataStream<User> dataStream =
Expand Down Expand Up @@ -2071,6 +2075,8 @@ val dataStream: DataStream[Row] = tableEnv.toDataStream(table)

val dataStream: DataStream[User] = tableEnv.toDataStream(table, classOf[User])

// === EXAMPLE 3 ===

// data types can be extracted reflectively as above or explicitly defined

val dataStream: DataStream[User] =
Expand Down Expand Up @@ -3058,7 +3064,6 @@ Afterward, the type information semantics of the DataStream API need to be consi

{{< top >}}


Legacy Conversion
-----------------

Expand Down
10 changes: 8 additions & 2 deletions docs/content/docs/dev/table/data_stream_api.md
Original file line number Diff line number Diff line change
Expand Up @@ -973,6 +973,7 @@ import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
import java.time.LocalDateTime;

// setup DataStream API
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Expand Down Expand Up @@ -1714,7 +1715,7 @@ table.printSchema();

// data types can be extracted reflectively as above or explicitly defined

Table table3 = tableEnv
Table table = tableEnv
.fromDataStream(
dataStream,
Schema.newBuilder()
Expand Down Expand Up @@ -1756,6 +1757,7 @@ The following code shows how to use `createTemporaryView` for different scenario
```java
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.table.api.Schema;

// create some DataStream
DataStream<Tuple2<Long, String>> dataStream = env.fromElements(
Expand Down Expand Up @@ -2014,6 +2016,8 @@ DataStream<Row> dataStream = tableEnv.toDataStream(table);

DataStream<User> dataStream = tableEnv.toDataStream(table, User.class);

// === EXAMPLE 3 ===

// data types can be extracted reflectively as above or explicitly defined

DataStream<User> dataStream =
Expand Down Expand Up @@ -2069,6 +2073,8 @@ val dataStream: DataStream[Row] = tableEnv.toDataStream(table)

val dataStream: DataStream[User] = tableEnv.toDataStream(table, classOf[User])

// === EXAMPLE 3 ===

// data types can be extracted reflectively as above or explicitly defined

val dataStream: DataStream[User] =
Expand Down Expand Up @@ -2892,7 +2898,7 @@ from pyflink.common import Encoder
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.datastream.connectors.file_system import FileSink
from pyflink.table import StreamTableEnvironment, TableDescriptor, Schema, DataTypes

env = StreamExecutionEnvironment.get_execution_environment()
table_env = StreamTableEnvironment.create(env)

Expand Down

0 comments on commit 35c5f67

Please sign in to comment.