Flink SQL> INSERT INTO flink_cdc_sink_hudi_hive_wudl SELECT id, name,age,birthday, ts, DATE_FORMAT(birthday, 'yyyyMMdd') as part FROM source_mysql ;[INFO] Submitting SQL update statement to the cluster...
[INFO] SQL update statement has been successfully submitted to the cluster:
Job ID: 8a6e4869c43e57d57357c1767e7c2b38
4. 查看数据
5. 批处理 从hudi 表输出到 kakfa
5.1 创建hudi 表
Flink SQL> CREATE TABLE hudi_flink_kafka_source (>id bigint ,
> name string,
> age int,
> birthday TIMESTAMP(3),
> ts TIMESTAMP(3),
> part STRING,
> primary key(id) not enforced
>)> PARTITIONED BY (part)> WITH (>'connector'='hudi',
>'path'='hdfs://192.168.1.161:8020/flink_cdc_sink_hudi_hive20220905',
>'table.type'='MERGE_ON_READ',
>'write.operation'='upsert',
>'hoodie.datasource.write.recordkey.field'='id',
>'write.precombine.field'='ts',
>'write.tasks'='1',
>'compaction.tasks'='1',
>'compaction.async.enabled'='true',
>'compaction.trigger.strategy'='num_commits',
>'compaction.delta_commits'='1'>);>
5.2 创建kafka 表
Flink SQL> CREATE TABLE kakfa_sink6 (>id bigint ,
> name string,
> age int,
> birthday TIMESTAMP(3),
> ts TIMESTAMP(3)>) WITH (>'connector'='kafka',
>'topic'='wudl2022flink03',
>'properties.bootstrap.servers'='192.168.1.161:6667',
>'properties.group.id'='wudl20220905',
>'format'='json',
>'json.fail-on-missing-field'='false',
>'json.ignore-parse-errors'='true'>);[INFO] Execute statement succeed.
Flink SQL> INSERT INTO kakfa_sink6 SELECT id, name,age,birthday, ts FROM hudi_flink_kafka_source ;[INFO] Submitting SQL update statement to the cluster...
[INFO] SQL update statement has been successfully submitted to the cluster:
Job ID: 005ee1b8011319d235c6485c2abb3efb
6. 查看表结构数据
7. 时间转化函数
7.1 flink sql LOCALTIMESTAMP 获取系统时间
Flink SQL>select DATE_FORMAT(LOCALTIMESTAMP, 'yyyy-MM-dd HH:mm:ss');
+----+--------------------------------+
|op| EXPR$0|
+----+--------------------------------+
| +I |2022-09-05 19:19:42 |
+----+--------------------------------+
Received a total of 1 row
# TO_TIMESTAMP 时间的转化
Flink SQL>
Flink SQL>select TO_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, 'yyyy-MM-dd HH:mm:ss'));
+----+-------------------------+
|op| EXPR$0|
+----+-------------------------+
| +I |2022-09-05 19:20:30.000 |
+----+-------------------------+
Received a total of 1 row
Flink SQL>