Slide 18
Slide 18 text
KafkaのデータをFlinkSQLで処理
●
CREATE TABLE source_table (
id BIGINT,
name STRING,
created_at BIGINT,
updated_at BIGINT
)
WITH (
'connector' = 'kafka',
'topic' = 'topic',
value.format = avro-confluent,
… )
CREATE TABLE sink_table (
`time` BIGINT,
source STRING,
`index` STRING,
event ROW<
event_name STRING,
id STRING,
name STRING,
created_at TIMESTAMP,
updated_at TIMESTAMP
>)
WITH (
'connector' = 'pubsub',
'format' = 'json',
… )
INSERT INTO sink_table
SELECT
UNIX_TIMESTAMP(),
'mysql',
'main',
(
'cdc',
CAST(id AS STRING),
name,
TO_TIMESTAMP(FROM_UNIXTIME(created_at)),
TO_TIMESTAMP(FROM_UNIXTIME(updated_at))
)
FROM source_table