Introduction to Flink Learning 5-DateStream API II

1. Union operator
insert image description here

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * union算子,可以实现多个流之间的合并,也可以实现自己和自己合并
 */
public class TestUnion {
    
    
    public static void main(String[] args) throws Exception {
    
    
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> stream1 = env.socketTextStream("localhost", 9633);
        DataStreamSource<String> stream2 = env.socketTextStream("localhost", 9634);
        stream1.union(stream2).print();
        //stream1.union(stream1).print();

        env.execute();
    }
}

2. Connect operator
insert image description here

import com.lxf.model.Student;
import com.lxf.source.TestSourceFun;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;


public class TestConnect {
    
    
    public static void main(String[] args) throws Exception {
    
    
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<Student> stream1 = env.addSource(new TestSourceFun());
        DataStreamSource<Student> stream2 = env.addSource(new TestSourceFun());

        SingleOutputStreamOperator<Tuple2<String, Student>> stream3 = stream2.map(new MapFunction<Student, Tuple2<String, Student>>() {
    
    
            @Override
            public Tuple2<String, Student> map(Student student) throws Exception {
    
    
                return Tuple2.of("flink", student);
            }
        });

        /**
         * union 可以实现多个流之间的数据合并,但数据结构必须相同
         * conect 实现两个流之间合并,但数据结构可以不同,比较灵活。
         * 允许两个流共享状态
         */
        ConnectedStreams<Student, Tuple2<String, Student>> connect = stream1.connect(stream3);

        connect.map(new CoMapFunction<Student, Tuple2<String, Student>, String>() {
    
    
            @Override
            public String map1(Student student) throws Exception {
    
    
                return student.toString();
            }

            @Override
            public String map2(Tuple2<String, Student> value) throws Exception {
    
    
                return value.f0 + "===>" +value.f1.toString();
            }
        }).print();
        /*connect.map(new CoMapFunction<Student, Student, String>() {
            //处理流1的逻辑
            @Override
            public String map1(Student student) throws Exception {
                return student.toString();
            }

            //处理流2的逻辑
            @Override
            public String map2(Student student) throws Exception {
                return student.getName();
            }
        }).print();*/


        env.execute();
    }
}

3. CoMap operator
insert image description here


import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

public class TestCoMapFun {
    
    
    public static void main(String[] args) throws Exception {
    
    
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> stream1 = env.socketTextStream("localhost", 9633);
        DataStreamSource<String> stream2 = env.socketTextStream("localhost", 9634);

        SingleOutputStreamOperator<Integer> stream3 = stream2.map(new MapFunction<String, Integer>() {
    
    
            @Override
            public Integer map(String s) throws Exception {
    
    
                return Integer.parseInt(s);
            }
        });
        /**
         * CoMapFunction可以实现ConnectedStreams流转换成DataStream流
         */
         stream1.connect(stream3).map(new CoMapFunction<String, Integer, String>() {
    
    
             //comap中第一个参数为流一数据类型,第二个为流二数据类型,第三个为返回值类型
             @Override
             public String map1(String s) throws Exception {
    
    
                 return s.toUpperCase();
             }

             @Override
             public String map2(Integer value) throws Exception {
    
    
                 return value * 10+ "";
             }
         }).print();
        env.execute();
    }
}

4. CoFlatMap operator

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction;
import org.apache.flink.util.Collector;

public class TestCoFlatMapFun {
    
    
    public static void main(String[] args) throws Exception {
    
    
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //定义元素集合
        DataStreamSource<String> stream1 = env.fromElements("1,2,3");
        DataStreamSource<String> stream2 = env.fromElements("a|b|c");
        stream1.connect(stream2).flatMap(new CoFlatMapFunction<String, String, String>() {
    
    
            @Override
            public void flatMap1(String s, Collector<String> collector) throws Exception {
    
    
                String[] splits = s.split(",");
                for(String words : splits){
    
    
                    collector.collect(words);
                }
            }

            @Override
            public void flatMap2(String s, Collector<String> collector) throws Exception {
    
    
                String[] splits = s.split("\\|");
                for(String words : splits){
    
    
                    collector.collect(words);
                }
            }
        }).print();
        env.execute();
    }
}

Guess you like

Origin blog.csdn.net/qq_40342691/article/details/127620331