scala 2.12 https://www.scala-lang.org/download/ Flink 1.10.0 https://flink.apache.org/downloads.html#apache-flink-1100 Hive 3.1.2 https://hive.apache.org/downloads.html Hadoop 3.2.1 https://hadoop.apache.org/releases.html Hbase2.2.3 https://hbase.apache.org/downloads.html zookeeper 3.6.0 https://www.apache.org/dyn/closer.cgi/zookeeper/ kafka 2.13 http://kafka.apache.org/downloads ######cat /root/hosts #127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 192.168.0.180 node180 192.168.0.181 node181 192.168.0.182 node182 ###### mkdir /tmp/zookeeper mkdir /tmp/zookeeper/data mkdir /tmp/zookeeper/log cd /tmp/zookeeper/data touch myid # JAVA_HOME export JAVA_HOME=/opt/module/jdk1.8.0_161 export PATH=$PATH:$JAVA_HOME/bin #zookeepeer export ZOOKEEPER_INSTALL=/opt/module/apache-zookeeper-3.6.0 export PATH=$PATH:$ZOOKEEPER_INSTALL/bin #flink export FLINK_HOME=/opt/module/flink-1.10.0 export PATH=$PATH:$FLINK_HOME/bin #hbase export HBASE_HOME=/opt/module/hbase-2.2.3 export PATH=$HBASE_HOME/bin:$PATH #hive export HIVE_HOME=/opt/module/apache-hive-3.1.2 export PATH=$HIVE_HOME/bin:$PATH #sqoop export SQOOP_HOME=/opt/module/sqoop-1.99.7 export PATH=$SQOOP_HOME/bin:$PATH export LOGDIR=$SQOOP_HOME/logs export SQOOP_SERVER_EXTRA_LIB=$SQOOP_HOME/lib #flume export FLUME_HOME=/opt/module/apache-flume-1.9.0 export PATH=$FLUME_HOME/bin:$PATH #kafka export KAFKA_HOME=/opt/module/kafka_2.12-2.4.1 export PATH=$KAFKA_HOME/bin:$PATH #spark export SPARK_HOME=/opt/module/spark-3.0.0-hadoop3.2 export PATH=$SPARK_HOME/bin:$PATH #scala export SCALA_HOME=/opt/module/scala-2.12.11 export PATH=$SCALA_HOME/bin:$PATH #kafka export KAFKA_HOME=/opt/module/kafka_2.13-2.4.0 export PATH=$KAFKA_HOME/bin:$PATH #scala export SCALA_HOME=/opt/module/scala-2.13.1 export PATH=$SCALA_HOME/bin:$PATH # 设置日志文件的目录 0,%s/@LOGDIR@/\/opt\/module\/sqoop-1.99.7\/logs/g # 设置数据的存放目录 0,%s/@BASEDIR@/\/opt\/module\/sqoop-1.99.7\/bin\/BASEDIR/g ##################################################################### netstat -tunlp #zk zkServer.sh start zkServer.sh status #hadoop hdfs --daemon start journalnode start-dfs.sh start-yarn.sh #flink start-cluster.sh #hbase start-hbase.sh stop-hbase.sh #kafka kafka-server-start.sh /opt/module/kafka_2.12-2.0.1/config/server.properties & #spark cd /opt/module/spark-3.0.0-hadoop3.2/sbin ./start-all.sh #sqoop2 sqoop2-server start