All specified directories are failed to load.(一)解决办法就是重新安装

2019-07-19 05:40:54,230 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: STARTUP_MSG: 
/************************************************************
STARTUP_MSG: Starting DataNode
STARTUP_MSG:   host = CentOSA/192.168.184.128
STARTUP_MSG:   args = []
STARTUP_MSG:   version = 2.7.2
STARTUP_MSG:   classpath = /opt/modules/hadoop-2.7.2/etc/hadoop:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/httpcore-4.2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-httpclient-3.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-codec-1.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/log4j-1.2.17.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-digester-1.8.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-compress-1.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/curator-recipes-2.7.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jersey-core-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/httpclient-4.2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jaxb-api-2.2.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/zookeeper-3.4.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-logging-1.1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/hadoop-annotations-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/guava-11.0.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/hamcrest-core-1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/api-asn1-api-1.0.0-M20.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-cli-1.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/xmlenc-0.52.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-configuration-1.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/gson-2.2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/slf4j-api-1.7.10.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/stax-api-1.0-2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-io-2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jsch-0.1.42.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-collections-3.2.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-lang-2.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/hadoop-auth-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/asm-3.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/activation-1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-math3-3.1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/netty-3.6.2.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-net-3.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/xz-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/junit-4.11.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jsr305-3.0.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jetty-util-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/apacheds-i18n-2.0.0-M15.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/mockito-all-1.8.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/curator-framework-2.7.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/java-xmlbuilder-0.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jsp-api-2.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jetty-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jersey-server-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jettison-1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jets3t-0.9.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/paranamer-2.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/curator-client-2.7.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/avro-1.7.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jersey-json-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/htrace-core-3.1.0-incubating.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/servlet-api-2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/hadoop-common-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/hadoop-common-2.7.2-tests.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/hadoop-nfs-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/xml-apis-1.3.04.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jersey-core-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-logging-1.1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/xercesImpl-2.9.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/guava-11.0.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/xmlenc-0.52.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-io-2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-lang-2.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/asm-3.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/netty-all-4.0.23.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jsr305-3.0.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jackson-core-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jersey-server-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/htrace-core-3.1.0-incubating.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/servlet-api-2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jackson-mapper-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/hadoop-hdfs-2.7.2-tests.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/hadoop-hdfs-nfs-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/hadoop-hdfs-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/guice-3.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/leveldbjni-all-1.8.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-codec-1.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/log4j-1.2.17.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-core-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jaxb-api-2.2.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/zookeeper-3.4.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-client-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jackson-jaxrs-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-logging-1.1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/guava-11.0.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-cli-1.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/stax-api-1.0-2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-io-2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/javax.inject-1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-collections-3.2.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-lang-2.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/asm-3.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/activation-1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/netty-3.6.2.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/xz-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jsr305-3.0.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jetty-util-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/zookeeper-3.4.6-tests.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jackson-core-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jetty-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/aopalliance-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-server-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jettison-1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jackson-xc-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jaxb-impl-2.2.3-1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-json-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/servlet-api-2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jackson-mapper-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-registry-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-common-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-client-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-api-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-tests-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-common-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/guice-3.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/leveldbjni-all-1.8.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jersey-core-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/hadoop-annotations-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/hamcrest-core-1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/commons-io-2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/javax.inject-1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/asm-3.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/xz-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/junit-4.11.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jackson-core-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jersey-server-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/avro-1.7.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.2-tests.jar:/contrib/capacity-scheduler/*.jar:/contrib/capacity-scheduler/*.jar:/contrib/capacity-scheduler/*.jar
STARTUP_MSG:   build = Unknown -r Unknown; compiled by 'root' on 2017-05-22T10:49Z
STARTUP_MSG:   java = 1.8.0_144
************************************************************/
2019-07-19 05:40:54,276 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: registered UNIX signal handlers for [TERM, HUP, INT]
2019-07-19 05:40:56,626 INFO org.apache.hadoop.metrics2.impl.MetricsConfig: loaded properties from hadoop-metrics2.properties
2019-07-19 05:40:57,060 INFO org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Scheduled snapshot period at 10 second(s).
2019-07-19 05:40:57,062 INFO org.apache.hadoop.metrics2.impl.MetricsSystemImpl: DataNode metrics system started
2019-07-19 05:40:57,086 INFO org.apache.hadoop.hdfs.server.datanode.BlockScanner: Initialized block scanner with targetBytesPerSec 1048576
2019-07-19 05:40:57,088 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Configured hostname is CentOSA
2019-07-19 05:40:57,109 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Starting DataNode with maxLockedMemory = 0
2019-07-19 05:40:57,216 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Opened streaming server at /0.0.0.0:50010
2019-07-19 05:40:57,323 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Balancing bandwith is 1048576 bytes/s
2019-07-19 05:40:57,323 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Number threads for balancing is 5
2019-07-19 05:40:57,656 INFO org.mortbay.log: Logging to org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via org.mortbay.log.Slf4jLog
2019-07-19 05:40:57,683 INFO org.apache.hadoop.security.authentication.server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets.
2019-07-19 05:40:57,709 INFO org.apache.hadoop.http.HttpRequestLog: Http request log for http.requests.datanode is not defined
2019-07-19 05:40:57,734 INFO org.apache.hadoop.http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
2019-07-19 05:40:57,749 INFO org.apache.hadoop.http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode
2019-07-19 05:40:57,749 INFO org.apache.hadoop.http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs
2019-07-19 05:40:57,750 INFO org.apache.hadoop.http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
2019-07-19 05:40:57,809 INFO org.apache.hadoop.http.HttpServer2: Jetty bound to port 39611
2019-07-19 05:40:57,809 INFO org.mortbay.log: jetty-6.1.26
2019-07-19 05:40:58,462 INFO org.mortbay.log: Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:39611
2019-07-19 05:40:58,836 INFO org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer: Listening HTTP traffic on /0.0.0.0:50075
2019-07-19 05:40:59,846 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: dnUserName = root
2019-07-19 05:40:59,847 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: supergroup = supergroup
2019-07-19 05:40:59,951 INFO org.apache.hadoop.ipc.CallQueueManager: Using callQueue class java.util.concurrent.LinkedBlockingQueue
2019-07-19 05:41:00,008 INFO org.apache.hadoop.ipc.Server: Starting Socket Reader #1 for port 50020
2019-07-19 05:41:00,118 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Opened IPC server at /0.0.0.0:50020
2019-07-19 05:41:00,151 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Refresh request received for nameservices: mycluster
2019-07-19 05:41:00,194 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Starting BPOfferServices for nameservices: mycluster
2019-07-19 05:41:00,222 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool <registering> (Datanode Uuid unassigned) service to CentOSA/192.168.184.128:9000 starting to offer service
2019-07-19 05:41:00,227 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool <registering> (Datanode Uuid unassigned) service to CentOSB/192.168.184.130:9000 starting to offer service
2019-07-19 05:41:00,265 INFO org.apache.hadoop.ipc.Server: IPC Server Responder: starting
2019-07-19 05:41:00,274 INFO org.apache.hadoop.ipc.Server: IPC Server listener on 50020: starting
2019-07-19 05:41:01,110 INFO org.apache.hadoop.hdfs.server.common.Storage: Lock on /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/in_use.lock acquired by nodename 2941@CentOSA
2019-07-19 05:41:01,112 INFO org.apache.hadoop.hdfs.server.common.Storage: Storage directory /opt/modules/hadoop-2.7.2/data/tmp/dfs/data is not formatted for BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:41:01,112 INFO org.apache.hadoop.hdfs.server.common.Storage: Formatting ...
2019-07-19 05:41:01,288 INFO org.apache.hadoop.hdfs.server.common.Storage: Analyzing storage directories for bpid BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:41:01,289 INFO org.apache.hadoop.hdfs.server.common.Storage: Locking is disabled for /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current/BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:41:01,290 INFO org.apache.hadoop.hdfs.server.common.Storage: Block pool storage directory /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current/BP-1817407430-192.168.184.128-1563485923256 is not formatted for BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:41:01,291 INFO org.apache.hadoop.hdfs.server.common.Storage: Formatting ...
2019-07-19 05:41:01,291 INFO org.apache.hadoop.hdfs.server.common.Storage: Formatting block pool BP-1817407430-192.168.184.128-1563485923256 directory /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current/BP-1817407430-192.168.184.128-1563485923256/current
2019-07-19 05:41:01,299 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Setting up storage: nsid=1858091198;bpid=BP-1817407430-192.168.184.128-1563485923256;lv=-56;nsInfo=lv=-63;cid=CID-036bea12-4aa5-4cde-a2e4-1e5dc71a35d0;nsid=1858091198;c=0;bpid=BP-1817407430-192.168.184.128-1563485923256;dnuuid=null
2019-07-19 05:41:01,301 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Generated and persisted new Datanode UUID 31f43ad5-ca15-4bf4-8d32-3a31def0321d
2019-07-19 05:41:01,452 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Added new volume: DS-b45c9491-ace2-4986-b460-f6102b2f3acb
2019-07-19 05:41:01,452 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Added volume - /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current, StorageType: DISK
2019-07-19 05:41:01,472 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Registered FSDatasetState MBean
2019-07-19 05:41:01,473 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Adding block pool BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:41:01,477 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Scanning block pool BP-1817407430-192.168.184.128-1563485923256 on volume /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current...
2019-07-19 05:41:01,567 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Time taken to scan block pool BP-1817407430-192.168.184.128-1563485923256 on /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current: 88ms
2019-07-19 05:41:01,567 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Total time to scan all replicas for block pool BP-1817407430-192.168.184.128-1563485923256: 94ms
2019-07-19 05:41:01,569 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Adding replicas to map for block pool BP-1817407430-192.168.184.128-1563485923256 on volume /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current...
2019-07-19 05:41:01,570 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Time to add replicas to map for block pool BP-1817407430-192.168.184.128-1563485923256 on volume /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current: 1ms
2019-07-19 05:41:01,570 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Total time to add all replicas to map: 2ms
2019-07-19 05:41:02,005 INFO org.apache.hadoop.hdfs.server.datanode.VolumeScanner: Now scanning bpid BP-1817407430-192.168.184.128-1563485923256 on volume /opt/modules/hadoop-2.7.2/data/tmp/dfs/data
2019-07-19 05:41:02,008 INFO org.apache.hadoop.hdfs.server.datanode.VolumeScanner: VolumeScanner(/opt/modules/hadoop-2.7.2/data/tmp/dfs/data, DS-b45c9491-ace2-4986-b460-f6102b2f3acb): finished scanning block pool BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:41:02,030 INFO org.apache.hadoop.hdfs.server.datanode.DirectoryScanner: Periodic Directory Tree Verification scan starting at 1563493201030 with interval 21600000
2019-07-19 05:41:02,048 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid null) service to CentOSB/192.168.184.130:9000 beginning handshake with NN
2019-07-19 05:41:02,051 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid null) service to CentOSA/192.168.184.128:9000 beginning handshake with NN
2019-07-19 05:41:02,174 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid null) service to CentOSB/192.168.184.130:9000 successfully registered with NN
2019-07-19 05:41:02,175 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: For namenode CentOSB/192.168.184.130:9000 using DELETEREPORT_INTERVAL of 300000 msec  BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
2019-07-19 05:41:02,313 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000 successfully registered with NN
2019-07-19 05:41:02,313 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: For namenode CentOSA/192.168.184.128:9000 using DELETEREPORT_INTERVAL of 300000 msec  BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
2019-07-19 05:41:02,417 INFO org.apache.hadoop.hdfs.server.datanode.VolumeScanner: VolumeScanner(/opt/modules/hadoop-2.7.2/data/tmp/dfs/data, DS-b45c9491-ace2-4986-b460-f6102b2f3acb): no suitable block pools found to scan.  Waiting 1814399584 ms.
2019-07-19 05:41:02,687 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Successfully sent block report 0x18d5548a0f2,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 11 msec to generate and 121 msecs for RPC and NN processing. Got back no commands.
2019-07-19 05:41:02,690 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Successfully sent block report 0x18d55ede65c,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 0 msec to generate and 113 msecs for RPC and NN processing. Got back no commands.
2019-07-19 05:41:23,322 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Namenode Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000 trying to claim ACTIVE state with txid=1
2019-07-19 05:41:23,323 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Acknowledging ACTIVE Namenode Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000
2019-07-19 05:43:47,228 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.io.EOFException: End of File Exception between local host is: "CentOSA/192.168.184.128"; destination host is: "CentOSB":9000; : java.io.EOFException; For more details see:  http://wiki.apache.org/hadoop/EOFException
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:765)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.EOFException
	at java.io.DataInputStream.readInt(DataInputStream.java:392)
	at org.apache.hadoop.ipc.Client$Connection.receiveRpcResponse(Client.java:1084)
	at org.apache.hadoop.ipc.Client$Connection.run(Client.java:979)
2019-07-19 05:43:47,355 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.io.EOFException: End of File Exception between local host is: "CentOSA/192.168.184.128"; destination host is: "CentOSA":9000; : java.io.EOFException; For more details see:  http://wiki.apache.org/hadoop/EOFException
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:765)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.EOFException
	at java.io.DataInputStream.readInt(DataInputStream.java:392)
	at org.apache.hadoop.ipc.Client$Connection.receiveRpcResponse(Client.java:1084)
	at org.apache.hadoop.ipc.Client$Connection.run(Client.java:979)
2019-07-19 05:43:50,930 ERROR org.apache.hadoop.hdfs.server.datanode.DataNode: RECEIVED SIGNAL 15: SIGTERM
2019-07-19 05:43:50,934 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: SHUTDOWN_MSG: 
/************************************************************
SHUTDOWN_MSG: Shutting down DataNode at CentOSA/192.168.184.128
************************************************************/
2019-07-19 05:53:47,945 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: STARTUP_MSG: 
/************************************************************
STARTUP_MSG: Starting DataNode
STARTUP_MSG:   host = CentOSA/192.168.184.128
STARTUP_MSG:   args = []
STARTUP_MSG:   version = 2.7.2
STARTUP_MSG:   classpath = /opt/modules/hadoop-2.7.2/etc/hadoop:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/httpcore-4.2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-httpclient-3.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-codec-1.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/log4j-1.2.17.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-digester-1.8.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-compress-1.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/curator-recipes-2.7.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jersey-core-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/httpclient-4.2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jaxb-api-2.2.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/zookeeper-3.4.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-logging-1.1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/hadoop-annotations-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/guava-11.0.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/hamcrest-core-1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/api-asn1-api-1.0.0-M20.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-cli-1.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/xmlenc-0.52.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-configuration-1.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/gson-2.2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/slf4j-api-1.7.10.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/stax-api-1.0-2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-io-2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jsch-0.1.42.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-collections-3.2.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-lang-2.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/hadoop-auth-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/asm-3.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/activation-1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-math3-3.1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/netty-3.6.2.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-net-3.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/xz-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/junit-4.11.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jsr305-3.0.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jetty-util-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/apacheds-i18n-2.0.0-M15.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/mockito-all-1.8.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/curator-framework-2.7.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/java-xmlbuilder-0.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jsp-api-2.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jetty-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jersey-server-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jettison-1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jets3t-0.9.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/paranamer-2.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/curator-client-2.7.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/avro-1.7.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jersey-json-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/htrace-core-3.1.0-incubating.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/servlet-api-2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/hadoop-common-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/hadoop-common-2.7.2-tests.jar:/opt/modules/hadoop-2.7.2/share/hadoop/common/hadoop-nfs-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/xml-apis-1.3.04.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jersey-core-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-logging-1.1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/xercesImpl-2.9.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/guava-11.0.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/xmlenc-0.52.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-io-2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-lang-2.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/asm-3.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/netty-all-4.0.23.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jsr305-3.0.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jackson-core-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jersey-server-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/htrace-core-3.1.0-incubating.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/servlet-api-2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/lib/jackson-mapper-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/hadoop-hdfs-2.7.2-tests.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/hadoop-hdfs-nfs-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/hdfs/hadoop-hdfs-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/guice-3.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/leveldbjni-all-1.8.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-codec-1.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/log4j-1.2.17.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-core-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jaxb-api-2.2.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/zookeeper-3.4.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-client-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jackson-jaxrs-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-logging-1.1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/guava-11.0.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-cli-1.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/stax-api-1.0-2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-io-2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/javax.inject-1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-collections-3.2.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/commons-lang-2.6.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/asm-3.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/activation-1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/netty-3.6.2.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/xz-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jsr305-3.0.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jetty-util-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/zookeeper-3.4.6-tests.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jackson-core-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jetty-6.1.26.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/aopalliance-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-server-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jettison-1.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jackson-xc-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jaxb-impl-2.2.3-1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jersey-json-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/servlet-api-2.5.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/lib/jackson-mapper-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-registry-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-common-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-client-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-api-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-tests-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-common-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/guice-3.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/leveldbjni-all-1.8.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jersey-core-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/hadoop-annotations-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/hamcrest-core-1.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/commons-io-2.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/javax.inject-1.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/asm-3.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/xz-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/junit-4.11.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jackson-core-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jersey-server-1.9.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/avro-1.7.4.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.9.13.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.7.2.jar:/opt/modules/hadoop-2.7.2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.2-tests.jar:/contrib/capacity-scheduler/*.jar:/contrib/capacity-scheduler/*.jar:/contrib/capacity-scheduler/*.jar
STARTUP_MSG:   build = Unknown -r Unknown; compiled by 'root' on 2017-05-22T10:49Z
STARTUP_MSG:   java = 1.8.0_144
************************************************************/
2019-07-19 05:53:48,065 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: registered UNIX signal handlers for [TERM, HUP, INT]
2019-07-19 05:53:55,571 INFO org.apache.hadoop.metrics2.impl.MetricsConfig: loaded properties from hadoop-metrics2.properties
2019-07-19 05:53:56,682 INFO org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Scheduled snapshot period at 10 second(s).
2019-07-19 05:53:56,684 INFO org.apache.hadoop.metrics2.impl.MetricsSystemImpl: DataNode metrics system started
2019-07-19 05:53:56,759 INFO org.apache.hadoop.hdfs.server.datanode.BlockScanner: Initialized block scanner with targetBytesPerSec 1048576
2019-07-19 05:53:56,786 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Configured hostname is CentOSA
2019-07-19 05:53:56,857 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Starting DataNode with maxLockedMemory = 0
2019-07-19 05:53:57,179 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Opened streaming server at /0.0.0.0:50010
2019-07-19 05:53:57,204 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Balancing bandwith is 1048576 bytes/s
2019-07-19 05:53:57,204 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Number threads for balancing is 5
2019-07-19 05:53:58,679 INFO org.mortbay.log: Logging to org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via org.mortbay.log.Slf4jLog
2019-07-19 05:53:58,827 INFO org.apache.hadoop.security.authentication.server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets.
2019-07-19 05:53:58,913 INFO org.apache.hadoop.http.HttpRequestLog: Http request log for http.requests.datanode is not defined
2019-07-19 05:53:59,014 INFO org.apache.hadoop.http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
2019-07-19 05:53:59,052 INFO org.apache.hadoop.http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode
2019-07-19 05:53:59,053 INFO org.apache.hadoop.http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
2019-07-19 05:53:59,053 INFO org.apache.hadoop.http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs
2019-07-19 05:53:59,226 INFO org.apache.hadoop.http.HttpServer2: Jetty bound to port 57316
2019-07-19 05:53:59,227 INFO org.mortbay.log: jetty-6.1.26
2019-07-19 05:54:00,704 INFO org.mortbay.log: Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:57316
2019-07-19 05:54:01,499 INFO org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer: Listening HTTP traffic on /0.0.0.0:50075
2019-07-19 05:54:03,389 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: dnUserName = root
2019-07-19 05:54:03,389 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: supergroup = supergroup
2019-07-19 05:54:03,675 INFO org.apache.hadoop.ipc.CallQueueManager: Using callQueue class java.util.concurrent.LinkedBlockingQueue
2019-07-19 05:54:03,787 INFO org.apache.hadoop.ipc.Server: Starting Socket Reader #1 for port 50020
2019-07-19 05:54:03,977 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Opened IPC server at /0.0.0.0:50020
2019-07-19 05:54:04,083 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Refresh request received for nameservices: mycluster
2019-07-19 05:54:04,221 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Starting BPOfferServices for nameservices: mycluster
2019-07-19 05:54:04,303 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool <registering> (Datanode Uuid unassigned) service to CentOSA/192.168.184.128:9000 starting to offer service
2019-07-19 05:54:04,315 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool <registering> (Datanode Uuid unassigned) service to CentOSB/192.168.184.130:9000 starting to offer service
2019-07-19 05:54:04,365 INFO org.apache.hadoop.ipc.Server: IPC Server Responder: starting
2019-07-19 05:54:04,371 INFO org.apache.hadoop.ipc.Server: IPC Server listener on 50020: starting
2019-07-19 05:54:06,429 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:54:07,431 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:54:08,434 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:54:09,435 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:54:10,437 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:54:11,438 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:54:12,440 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:54:13,933 INFO org.apache.hadoop.hdfs.server.common.Storage: Lock on /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/in_use.lock acquired by nodename 3894@CentOSA
2019-07-19 05:54:14,572 INFO org.apache.hadoop.hdfs.server.common.Storage: Analyzing storage directories for bpid BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:54:14,572 INFO org.apache.hadoop.hdfs.server.common.Storage: Locking is disabled for /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current/BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:54:14,582 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Setting up storage: nsid=1858091198;bpid=BP-1817407430-192.168.184.128-1563485923256;lv=-56;nsInfo=lv=-63;cid=CID-036bea12-4aa5-4cde-a2e4-1e5dc71a35d0;nsid=1858091198;c=0;bpid=BP-1817407430-192.168.184.128-1563485923256;dnuuid=31f43ad5-ca15-4bf4-8d32-3a31def0321d
2019-07-19 05:54:15,221 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Added new volume: DS-b45c9491-ace2-4986-b460-f6102b2f3acb
2019-07-19 05:54:15,221 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Added volume - /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current, StorageType: DISK
2019-07-19 05:54:15,650 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Registered FSDatasetState MBean
2019-07-19 05:54:15,651 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Adding block pool BP-1817407430-192.168.184.128-1563485923256
2019-07-19 05:54:15,675 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Scanning block pool BP-1817407430-192.168.184.128-1563485923256 on volume /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current...
2019-07-19 05:54:15,903 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Time taken to scan block pool BP-1817407430-192.168.184.128-1563485923256 on /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current: 223ms
2019-07-19 05:54:15,904 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Total time to scan all replicas for block pool BP-1817407430-192.168.184.128-1563485923256: 236ms
2019-07-19 05:54:15,908 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Adding replicas to map for block pool BP-1817407430-192.168.184.128-1563485923256 on volume /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current...
2019-07-19 05:54:15,909 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Time to add replicas to map for block pool BP-1817407430-192.168.184.128-1563485923256 on volume /opt/modules/hadoop-2.7.2/data/tmp/dfs/data/current: 1ms
2019-07-19 05:54:15,909 INFO org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl: Total time to add all replicas to map: 5ms
2019-07-19 05:54:18,988 INFO org.apache.hadoop.hdfs.server.datanode.VolumeScanner: VolumeScanner(/opt/modules/hadoop-2.7.2/data/tmp/dfs/data, DS-b45c9491-ace2-4986-b460-f6102b2f3acb): no suitable block pools found to scan.  Waiting 1813603013 ms.
2019-07-19 05:54:19,022 INFO org.apache.hadoop.hdfs.server.datanode.DirectoryScanner: Periodic Directory Tree Verification scan starting at 1563497937022 with interval 21600000
2019-07-19 05:54:19,064 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid null) service to CentOSB/192.168.184.130:9000 beginning handshake with NN
2019-07-19 05:54:19,248 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid null) service to CentOSB/192.168.184.130:9000 successfully registered with NN
2019-07-19 05:54:19,251 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: For namenode CentOSB/192.168.184.130:9000 using DELETEREPORT_INTERVAL of 300000 msec  BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
2019-07-19 05:54:20,369 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Successfully sent block report 0x24703498a54,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 41 msec to generate and 314 msecs for RPC and NN processing. Got back no commands.
2019-07-19 05:54:26,134 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000 beginning handshake with NN
2019-07-19 05:54:26,270 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000 successfully registered with NN
2019-07-19 05:54:26,271 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: For namenode CentOSA/192.168.184.128:9000 using DELETEREPORT_INTERVAL of 300000 msec  BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
2019-07-19 05:54:26,602 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Successfully sent block report 0x2488230edd9,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 0 msec to generate and 122 msecs for RPC and NN processing. Got back no commands.
2019-07-19 05:59:17,347 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.io.EOFException: End of File Exception between local host is: "CentOSA/192.168.184.128"; destination host is: "CentOSA":9000; : java.io.EOFException; For more details see:  http://wiki.apache.org/hadoop/EOFException
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:765)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.EOFException
	at java.io.DataInputStream.readInt(DataInputStream.java:392)
	at org.apache.hadoop.ipc.Client$Connection.receiveRpcResponse(Client.java:1084)
	at org.apache.hadoop.ipc.Client$Connection.run(Client.java:979)
2019-07-19 05:59:21,344 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:22,349 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:23,354 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:24,356 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:25,359 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:26,360 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:27,368 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:28,370 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:29,373 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:30,374 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:30,380 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 05:59:31,387 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:32,391 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:33,393 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:34,394 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:35,395 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:36,397 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:37,398 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:38,400 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:39,402 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:40,404 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:40,409 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 05:59:41,419 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:42,420 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:43,425 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:44,428 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:45,429 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:46,431 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:47,433 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:48,436 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:49,437 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:50,439 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:50,441 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 05:59:51,447 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:52,449 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:53,450 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:54,452 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:55,454 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:56,455 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:57,457 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:58,459 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 05:59:59,461 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:00,463 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:00,465 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:00:01,476 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:02,491 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:03,493 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:04,494 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:05,495 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:06,497 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:07,499 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:08,507 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:09,510 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:10,511 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:10,512 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:00:11,515 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:12,516 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:13,519 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:14,521 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:15,524 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:16,531 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:17,534 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:18,537 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:19,539 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:20,541 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:20,543 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:00:21,549 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:22,552 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:23,558 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:24,560 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:25,561 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:26,562 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:27,564 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:28,568 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:29,570 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:30,572 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:30,574 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:00:31,577 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:32,579 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:33,584 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:34,586 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:35,587 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:36,589 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:37,591 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:38,600 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:39,602 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:40,604 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:40,607 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:00:41,619 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:42,620 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:43,628 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:44,630 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:45,632 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:46,633 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:47,635 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:48,637 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:49,639 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:50,641 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:50,642 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:00:51,648 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:52,650 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:53,651 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:54,657 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:55,659 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:56,661 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:57,662 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:58,664 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:00:59,666 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:00,667 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:00,669 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:01:01,680 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:02,682 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:03,695 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:04,696 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:05,698 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:06,700 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:07,702 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:08,704 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:09,706 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:10,708 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:10,714 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:01:11,726 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:12,727 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:13,729 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:14,731 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:15,733 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:16,741 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:17,743 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:18,745 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:19,747 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:20,748 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:20,750 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:01:21,757 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:22,758 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:23,760 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:24,761 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:25,762 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:26,763 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:27,764 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:28,772 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:29,781 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:30,782 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:30,810 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:01:31,820 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:32,822 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:33,823 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:34,829 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:35,832 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:36,834 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:37,836 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:38,837 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:39,839 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:40,840 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:40,841 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
2019-07-19 06:01:41,855 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:42,857 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:43,858 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:44,859 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:45,860 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:01:46,938 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeCommand action : DNA_REGISTER from CentOSA/192.168.184.128:9000 with standby state
2019-07-19 06:01:47,027 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000 beginning handshake with NN
2019-07-19 06:01:47,176 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Block pool Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000 successfully registered with NN
2019-07-19 06:01:47,302 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Successfully sent block report 0x2af1ea7ccae,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 0 msec to generate and 112 msecs for RPC and NN processing. Got back no commands.
2019-07-19 06:02:17,842 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Namenode Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000 trying to claim ACTIVE state with txid=4
2019-07-19 06:02:17,842 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Acknowledging ACTIVE Namenode Block pool BP-1817407430-192.168.184.128-1563485923256 (Datanode Uuid 31f43ad5-ca15-4bf4-8d32-3a31def0321d) service to CentOSA/192.168.184.128:9000
2019-07-19 06:02:50,200 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.io.EOFException: End of File Exception between local host is: "CentOSA/192.168.184.128"; destination host is: "CentOSA":9000; : java.io.EOFException; For more details see:  http://wiki.apache.org/hadoop/EOFException
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:765)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.EOFException
	at java.io.DataInputStream.readInt(DataInputStream.java:392)
	at org.apache.hadoop.ipc.Client$Connection.receiveRpcResponse(Client.java:1084)
	at org.apache.hadoop.ipc.Client$Connection.run(Client.java:979)
2019-07-19 06:02:54,200 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:02:55,202 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:02:56,208 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:02:57,210 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:02:58,212 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:02:59,213 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:03:00,215 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:03:01,221 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:03:02,222 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:03:03,223 INFO org.apache.hadoop.ipc.Client: Retrying connect to server: CentOSA/192.168.184.128:9000. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
2019-07-19 06:03:03,225 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: IOException in offerService
java.net.ConnectException: Call From CentOSA/192.168.184.128 to CentOSA:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)
	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy14.sendHeartbeat(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB.sendHeartbeat(DatanodeProtocolClientSideTranslatorPB.java:153)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.sendHeartBeat(BPServiceActor.java:554)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.offerService(BPServiceActor.java:653)
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:824)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	... 8 more
发布了307 篇原创文章 · 获赞 15 · 访问量 6万+

猜你喜欢

转载自blog.csdn.net/u011488009/article/details/104473659