hadoop startup script analysis

 

start-all.cmd

setlocal enabledelayedexpansion

//@rem is the main comment

@rem Start all hadoop daemons.  Run this on master node.

//echo is the output text

echo This script is Deprecated. Instead use start-dfs.cmd and start-yarn.cmd

//%~dp0 indicates the directory where the current batch is located

if not defined HADOOP_BIN_PATH ( 

  set HADOOP_BIN_PATH=%~dp0

)

//To determine whether the path ends with \, %xxx% represents the environment variable. %xx:~-1%, take the last character of the xx string. %xx:~0-1%, take the 1st to the penultimate character of the xx string.

if "%HADOOP_BIN_PATH:~-1%" == "\" (

  set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%

)

//The same level libexec directory of the bin directory

set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec

if not defined HADOOP_LIBEXEC_DIR (

  set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%

)

call hadoop-config.cmd

call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*

if "%1" == "--config" (

  shift

  shift

)

Call start-dfs.cmd --config %HADOOP_CONF_DIR%. %HADOOP_CONF_DIR% is a variable defined in hadoop-config.cmd.

@rem start hdfs daemons if hdfs is present

if exist %HADOOP_HDFS_HOME%\sbin\start-dfs.cmd (

  call %HADOOP_HDFS_HOME%\sbin\start-dfs.cmd --config %HADOOP_CONF_DIR%

)

调用start-yarn.cmd --config %HADOOP_CONF_DIR%。%HADOOP_CONF_DIR%是hadoop-config.cmd中定义的变量。

@rem start yarn daemons if yarn is present

if exist %HADOOP_YARN_HOME%\sbin\start-yarn.cmd (

  call %HADOOP_YARN_HOME%\sbin\start-yarn.cmd --config %HADOOP_CONF_DIR%

)

 

endlocal

 

-----------------

hadoop-config.cmd

//设置环境变量

if not defined HADOOP_COMMON_DIR (

  set HADOOP_COMMON_DIR=share\hadoop\common

)

if not defined HADOOP_COMMON_LIB_JARS_DIR (

  set HADOOP_COMMON_LIB_JARS_DIR=share\hadoop\common\lib

)

if not defined HADOOP_COMMON_LIB_NATIVE_DIR (

  set HADOOP_COMMON_LIB_NATIVE_DIR=lib\native

)

if not defined HDFS_DIR (

  set HDFS_DIR=share\hadoop\hdfs

)

if not defined HDFS_LIB_JARS_DIR (

  set HDFS_LIB_JARS_DIR=share\hadoop\hdfs\lib

)

if not defined YARN_DIR (

  set YARN_DIR=share\hadoop\yarn

)

if not defined YARN_LIB_JARS_DIR (

  set YARN_LIB_JARS_DIR=share\hadoop\yarn\lib

)

if not defined MAPRED_DIR (

  set MAPRED_DIR=share\hadoop\mapreduce

)

if not defined MAPRED_LIB_JARS_DIR (

  set MAPRED_LIB_JARS_DIR=share\hadoop\mapreduce\lib

)

//设置HADOOP_HOME=当前目录就是d:\hadoop2.3.7\libexec\目录

@rem the root of the Hadoop installation

set HADOOP_HOME=%~dp0

//FOR 参数 %%变量名 IN (相关文件或命令) DO 执行的命令

for %%i in (%HADOOP_HOME%.) do (

  set HADOOP_HOME=%%~dpi

)

//执行上面for后就变成了d:\hadoop2.3.7\,下面是去掉最后一个\

if "%HADOOP_HOME:~-1%" == "\" (

  set HADOOP_HOME=%HADOOP_HOME:~0,-1%

)

//如果hadoop-common-*.jar不存在就直接报错

if not exist %HADOOP_HOME%\share\hadoop\common\hadoop-common-*.jar (

    @echo +================================================================+

    @echo ^|      Error: HADOOP_HOME is not set correctly                   ^|

    @echo +----------------------------------------------------------------+

    @echo ^| Please set your HADOOP_HOME variable to the absolute path of   ^|

    @echo ^| the directory that contains the hadoop distribution            ^|

    @echo +================================================================+

    exit /b 1

)

//设置HADOOP_CONF_DIR

if not defined HADOOP_CONF_DIR (

  set HADOOP_CONF_DIR=%HADOOP_HOME%\etc\hadoop

)

 

@rem

@rem Allow alternate conf dir location.

@rem

//判断--config是不是第二个参数,如果是就把第三个参数指定给HADOOP_CONF_DIR.shift,shift后再计算参数时排除当前这两个。

if "%1" == "--config" (

  set HADOOP_CONF_DIR=%2

  shift

  shift

)

 

@rem

@rem Set log level. Default to INFO.

@rem

//判断-loglevel是不是第二个参数,如果是就把第三个参数指定给HADOOP_CONF_DIR

if "%1" == "--loglevel" (

  set HADOOP_LOGLEVEL=%2

  shift

  shift

)

 

@rem

@rem check to see it is specified whether to use the slaves or the

@rem masters file

@rem

//判断--hosts是不是第二个参数,如果是就把第三个参数指定给HADOOP_CONF_DIR

if "%1" == "--hosts" (

  set HADOOP_SLAVES=%HADOOP_CONF_DIR%\%2

  shift

  shift

)

调用hadoop-env.cmd

if exist %HADOOP_CONF_DIR%\hadoop-env.cmd (

  call %HADOOP_CONF_DIR%\hadoop-env.cmd

)

 

@rem

@rem setup java environment variables

@rem

//没设置java_home直接报错

if not defined JAVA_HOME (

  echo Error: JAVA_HOME is not set.

  goto :eof

)

//如果%JAVA_HOME%\bin\java.exe不错在,要更新hadoop-env.cmd。因为hadoop-env.cmd也可以设置java_home

if not exist %JAVA_HOME%\bin\java.exe (

  echo Error: JAVA_HOME is incorrectly set.

  echo        Please update %HADOOP_HOME%\conf\hadoop-env.cmd

  goto :eof

)

设置JAVA=java.exe

set JAVA=%JAVA_HOME%\bin\java

@rem some Java parameters

设置java堆最大为1000M

set JAVA_HEAP_MAX=-Xmx1000m

 

@rem

@rem check envvars which might override default args

@rem

//如果有,则覆盖上面java的堆参数。

if defined HADOOP_HEAPSIZE (

  set JAVA_HEAP_MAX=-Xmx%HADOOP_HEAPSIZE%m

)

 

@rem

@rem CLASSPATH initially contains %HADOOP_CONF_DIR%

@rem

设置CLASSPATH

set CLASSPATH=%HADOOP_CONF_DIR%

 

if not defined HADOOP_COMMON_HOME (

  if exist %HADOOP_HOME%\share\hadoop\common (

    set HADOOP_COMMON_HOME=%HADOOP_HOME%

  )

)

 

@rem

@rem for releases, add core hadoop jar & webapps to CLASSPATH

@rem

附加CLASSPATH

if exist %HADOOP_COMMON_HOME%\%HADOOP_COMMON_DIR%\webapps (

  set CLASSPATH=!CLASSPATH!;%HADOOP_COMMON_HOME%\%HADOOP_COMMON_DIR%

)

附加CLASSPATH

if exist %HADOOP_COMMON_HOME%\%HADOOP_COMMON_LIB_JARS_DIR% (

  set CLASSPATH=!CLASSPATH!;%HADOOP_COMMON_HOME%\%HADOOP_COMMON_LIB_JARS_DIR%\*

)

附加CLASSPATH

set CLASSPATH=!CLASSPATH!;%HADOOP_COMMON_HOME%\%HADOOP_COMMON_DIR%\*

 

@rem

@rem default log directory % file

@rem

//定义日志目录

if not defined HADOOP_LOG_DIR (

  set HADOOP_LOG_DIR=%HADOOP_HOME%\logs

)

//定义日志文件名

if not defined HADOOP_LOGFILE (

  set HADOOP_LOGFILE=hadoop.log

)

//定义日志输出级别

if not defined HADOOP_LOGLEVEL (

  set HADOOP_LOGLEVEL=INFO

)

 

if not defined HADOOP_ROOT_LOGGER (

  set HADOOP_ROOT_LOGGER=%HADOOP_LOGLEVEL%,console

)

 

@rem

@rem default policy file for service-level authorization

@rem

设置安全权限相关内容

if not defined HADOOP_POLICYFILE (

  set HADOOP_POLICYFILE=hadoop-policy.xml

)

 

@rem

@rem Determine the JAVA_PLATFORM

@rem

 %%A代表in里面的个每个字符串(用空格区分)。设置JAVA_PLATFORM执行命令,%JAVA%就是xxx/java.exe,后面跟着java的参数,猜测是用这个执行命令来启动hadoop的。

for /f "delims=" %%A in ('%JAVA% -Xmx32m %HADOOP_JAVA_PLATFORM_OPTS% -classpath "%CLASSPATH%" org.apache.hadoop.util.PlatformName') do set JAVA_PLATFORM=%%A

@rem replace space with underscore

set JAVA_PLATFORM=%JAVA_PLATFORM: =_%

 

@rem

@rem setup 'java.library.path' for native hadoop code if necessary

@rem

设置java类库路径

@rem Check if we're running hadoop directly from the build

set JAVA_LIBRARY_PATH=

if exist %HADOOP_COMMON_HOME%\target\bin (

  set JAVA_LIBRARY_PATH=%HADOOP_COMMON_HOME%\target\bin

)

 

@rem For the distro case, check the bin folder

if exist %HADOOP_COMMON_HOME%\bin (

  set JAVA_LIBRARY_PATH=%JAVA_LIBRARY_PATH%;%HADOOP_COMMON_HOME%\bin

)

 

@rem

@rem setup a default TOOL_PATH

@rem

设置javatool路径

set TOOL_PATH=%HADOOP_HOME%\share\hadoop\tools\lib\*

定义hadoop的选项加上日志相关参数

set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.log.dir=%HADOOP_LOG_DIR%

set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.log.file=%HADOOP_LOGFILE%

set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.home.dir=%HADOOP_HOME%

set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.id.str=%HADOOP_IDENT_STRING%

set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.root.logger=%HADOOP_ROOT_LOGGER%

定义hadoop的选项加上java类库

if defined JAVA_LIBRARY_PATH (

  set HADOOP_OPTS=%HADOOP_OPTS% -Djava.library.path=%JAVA_LIBRARY_PATH%

)

定义hadoop的选项加上策略

set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.policy.file=%HADOOP_POLICYFILE%

 

@rem

@rem Disable ipv6 as it can cause issues

@rem

用ipv4

set HADOOP_OPTS=%HADOOP_OPTS% -Djava.net.preferIPv4Stack=true

 

@rem

@rem put hdfs in classpath if present

@rem

 

if not defined HADOOP_HDFS_HOME (

  if exist %HADOOP_HOME%\%HDFS_DIR% (

    set HADOOP_HDFS_HOME=%HADOOP_HOME%

  )

)

设置CLASSPATH

if exist %HADOOP_HDFS_HOME%\%HDFS_DIR%\webapps (

  set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_DIR%

)

 

if exist %HADOOP_HDFS_HOME%\%HDFS_LIB_JARS_DIR% (

  set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_LIB_JARS_DIR%\*

)

设置CLASSPATH

set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_DIR%\*

 

@rem

@rem put yarn in classpath if present

@rem

 

if not defined HADOOP_YARN_HOME (

  if exist %HADOOP_HOME%\%YARN_DIR% (

    set HADOOP_YARN_HOME=%HADOOP_HOME%

  )

)

 

if exist %HADOOP_YARN_HOME%\%YARN_DIR%\webapps (

  set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_DIR%

)

 

if exist %HADOOP_YARN_HOME%\%YARN_LIB_JARS_DIR% (

  set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_LIB_JARS_DIR%\*

)

 

set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_DIR%\*

 

@rem

@rem put mapred in classpath if present AND different from YARN

@rem

 

if not defined HADOOP_MAPRED_HOME (

  if exist %HADOOP_HOME%\%MAPRED_DIR% (

    set HADOOP_MAPRED_HOME=%HADOOP_HOME%

  )

)

这是classpath

if not "%HADOOP_MAPRED_HOME%\%MAPRED_DIR%" == "%HADOOP_YARN_HOME%\%YARN_DIR%" (

 

  if exist %HADOOP_MAPRED_HOME%\%MAPRED_DIR%\webapps (

    set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_DIR%

  )

 

  if exist %HADOOP_MAPRED_HOME%\%MAPRED_LIB_JARS_DIR% (

    set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_LIB_JARS_DIR%\*

  )

 

  set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_DIR%\*

)

 

@rem

@rem add user-specified CLASSPATH last

@rem

 

if defined HADOOP_CLASSPATH (

  if not defined HADOOP_USE_CLIENT_CLASSLOADER (

    if defined HADOOP_USER_CLASSPATH_FIRST (

      set CLASSPATH=%HADOOP_CLASSPATH%;%CLASSPATH%;

    ) else (

      set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%;

    )

  )

)

 

:eof

-----------------------------

hadoop-env.cmd

设置java_home

set JAVA_HOME=%JAVA_HOME%

//容量调度器,当前版本没有。

if exist %HADOOP_HOME%\contrib\capacity-scheduler (

  if not defined HADOOP_CLASSPATH (

//我们没有设置HADOOP_CLASSPATH,所以应该执行下面这句。

    set HADOOP_CLASSPATH=%HADOOP_HOME%\contrib\capacity-scheduler\*.jar

  ) else (

    set HADOOP_CLASSPATH=%HADOOP_CLASSPATH%;%HADOOP_HOME%\contrib\capacity-scheduler\*.jar

  )

)

 

@rem The maximum amount of heap to use, in MB. Default is 1000.

@rem set HADOOP_HEAPSIZE=

@rem set HADOOP_NAMENODE_INIT_HEAPSIZE=""

 

@rem Extra Java runtime options.  Empty by default.

@rem set HADOOP_OPTS=%HADOOP_OPTS% -Djava.net.preferIPv4Stack=true

 

@rem Command specific options appended to HADOOP_OPTS when specified

设置日志环境变量

if not defined HADOOP_SECURITY_LOGGER (

  set HADOOP_SECURITY_LOGGER=INFO,RFAS

)

if not defined HDFS_AUDIT_LOGGER (

  set HDFS_AUDIT_LOGGER=INFO,NullAppender

)

设置名称节点选项,安全日志,审计记录

set HADOOP_NAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% -Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_NAMENODE_OPTS%

set HADOOP_DATANODE_OPTS=-Dhadoop.security.logger=ERROR,RFAS %HADOOP_DATANODE_OPTS%

set HADOOP_SECONDARYNAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% -Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_SECONDARYNAMENODE_OPTS%

设置客户端选项

@rem The following applies to multiple commands (fs, dfs, fsck, distcp etc)

set HADOOP_CLIENT_OPTS=-Xmx512m %HADOOP_CLIENT_OPTS%

@rem set HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData %HADOOP_JAVA_PLATFORM_OPTS%"

 

@rem On secure datanodes, user to run the datanode as after dropping privileges

set HADOOP_SECURE_DN_USER=%HADOOP_SECURE_DN_USER%

 

@rem Where log files are stored.  %HADOOP_HOME%/logs by default.

@rem set HADOOP_LOG_DIR=%HADOOP_LOG_DIR%\%USERNAME%

 

@rem Where log files are stored in the secure data environment.

set HADOOP_SECURE_DN_LOG_DIR=%HADOOP_LOG_DIR%\%HADOOP_HDFS_USER%

 

@rem The directory where pid files are stored. /tmp by default.

@rem NOTE: this should be set to a directory that can only be written to by 

@rem       the user that will run the hadoop daemons.  Otherwise there is the

@rem       potential for a symlink attack.

set HADOOP_PID_DIR=%HADOOP_PID_DIR%

set HADOOP_SECURE_DN_PID_DIR=%HADOOP_PID_DIR%

 

@rem A string representing this instance of hadoop. %USERNAME% by default.

set HADOOP_IDENT_STRING=%USERNAME%

-----------------------

start-dfs.cmd

setlocal enabledelayedexpansion

同上HADOOP_BIN_PATH=${hadoop_install}\sbin

if not defined HADOOP_BIN_PATH ( 

  set HADOOP_BIN_PATH=%~dp0

)

 

if "%HADOOP_BIN_PATH:~-1%" == "\" (

  set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%

)

同上

set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec

if not defined HADOOP_LIBEXEC_DIR (

  set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%

)

同上,判断是否有自定义的config参数

call %HADOOP_LIBEXEC_DIR%\hdfs-config.cmd %*

if "%1" == "--config" (

  shift

  shift

)

windows下的命令,启动一个单独的窗口运行一个新程序。 "Apache Hadoop Distribution"窗口的标题。hadoop是一个可执行文件

start "Apache Hadoop Distribution" hadoop namenode

start "Apache Hadoop Distribution" hadoop datanode

 

endlocal

-----------------------

hdfs-config.cmd

同上

if not defined HADOOP_BIN_PATH ( 

  set HADOOP_BIN_PATH=%~dp0

)

同上

if "%HADOOP_BIN_PATH:~-1%" == "\" (

  set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%

)

 

set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec

if not defined HADOOP_LIBEXEC_DIR (

  set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%

)

 

if exist %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd (

  call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*

) else if exist %HADOOP_COMMON_HOME%\libexec\hadoop-config.cmd (

  call %HADOOP_COMMON_HOME%\libexec\hadoop-config.cmd %*

) else if exist %HADOOP_HOME%\libexec\hadoop-config.cmd (

  call %HADOOP_HOME%\libexec\hadoop-config.cmd %*

) else (

  echo Hadoop common not found.

)

 

:eof

---------------------

hadoop.cmd

同上

if not defined HADOOP_BIN_PATH ( 

  set HADOOP_BIN_PATH=%~dp0

)

 

if "%HADOOP_BIN_PATH:~-1%" == "\" (

  set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%

)

:updatepath是标签,在本配置文件的下方,先执行此段逻辑。

call :updatepath %HADOOP_BIN_PATH%

 

:main

  setlocal enabledelayedexpansion

 

  set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec

  if not defined HADOOP_LIBEXEC_DIR (

    set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%

  )

 

  call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*

判断第一个参数是否是--config,如果是则左移动两个参数。也就是不需要这个参数。

  if "%1" == "--config" (

    shift

    shift

  )

判断第一个参数是否是--loglevel,如果是则左移动两个参数。也就是不需要这个参数。

  if "%1" == "--loglevel" (

    shift

    shift

  )

//提取第一个参数

  set hadoop-command=%1

  if not defined hadoop-command (

      goto print_usage

  )

 

  call :make_command_arguments %*

设置hdfs的hdfscommands命令集合

  set hdfscommands=namenode secondarynamenode datanode dfs dfsadmin fsck balancer fetchdt oiv dfsgroups

  循环%hdfscommands%里的命令,如果%hdfscommands%中的命令和hdfscommands中的一致,则hdfscommand=真。

  for %%i in ( %hdfscommands% ) do (

    if %hadoop-command% == %%i set hdfscommand=true

  )

 

  if defined hdfscommand (

    @echo DEPRECATED: Use of this script to execute hdfs command is deprecated. 1>&2

    @echo Instead use the hdfs command for it. 1>&2

    if exist %HADOOP_HDFS_HOME%\bin\hdfs.cmd (

      call %HADOOP_HDFS_HOME%\bin\hdfs.cmd %*

      goto :eof

    ) else if exist %HADOOP_HOME%\bin\hdfs.cmd (

      call %HADOOP_HOME%\bin\hdfs.cmd %*

      goto :eof

    ) else (

      echo HADOOP_HDFS_HOME not found!

      goto :eof

    )

  )

设置mapred的mapredcommands命令集合

  set mapredcommands=pipes job queue mrgroups mradmin jobtracker tasktracker

  for %%i in ( %mapredcommands% ) do (

    if %hadoop-command% == %%i set mapredcommand=true  

  )

  if defined mapredcommand (

    @echo DEPRECATED: Use of this script to execute mapred command is deprecated. 1>&2

    @echo Instead use the mapred command for it. 1>&2

    if exist %HADOOP_MAPRED_HOME%\bin\mapred.cmd (

      call %HADOOP_MAPRED_HOME%\bin\mapred.cmd %*

      goto :eof

    ) else if exist %HADOOP_HOME%\bin\mapred.cmd (

      call %HADOOP_HOME%\bin\mapred.cmd %*

      goto :eof

    ) else (

      echo HADOOP_MAPRED_HOME not found!

      goto :eof

    )

  )

 

  if %hadoop-command% == classpath (

    if not defined hadoop-command-arguments (

      @rem No need to bother starting up a JVM for this simple case.

      @echo %CLASSPATH%

      exit /b

    )

  )

  

  set corecommands=fs version jar checknative distcp daemonlog archive classpath credential key

  for %%i in ( %corecommands% ) do (

    if %hadoop-command% == %%i set corecommand=true  

  )

  if defined corecommand (

    call :%hadoop-command%

  ) else (

    set CLASSPATH=%CLASSPATH%;%CD%

    set CLASS=%hadoop-command%

  )

 

  set path=%PATH%;%HADOOP_BIN_PATH%

 

  @rem Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS

  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%

 

  @rem make sure security appender is turned off

  if not defined HADOOP_SECURITY_LOGGER (

    set HADOOP_SECURITY_LOGGER=INFO,NullAppender

  )

  set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER%

 

  call %JAVA% %JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hadoop-command-arguments%

 

  exit /b %ERRORLEVEL%

 

:fs 

  set CLASS=org.apache.hadoop.fs.FsShell

  goto :eof

 

:version 

  set CLASS=org.apache.hadoop.util.VersionInfo

  goto :eof

 

:jar

  if defined YARN_OPTS (

    @echo WARNING: Use "yarn jar" to launch YARN applications. 1>&2

  ) else if defined YARN_CLIENT_OPTS (

    @echo WARNING: Use "yarn jar" to launch YARN applications. 1>&2

  )

  set CLASS=org.apache.hadoop.util.RunJar

  goto :eof

 

:checknative

  set CLASS=org.apache.hadoop.util.NativeLibraryChecker

  goto :eof

 

:distcp

  set CLASS=org.apache.hadoop.tools.DistCp

  set CLASSPATH=%CLASSPATH%;%TOOL_PATH%

  goto :eof

 

:daemonlog

  set CLASS=org.apache.hadoop.log.LogLevel

  goto :eof

 

:archive

  set CLASS=org.apache.hadoop.tools.HadoopArchives

  set CLASSPATH=%CLASSPATH%;%TOOL_PATH%

  goto :eof

 

:classpath

  set CLASS=org.apache.hadoop.util.Classpath

  goto :eof

 

:credential

  set CLASS=org.apache.hadoop.security.alias.CredentialShell

  goto :eof

 

:key

  set CLASS=org.apache.hadoop.crypto.key.KeyShell

  goto :eof

//%: =_%用_替换空格的意思。:是分割符。

:updatepath

  set path_to_add=%*

  set current_path_comparable=%path%

  set current_path_comparable=%current_path_comparable: =_%

  set current_path_comparable=%current_path_comparable:(=_%

  set current_path_comparable=%current_path_comparable:)=_%

  set path_to_add_comparable=%path_to_add%

  set path_to_add_comparable=%path_to_add_comparable: =_%

  set path_to_add_comparable=%path_to_add_comparable:(=_%

  set path_to_add_comparable=%path_to_add_comparable:)=_%

 

  for %%i in ( %current_path_comparable% ) do (

    if /i "%%i" == "%path_to_add_comparable%" (

      set path_to_add_exist=true

    )

  )

  set system_path_comparable=

  set path_to_add_comparable=

  if not defined path_to_add_exist path=%path_to_add%;%path%

  set path_to_add=

//退出脚本,直接到文件末尾

  goto :eof

@rem This changes %1, %2 etc. Hence those cannot be used after calling this.

下面的标签是利用循环提取参数

:make_command_arguments

  if "%1" == "--config" (

    shift

    shift

  )

  if "%1" == "--loglevel" (

    shift

    shift

  )

  if [%2] == [] goto :eof

  shift

  set _arguments=

循环开始

  :MakeCmdArgsLoop 

  if [%1]==[] goto :EndLoop 

 

  if not defined _arguments (

    set _arguments=%1

  ) else (

    set _arguments=!_arguments! %1

  )

  shift

  goto :MakeCmdArgsLoop 跳到循环开始

  :EndLoop 

将提取出的变量赋值给hadoop-command-arguments

  set hadoop-command-arguments=%_arguments%

  goto :eof

 

:print_usage

  @echo Usage: hadoop [--config confdir] [--loglevel loglevel] COMMAND

  @echo where COMMAND is one of:

  @echo   fs                   run a generic filesystem user client

  @echo   version              print the version

  @echo   jar ^<jar^>            run a jar file

  @echo                        note: please use "yarn jar" to launch

  @echo                              YARN applications, not this command.

  @echo   checknative [-a^|-h]  check native hadoop and compression libraries availability

  @echo   distcp ^<srcurl^> ^<desturl^> copy file or directories recursively

  @echo   archive -archiveName NAME -p ^<parent path^> ^<src^>* ^<dest^> create a hadoop archive

  @echo   classpath            prints the class path needed to get the

  @echo                        Hadoop jar and the required libraries

  @echo   credential           interact with credential providers

  @echo   key                  manage keys via the KeyProvider

  @echo   daemonlog            get/set the log level for each daemon

  @echo  or

  @echo   CLASSNAME            run the class named CLASSNAME

  @echo.

  @echo Most commands print help when invoked w/o parameters.

 

endlocal

 

 

 

 

 

-----------------------

批处理之字符串接取

 

 

@echo off
  set ifo=abcdefghijklmnopqrstuvwxyz0123456789
  echo 原字符串(第二行为各字符的序号):
  echo %ifo%
  echo 123456789012345678901234567890123456
  echo 截取前5个字符:
  echo %ifo:~0,5%
  echo 截取最后5个字符:
  echo %ifo:~-5%
  echo 截取第一个到倒数第6个字符:
  echo %ifo:~0,-5%
  echo 从第4个字符开始,截取5个字符:
  echo %ifo:~3,5%
  echo 从倒数第14个字符开始,截取5个字符:
  echo %ifo:~-14,5%
  pause

 

 

 

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=326375006&siteId=291194637