Linux Server 流水-2

hadoop install
・hadoop-0.20.1.tar.gz
・hadoop-0.20-0.20.2+923.21-1.noarch.rpm
・hadoop-0.20-datanode-0.20.2+923.21-1.noarch.rpm
・hadoop-0.20-jobtracker-0.20.2+923.21-1.noarch.rpm
・hadoop-0.20-namenode-0.20.2+923.21-1.noarch.rpm
・hadoop-0.20-tasktracker-0.20.2+923.21-1.noarch.rpm

# cp -rp /mnt/nas/middleware/hadoop/* /var/tmp
# groupadd -g 496 hadoop
# useradd -u 496 -g hadoop -r -m -d /opt/hadoop hadoop
# usermod -g admin -G tomcat7,hadoop admin
# usermod -g tomcat7 -G admin,hadoop tomcat7
# usermod -g hadoop -G admin,tomcat7 hadoop

# su - hadoop
$ ssh-keygen -b 2048 -t rsa -P ''

$ touch .ssh/authorized_keys
$ chmod 600 .ssh/authorized_keys
$ cat .ssh/id_rsa.pub >> .ssh/authorized_keys

$ ssh localhost
$ ssh 127.0.0.1

Are you sure you want to continue connecting (yes/no)? yes
$ exit

# su - admin
$ cp .ssh/id_rsa.pub /tmp
$ exit

# su - hadoop
$ cat /tmp/id_rsa.pub >> .ssh/authorized_keys
$ exit

# su - admin
$ ssh hadoop@localhost
$ ssh hadoop@127.0.0.1

Are you sure you want to continue connecting (yes/no)? yes
$ rm /tmp/id_rsa.pub   
$ exit

# cd /var/tmp
# tar zxvf hadoop-0.20.1.tar.gz
# mv hadoop-0.20.1/* /opt/hadoop/
# chown -R hadoop:hadoop /opt/hadoop

# chown hadoop:hadoop /mnt/log/hadoop

# mkdir /var/run/hadoop
# chown hadoop:hadoop /var/run/hadoop
# chmod 700 /var/run/hadoop
hadoop set
# su - hadoop
$ mkdir dfs
$ mkdir mapred
$ chmod 775 dfs
$ chmod 775 mapred

$ cd conf
$ cp -p hadoop-env.sh hadoop-env.sh.orig
$ vi hadoop-env.sh

export JAVA_HOME=/opt/java/jdk1.6.0
export HADOOP_LOG_DIR=/mnt/log/hadoop

$ cp -p core-site.xml core-site.xml.orig
$ cp -p hdfs-site.xml hdfs-site.xml.orig
$ cp -p mapred-site.xml mapred-site.xml.orig

$ vi core-site.xml
<configuration>

 <property>
  <name>hadoop.tmp.dir</name>
  <value>/opt/hadoop</value>
  <description>CacheDirectory</description>
 </property>

 <property>
  <name>fs.default.name</name>
  <value>hdfs://localhost:9000</value>
  <description>NameNode</description>
 </property>

</configuration>

$ vi hdfs-site.xml

<configuration>

 <property>
  <name>dfs.replication</name>
  <value>2</value>
  <description>HdfsRedundancy</description>
 </property>

</configuration>

$ vi mapred-site.xml

<configuration>
 <property>  <name>mapred.job.tracker</name>
  <value>localhost:9001</value>
  <description>JobTrackerNode</description>
 </property>

 <property>
  <name>mapred.tasktracker.map.tasks.maximum</name>
  <value>4</value>
 </property>

 <property>
  <name>mapred.map.tasks</name>
  <value>40</value>
 </property>
<!--
 <property>
  <name>mapred.reduce.tasks</name>
  <value>80</value>
 </property>
-->
 <property>
  <name>mapred.map.tasks.speculative.execution</name>
  <value>false</value>
 </property>

 <property>
  <name>mapred.reduce.tasks.speculative.execution</name>
  <value>false</value>
 </property>

 <property>
  <name>mapred.child.java.opts</name>
  <value>-Xmx2500m -Xms640m</value>
 </property>

</configuration>
$ vi masters

(削除)

$ vi slaves

localhost
hadoop run 1
$ hadoop namenode -format

13/12/23 01:36:00 INFO namenode.NameNode: STARTUP_MSG:
/************************************************************
STARTUP_MSG: Starting NameNode
STARTUP_MSG:   host = ip-10-171-91-236/10.171.91.236
STARTUP_MSG:   args = [-format]
STARTUP_MSG:   version = 0.20.1
STARTUP_MSG:   build = http://svn.apache.org/repos/asf/hadoop/common/tags/release-0.20.1-rc1 -r 810220; compiled by 'oom' on Tue Sep  1 20:55:56 UTC 2009
************************************************************/
13/12/23 01:36:00 INFO namenode.FSNamesystem: fsOwner=hadoop,hadoop
13/12/23 01:36:00 INFO namenode.FSNamesystem: supergroup=supergroup
13/12/23 01:36:00 INFO namenode.FSNamesystem: isPermissionEnabled=true
13/12/23 01:36:01 INFO common.Storage: Image file of size 96 saved in 0 seconds.
13/12/23 01:36:01 INFO common.Storage: Storage directory /opt/hadoop/dfs/name has been successfully formatted.
13/12/23 01:36:01 INFO namenode.NameNode: SHUTDOWN_MSG:
/************************************************************
SHUTDOWN_MSG: Shutting down NameNode at ip-10-171-91-236/10.171.91.236
************************************************************/
hadoop auto run
# cd /var/tmp
# rpm2cpio /var/tmp/hadoop-0.20-0.20.2+923.21-1.noarch.rpm | cpio -id
# rpm2cpio /var/tmp/hadoop-0.20-datanode-0.20.2+923.21-1.noarch.rpm | cpio -id
# rpm2cpio /var/tmp/hadoop-0.20-jobtracker-0.20.2+923.21-1.noarch.rpm | cpio -id
# rpm2cpio /var/tmp/hadoop-0.20-namenode-0.20.2+923.21-1.noarch.rpm | cpio -id
# rpm2cpio /var/tmp/hadoop-0.20-tasktracker-0.20.2+923.21-1.noarch.rpm | cpio -id

# cd /var/tmp/etc/rc.d/init.d
# cp hadoop-0.20-datanode /etc/init.d/
# cp hadoop-0.20-jobtracker /etc/init.d/
# cp hadoop-0.20-namenode /etc/init.d/
# cp hadoop-0.20-tasktracker /etc/init.d/

# cd /var/tmp/etc/default
# cp hadoop-0.20 /etc/default/

# cd /etc/default
# vi hadoop-0.20

export HADOOP_HOME=/opt/hadoop
export HADOOP_NAMENODE_USER=hadoop
export HADOOP_SECONDARYNAMENODE_USER=hadoop
export HADOOP_DATANODE_USER=hadoop
export HADOOP_JOBTRACKER_USER=hadoop
export HADOOP_TASKTRACKER_USER=hadoop
export HADOOP_IDENT_STRING=hadoop
export HADOOP_LOG_DIR=/mnt/log/hadoop
export HADOOP_PID_DIR=/var/run/hadoop

# cd /etc/init.d
# vi hadoop-0.20-namenode

PIDFILE="/var/run/hadoop/hadoop-$HADOOP_IDENT_STRING-namenode.pid"

  daemon --user "$HADOOP_NAMENODE_USER" /opt/hadoop/bin/hadoop-daemon.sh --config "/opt/hadoop/conf" start namenode $DAEMON_FLAGS

  daemon --user "$HADOOP_NAMENODE_USER" /opt/hadoop/bin/hadoop-daemon.sh --config "/opt/hadoop/conf" stop namenode


# vi hadoop-0.20-jobtracker

PIDFILE="/var/run/hadoop/hadoop-$HADOOP_IDENT_STRING-jobtracker.pid"

  daemon --user "$HADOOP_JOBTRACKER_USER" /opt/hadoop/bin/hadoop-daemon.sh --config "/opt/hadoop/conf" start jobtracker $DAEMON_FLAGS

  daemon --user "$HADOOP_JOBTRACKER_USER" /opt/hadoop/bin/hadoop-daemon.sh --config "/opt/hadoop/conf" stop jobtracker


# vi hadoop-0.20-datanode

# chkconfig: 2345 91 09

PIDFILE="/var/run/hadoop/hadoop-$HADOOP_IDENT_STRING-datanode.pid"

  daemon --user "$HADOOP_DATANODE_USER" /opt/hadoop/bin/hadoop-daemon.sh --config "/opt/hadoop/conf" start datanode $DAEMON_FLAGS

  daemon --user "$HADOOP_DATANODE_USER" /opt/hadoop/bin/hadoop-daemon.sh --config "/opt/hadoop/conf" stop datanode


# vi hadoop-0.20-tasktracker

# chkconfig: 2345 91 09

PIDFILE="/var/run/hadoop/hadoop-$HADOOP_IDENT_STRING-tasktracker.pid"

  daemon --user "$HADOOP_TASKTRACKER_USER" /opt/hadoop/bin/hadoop-daemon.sh --config "/opt/hadoop/conf" start tasktracker $DAEMON_FLAGS

  daemon --user "$HADOOP_TASKTRACKER_USER" /opt/hadoop/bin/hadoop-daemon.sh --config "/opt/hadoop/conf" stop tasktracker

# chkconfig --add hadoop-0.20-namenode
# chkconfig hadoop-0.20-namenode on

# chkconfig --add hadoop-0.20-jobtracker
# chkconfig hadoop-0.20-jobtracker on

# chkconfig --add hadoop-0.20-datanode
# chkconfig hadoop-0.20-datanode on

# chkconfig --add hadoop-0.20-tasktracker
# chkconfig hadoop-0.20-tasktracker on
hadoop check
# service hadoop-0.20-namenode start
# service hadoop-0.20-jobtracker start
# service hadoop-0.20-datanode start
# service hadoop-0.20-tasktracker start

# ps -ef 

hadoop   16784     1  3 03:00 ?        00:00:03 /opt/java/jdk1.7.0/bin/java -Xmx1000m -Dcom.sun.management.jmxremote -Dc
hadoop   16879     1  3 03:00 ?        00:00:04 /opt/java/jdk1.7.0/bin/java -Xmx1000m -Dcom.sun.management.jmxremote -Dc
hadoop   16952     1  3 03:00 ?        00:00:03 /opt/java/jdk1.7.0/bin/java -Xmx1000m -Dcom.sun.management.jmxremote -Dc
hadoop   17059     1  3 03:01 ?        00:00:03 /opt/java/jdk1.7.0/bin/java -Xmx1000m -Dhadoop.log.dir=/mnt/log/hadoop

# cd /mnt/log/hadoop
# tail hadoop-hadoop-namenode-*.log
# tail hadoop-hadoop-jobtracker-*.log
# tail hadoop-hadoop-datanode-*.log
# tail hadoop-hadoop-tasktracker-*.log

# su - hadoop
$ hadoop dfs -mkdir /user/hadoop/input_inverted_index
$ hadoop dfs -mkdir /user/hadoop/output_inverted_index
$ hadoop dfs -mkdir /user/hadoop/pop_rank

$ echo "hoge hoge hoge fuga fuga" > hoge.txt
$ hadoop fs -mkdir input
$ hadoop fs -put hoge.txt input/hoge.txt
$ hadoop fs -ls input
Found 1 items
-rw-r--r--   2 hadoop supergroup         25 2013-12-23 03:03 /user/hadoop/input/hoge.txt

$ hadoop jar hadoop-*-examples.jar wordcount input output

13/12/23 03:03:57 INFO input.FileInputFormat: Total input paths to process : 1
13/12/23 03:03:58 INFO mapred.JobClient: Running job: job_201312230300_0001
13/12/23 03:03:59 INFO mapred.JobClient:  map 0% reduce 0%
13/12/23 03:04:07 INFO mapred.JobClient:  map 100% reduce 0%
13/12/23 03:04:19 INFO mapred.JobClient:  map 100% reduce 100%
13/12/23 03:04:21 INFO mapred.JobClient: Job complete: job_201312230300_0001
13/12/23 03:04:21 INFO mapred.JobClient: Counters: 17
13/12/23 03:04:21 INFO mapred.JobClient:   Job Counters
13/12/23 03:04:21 INFO mapred.JobClient:     Launched reduce tasks=1
13/12/23 03:04:21 INFO mapred.JobClient:     Launched map tasks=1
13/12/23 03:04:21 INFO mapred.JobClient:     Data-local map tasks=1
13/12/23 03:04:21 INFO mapred.JobClient:   FileSystemCounters
13/12/23 03:04:21 INFO mapred.JobClient:     FILE_BYTES_READ=28
13/12/23 03:04:21 INFO mapred.JobClient:     HDFS_BYTES_READ=25
13/12/23 03:04:21 INFO mapred.JobClient:     FILE_BYTES_WRITTEN=88
13/12/23 03:04:21 INFO mapred.JobClient:     HDFS_BYTES_WRITTEN=14
13/12/23 03:04:21 INFO mapred.JobClient:   Map-Reduce Framework
13/12/23 03:04:21 INFO mapred.JobClient:     Reduce input groups=0
13/12/23 03:04:21 INFO mapred.JobClient:     Combine output records=2
13/12/23 03:04:21 INFO mapred.JobClient:     Map input records=1
13/12/23 03:04:21 INFO mapred.JobClient:     Reduce shuffle bytes=28
13/12/23 03:04:21 INFO mapred.JobClient:     Reduce output records=0
13/12/23 03:04:21 INFO mapred.JobClient:     Spilled Records=4
13/12/23 03:04:21 INFO mapred.JobClient:     Map output bytes=45
13/12/23 03:04:21 INFO mapred.JobClient:     Combine input records=5
13/12/23 03:04:21 INFO mapred.JobClient:     Map output records=5
13/12/23 03:04:21 INFO mapred.JobClient:     Reduce input records=2

$ hadoop fs -ls output
Found 2 items
drwxr-xr-x   - hadoop supergroup          0 2013-12-23 03:03 /user/hadoop/output/_logs
-rw-r--r--   2 hadoop supergroup         14 2013-12-23 03:04 /user/hadoop/output/part-r-00000

$ hadoop fs -cat output/part-r-00000
fuga    2
hoge    3

$ hadoop fs -rmr input
Deleted hdfs://localhost:9000/user/hadoop/input

$ hadoop fs -rmr output
Deleted hdfs://localhost:9000/user/hadoop/output

$ rm hoge.txt
memcached install
・memcached-1.4.15.tar.gz
・libevent-1.4.14b-stable.tar.gz
・java_memcached-release_2.6.3.zip

# cp -rp /mnt/nas/middleware/memcached/* /var/tmp

# groupadd -g 495 memcached
# useradd -u 495 -g memcached -r -m -d /var/run/memcached memcached

# cd /var/tmp
# mkdir java_memcached
# chmod 755 java_memcached
# cd java_memcached
# unzip ../java_memcached-release_2.6.3.zip

# cd /opt/tomcat7/lib
# cp /var/tmp/java_memcached/commons-pool-1.5.6.jar .
# cp /var/tmp/java_memcached/java_memcached-release_2.6.3.jar .
# cp /var/tmp/java_memcached/slf4j-api-1.6.1.jar .
# cp /var/tmp/java_memcached/slf4j-simple-1.6.1.jar .
# chown tomcat7:tomcat7 commons-pool-1.5.6.jar java_memcached-release_2.6.3.jar slf4j-api-1.6.1.jar slf4j-simple-1.6.1.jar

# service tomcat7 restart

# cd /var/tmp
# tar zxvf libevent-1.4.14b-stable.tar.gz
# chown -R root:root libevent-1.4.14b-stable
# cd libevent-1.4.14b-stable

# ./configure --prefix=/usr/local/libevent 2>&1 | tee -a configure.log.`date +%Y%m%d%H%M%S`

# make 2>&1 | tee -a make.log.`date +%Y%m%d%H%M%S`

# make install 2>&1 | tee -a make_install.log.`date +%Y%m%d%H%M%S`

# vi /etc/ld.so.conf.d/libevent.conf
/usr/local/libevent/lib

# ldconfig
# ldconfig -p | grep libevent

        libevent_extra-1.4.so.2 (libc6,x86-64) => /usr/local/libevent/lib/libevent_extra-1.4.so.2
        libevent_extra-1.4.so.2 (libc6,x86-64) => /usr/lib64/libevent_extra-1.4.so.2
        libevent_core-1.4.so.2 (libc6,x86-64) => /usr/local/libevent/lib/libevent_core-1.4.so.2
        libevent_core-1.4.so.2 (libc6,x86-64) => /usr/lib64/libevent_core-1.4.so.2
        libevent-1.4.so.2 (libc6,x86-64) => /usr/local/libevent/lib/libevent-1.4.so.2
        libevent-1.4.so.2 (libc6,x86-64) => /usr/lib64/libevent-1.4.so.2

# cd /var/tmp
# tar zxvf memcached-1.4.15.tar.gz
# chown -R root:root memcached-1.4.15
# cd memcached-1.4.15

# ./configure --prefix=/usr/local --with-libevent=/usr/local/libevent 2>&1 | tee -a configure.log.`date +%Y%m%d%H%M%S`

# make 2>&1 | tee -a make.log.`date +%Y%m%d%H%M%S`
# make install 2>&1 | tee -a make_install.log.`date +%Y%m%d%H%M%S`
auto run memcached
# vi /etc/init.d/memcached

#! /bin/sh
#
# chkconfig: - 55 45
# description:  The memcached daemon is a network memory cache service.
# processname: memcached
# config: /etc/sysconfig/memcached

# Source function library.
. /etc/rc.d/init.d/functions

PORT=11211
USER=memcached
MAXCONN=1024
CACHESIZE=3072
OPTIONS=""

if [ -f /etc/sysconfig/memcached ];then
    . /etc/sysconfig/memcached
fi

# Check that networking is up.
if [ "$NETWORKING" = "no" ]
then
    exit 0
fi

RETVAL=0
prog="memcached"

start () {
    echo -n $"Starting $prog: "
    # insure that /var/run/memcached has proper permissions
    chown $USER /var/run/memcached
    daemon /usr/local/bin/memcached -d -p $PORT -u $USER  -m $CACHESIZE -c $MAXCONN -P /var/run/memcached/memcached.pid $OPTIONS
    #daemon memcached -d -p $PORT -u $USER  -m $CACHESIZE -c $MAXCONN $OPTIONS
    RETVAL=$?
    echo
    [ $RETVAL -eq 0 ] && touch /var/lock/subsys/memcached
}
stop () {
    echo -n $"Stopping $prog: "
    killproc memcached
    RETVAL=$?
    echo
    if [ $RETVAL -eq 0 ] ; then
        rm -f /var/lock/subsys/memcached
        rm -f /var/run/memcached/memcached.pid
    fi
}
restart () {
    stop
    start
}


# See how we were called.
case "$1" in
    start)
        start
        ;;
    stop)
    stop
    ;;
    status)
    status memcached
    ;;
    restart|reload)
    restart
    ;;
    condrestart)
    [ -f /var/lock/subsys/memcached ] && restart || :
    ;;
    *)
    echo $"Usage: $0 {start|stop|status|restart|reload|condrestart}"
    exit 1
esac

exit $?

# chmod +x /etc/init.d/memcached
# chkconfig --add memcached
# chkconfig memcached on
check memcached
# service memcached start

# ps -ef | grep memcached

497      13008     1  0 00:12 ?        00:00:00 /usr/local/bin/memcached -d -p 11211 -u memcached -m 3072 -c 1024 -P /var/run/memcached/memcached.pid
root     13017  7950  0 00:12 pts/0    00:00:00 grep memcached

# telnet localhost 11211

Trying ::1...
Connected to localhost.
Escape character is '^]'.
set hoge 0 0 5      
japan               
STORED
get hoge            
VALUE hoge 0 5
japan
END
quit                
Connection closed by foreign host.

猜你喜欢

转载自blog.csdn.net/lkw5657/article/details/53332417
今日推荐