#停止 systemctl stop firewalld
#禁用 systemctl disable firewalld
#清空防火墙的规则 iptables -F
下载地址
http://archive.cloudera.com/cdh5/cdh/5/tar开
[root@localhost SOFT]# tar -xzvf jdk-8u181-linux-x64.tar.gz -C /usr/java/验证
java -version创建用户
[root@localhost java]# ll /home/ifengs total 0 [root@localhost java]#进入ifengs 并创建文件夹
[ifengs@localhost ~]$ mkdir app software sourcecode log data tmp lib [ifengs@localhost ~]$ ll total 0 drwxrwxr-x. 2 ifengs ifengs 6 Jul 2 18:26 app drwxrwxr-x. 2 ifengs ifengs 6 Jul 2 18:26 data drwxrwxr-x. 2 ifengs ifengs 6 Jul 2 18:26 lib drwxrwxr-x. 2 ifengs ifengs 6 Jul 2 18:26 log drwxrwxr-x. 2 ifengs ifengs 6 Jul 2 18:26 software drwxrwxr-x. 2 ifengs ifengs 6 Jul 2 18:26 sourcecode drwxrwxr-x. 2 ifengs ifengs 6 Jul 2 18:26 tmp [ifengs@localhost ~]$ app 软件data 额外的数据lib 额外的第三方的jdbc…jarlog 日志sourcecode 源代码tmp 临时文件夹移动安装包到software下面
mv hadoop... /home/ifengs/software/修改安装包所有者
chown -R ifeng:ifeng /home/ifeng/software/*解压安装包
tar -xzvf hadoop-2.6 -C ~/app/ ...同理创建软连接
ln -s hadoop-2.6.0... hadoop ...配置 ,bashrc
export HADOOP_HOME=/home/zull1/app/hadoop export Hive_HOME=/home/zull1/app/hive export PATH=${HADOOP_HOME}/bin:${Hive_HOME}/bin:$PATH source .bashrc测试 首先在/etc/hosts 中配置内网ip
ssh zuoll1无需密码直接登录即可
<configuration> <property> <name>dfs.replication</name> <value>1</value> </property> </configuration> <configuration> <property> <name>dfs.namenode.secondary.http-address</name> <value>zuoll1:9868</value> </property> </configuration> <configuration> <property> <name>dfs.namenode.secondary.https-address</name> <value>zuoll1:9869</value> </property> </configuration>
slave的配置
vi slaves # 清空后输入 zuoll1core-site
vi core-site.xml <property> <name>fs.defaultFS</name> <value>hdfs://ifengs:9000</value> </property>修改 hadoop.tmp.dir
vi core-site.xml <property> <name>hadoop.tmp.dir</name> <value>/home/zuoll1/tmp</value> </property>默认是在系统的tmp下 但是系统tmp 会半个月清理一次 hdfs
<property> <name>dfs.replication</name> <value>1</value> </property> <property> <name>dfs.namenode.secondary.http-address</name> <value>ifengs:9868</value> </property> <property> <name>dfs.namenode.secondary.https-address</name> <value>ifengs:9869</value> </property>snn
8088容易被挖矿ST
<property> <name>yarn.resourcemanager.webapp.address</name> <value>zuoll1:18088</value> </property>格式化hdfs
hdfs namenode -format启动hdfs
start-dfs.shJAVA_HOME已经配置 启动还是报错
apache hadoop 重大bug!!!
去改下官方的配置文件
vi ~/app/hadoop/etc/hadoop/hadoop-env.sh export JAVA_HOME=/usr/java/jdk!.8....再次启动dfs
start-dfs.shhdfs查看文件
hdfs dfs -ls /hdfs创建文件
hdfs dfs -mkdir /zuoll1
1>&2