tar xzvf 解压spark压缩包
vim /etc/profile 配置spark环境变量
cp /opt/spark/conf/spark-env.sh.tempalte /opt/spark/conf/spark-env.sh
vim /opt/spark/conf/spark-env.sh
增加内容:
export JAVA_HOME=/opt/jdk(jdk安装路径)
HADOOP_HOME=/opt/hadoop(hadoop安装路径)
SCALA_HOME=/opt/scala(scalca安装路径)
HADOOP_CONF_DIR=/opt/hadoop/etc/hadoop(hadoop配置文件路径)
SPARK_MASTER_IP=localhost(本机主机名)
SPARK_WORKER_MEMORY=1g(内存)
start-all.sh
/opt/spark/sbin/start-all.sh
spark-shell 启动
:quit 退出
进入spark的web界面
tar xzvf 解压spark压缩包
vim /etc/profile 配置spark环境变量
cp /opt/spark/conf/spark-env.sh.tempalte /opt/spark/conf/spark-env.sh
vim /opt/spark/conf/spark-env.sh
增加内容:
export JAVA_HOME=/opt/jdk(jdk安装路径)
HADOOP_HOME=/opt/hadoop(hadoop安装路径)
SCALA_HOME=/opt/scala(scalca安装路径)
HADOOP_CONF_DIR=/opt/hadoop/etc/hadoop(hadoop配置文件路径)
SPARK_MASTER_IP=localhost(本机主机名)
SPARK_WORKER_MEMORY=1g(内存)
先行复制一份无后缀slaves文件
cp /opt/spark/conf/slaves.template
/opt/spark/conf/slaves
在slaves文件中删除原有添加三台主机名
scp 命令 scp -r命令 传输
source /etc/profile
start-all.sh
/opt/spark/sbin/start-all.sh
spark-shell 启动
:quit 退出
创作打卡挑战赛
赢取流量/现金/CSDN周边激励大奖
版权说明 : 本文为转载文章, 版权归原作者所有 版权申明
原文链接 : https://blog.csdn.net/weixin_44912902/article/details/124743250
内容来源于网络,如有侵权,请联系作者删除!