1、下载spark2.2.0包
2、解压
3、计入 配置目录 conf
4、copy cdh中hive-site.xml hdfs-site.xml yarn-site.xml core-site.xml 到conf 目录下
5、修改spark-env.sh,设置各变量及引入jar包
export SPARK_HOME=/home/cdh/software/spark/spark-2.2.0-bin-hadoop2.6
export JAVA_HOME=/usr/java/latest
export HADOOP_HOME=/opt/cloudera/parcels/CDH/lib/hadoop
export HADOOP_CONF_DIR=/etc/hadoop/conf
export YARN_CONF_DIR=/etc/hadoop/conf
export HBASE_HOME=/opt/cloudera/parcels/CDH/lib/hbase
export HBASE_CONF_DIR=/etc/hbase/conf
#export YARN_CONF_DIR=/etc/hadoop/conf.cloudera.yarn
export SPARK_MASTER_IP=10.101.1.119
#export CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath):$CLASSPATH
export SPARK_DIST_CLASSPATH=$(hadoop classpath)
export SPARK_DIST_CLASSPATH=$SPARK_DIST_CLASSPATH:/opt/cloudera/parcels/CDH/lib/hbase/*.jar:/opt/cloudera/parcels/CDH/lib/hive/lib/*.jar:/opt/cloudera/parcels/CDH/jars
export export SPARK_CLASSPATH=/home/cdh/s