spark配置(7)--on yarn配置

  1. vim /usr/local/spark/conf/spark-env.sh


  1. export SPARK_DIST_CLASSPATH=$(/usr/local/hadoop/bin/hadoop classpath)
  2. export SCALA_HOME=/usr/local/scala
  3. export JAVA_HOME=/opt/jdk1.8.0_65
  4. export SPARK_MASTER=localhost
  5. export SPARK_LOCAL_IP=localhost
  6. export HADOOP_HOME=/usr/local/hadoop
  7. export SPARK_HOME=/usr/local/spark
  8. export SPARK_LIBARY_PATH=.:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$HADOOP_HOME/lib/native
  9. export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop

  1. /usr/local/spark/bin/spark-submit --master yarn --num-executors 2 --executor-cores 1 --class "SimpleApp" ~/sparkapp/target/scala-2.10/simple-project_2.10-1.0.jar

  1. ./bin/spark-submit --class org.apache.spark.examples.SparkPi
  2. --master yarn
  3. --deploy-mode cluster
  4. --driver-memory 4g
  5. --executor-memory 2g
  6. --executor-cores 1
  7. --queue thequeue
  8. lib/spark-examples*.jar
  9.     10






原文地址:https://www.cnblogs.com/iathena/p/5617238.html