plantegg 发表于 2019-1-30 11:39:59

spark

  16/03/04 00:21:09 WARN SparkContext: Using SPARK_MEM to set amount of memory to use per executor process is deprecated, please use spark.executor.memory instead.
  16/03/04 00:21:09 ERROR SparkContext: Error initializing SparkContext.
  org.apache.spark.SparkException: Could not parse Master URL: 'at'
  at org.apache.spark.SparkContext$.org$apache$spark$SparkContext$$createTaskScheduler(SparkContext.scala:2554)
  at org.apache.spark.SparkContext.(SparkContext.scala:489)
  at com.bigdata.deal.scala.DomainLib$.main(DomainLib.scala:22)
  at com.bigdata.deal.scala.DomainLib.main(DomainLib.scala)
  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  at java.lang.reflect.Method.invoke(Method.java:606)
  at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:664)
  at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:169)
  at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:192)
  at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:111)
  at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
  

  配置conf时,务必要有 sparkHomemaster地址

  注意在spark-env.sh中要配置这些,就可以了
  
  # cat conf/spark-env.sh
  #!/usr/bin/env bash
  

  SPARK_MASTER_IP=mini-cp1
  #必须导入JAVA根目录路径
  export JAVA_HOME=/usr/local/jdk1.7.0_65
  export HADOOP_HOME=/usr/local/hadoop-2.6.0
  

  

  #export SCALA_HOME=/opt/scala
  export SPARK_WORKER_MEMORY=3g
  export HADOOP_CONF_DIR=/usr/local/hadoop-2.6.0/etc/hadoop
  #SPARK_MEM=${SPARK_MEM:-1g}
  export SPARK_MEM=3g
  export HADOOP_HOME=/usr/local/hadoop-2.6.0
  export HADOOP_COMMON_LIB_NATIVE_DIR=/usr/local/hadoop-2.6.0/lib/native
  export HADOOP_OPTS="-Djava.library.path=/usr/local/hadoop-2.6.0/lib"
  


  




页: [1]
查看完整版本: spark