您当前的位置: 首页 >  ar

段智华

暂无认证

  • 2浏览

    0关注

    1232博文

    0收益

  • 0浏览

    0点赞

    0打赏

    0留言

私信
关注
热门博文

SparkContext.scala 源代码学习

段智华 发布时间:2016-01-03 17:38:21 ,浏览量:2

https://github.com/apache/spark/blob/master/core/src/main/scala/org/apache/spark/SparkContext.scala

package org.apache.spark  //定义spark包 import scala.language.implicitConversions //导入各类包。。。。

class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationClient { //定义类 SparkContext,传入配置参数SparkConf,继承特质Logging及ExecutorAllocationClient

def this() = this(new SparkConf()) //以下一系列辅助构造器,适应各种参数传入构造

def this(config: SparkConf, preferredNodeLocationData: Map[String, Set[SplitInfo]]) = {     this(config)     logWarning("Passing in preferred locations has no effect at all, see SPARK-8949")   }

def this(master: String, appName: String, conf: SparkConf) =     this(SparkContext.updatedConf(conf, master, appName))

 def this(       master: String,       appName: String,       sparkHome: String = null,       jars: Seq[String] = Nil,       environment: Map[String, String] = Map(),       preferredNodeLocationData: Map[String, Set[SplitInfo]] = Map()) =   {     this(SparkContext.updatedConf(new SparkConf(), master, appName, sparkHome, jars, environment))     if (preferredNodeLocationData.nonEmpty) {       logWarning("Passing in preferred locations has no effect at all, see SPARK-8949")     }   }   // NOTE: The below constructors could be consolidated using default arguments. Due to   // Scala bug SI-8479, however, this causes the compile step to fail when generating docs.   // Until we have a good workaround for that bug the constructors remain broken out.   /**    * Alternative constructor that allows setting common Spark properties directly    *    * @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).    * @param appName A name for your application, to display on the cluster web UI.    */   private[spark] def this(master: String, appName: String) =     this(master, appName, null, Nil, Map())   /**    * Alternative constructor that allows setting common Spark properties directly    *    * @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).    * @param appName A name for your application, to display on the cluster web UI.    * @param sparkHome Location where Spark is installed on cluster nodes.    */   private[spark] def this(master: String, appName: String, sparkHome: String) =     this(master, appName, sparkHome, Nil, Map())   /**    * Alternative constructor that allows setting common Spark properties directly    *    * @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).    * @param appName A name for your application, to display on the cluster web UI.    * @param sparkHome Location where Spark is installed on cluster nodes.    * @param jars Collection of JARs to send to the cluster. These can be paths on the local file    *             system or HDFS, HTTP, HTTPS, or FTP URLs.    */   private[spark] def this(master: String, appName: String, sparkHome: String, jars: Seq[String]) =     this(master, appName, sparkHome, jars, Map())

关注
打赏
1659361485
查看更多评论
立即登录/注册

微信扫码登录

0.0963s