ammonite.spark.Spark

SparkContext

class SparkContext extends org.apache.spark.SparkContext

Linear Supertypes
org.apache.spark.SparkContext, ExecutorAllocationClient, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. SparkContext
  2. SparkContext
  3. ExecutorAllocationClient
  4. Logging
  5. AnyRef
  6. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SparkContext(sparkConf: SparkConf)

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def accumulable[R, T](initialValue: R, name: String)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Definition Classes
    SparkContext
  7. def accumulable[R, T](initialValue: R)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Definition Classes
    SparkContext
  8. def accumulableCollection[R, T](initialValue: R)(implicit arg0: (R) ⇒ Growable[T] with TraversableOnce[T] with Serializable, arg1: ClassTag[R]): Accumulable[R, T]

    Definition Classes
    SparkContext
  9. def accumulator[T](initialValue: T, name: String)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  10. def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  11. def addFile(path: String, recursive: Boolean): Unit

    Definition Classes
    SparkContext
  12. def addFile(path: String): Unit

    Definition Classes
    SparkContext
  13. def addJar(path: String): Unit

    Definition Classes
    SparkContext
  14. def addSparkListener(listener: SparkListener): Unit

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  15. def appName: String

    Definition Classes
    SparkContext
  16. def applicationAttemptId: Option[String]

    Definition Classes
    SparkContext
  17. def applicationId: String

    Definition Classes
    SparkContext
  18. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  19. def binaryFiles(path: String, minPartitions: Int): RDD[(String, PortableDataStream)]

    Definition Classes
    SparkContext
  20. def binaryRecords(path: String, recordLength: Int, conf: Configuration): RDD[Array[Byte]]

    Definition Classes
    SparkContext
  21. def broadcast[T](value: T)(implicit arg0: ClassTag[T]): Broadcast[T]

    Definition Classes
    SparkContext
  22. def cancelAllJobs(): Unit

    Definition Classes
    SparkContext
  23. def cancelJobGroup(groupId: String): Unit

    Definition Classes
    SparkContext
  24. def checkpointFile[T](path: String)(implicit arg0: ClassTag[T]): RDD[T]

    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  25. def clearCallSite(): Unit

    Definition Classes
    SparkContext
  26. def clearJobGroup(): Unit

    Definition Classes
    SparkContext
  27. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  28. def defaultMinPartitions: Int

    Definition Classes
    SparkContext
  29. def defaultParallelism: Int

    Definition Classes
    SparkContext
  30. def emptyRDD[T](implicit arg0: ClassTag[T]): EmptyRDD[T]

    Definition Classes
    SparkContext
  31. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  32. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  33. val externalBlockStoreFolderName: String

    Definition Classes
    SparkContext
  34. def files: Seq[String]

    Definition Classes
    SparkContext
  35. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  36. def getAllPools: Seq[Schedulable]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  37. def getCheckpointDir: Option[String]

    Definition Classes
    SparkContext
  38. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  39. def getConf: SparkConf

    Definition Classes
    SparkContext
  40. def getExecutorMemoryStatus: Map[String, (Long, Long)]

    Definition Classes
    SparkContext
  41. def getExecutorStorageStatus: Array[StorageStatus]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  42. def getLocalProperty(key: String): String

    Definition Classes
    SparkContext
  43. def getPersistentRDDs: Map[Int, RDD[_]]

    Definition Classes
    SparkContext
  44. def getPoolForName(pool: String): Option[Schedulable]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  45. def getRDDStorageInfo: Array[RDDInfo]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  46. def getSchedulingMode: SchedulingMode

    Definition Classes
    SparkContext
  47. def hadoopConfiguration: Configuration

    Definition Classes
    SparkContext
  48. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  49. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  50. def hadoopFile[K, V](path: String, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  51. def hadoopRDD[K, V](conf: JobConf, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  52. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  53. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  54. def isLocal: Boolean

    Definition Classes
    SparkContext
  55. def isStopped: Boolean

    Definition Classes
    SparkContext
  56. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  57. def jars: Seq[String]

    Definition Classes
    SparkContext
  58. def killExecutor(executorId: String): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  59. def killExecutors(executorIds: Seq[String]): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  60. val localProperties: InheritableThreadLocal[Properties]

    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  61. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  62. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  63. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  64. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  65. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  66. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  67. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  68. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  69. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  70. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  71. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  72. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  73. def makeRDD[T](seq: Seq[(T, Seq[String])])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  74. def makeRDD[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  75. def master: String

    Definition Classes
    SparkContext
  76. def metricsSystem: MetricsSystem

    Definition Classes
    SparkContext
  77. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  78. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String, fClass: Class[F], kClass: Class[K], vClass: Class[V], conf: Configuration): RDD[(K, V)]

    Definition Classes
    SparkContext
  79. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  80. def newAPIHadoopRDD[K, V, F <: InputFormat[K, V]](conf: Configuration, fClass: Class[F], kClass: Class[K], vClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  81. final def notify(): Unit

    Definition Classes
    AnyRef
  82. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  83. def objectFile[T](path: String, minPartitions: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  84. def parallelize[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  85. def range(start: Long, end: Long, step: Long, numSlices: Int): RDD[Long]

    Definition Classes
    SparkContext
  86. def requestExecutors(numAdditionalExecutors: Int): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  87. def runApproximateJob[T, U, R](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, evaluator: ApproximateEvaluator[U, R], timeout: Long): PartialResult[R]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  88. def runJob[T, U](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  89. def runJob[T, U](rdd: RDD[T], processPartition: (TaskContext, Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  90. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  91. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  92. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U, partitions: Seq[Int])(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  93. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int])(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  94. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  95. def sequenceFile[K, V](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], kcf: () ⇒ WritableConverter[K], vcf: () ⇒ WritableConverter[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  96. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  97. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  98. def setCallSite(shortCallSite: String): Unit

    Definition Classes
    SparkContext
  99. def setCheckpointDir(directory: String): Unit

    Definition Classes
    SparkContext
  100. def setJobDescription(value: String): Unit

    Definition Classes
    SparkContext
  101. def setJobGroup(groupId: String, description: String, interruptOnCancel: Boolean): Unit

    Definition Classes
    SparkContext
  102. def setLocalProperty(key: String, value: String): Unit

    Definition Classes
    SparkContext
  103. def setLogLevel(logLevel: String): Unit

    Definition Classes
    SparkContext
  104. val sparkUser: String

    Definition Classes
    SparkContext
  105. val startTime: Long

    Definition Classes
    SparkContext
  106. def statusTracker: SparkStatusTracker

    Definition Classes
    SparkContext
  107. def stop(): Unit

    Definition Classes
    SparkContext
  108. def submitJob[T, U, R](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit, resultFunc: ⇒ R): SimpleFutureAction[R]

    Definition Classes
    SparkContext
  109. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  110. def textFile(path: String, minPartitions: Int): RDD[String]

    Definition Classes
    SparkContext
  111. def toString(): String

    Definition Classes
    SparkContext → AnyRef → Any
  112. def union[T](first: RDD[T], rest: RDD[T]*)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  113. def union[T](rdds: Seq[RDD[T]])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  114. def version: String

    Definition Classes
    SparkContext
  115. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  116. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  117. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  118. def wholeTextFiles(path: String, minPartitions: Int): RDD[(String, String)]

    Definition Classes
    SparkContext

Deprecated Value Members

  1. def clearFiles(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding files no longer creates local copies that need to be deleted

  2. def clearJars(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding jars no longer creates local copies that need to be deleted

  3. def defaultMinSplits: Int

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) use defaultMinPartitions

  4. def initLocalProperties(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) Properties no longer need to be explicitly initialized.

  5. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.5.0) use the version of runJob without the allowLocal parameter

  6. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.5.0) use the version of runJob without the allowLocal parameter

  7. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.5.0) use the version of runJob without the allowLocal parameter

  8. val tachyonFolderName: String

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use externalBlockStoreFolderName instead.

Inherited from org.apache.spark.SparkContext

Inherited from ExecutorAllocationClient

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped