shark

SharkContext

class SharkContext extends SparkContext

Linear Supertypes
SparkContext, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. SharkContext
  2. SparkContext
  3. Logging
  4. AnyRef
  5. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SharkContext(master: String, jobName: String, sparkHome: String, jars: Seq[String], environment: Map[String, String])

  2. new SharkContext(config: SparkConf, preferredNodeLocationData: Map[String, Set[SplitInfo]] = ...)

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def accumulable[T, R](initialValue: T)(implicit param: AccumulableParam[T, R]): Accumulable[T, R]

    Definition Classes
    SparkContext
  7. def accumulableCollection[R, T](initialValue: R)(implicit arg0: (R) ⇒ Growable[T] with TraversableOnce[T] with Serializable): Accumulable[R, T]

    Definition Classes
    SparkContext
  8. def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  9. def addFile(path: String): Unit

    Definition Classes
    SparkContext
  10. def addJar(path: String): Unit

    Definition Classes
    SparkContext
  11. def addSparkListener(listener: SparkListener): Unit

    Definition Classes
    SparkContext
  12. val appName: String

    Definition Classes
    SparkContext
  13. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  14. def broadcast[T](value: T): Broadcast[T]

    Definition Classes
    SparkContext
  15. def cancelAllJobs(): Unit

    Definition Classes
    SparkContext
  16. def cancelJobGroup(groupId: String): Unit

    Definition Classes
    SparkContext
  17. def checkpointFile[T](path: String)(implicit arg0: ClassTag[T]): RDD[T]

    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  18. def clearCallSite(): Unit

    Definition Classes
    SparkContext
  19. def clearFiles(): Unit

    Definition Classes
    SparkContext
  20. def clearJars(): Unit

    Definition Classes
    SparkContext
  21. def clearJobGroup(): Unit

    Definition Classes
    SparkContext
  22. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  23. def defaultMinSplits: Int

    Definition Classes
    SparkContext
  24. def defaultParallelism: Int

    Definition Classes
    SparkContext
  25. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  26. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  27. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  28. def getAllPools: ArrayBuffer[Schedulable]

    Definition Classes
    SparkContext
  29. def getCheckpointDir: Option[String]

    Definition Classes
    SparkContext
  30. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  31. def getConf: SparkConf

    Definition Classes
    SparkContext
  32. def getExecutorMemoryStatus: Map[String, (Long, Long)]

    Definition Classes
    SparkContext
  33. def getExecutorStorageStatus: Array[StorageStatus]

    Definition Classes
    SparkContext
  34. def getLocalProperty(key: String): String

    Definition Classes
    SparkContext
  35. def getPersistentRDDs: Map[Int, RDD[_]]

    Definition Classes
    SparkContext
  36. def getPoolForName(pool: String): Option[Schedulable]

    Definition Classes
    SparkContext
  37. def getRDDStorageInfo: Array[RDDInfo]

    Definition Classes
    SparkContext
  38. def getSchedulingMode: SchedulingMode

    Definition Classes
    SparkContext
  39. def getStageInfo: Map[Stage, StageInfo]

    Definition Classes
    SparkContext
  40. val hadoopConfiguration: Configuration

    Definition Classes
    SparkContext
  41. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  42. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String, minSplits: Int)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  43. def hadoopFile[K, V](path: String, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minSplits: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  44. def hadoopRDD[K, V](conf: JobConf, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minSplits: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  45. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  46. def initDriverMetrics(): Unit

    Definition Classes
    SparkContext
  47. def initLocalProperties(): Unit

    Definition Classes
    SparkContext
  48. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  49. val isLocal: Boolean

    Definition Classes
    SparkContext
  50. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  51. val jars: Seq[String]

    Definition Classes
    SparkContext
  52. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  53. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  54. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  55. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  56. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  57. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  58. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  59. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  60. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  61. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  62. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  63. def makeRDD[T](seq: Seq[(T, Seq[String])])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  64. def makeRDD[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  65. val master: String

    Definition Classes
    SparkContext
  66. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  67. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String, fClass: Class[F], kClass: Class[K], vClass: Class[V], conf: Configuration): RDD[(K, V)]

    Definition Classes
    SparkContext
  68. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  69. def newAPIHadoopRDD[K, V, F <: InputFormat[K, V]](conf: Configuration, fClass: Class[F], kClass: Class[K], vClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  70. final def notify(): Unit

    Definition Classes
    AnyRef
  71. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  72. def objectFile[T](path: String, minSplits: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  73. def parallelize[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  74. def runApproximateJob[T, U, R](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, evaluator: ApproximateEvaluator[U, R], timeout: Long): PartialResult[R]

    Definition Classes
    SparkContext
  75. def runJob[T, U](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  76. def runJob[T, U](rdd: RDD[T], processPartition: (TaskContext, Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  77. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  78. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  79. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  80. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  81. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  82. def runSql(cmd: String, maxRows: Int = 1000): ResultSet

    Execute a SQL command and collect the results locally.

    Execute a SQL command and collect the results locally.

    cmd

    The SQL command to be executed.

    maxRows

    The max number of rows to retrieve for the result set.

    returns

    A ResultSet object with both the schema and the query results.

  83. def sequenceFile[K, V](path: String, minSplits: Int)(implicit km: ClassTag[K], vm: ClassTag[V], kcf: () ⇒ WritableConverter[K], vcf: () ⇒ WritableConverter[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  84. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  85. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V], minSplits: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  86. def setCallSite(site: String): Unit

    Definition Classes
    SparkContext
  87. def setCheckpointDir(directory: String): Unit

    Definition Classes
    SparkContext
  88. def setJobGroup(groupId: String, description: String): Unit

    Definition Classes
    SparkContext
  89. def setLocalProperty(key: String, value: String): Unit

    Definition Classes
    SparkContext
  90. val sparkEnv: SparkEnv

  91. val sparkUser: String

    Definition Classes
    SparkContext
  92. def sql(cmd: String, maxRows: Int = 1000): Seq[String]

    Execute the command and return the results as a sequence.

    Execute the command and return the results as a sequence. Each element in the sequence is one row.

  93. def sql2console(cmd: String, maxRows: Int = 1000): Unit

    Execute the command and print the results to console.

  94. def sql2rdd(cmd: String): TableRDD

    Execute a SQL command and return the results as a TableRDD.

    Execute a SQL command and return the results as a TableRDD. The SQL command must be a SELECT statement.

  95. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14], arg14: C[T15], arg15: C[T16], arg16: C[T17], arg17: C[T18], arg18: C[T19], arg19: C[T20], arg20: C[T21], arg21: C[T22]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22)]

  96. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14], arg14: C[T15], arg15: C[T16], arg16: C[T17], arg17: C[T18], arg18: C[T19], arg19: C[T20], arg20: C[T21]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21)]

  97. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14], arg14: C[T15], arg15: C[T16], arg16: C[T17], arg17: C[T18], arg18: C[T19], arg19: C[T20]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20)]

  98. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14], arg14: C[T15], arg15: C[T16], arg16: C[T17], arg17: C[T18], arg18: C[T19]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19)]

  99. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14], arg14: C[T15], arg15: C[T16], arg16: C[T17], arg17: C[T18]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18)]

  100. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14], arg14: C[T15], arg15: C[T16], arg16: C[T17]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17)]

  101. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14], arg14: C[T15], arg15: C[T16]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16)]

  102. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14], arg14: C[T15]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15)]

  103. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13], arg13: C[T14]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14)]

  104. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12], arg12: C[T13]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13)]

  105. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11], arg11: C[T12]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12)]

  106. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10], arg10: C[T11]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11)]

  107. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9], arg9: C[T10]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10)]

  108. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8, T9](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8], arg8: C[T9]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8, T9)]

  109. def sqlRdd[T1, T2, T3, T4, T5, T6, T7, T8](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7], arg7: C[T8]): RDD[(T1, T2, T3, T4, T5, T6, T7, T8)]

  110. def sqlRdd[T1, T2, T3, T4, T5, T6, T7](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6], arg6: C[T7]): RDD[(T1, T2, T3, T4, T5, T6, T7)]

  111. def sqlRdd[T1, T2, T3, T4, T5, T6](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5], arg5: C[T6]): RDD[(T1, T2, T3, T4, T5, T6)]

  112. def sqlRdd[T1, T2, T3, T4, T5](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4], arg4: C[T5]): RDD[(T1, T2, T3, T4, T5)]

  113. def sqlRdd[T1, T2, T3, T4](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3], arg3: C[T4]): RDD[(T1, T2, T3, T4)]

  114. def sqlRdd[T1, T2, T3](cmd: String)(implicit arg0: C[T1], arg1: C[T2], arg2: C[T3]): RDD[(T1, T2, T3)]

  115. def sqlRdd[T1, T2](cmd: String)(implicit arg0: C[T1], arg1: C[T2]): RDD[(T1, T2)]

    Execute a SQL command and return the results as a RDD of Tuple.

    Execute a SQL command and return the results as a RDD of Tuple. The SQL command must be a SELECT statement.

  116. def sqlSeqRdd(cmd: String): RDD[Seq[Any]]

    Execute a SQL command and return the results as a RDD of Seq.

    Execute a SQL command and return the results as a RDD of Seq. The SQL command must be a SELECT statement. This is useful if the table has more than 22 columns (more than fits in tuples) NB: These are auto-generated using resources/tablerdd/SharkContext_sqlRdd_generator.py

  117. val startTime: Long

    Definition Classes
    SparkContext
  118. def stop(): Unit

    Definition Classes
    SparkContext
  119. def submitJob[T, U, R](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit, resultFunc: ⇒ R): SimpleFutureAction[R]

    Definition Classes
    SparkContext
  120. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  121. def textFile(path: String, minSplits: Int): RDD[String]

    Definition Classes
    SparkContext
  122. def toString(): String

    Definition Classes
    AnyRef → Any
  123. def union[T](first: RDD[T], rest: RDD[T]*)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  124. def union[T](rdds: Seq[RDD[T]])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  125. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  126. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  127. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Deprecated Value Members

  1. def setJobDescription(value: String): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 0.8.1) use setJobGroup

Inherited from SparkContext

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped