Class

eu.shiftforward.adstax.spark

AdStaxSparkContext

Related Doc: package spark

Permalink

class AdStaxSparkContext extends SparkContext

A context for running spark jobs which provides an extra method to access AdStax events.

Linear Supertypes
SparkContext, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. AdStaxSparkContext
  2. SparkContext
  3. Logging
  4. AnyRef
  5. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new AdStaxSparkContext(conf: SparkConf, esIndex: String = "http-*")

    Permalink

    conf

    the Spark configuration to use when initializing the context

    esIndex

    the elasticsearch index where to search for events

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. def addFile(path: String, recursive: Boolean): Unit

    Permalink
    Definition Classes
    SparkContext
  5. def addFile(path: String): Unit

    Permalink
    Definition Classes
    SparkContext
  6. def addJar(path: String): Unit

    Permalink
    Definition Classes
    SparkContext
  7. def addSparkListener(listener: SparkListenerInterface): Unit

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  8. def appName: String

    Permalink
    Definition Classes
    SparkContext
  9. def applicationAttemptId: Option[String]

    Permalink
    Definition Classes
    SparkContext
  10. def applicationId: String

    Permalink
    Definition Classes
    SparkContext
  11. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  12. def binaryFiles(path: String, minPartitions: Int): RDD[(String, PortableDataStream)]

    Permalink
    Definition Classes
    SparkContext
  13. def binaryRecords(path: String, recordLength: Int, conf: Configuration): RDD[Array[Byte]]

    Permalink
    Definition Classes
    SparkContext
  14. def broadcast[T](value: T)(implicit arg0: ClassTag[T]): Broadcast[T]

    Permalink
    Definition Classes
    SparkContext
  15. def cancelAllJobs(): Unit

    Permalink
    Definition Classes
    SparkContext
  16. def cancelJob(jobId: Int): Unit

    Permalink
    Definition Classes
    SparkContext
  17. def cancelJobGroup(groupId: String): Unit

    Permalink
    Definition Classes
    SparkContext
  18. def cancelStage(stageId: Int): Unit

    Permalink
    Definition Classes
    SparkContext
  19. def checkpointFile[T](path: String)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  20. def clearCallSite(): Unit

    Permalink
    Definition Classes
    SparkContext
  21. def clearJobGroup(): Unit

    Permalink
    Definition Classes
    SparkContext
  22. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  23. def collectionAccumulator[T](name: String): CollectionAccumulator[T]

    Permalink
    Definition Classes
    SparkContext
  24. def collectionAccumulator[T]: CollectionAccumulator[T]

    Permalink
    Definition Classes
    SparkContext
  25. def defaultMinPartitions: Int

    Permalink
    Definition Classes
    SparkContext
  26. def defaultParallelism: Int

    Permalink
    Definition Classes
    SparkContext
  27. def deployMode: String

    Permalink
    Definition Classes
    SparkContext
  28. def doubleAccumulator(name: String): DoubleAccumulator

    Permalink
    Definition Classes
    SparkContext
  29. def doubleAccumulator: DoubleAccumulator

    Permalink
    Definition Classes
    SparkContext
  30. def emptyRDD[T](implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  31. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  32. def equals(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  33. def eventsRDD(eventTypes: Set[String], startDateTime: com.github.nscala_time.time.Imports.DateTime, endDateTime: com.github.nscala_time.time.Imports.DateTime): RDD[String]

    Permalink

    Returns all AdStax events between startDateTime and endDateTime which have a type contained in eventTypes.

    Returns all AdStax events between startDateTime and endDateTime which have a type contained in eventTypes. In case eventTypes is empty, all event types are considered.

    eventTypes

    the type of events to fetch. If empty, all event types are considered

    startDateTime

    the initial timestamp of the events to fetch

    endDateTime

    the final timestamp of the events to fetch

    returns

    a RDD of Strings with the JSON representation of the fetched events.

  34. def files: Seq[String]

    Permalink
    Definition Classes
    SparkContext
  35. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  36. def getAllPools: Seq[Schedulable]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  37. def getCheckpointDir: Option[String]

    Permalink
    Definition Classes
    SparkContext
  38. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  39. def getConf: SparkConf

    Permalink
    Definition Classes
    SparkContext
  40. def getExecutorMemoryStatus: Map[String, (Long, Long)]

    Permalink
    Definition Classes
    SparkContext
  41. def getExecutorStorageStatus: Array[StorageStatus]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  42. def getLocalProperty(key: String): String

    Permalink
    Definition Classes
    SparkContext
  43. def getPersistentRDDs: Map[Int, RDD[_]]

    Permalink
    Definition Classes
    SparkContext
  44. def getPoolForName(pool: String): Option[Schedulable]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  45. def getRDDStorageInfo: Array[RDDInfo]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  46. def getSchedulingMode: SchedulingMode

    Permalink
    Definition Classes
    SparkContext
  47. def hadoopConfiguration: Configuration

    Permalink
    Definition Classes
    SparkContext
  48. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  49. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  50. def hadoopFile[K, V](path: String, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  51. def hadoopRDD[K, V](conf: JobConf, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  52. def hashCode(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  53. def initializeLogIfNecessary(isInterpreter: Boolean): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  54. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  55. def isLocal: Boolean

    Permalink
    Definition Classes
    SparkContext
  56. def isStopped: Boolean

    Permalink
    Definition Classes
    SparkContext
  57. def isTraceEnabled(): Boolean

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  58. def jars: Seq[String]

    Permalink
    Definition Classes
    SparkContext
  59. def killExecutor(executorId: String): Boolean

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  60. def killExecutors(executorIds: Seq[String]): Boolean

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  61. def listFiles(): Seq[String]

    Permalink
    Definition Classes
    SparkContext
  62. def listJars(): Seq[String]

    Permalink
    Definition Classes
    SparkContext
  63. val localProperties: InheritableThreadLocal[Properties]

    Permalink
    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  64. def log: Logger

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  65. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  66. def logDebug(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  67. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  68. def logError(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  69. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  70. def logInfo(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  71. def logName: String

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  72. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  73. def logTrace(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  74. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  75. def logWarning(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  76. def longAccumulator(name: String): LongAccumulator

    Permalink
    Definition Classes
    SparkContext
  77. def longAccumulator: LongAccumulator

    Permalink
    Definition Classes
    SparkContext
  78. def makeRDD[T](seq: Seq[(T, Seq[String])])(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  79. def makeRDD[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  80. def master: String

    Permalink
    Definition Classes
    SparkContext
  81. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  82. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String, fClass: Class[F], kClass: Class[K], vClass: Class[V], conf: Configuration): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  83. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  84. def newAPIHadoopRDD[K, V, F <: InputFormat[K, V]](conf: Configuration, fClass: Class[F], kClass: Class[K], vClass: Class[V]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  85. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  86. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  87. def objectFile[T](path: String, minPartitions: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  88. def parallelize[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  89. def range(start: Long, end: Long, step: Long, numSlices: Int): RDD[Long]

    Permalink
    Definition Classes
    SparkContext
  90. def register(acc: AccumulatorV2[_, _], name: String): Unit

    Permalink
    Definition Classes
    SparkContext
  91. def register(acc: AccumulatorV2[_, _]): Unit

    Permalink
    Definition Classes
    SparkContext
  92. def requestExecutors(numAdditionalExecutors: Int): Boolean

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  93. def requestTotalExecutors(numExecutors: Int, localityAwareTasks: Int, hostToLocalTaskCount: Map[String, Int]): Boolean

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  94. def runApproximateJob[T, U, R](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, evaluator: ApproximateEvaluator[U, R], timeout: Long): PartialResult[R]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  95. def runJob[T, U](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Permalink
    Definition Classes
    SparkContext
  96. def runJob[T, U](rdd: RDD[T], processPartition: (TaskContext, Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Permalink
    Definition Classes
    SparkContext
  97. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Permalink
    Definition Classes
    SparkContext
  98. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Permalink
    Definition Classes
    SparkContext
  99. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U, partitions: Seq[Int])(implicit arg0: ClassTag[U]): Array[U]

    Permalink
    Definition Classes
    SparkContext
  100. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int])(implicit arg0: ClassTag[U]): Array[U]

    Permalink
    Definition Classes
    SparkContext
  101. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Permalink
    Definition Classes
    SparkContext
  102. def sequenceFile[K, V](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], kcf: () ⇒ WritableConverter[K], vcf: () ⇒ WritableConverter[V]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  103. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  104. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  105. def setCallSite(shortCallSite: String): Unit

    Permalink
    Definition Classes
    SparkContext
  106. def setCheckpointDir(directory: String): Unit

    Permalink
    Definition Classes
    SparkContext
  107. def setJobDescription(value: String): Unit

    Permalink
    Definition Classes
    SparkContext
  108. def setJobGroup(groupId: String, description: String, interruptOnCancel: Boolean): Unit

    Permalink
    Definition Classes
    SparkContext
  109. def setLocalProperty(key: String, value: String): Unit

    Permalink
    Definition Classes
    SparkContext
  110. def setLogLevel(logLevel: String): Unit

    Permalink
    Definition Classes
    SparkContext
  111. val sparkUser: String

    Permalink
    Definition Classes
    SparkContext
  112. val startTime: Long

    Permalink
    Definition Classes
    SparkContext
  113. def statusTracker: SparkStatusTracker

    Permalink
    Definition Classes
    SparkContext
  114. def stop(): Unit

    Permalink
    Definition Classes
    SparkContext
  115. def submitJob[T, U, R](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit, resultFunc: ⇒ R): SimpleFutureAction[R]

    Permalink
    Definition Classes
    SparkContext
  116. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  117. def textFile(path: String, minPartitions: Int): RDD[String]

    Permalink
    Definition Classes
    SparkContext
  118. def toString(): String

    Permalink
    Definition Classes
    AnyRef → Any
  119. def uiWebUrl: Option[String]

    Permalink
    Definition Classes
    SparkContext
  120. def union[T](first: RDD[T], rest: RDD[T]*)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  121. def union[T](rdds: Seq[RDD[T]])(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  122. def version: String

    Permalink
    Definition Classes
    SparkContext
  123. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  124. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  125. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  126. def wholeTextFiles(path: String, minPartitions: Int): RDD[(String, String)]

    Permalink
    Definition Classes
    SparkContext

Deprecated Value Members

  1. def accumulable[R, T](initialValue: R, name: String)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.0.0) use AccumulatorV2

  2. def accumulable[R, T](initialValue: R)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.0.0) use AccumulatorV2

  3. def accumulableCollection[R, T](initialValue: R)(implicit arg0: (R) ⇒ Growable[T] with TraversableOnce[T] with Serializable, arg1: ClassTag[R]): Accumulable[R, T]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.0.0) use AccumulatorV2

  4. def accumulator[T](initialValue: T, name: String)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.0.0) use AccumulatorV2

  5. def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.0.0) use AccumulatorV2

Inherited from SparkContext

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped