Object

org.apache.s2graph.counter.loader.stream

GraphToETLStreaming

Related Doc: package stream

Permalink

object GraphToETLStreaming extends SparkApp with WithKafka

Linear Supertypes
WithKafka, SparkApp, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. GraphToETLStreaming
  2. WithKafka
  3. SparkApp
  4. Logging
  5. AnyRef
  6. Any
  1. Hide All
  2. Show all
Visibility
  1. Public
  2. All

Type Members

  1. type HashMapAccumulable = Accumulable[HashMap[String, Long], (String, Long)]

    Permalink
    Definition Classes
    SparkApp

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. def args: Array[String]

    Permalink
    Attributes
    protected
    Definition Classes
    SparkApp
  5. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  6. def buildKafkaGroupId(topic: String, ext: String): String

    Permalink
    Definition Classes
    SparkApp
  7. lazy val className: String

    Permalink
  8. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  9. lazy val config: Config

    Permalink
  10. def createKafkaPairStream(ssc: StreamingContext, kafkaParam: Map[String, String], topics: String, numPartition: Option[Int]): DStream[(String, String)]

    Permalink
    Definition Classes
    SparkApp
  11. def createKafkaPairStreamMulti(ssc: StreamingContext, kafkaParam: Map[String, String], topics: String, receiverCount: Int, numPartition: Option[Int]): DStream[(String, String)]

    Permalink
    Definition Classes
    SparkApp
  12. def createKafkaValueStream(ssc: StreamingContext, kafkaParam: Map[String, String], topics: String, numPartition: Option[Int]): DStream[String]

    Permalink
    Definition Classes
    SparkApp
  13. def createKafkaValueStreamMulti(ssc: StreamingContext, kafkaParam: Map[String, String], topics: String, receiverCount: Int, numPartition: Option[Int]): DStream[String]

    Permalink
    Definition Classes
    SparkApp
  14. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  15. def equals(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  16. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  17. def getArgs(index: Int): String

    Permalink
    Definition Classes
    SparkApp
  18. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  19. def getPartKey(k: Any, n: Int): Int

    Permalink
    Definition Classes
    WithKafka
  20. def getProducer[K, V](brokers: String): Producer[K, V]

    Permalink
    Definition Classes
    WithKafka
  21. def getProducer[K, V](config: ProducerConfig): Producer[K, V]

    Permalink
    Definition Classes
    WithKafka
  22. def getStreamHelper(kafkaParam: Map[String, String]): StreamHelper

    Permalink
    Definition Classes
    SparkApp
  23. def hashCode(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  24. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  25. def isTraceEnabled(): Boolean

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  26. def kafkaConf(brokerList: String): ProducerConfig

    Permalink
    Definition Classes
    WithKafka
  27. def log: Logger

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  28. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  29. def logDebug(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  30. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  31. def logError(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  32. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  33. def logInfo(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  34. def logName: String

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  35. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  36. def logTrace(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  37. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  38. def logWarning(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  39. def main(args: Array[String]): Unit

    Permalink
    Definition Classes
    SparkApp
  40. def makeKafkaGroupId(topic: String, ext: String): String

    Permalink
    Definition Classes
    WithKafka
  41. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  42. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  43. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  44. lazy val producer: Producer[String, String]

    Permalink
  45. def producerConfig(brokerList: String, requireAcks: String, producerType: String): ProducerConfig

    Permalink
    Definition Classes
    WithKafka
  46. def run(): Unit

    Permalink
    Definition Classes
    GraphToETLStreaming → SparkApp
  47. lazy val s2Config: S2CounterConfig

    Permalink
  48. def sparkConf(jobName: String): SparkConf

    Permalink
    Definition Classes
    SparkApp
  49. def streamingContext(sparkConf: SparkConf, interval: Duration, checkPoint: Option[String]): StreamingContext

    Permalink
    Definition Classes
    SparkApp
  50. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  51. def toString(): String

    Permalink
    Definition Classes
    AnyRef → Any
  52. def validateArgument(argNames: String*): Unit

    Permalink
    Definition Classes
    SparkApp
  53. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  54. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  55. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Inherited from WithKafka

Inherited from SparkApp

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped