Object

org.apache.s2graph.counter.loader

CounterBulkLoader

Related Doc: package loader

Permalink

object CounterBulkLoader extends SparkApp with WithKafka

Linear Supertypes
WithKafka, SparkApp, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. CounterBulkLoader
  2. WithKafka
  3. SparkApp
  4. Logging
  5. AnyRef
  6. Any
  1. Hide All
  2. Show all
Visibility
  1. Public
  2. All

Type Members

  1. type HashMapAccumulable = Accumulable[HashMap[String, Long], (String, Long)]

    Permalink
    Definition Classes
    SparkApp

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. def args: Array[String]

    Permalink
    Attributes
    protected
    Definition Classes
    SparkApp
  5. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  6. def buildKafkaGroupId(topic: String, ext: String): String

    Permalink
    Definition Classes
    SparkApp
  7. lazy val className: String

    Permalink
  8. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  9. lazy val config: Config

    Permalink
  10. lazy val counterModel: CounterModel

    Permalink
  11. def createKafkaPairStream(ssc: StreamingContext, kafkaParam: Map[String, String], topics: String, numPartition: Option[Int]): DStream[(String, String)]

    Permalink
    Definition Classes
    SparkApp
  12. def createKafkaPairStreamMulti(ssc: StreamingContext, kafkaParam: Map[String, String], topics: String, receiverCount: Int, numPartition: Option[Int]): DStream[(String, String)]

    Permalink
    Definition Classes
    SparkApp
  13. def createKafkaValueStream(ssc: StreamingContext, kafkaParam: Map[String, String], topics: String, numPartition: Option[Int]): DStream[String]

    Permalink
    Definition Classes
    SparkApp
  14. def createKafkaValueStreamMulti(ssc: StreamingContext, kafkaParam: Map[String, String], topics: String, receiverCount: Int, numPartition: Option[Int]): DStream[String]

    Permalink
    Definition Classes
    SparkApp
  15. implicit val ec: ExecutionContextExecutor

    Permalink
  16. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  17. def equals(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  18. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  19. def getArgs(index: Int): String

    Permalink
    Definition Classes
    SparkApp
  20. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  21. def getPartKey(k: Any, n: Int): Int

    Permalink
    Definition Classes
    WithKafka
  22. def getProducer[K, V](brokers: String): Producer[K, V]

    Permalink
    Definition Classes
    WithKafka
  23. def getProducer[K, V](config: ProducerConfig): Producer[K, V]

    Permalink
    Definition Classes
    WithKafka
  24. def getStreamHelper(kafkaParam: Map[String, String]): StreamHelper

    Permalink
    Definition Classes
    SparkApp
  25. def hashCode(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  26. val initialize: Boolean

    Permalink
  27. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  28. def isTraceEnabled(): Boolean

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  29. def kafkaConf(brokerList: String): ProducerConfig

    Permalink
    Definition Classes
    WithKafka
  30. def log: Logger

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  31. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  32. def logDebug(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  33. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  34. def logError(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  35. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  36. def logInfo(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  37. def logName: String

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  38. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  39. def logTrace(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  40. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  41. def logWarning(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  42. def main(args: Array[String]): Unit

    Permalink
    Definition Classes
    SparkApp
  43. def makeKafkaGroupId(topic: String, ext: String): String

    Permalink
    Definition Classes
    WithKafka
  44. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  45. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  46. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  47. lazy val producer: Producer[String, String]

    Permalink
  48. def producerConfig(brokerList: String, requireAcks: String, producerType: String): ProducerConfig

    Permalink
    Definition Classes
    WithKafka
  49. def run(): Unit

    Permalink
    Definition Classes
    CounterBulkLoader → SparkApp
  50. lazy val s2Config: S2CounterConfig

    Permalink
  51. def sparkConf(jobName: String): SparkConf

    Permalink
    Definition Classes
    SparkApp
  52. def streamingContext(sparkConf: SparkConf, interval: Duration, checkPoint: Option[String]): StreamingContext

    Permalink
    Definition Classes
    SparkApp
  53. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  54. def toString(): String

    Permalink
    Definition Classes
    AnyRef → Any
  55. def validateArgument(argNames: String*): Unit

    Permalink
    Definition Classes
    SparkApp
  56. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  57. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  58. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Inherited from WithKafka

Inherited from SparkApp

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped