Class/Object

ai.chronon.spark

GroupBy

Related Docs: object GroupBy | package spark

Permalink

class GroupBy extends Serializable

Linear Supertypes
Serializable, Serializable, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. GroupBy
  2. Serializable
  3. Serializable
  4. AnyRef
  5. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new GroupBy(aggregations: Seq[Aggregation], keyColumns: Seq[String], inputDf: DataFrame, mutationDf: DataFrame = null, skewFilter: Option[String] = None, finalize: Boolean = true)

    Permalink

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. lazy val aggPartWithSchema: Seq[(AggregationPart, DataType)]

    Permalink
  5. lazy val aggregationParts: Seq[AggregationPart]

    Permalink
  6. val aggregations: Seq[Aggregation]

    Permalink
  7. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  8. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  9. lazy val columnAggregators: Array[ColumnAggregator]

    Permalink
  10. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  11. def equals(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  12. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  13. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  14. def hashCode(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  15. def hopsAggregate(minQueryTs: Long, resolution: Resolution): RDD[(KeyWithHash, OutputArrayType)]

    Permalink
  16. val inputDf: DataFrame

    Permalink
  17. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  18. val keyColumns: Seq[String]

    Permalink
  19. val keySchema: StructType

    Permalink
  20. val mutationDf: DataFrame

    Permalink
  21. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  22. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  23. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  24. lazy val outputSchema: StructType

    Permalink
  25. lazy val postAggSchema: StructType

    Permalink
  26. val preAggSchema: StructType

    Permalink
  27. val selectedSchema: Array[(String, DataType)]

    Permalink
    Attributes
    protected
  28. def snapshotEntities: DataFrame

    Permalink
  29. def snapshotEntitiesBase: RDD[(Array[Any], Array[Any])]

    Permalink
  30. def snapshotEvents(partitionRange: PartitionRange): DataFrame

    Permalink
  31. def snapshotEventsBase(partitionRange: PartitionRange, resolution: Resolution = DailyResolution): RDD[(Array[Any], Array[Any])]

    Permalink
  32. implicit val sparkSession: SparkSession

    Permalink
  33. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  34. def temporalEntities(queriesUnfilteredDf: DataFrame, resolution: Resolution = FiveMinuteResolution): DataFrame

    Permalink

    Support for entities with mutations.

    Support for entities with mutations. Three way join between: Queries: grouped by key and dsOf[ts] Snapshot[InputDf]: Grouped by key and ds providing a FinalBatchIR to be extended. Mutations[MutationDf]: Grouped by key and dsOf[MutationTs] providing an array of updates/deletes to be done With this process the components (end of day batchIr + day's mutations + day's queries -> output)

  35. def temporalEvents(queriesUnfilteredDf: DataFrame, queryTimeRange: Option[TimeRange] = None, resolution: Resolution = FiveMinuteResolution): DataFrame

    Permalink
  36. def toDf(aggregateRdd: RDD[(Array[Any], Array[Any])], additionalFields: Seq[(String, DataType)]): DataFrame

    Permalink
    Attributes
    protected[ai.chronon.spark]
  37. def toString(): String

    Permalink
    Definition Classes
    AnyRef → Any
  38. val tsIndex: Int

    Permalink
    Attributes
    protected[ai.chronon.spark]
  39. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  40. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  41. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  42. lazy val windowAggregator: RowAggregator

    Permalink
    Attributes
    protected[ai.chronon.spark]

Inherited from Serializable

Inherited from Serializable

Inherited from AnyRef

Inherited from Any

Ungrouped