Packages

p

org.apache.spark.sql

executionmetrics

package executionmetrics

data schema is in snake case spark doesn't gives us a native converter from snake case to camel case, we have to do that in this class.

Linear Supertypes
LazyLogging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. executionmetrics
  2. LazyLogging
  3. AnyRef
  4. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Type Members

  1. case class ComponentRuns(uid: String, component_uri: String, pipeline_uri: String, pipeline_run_uid: String, fabric_uid: String, component_name: String, interim_component_name: String, component_type: String, interim_subgraph_name: String, interim_process_id: String, interim_out_port: String, created_at: Option[Timestamp] = None, created_by: String, records: Option[Long] = None, bytes: Option[Long] = None, partitions: Option[Long] = None, expired: Option[Boolean] = Some(false), run_type: Option[String], job_uri: Option[String], branch: Option[String] = None) extends ExecutionMetricsEntity with Product with Serializable
  2. case class ComponentRunsWithRunDates(componentRunsWithStatusAndInterims: Option[List[ComponentRunsWithStatusAndInterims]] = None, runDates: Option[List[RunDates]] = None) extends Product with Serializable
  3. case class ComponentRunsWithStatus(uid: String, component_uri: String, pipeline_run_uid: String, pipeline_uri: String, fabric_uid: String, component_name: String, interim_component_name: String, records: Option[Long] = None, bytes: Option[Long] = None, partitions: Option[Long] = None, created_at: Option[Timestamp] = None, created_by: String, component_type: String, interim_out_port: String, interim_subgraph_name: String, interim_process_id: String, expired: Option[Boolean] = Some(false), status: Option[String], job_uri: Option[String] = None, run_type: Option[String] = None, branch: Option[String] = None) extends ExecutionMetricsEntity with Product with Serializable
  4. case class ComponentRunsWithStatusAndInterims(uid: String, component_uri: String, pipeline_run_uid: String, pipeline_uri: String, fabric_uid: String, component_name: String, interim_component_name: String, records: Option[Long] = None, bytes: Option[Long] = None, partitions: Option[Long] = None, created_at: Option[Timestamp] = None, created_by: String, component_type: String, interim_out_port: String, interim_subgraph_name: String, interim_process_id: String, expired: Option[Boolean] = Some(false), status: Option[String], interim: Option[String] = None, job_uri: Option[String] = None, run_type: Option[String] = None) extends ExecutionMetricsEntity with Product with Serializable
  5. case class Config(pipelineConfig: Option[String], runConfig: Option[String] = None) extends Product with Serializable
  6. case class DatasetRunsResponseCamelCase(uid: String, datasetUID: String, datasetName: Option[String] = None, pipelineUID: String, pipelineName: Option[String] = None, fabric: Option[Fabric], submissionTime: Option[Timestamp] = None, pipelineRun: PipelineRun, componentName: String, componentType: String, recordsProcessed: Option[Long] = None, status: Option[String] = None, interims: Option[String] = None, runDates: Option[Seq[RunDates]] = None, runType: Option[String] = None, jobUID: Option[String] = None, jobName: Option[String] = None, bytes: Option[Long], partition: Option[Long], branch: Option[String] = None) extends ResponseCamelCase with Product with Serializable
  7. trait ExecutionMetricsDAO[A <: ExecutionMetricsEntity] extends AnyRef
  8. sealed trait ExecutionMetricsEntity extends AnyRef

    TODO Atm we hopefully should have something related to spark which helps in expressionEncoder to keep cases in camelCase rather than snake_case.

  9. case class Fabric(uid: String, name: Option[String] = None) extends Product with Serializable
  10. case class FileContent(path: String, content: String) extends Product with Serializable
  11. case class HistoricalViewCodeResponse(pipelineId: String, pipelineRun: PipelineRunsResponseCamelCase) extends Product with Serializable
  12. case class InMemoryStore(spark: SparkSession, uuid: String) extends LazyLogging with Product with Serializable
  13. case class InterimPath(subgraph: String, component: String, port: String) extends Product with Serializable
  14. case class InterimResponse(uid: String, interim_component_name: String, interim_out_port: String, interim_process_id: String, interim: String, run_id: Option[String] = None) extends Product with Serializable
  15. case class InterimResponseCamelCase(uid: String, interimComponentName: String, interimOutPort: String, interimProcessId: String, interim: String, submissionTime: Option[Timestamp] = None, runId: Option[String] = None, runConfig: Option[String] = None) extends ResponseCamelCase with Product with Serializable
  16. implicit class LoggingSparkSession extends LazyLogging
  17. case class NextFilters(lastSubmissionTimeInMs: Timestamp, lastUid: String) extends Product with Serializable
  18. case class PipelineRun(uid: String) extends Product with Serializable
  19. case class PipelineRuns(uid: String, pipeline_uri: String, job_uri: Option[String] = None, job_run_uid: String, task_run_uid: String, status: String, fabric_uid: String, time_taken: Option[Long] = None, rows_read: Option[Long] = None, rows_written: Option[Long] = None, created_at: Option[Timestamp] = None, created_by: String, run_type: String, input_datasets: Option[Array[String]] = None, output_datasets: Option[Array[String]] = None, workflow_code: Option[Map[String, String]] = None, expired: Option[Boolean] = Some(false), branch: Option[String] = None, pipeline_config: Option[String] = None, user_config: Option[String] = None, expected_interims: Option[Int] = None, actual_interims: Option[Int] = None, logs: Option[String] = None) extends ExecutionMetricsEntity with Product with Serializable
  20. case class PipelineRunsResponseCamelCase(pipelineRunId: String, pipelineUID: String, jobUID: Option[String] = None, jobName: Option[String] = None, fabric: Option[Fabric] = None, fabricId: Option[Long] = None, submissionTime: Option[Timestamp], status: String, timeTaken: Option[Long] = None, rowsRead: Option[Long] = None, rowsWritten: Option[Long] = None, runType: String, code: Option[Map[String, String]] = None, branch: Option[String] = None, pipelineConfig: Option[String] = None) extends ResponseCamelCase with Product with Serializable
  21. sealed trait ResponseCamelCase extends AnyRef
  22. implicit class ResponseWrapperAsList[T <: ResponseCamelCase] extends AnyRef
  23. case class ResponsesAsList[T <: ResponseCamelCase](rows: List[T], limit: Int, next_filters: Option[NextFilters]) extends Product with Serializable
  24. case class RunDates(uid: String, runId: String, submissionTime: Timestamp) extends Product with Serializable
  25. implicit class SchemaEvolvingDataFrame extends AnyRef
  26. case class UnavailableWorkflowJsonException(msg: String, cause: Throwable) extends Exception with Product with Serializable

Value Members

  1. val ActualInterims: String
  2. val Branch: String
  3. val Bytes: String
  4. val ComponentName: String
  5. val ComponentType: String
  6. val ComponentUri: String
  7. val CreatedAt: String
  8. val CreatedBy: String
  9. val ExpectedInterims: String
  10. val Expired: String
  11. val FabricUID: String
  12. val InputDatasets: String
  13. val Interim: String
  14. val InterimComponentName: String
  15. val InterimOutPort: String
  16. val InterimProcessId: String
  17. val InterimSubgraphName: String
  18. val JobRunUid: String
  19. val JobUri: String
  20. val Logs: String
  21. val OutputDatasets: String
  22. val Partitions: String
  23. val PipelineConfig: String
  24. val PipelineRunUid: String
  25. val PipelineUri: String
  26. val Records: String
  27. val RowsRead: String
  28. val RowsWritten: String
  29. val RunId: String
  30. val RunTypeColumn: String
  31. val Status: String
  32. val TaskRunUid: String
  33. val TimeTaken: String
  34. val UID: String
  35. val UserConfig: String
  36. val WorkflowCode: String
  37. def checkExpiredRowPf[T <: ExecutionMetricsEntity](uid: String): PartialFunction[T, T]
  38. def error(msg: String, cause: Option[Throwable] = None): Nothing
  39. def isDatabricksEnvironment(spark: SparkSession): Boolean
  40. lazy val logger: Logger
    Attributes
    protected
    Definition Classes
    LazyLogging
    Annotations
    @transient()
  41. def now(): Timestamp
  42. def onFail(uid: String): Nothing
  43. implicit lazy val timestampFormat: Format[Timestamp]
  44. object ComponentRuns extends Serializable
  45. object ComponentRunsWithStatus extends Serializable
  46. object ComponentRunsWithStatusAndInterims extends Serializable
  47. object Config extends Serializable
  48. object DatasetRunsResponseCamelCase extends Serializable
  49. object Fabric extends Serializable
  50. object FileContent extends Serializable
  51. object HistoricalViewCodeResponse extends Serializable
  52. object InMemoryStore extends Serializable
  53. object InterimPath extends Serializable
  54. object InterimResponse extends Serializable
  55. object InterimResponseCamelCase extends Serializable
  56. object NextFilters extends Serializable
  57. object PipelineRun extends Serializable
  58. object PipelineRunsResponseCamelCase extends Serializable
  59. object ResponseCamelCase
  60. object ResponsesAsList extends Serializable
  61. object RunDates extends Serializable
  62. object ZipFileExtractor extends LazyLogging

Inherited from LazyLogging

Inherited from AnyRef

Inherited from Any

Ungrouped