package executionmetrics
data schema is in snake case spark doesn't gives us a native converter from snake case to camel case, we have to do that in this class.
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- executionmetrics
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Type Members
- case class ComponentRuns(uid: String, component_uri: String, pipeline_uri: String, pipeline_run_uid: String, fabric_uid: String, component_name: String, interim_component_name: String, component_type: String, interim_subgraph_name: String, interim_process_id: String, interim_out_port: String, created_at: Option[Timestamp] = None, created_by: String, records: Option[Long] = None, bytes: Option[Long] = None, partitions: Option[Long] = None, expired: Option[Boolean] = Some(false), run_type: Option[String], job_uri: Option[String], branch: Option[String] = None) extends ExecutionMetricsEntity with Product with Serializable
- case class ComponentRunsWithRunDates(componentRunsWithStatusAndInterims: Option[List[ComponentRunsWithStatusAndInterims]] = None, runDates: Option[List[RunDates]] = None) extends Product with Serializable
- case class ComponentRunsWithStatus(uid: String, component_uri: String, pipeline_run_uid: String, pipeline_uri: String, fabric_uid: String, component_name: String, interim_component_name: String, records: Option[Long] = None, bytes: Option[Long] = None, partitions: Option[Long] = None, created_at: Option[Timestamp] = None, created_by: String, component_type: String, interim_out_port: String, interim_subgraph_name: String, interim_process_id: String, expired: Option[Boolean] = Some(false), status: Option[String], job_uri: Option[String] = None, run_type: Option[String] = None, branch: Option[String] = None) extends ExecutionMetricsEntity with Product with Serializable
- case class ComponentRunsWithStatusAndInterims(uid: String, component_uri: String, pipeline_run_uid: String, pipeline_uri: String, fabric_uid: String, component_name: String, interim_component_name: String, records: Option[Long] = None, bytes: Option[Long] = None, partitions: Option[Long] = None, created_at: Option[Timestamp] = None, created_by: String, component_type: String, interim_out_port: String, interim_subgraph_name: String, interim_process_id: String, expired: Option[Boolean] = Some(false), status: Option[String], interim: Option[String] = None, job_uri: Option[String] = None, run_type: Option[String] = None) extends ExecutionMetricsEntity with Product with Serializable
- case class DatasetRunsResponseCamelCase(uid: String, datasetUID: String, datasetName: Option[String] = None, pipelineUID: String, pipelineName: Option[String] = None, fabric: Option[Fabric], submissionTime: Option[Timestamp] = None, pipelineRun: PipelineRun, componentName: String, componentType: String, recordsProcessed: Option[Long] = None, status: Option[String] = None, interims: Option[String] = None, runDates: Option[Seq[RunDates]] = None, runType: Option[String] = None, jobUID: Option[String] = None, jobName: Option[String] = None, bytes: Option[Long], partition: Option[Long], branch: Option[String] = None) extends ResponseCamelCase with Product with Serializable
- trait ExecutionMetricsDAO[A <: ExecutionMetricsEntity] extends AnyRef
-
sealed
trait
ExecutionMetricsEntity extends AnyRef
TODO Atm we hopefully should have something related to spark which helps in expressionEncoder to keep cases in camelCase rather than snake_case.
- case class Fabric(uid: String, name: Option[String] = None) extends Product with Serializable
- case class FileContent(path: String, content: String) extends Product with Serializable
- case class HistoricalViewCodeResponse(pipelineId: String, pipelineRun: PipelineRunsResponseCamelCase) extends Product with Serializable
- case class InMemoryStore(uuid: String, sparkSession: SparkSession)(implicit ec: ExecutionContext) extends LazyLogging with Product with Serializable
- case class InterimResponse(uid: String, interim_component_name: String, interim_out_port: String, interim_process_id: String, interim: String) extends Product with Serializable
- case class InterimResponseCamelCase(uid: String, interimComponentName: String, interimOutPort: String, interimProcessId: String, interim: String, submissionTime: Option[Timestamp] = None) extends ResponseCamelCase with Product with Serializable
- implicit class LoggingSparkSession extends LazyLogging
- case class NextFilters(lastSubmissionTimeInMs: Timestamp, lastUid: String) extends Product with Serializable
- case class PipelineRun(uid: String) extends Product with Serializable
- case class PipelineRuns(uid: String, pipeline_uri: String, job_uri: Option[String] = None, job_run_uid: String, task_run_uid: String, status: String, fabric_uid: String, time_taken: Option[Long] = None, rows_read: Option[Long] = None, rows_written: Option[Long] = None, created_at: Option[Timestamp] = None, created_by: String, run_type: String, input_datasets: Option[Array[String]] = None, output_datasets: Option[Array[String]] = None, workflow_code: Option[Map[String, String]] = None, expired: Option[Boolean] = Some(false), branch: Option[String] = None) extends ExecutionMetricsEntity with Product with Serializable
- case class PipelineRunsResponseCamelCase(pipelineRunId: String, pipelineUID: String, jobUID: Option[String] = None, jobName: Option[String] = None, fabric: Option[Fabric] = None, fabricId: Option[Long] = None, submissionTime: Option[Timestamp], status: String, timeTaken: Option[Long] = None, rowsRead: Option[Long] = None, rowsWritten: Option[Long] = None, runType: String, code: Option[Map[String, String]] = None, branch: Option[String] = None) extends ResponseCamelCase with Product with Serializable
- sealed trait ResponseCamelCase extends AnyRef
- implicit class ResponseWrapperAsList[T <: ResponseCamelCase] extends AnyRef
- case class ResponsesAsList[T <: ResponseCamelCase](rows: List[T], limit: Int, next_filters: Option[NextFilters]) extends Product with Serializable
- case class RunDates(uid: String, runId: String, submissionTime: Timestamp) extends Product with Serializable
- sealed trait RunningMode extends EnumEntry
Value Members
- val Branch: String
- val Bytes: String
- val ComponentName: String
- val ComponentType: String
- val ComponentUri: String
- val CreatedAt: String
- val CreatedBy: String
- val Expired: String
- val FabricUID: String
- val InputDatasets: String
- val Interim: String
- val InterimComponentName: String
- val InterimOutPort: String
- val InterimProcessId: String
- val InterimSubgraphName: String
- val JobRunUid: String
- val JobUri: String
- val OutputDatasets: String
- val Partitions: String
- val PipelineRunUid: String
- val PipelineUri: String
- val Records: String
- val RowsRead: String
- val RowsWritten: String
- val RunTypeColumn: String
- val Status: String
- val TaskRunUid: String
- val TimeTaken: String
- val UID: String
- val WorkflowCode: String
- val WorkflowJson: String
- def checkExpiredRowPf[T <: ExecutionMetricsEntity](uid: String): PartialFunction[T, T]
- def error(msg: String, cause: Option[Throwable] = None): Nothing
- def now(): Timestamp
- def onFail(uid: String): Nothing
- implicit lazy val timestampFormat: Format[Timestamp]
- object ComponentRuns extends Serializable
- object ComponentRunsWithStatus extends Serializable
- object ComponentRunsWithStatusAndInterims extends Serializable
- object DatasetRunsResponseCamelCase extends Serializable
- object Fabric extends Serializable
- object FileContent extends Serializable
- object HistoricalViewCodeResponse extends Serializable
- object InterimResponse extends Serializable
- object InterimResponseCamelCase extends Serializable
- object NextFilters extends Serializable
- object PipelineRun extends Serializable
- object PipelineRuns extends Serializable
- object PipelineRunsResponseCamelCase extends Serializable
- object ResponseCamelCase
- object ResponsesAsList extends Serializable
- object RunDates extends Serializable
- object RunningMode extends Enum[RunningMode] with PlayJsonEnum[RunningMode]
- object ZipFileExtractor extends LazyLogging