case class InMemoryStore(spark: SparkSession, uuid: String) extends LazyLogging with Product with Serializable
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- InMemoryStore
- Serializable
- Serializable
- Product
- Equals
- LazyLogging
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new InMemoryStore(spark: SparkSession, uuid: String)
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
- def init(pipelineURI: String, jobURI: Option[String], fabricUID: String, runType: RunType, createdBy: String, code: Option[RecursiveDirectoryContent], branch: String, dbSuffix: String, expectedInterims: List[InterimKey], pipelineConfig: Option[String] = None): Unit
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
lazy val
logger: Logger
- Attributes
- protected
- Definition Classes
- LazyLogging
- Annotations
- @transient()
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
offload(executionMetricsTable: MetricsTableNames = MetricsTableNames(), interimDetails: List[(InterimKey, DataFrame)] = Nil, storageFormat: MetricsStore = MetricsStore.DeltaStore, isPartitioningDisabled: Boolean = false): (List[ComponentRuns], List[String])
- Annotations
- @silent( "discarded non-Unit value" )
- val spark: SparkSession
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- InMemoryStore → AnyRef → Any
- def updateInterims(interim: LInterimContent): Unit
- def updateMetrics(status: PipelineStatus, rowsRead: Option[Long], rowsWritten: Option[Long], timeTaken: Option[Long]): Unit
- def updateRunUID(jobRunUID: String, taskRunUID: String): Unit
- val uuid: String
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] ) @Deprecated @deprecated
- Deprecated
(Since version ) see corresponding Javadoc for more information.