Packages

c

etlflow.task

SparkReadWriteTask

case class SparkReadWriteTask[I <: Product, O <: Product](name: String, inputLocation: List[String], inputType: IOType, inputFilter: String = "1 = 1", outputLocation: String, outputType: IOType, outputSaveMode: SaveMode = SaveMode.Append, outputPartitionCol: Seq[String] = Seq.empty[String], outputFilename: Option[String] = None, outputCompression: String = "none", outputRepartitioning: Boolean = false, outputRepartitioningNum: Int = 1, transformFunction: Option[(SparkSession, Dataset[I]) => Dataset[O]] = None)(implicit evidence$1: scala.reflect.api.JavaUniverse.TypeTag[I], evidence$2: scala.reflect.api.JavaUniverse.TypeTag[O]) extends EtlTask[SparkEnv, Unit] with Product with Serializable

Annotations
@SuppressWarnings()
Linear Supertypes
Serializable, Product, Equals, EtlTask[SparkEnv, Unit], ApplicationLogger, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. SparkReadWriteTask
  2. Serializable
  3. Product
  4. Equals
  5. EtlTask
  6. ApplicationLogger
  7. AnyRef
  8. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Instance Constructors

  1. new SparkReadWriteTask(name: String, inputLocation: List[String], inputType: IOType, inputFilter: String = "1 = 1", outputLocation: String, outputType: IOType, outputSaveMode: SaveMode = SaveMode.Append, outputPartitionCol: Seq[String] = Seq.empty[String], outputFilename: Option[String] = None, outputCompression: String = "none", outputRepartitioning: Boolean = false, outputRepartitioningNum: Int = 1, transformFunction: Option[(SparkSession, Dataset[I]) => Dataset[O]] = None)(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[I], arg1: scala.reflect.api.JavaUniverse.TypeTag[O])

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @native() @HotSpotIntrinsicCandidate()
  6. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  7. final def execute: RIO[SparkEnv with Audit, Unit]
    Definition Classes
    EtlTask
    Annotations
    @SuppressWarnings()
  8. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  9. def getTaskProperties: Map[String, String]
    Definition Classes
    SparkReadWriteTask → EtlTask
  10. val inputFilter: String
  11. val inputLocation: List[String]
  12. val inputType: IOType
  13. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  14. lazy val logger: Logger
    Attributes
    protected
    Definition Classes
    ApplicationLogger
  15. val name: String
    Definition Classes
    SparkReadWriteTask → EtlTask
  16. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  17. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  18. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  19. val outputCompression: String
  20. val outputFilename: Option[String]
  21. val outputLocation: String
  22. val outputPartitionCol: Seq[String]
  23. val outputRepartitioning: Boolean
  24. val outputRepartitioningNum: Int
  25. val outputSaveMode: SaveMode
  26. val outputType: IOType
  27. def process: RIO[SparkEnv, Unit]
    Attributes
    protected
    Definition Classes
    SparkReadWriteTask → EtlTask
  28. def productElementNames: Iterator[String]
    Definition Classes
    Product
  29. def showCorruptedData(numRows: Int = 100): RIO[SparkEnv, Unit]
  30. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  31. val taskType: String
    Definition Classes
    EtlTask
  32. val transformFunction: Option[(SparkSession, Dataset[I]) => Dataset[O]]
  33. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  34. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  35. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  36. val zioSlf4jLogger: ULayer[Unit]
    Attributes
    protected
    Definition Classes
    ApplicationLogger

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable]) @Deprecated
    Deprecated

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from EtlTask[SparkEnv, Unit]

Inherited from ApplicationLogger

Inherited from AnyRef

Inherited from Any

Ungrouped