c
it.agilelab.bigdata.wasp.consumers.spark.plugins.postgresql
PostgreSQLConsumerSparkPlugin
Companion object PostgreSQLConsumerSparkPlugin
class PostgreSQLConsumerSparkPlugin extends WaspConsumersSparkPlugin with Logging
A WASP Consumers Spark plugin to write to PostgreSQL via batch and streaming, with support for INSERT ON CONFLICT
operations, connection pooling, batching and transactions.
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- PostgreSQLConsumerSparkPlugin
- Logging
- WaspConsumersSparkPlugin
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new PostgreSQLConsumerSparkPlugin()
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
datastoreProduct: DatastoreProduct
- Definition Classes
- PostgreSQLConsumerSparkPlugin → WaspConsumersSparkPlugin
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getSparkBatchReader(sc: SparkContext, readerModel: ReaderModel): SparkBatchReader
- Definition Classes
- PostgreSQLConsumerSparkPlugin → WaspConsumersSparkPlugin
-
def
getSparkBatchWriter(sc: SparkContext, writerModel: WriterModel): SparkBatchWriter
- Definition Classes
- PostgreSQLConsumerSparkPlugin → WaspConsumersSparkPlugin
-
def
getSparkStructuredStreamingReader(ss: SparkSession, structuredStreamingETLModel: StructuredStreamingETLModel, streamingReaderModel: StreamingReaderModel): SparkStructuredStreamingReader
- Definition Classes
- PostgreSQLConsumerSparkPlugin → WaspConsumersSparkPlugin
-
def
getSparkStructuredStreamingWriter(ss: SparkSession, structuredStreamingETLModel: StructuredStreamingETLModel, writerModel: WriterModel): SparkStructuredStreamingWriter
- Definition Classes
- PostgreSQLConsumerSparkPlugin → WaspConsumersSparkPlugin
-
def
getValidationRules: Seq[ValidationRule]
- Definition Classes
- PostgreSQLConsumerSparkPlugin → WaspConsumersSparkPlugin
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
initialize(waspDB: WaspDB): Unit
- Definition Classes
- PostgreSQLConsumerSparkPlugin → WaspConsumersSparkPlugin
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
val
logger: WaspLogger
- Attributes
- protected
- Definition Classes
- Logging
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- var sqlSinkBL: SQLSinkBL
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()