final
case class
ExasolConfiguration(host: String, port: Int, username: String, password: String, max_nodes: Int, create_table: Boolean, batch_size: Int) extends Product with Serializable
Instance Constructors
-
new
ExasolConfiguration(host: String, port: Int, username: String, password: String, max_nodes: Int, create_table: Boolean, batch_size: Int)
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
val
batch_size: Int
-
def
clone(): AnyRef
-
val
create_table: Boolean
-
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
val
host: String
-
final
def
isInstanceOf[T0]: Boolean
-
val
max_nodes: Int
-
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
val
password: String
-
val
port: Int
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
val
username: String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
The configuration parameters for Spark Exasol connector.
These can be user provided when loading or defined in Spark configurations. For example, user provided:
From Spark configuration:
If both are defined, spark configs are used. If nothing is defined, then default values are used.