object
Converter extends Logging
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
-
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
def
initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
-
def
initializeLogIfNecessary(isInterpreter: Boolean): Unit
-
final
def
isInstanceOf[T0]: Boolean
-
def
isTraceEnabled(): Boolean
-
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
-
def
logDebug(msg: ⇒ String): Unit
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
-
def
logError(msg: ⇒ String): Unit
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
-
def
logInfo(msg: ⇒ String): Unit
-
def
logName: String
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
-
def
logTrace(msg: ⇒ String): Unit
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
-
def
logWarning(msg: ⇒ String): Unit
-
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
def
resultSetToRows(resultSet: ResultSet, schema: StructType): Iterator[Row]
-
def
resultSetToSparkInternalRows(resultSet: ResultSet, schema: StructType): Iterator[InternalRow]
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
A helper object with functions to convert JDBC java.sql.ResultSet into Spark org.apache.spark.sql.Row or vice versa.
Most of the functions here are adapted from
spark/sql/execution/datasources/jdbc/JdbcUtils.scala
class.