Type Members
-
class
QueryExecution extends AnyRef
-
class
SQLSession extends AnyRef
-
Value Members
-
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
-
final
def
==(arg0: Any): Boolean
-
lazy val
analyzer: Analyzer
-
-
def
applySchemaToPythonRDD(rdd: RDD[Array[Any]], schemaString: String): DataFrame
-
final
def
asInstanceOf[T0]: T0
-
def
baseRelationToDataFrame(baseRelation: BaseRelation): DataFrame
-
-
def
cacheTable(tableName: String): Unit
-
lazy val
catalog: Catalog
-
def
clearCache(): Unit
-
def
clone(): AnyRef
-
-
def
createDataFrame(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame
-
def
createDataFrame(rdd: RDD[_], beanClass: Class[_]): DataFrame
-
-
-
def
createDataFrame[A <: Product](data: Seq[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame
-
def
createDataFrame[A <: Product](rdd: RDD[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame
-
def
createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame
-
def
createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame
-
def
createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame
-
def
createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame
-
def
createExternalTable(tableName: String, path: String, source: String): DataFrame
-
def
createExternalTable(tableName: String, path: String): DataFrame
-
def
createSession(): SQLSession
-
def
currentSession(): SQLSession
-
-
-
def
detachSession(): Unit
-
def
dialectClassName: String
-
def
dropTempTable(tableName: String): Unit
-
lazy val
emptyDataFrame: DataFrame
-
-
-
-
-
def
executeSql(sql: String): QueryExecution
-
-
def
finalize(): Unit
-
-
def
getAllConfs: Map[String, String]
-
final
def
getClass(): Class[_]
-
def
getConf(key: String, defaultValue: String): String
-
def
getConf(key: String): String
-
def
getDatabase: String
-
-
-
def
getSchema(beanClass: Class[_]): Seq[AttributeReference]
-
def
hashCode(): Int
-
def
isCached(tableName: String): Boolean
-
final
def
isInstanceOf[T0]: Boolean
-
def
isTraceEnabled(): Boolean
-
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
-
def
logDebug(msg: ⇒ String): Unit
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
-
def
logError(msg: ⇒ String): Unit
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
-
def
logInfo(msg: ⇒ String): Unit
-
def
logName: String
-
def
logTrace(msg: ⇒ String, throwable: Throwable): Unit
-
def
logTrace(msg: ⇒ String): Unit
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
-
def
logWarning(msg: ⇒ String): Unit
-
def
maybeTable(tableName: String): Option[DataFrame]
-
-
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
-
lazy val
optimizer: Optimizer
-
def
parseDataType(dataTypeString: String): DataType
-
def
parseSql(sql: String): LogicalPlan
-
-
val
prepareForExecution: RuleExecutor[SparkPlan]
-
def
range(start: Long, end: Long, step: Long, numPartitions: Int): DataFrame
-
-
-
-
def
setConf(key: String, value: String): Unit
-
def
setConf(props: Properties): Unit
-
def
setDatabase(dbName: String): Unit
-
-
-
def
sql(sqlText: String, bypassCatalyst: Boolean): DataFrame
-
def
sql(sqlText: String): DataFrame
-
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
-
def
table(tableName: String): DataFrame
-
def
tableNames(databaseName: String): Array[String]
-
def
tableNames(): Array[String]
-
def
tables(databaseName: String): DataFrame
-
-
val
tlSession: ThreadLocal[SQLSession]
-
def
toString(): String
-
-
def
uncacheTable(tableName: String): Unit
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Deprecated Value Members
-
def
applySchema(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame
-
def
applySchema(rdd: RDD[_], beanClass: Class[_]): DataFrame
-
-
-
def
jdbc(url: String, table: String, theParts: Array[String]): DataFrame
-
def
jdbc(url: String, table: String, columnName: String, lowerBound: Long, upperBound: Long, numPartitions: Int): DataFrame
-
def
jdbc(url: String, table: String): DataFrame
-
def
jsonFile(path: String, samplingRatio: Double): DataFrame
-
-
def
jsonFile(path: String): DataFrame
-
def
jsonRDD(json: JavaRDD[String], samplingRatio: Double): DataFrame
-
def
jsonRDD(json: RDD[String], samplingRatio: Double): DataFrame
-
-
-
-
def
jsonRDD(json: RDD[String]): DataFrame
-
def
load(source: String, schema: StructType, options: Map[String, String]): DataFrame
-
def
load(source: String, schema: StructType, options: Map[String, String]): DataFrame
-
def
load(source: String, options: Map[String, String]): DataFrame
-
def
load(source: String, options: Map[String, String]): DataFrame
-
def
load(path: String, source: String): DataFrame
-
def
load(path: String): DataFrame
-
def
parquetFile(paths: String*): DataFrame
Inherited from Serializable
A MemSQL cluster aware org.apache.spark.sql.SQLContext that can read and write org.apache.spark.sql.DataFrames from MemSQL tables.
Configuration for the MemSQL cluster is set via com.memsql.spark.connector.MemSQLConf.
NOTE: This class is just a proxy so you can import from com.memsql.spark.connector rather than org.apache.spark.sql.memsql
org.apache.spark.sql.memsql.MemSQLContext