com.memsql.spark.connector

MemSQLContext

class MemSQLContext extends org.apache.spark.sql.memsql.MemSQLContext

A MemSQL cluster aware org.apache.spark.sql.SQLContext that can read and write org.apache.spark.sql.DataFrames from MemSQL tables.

Configuration for the MemSQL cluster is set via com.memsql.spark.connector.MemSQLConf.

NOTE: This class is just a proxy so you can import from com.memsql.spark.connector rather than org.apache.spark.sql.memsql

See also

org.apache.spark.sql.memsql.MemSQLContext

Linear Supertypes
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. MemSQLContext
  2. MemSQLContext
  3. SQLContext
  4. Serializable
  5. Serializable
  6. Logging
  7. AnyRef
  8. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new MemSQLContext(sc: SparkContext)

    sc

    The context for your Spark application

Type Members

  1. class QueryExecution extends AnyRef

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  2. class SQLSession extends AnyRef

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  3. class SparkPlanner extends SparkStrategies

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. lazy val analyzer: Analyzer

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  7. def applySchemaToPythonRDD(rdd: RDD[Array[Any]], schema: StructType): DataFrame

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  8. def applySchemaToPythonRDD(rdd: RDD[Array[Any]], schemaString: String): DataFrame

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  9. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  10. def baseRelationToDataFrame(baseRelation: BaseRelation): DataFrame

    Definition Classes
    SQLContext
  11. val cacheManager: CacheManager

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  12. def cacheTable(tableName: String): Unit

    Definition Classes
    SQLContext
  13. lazy val catalog: Catalog

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    MemSQLContext → SQLContext
  14. def clearCache(): Unit

    Definition Classes
    SQLContext
  15. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  16. def conf: SQLConf

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  17. def createDataFrame(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  18. def createDataFrame(rdd: RDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  19. def createDataFrame(rowRDD: JavaRDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  20. def createDataFrame(rowRDD: RDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  21. def createDataFrame[A <: Product](data: Seq[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  22. def createDataFrame[A <: Product](rdd: RDD[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  23. def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  24. def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  25. def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  26. def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  27. def createExternalTable(tableName: String, path: String, source: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  28. def createExternalTable(tableName: String, path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  29. def createSession(): SQLSession

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  30. def currentSession(): SQLSession

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  31. val ddlParser: DDLParser

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  32. val defaultSession: SQLSession

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  33. def detachSession(): Unit

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  34. def dialectClassName: String

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  35. def dropTempTable(tableName: String): Unit

    Definition Classes
    SQLContext
  36. lazy val emptyDataFrame: DataFrame

    Definition Classes
    SQLContext
  37. lazy val emptyResult: RDD[InternalRow]

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  38. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  39. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  40. def executePlan(plan: LogicalPlan): QueryExecution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  41. def executeSql(sql: String): QueryExecution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  42. val experimental: ExperimentalMethods

    Definition Classes
    SQLContext
  43. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  44. lazy val functionRegistry: FunctionRegistry

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  45. def getAllConfs: Map[String, String]

    Definition Classes
    SQLContext
  46. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  47. def getConf(key: String, defaultValue: String): String

    Definition Classes
    SQLContext
  48. def getConf(key: String): String

    Definition Classes
    SQLContext
  49. def getDatabase: String

    Definition Classes
    MemSQLContext
  50. def getMemSQLCluster: MemSQLCluster

    Definition Classes
    MemSQLContext
  51. def getSQLDialect(): ParserDialect

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  52. def getSchema(beanClass: Class[_]): Seq[AttributeReference]

    Attributes
    protected
    Definition Classes
    SQLContext
  53. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  54. def isCached(tableName: String): Boolean

    Definition Classes
    SQLContext
  55. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  56. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  57. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  58. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  59. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  60. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  61. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  62. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  63. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  64. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  65. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  66. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  67. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  68. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  69. def maybeTable(tableName: String): Option[DataFrame]

    Definition Classes
    MemSQLContext
  70. var memSQLConf: MemSQLConf

    Definition Classes
    MemSQLContext
  71. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  72. final def notify(): Unit

    Definition Classes
    AnyRef
  73. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  74. def openSession(): SQLSession

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  75. lazy val optimizer: Optimizer

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  76. def parseDataType(dataTypeString: String): DataType

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  77. def parseSql(sql: String): LogicalPlan

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  78. val planner: SparkPlanner

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  79. val prepareForExecution: RuleExecutor[SparkPlan]

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  80. def range(start: Long, end: Long, step: Long, numPartitions: Int): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  81. def range(start: Long, end: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  82. def range(end: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  83. def read: DataFrameReader

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  84. def setConf(key: String, value: String): Unit

    Definition Classes
    SQLContext
  85. def setConf(props: Properties): Unit

    Definition Classes
    SQLContext
  86. def setDatabase(dbName: String): Unit

    Definition Classes
    MemSQLContext
  87. def setSession(session: SQLSession): Unit

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  88. val sparkContext: SparkContext

    Definition Classes
    SQLContext
  89. def sql(sqlText: String, bypassCatalyst: Boolean): DataFrame

    Definition Classes
    MemSQLContext
  90. def sql(sqlText: String): DataFrame

    Definition Classes
    MemSQLContext → SQLContext
  91. val sqlParser: SparkSQLParser

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  92. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  93. def table(tableIdent: TableIdentifier): DataFrame

    Definition Classes
    MemSQLContext
  94. def table(tableName: String): DataFrame

    Definition Classes
    SQLContext
  95. def tableNames(databaseName: String): Array[String]

    Definition Classes
    SQLContext
  96. def tableNames(): Array[String]

    Definition Classes
    SQLContext
  97. def tables(databaseName: String): DataFrame

    Definition Classes
    SQLContext
  98. def tables(): DataFrame

    Definition Classes
    SQLContext
  99. val tlSession: ThreadLocal[SQLSession]

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  100. def toString(): String

    Definition Classes
    AnyRef → Any
  101. val udf: UDFRegistration

    Definition Classes
    SQLContext
  102. def uncacheTable(tableName: String): Unit

    Definition Classes
    SQLContext
  103. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  104. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  105. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Deprecated Value Members

  1. def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) use createDataFrame

  2. def applySchema(rdd: RDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) use createDataFrame

  3. def applySchema(rowRDD: JavaRDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) use createDataFrame

  4. def applySchema(rowRDD: RDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) use createDataFrame

  5. def jdbc(url: String, table: String, theParts: Array[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) use read.jdbc()

  6. def jdbc(url: String, table: String, columnName: String, lowerBound: Long, upperBound: Long, numPartitions: Int): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) use read.jdbc()

  7. def jdbc(url: String, table: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) use read.jdbc()

  8. def jsonFile(path: String, samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  9. def jsonFile(path: String, schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  10. def jsonFile(path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  11. def jsonRDD(json: JavaRDD[String], samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  12. def jsonRDD(json: RDD[String], samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  13. def jsonRDD(json: JavaRDD[String], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  14. def jsonRDD(json: RDD[String], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  15. def jsonRDD(json: JavaRDD[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  16. def jsonRDD(json: RDD[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json()

  17. def load(source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).schema(schema).options(options).load()

  18. def load(source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).schema(schema).options(options).load()

  19. def load(source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).options(options).load()

  20. def load(source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).options(options).load()

  21. def load(path: String, source: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).load(path)

  22. def load(path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.load(path)

  23. def parquetFile(paths: String*): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated @varargs()
    Deprecated

    (Since version 1.4.0) Use read.parquet()

Inherited from SQLContext

Inherited from Serializable

Inherited from Serializable

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped