org.apache.spark.sql.hive.sparklinedata

SparklineDataContext

class SparklineDataContext extends HiveContext with Logging

Linear Supertypes
HiveContext, SQLContext, Serializable, Serializable, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. SparklineDataContext
  2. HiveContext
  3. SQLContext
  4. Serializable
  5. Serializable
  6. Logging
  7. AnyRef
  8. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SparklineDataContext(sc: JavaSparkContext)

  2. new SparklineDataContext(sc: SparkContext)

  3. new SparklineDataContext(sc: SparkContext, cacheManager: execution.CacheManager, listener: SQLListener, execHive: ClientWrapper, metaHive: ClientInterface, isRootContext: Boolean)

Type Members

  1. class QueryExecution extends execution.QueryExecution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  2. class SparkPlanner extends execution.SparkPlanner

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.6.0) use org.apache.spark.sql.SparkPlanner

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def addJar(path: String): Unit

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  7. def analyze(tableName: String): Unit

    Definition Classes
    HiveContext
  8. lazy val analyzer: Analyzer

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  9. def applySchemaToPythonRDD(rdd: RDD[Array[Any]], schema: StructType): DataFrame

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  10. def applySchemaToPythonRDD(rdd: RDD[Array[Any]], schemaString: String): DataFrame

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  11. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  12. def baseRelationToDataFrame(baseRelation: BaseRelation): DataFrame

    Definition Classes
    SQLContext
  13. def cacheTable(tableName: String): Unit

    Definition Classes
    SQLContext
  14. lazy val catalog: SparklineMetastoreCatalog with OverrideCatalog

    Definition Classes
    SparklineDataContext → HiveContext → SQLContext
  15. def clearCache(): Unit

    Definition Classes
    SQLContext
  16. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  17. lazy val conf: SQLConf

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  18. def configure(): Map[String, String]

    Attributes
    protected
    Definition Classes
    HiveContext
  19. def convertCTAS: Boolean

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext
  20. def convertMetastoreParquet: Boolean

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext
  21. def convertMetastoreParquetWithSchemaMerging: Boolean

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext
  22. def createDataFrame(data: List[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  23. def createDataFrame(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  24. def createDataFrame(rdd: RDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  25. def createDataFrame(rows: List[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  26. def createDataFrame(rowRDD: JavaRDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  27. def createDataFrame(rowRDD: RDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  28. def createDataFrame[A <: Product](data: Seq[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  29. def createDataFrame[A <: Product](rdd: RDD[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  30. def createDataset[T](data: List[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
  31. def createDataset[T](data: RDD[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
  32. def createDataset[T](data: Seq[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
  33. def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  34. def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  35. def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  36. def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  37. def createExternalTable(tableName: String, path: String, source: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  38. def createExternalTable(tableName: String, path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  39. def currentDB: String

  40. val ddlParser: DDLParser

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  41. def dialectClassName: String

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  42. def dropTempTable(tableName: String): Unit

    Definition Classes
    SQLContext
  43. lazy val emptyDataFrame: DataFrame

    Definition Classes
    SQLContext
  44. lazy val emptyResult: RDD[InternalRow]

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  45. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  46. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  47. def executePlan(plan: LogicalPlan): QueryExecution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  48. def executeSql(sql: String): execution.QueryExecution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  49. lazy val executionHive: ClientWrapper

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  50. val experimental: ExperimentalMethods

    Definition Classes
    SQLContext
  51. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  52. lazy val functionRegistry: FunctionRegistry

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  53. def getAllConfs: Map[String, String]

    Definition Classes
    SQLContext
  54. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  55. def getConf(key: String, defaultValue: String): String

    Definition Classes
    SQLContext
  56. def getConf(key: String): String

    Definition Classes
    SQLContext
  57. def getSQLDialect(): ParserDialect

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SparklineDataContext → HiveContext → SQLContext
  58. def getSchema(beanClass: Class[_]): Seq[AttributeReference]

    Attributes
    protected
    Definition Classes
    SQLContext
  59. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  60. def hiveMetastoreBarrierPrefixes: Seq[String]

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  61. def hiveMetastoreJars: String

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  62. def hiveMetastoreSharedPrefixes: Seq[String]

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  63. def hiveMetastoreVersion: String

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  64. def hiveThriftServerAsync: Boolean

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  65. def hiveThriftServerSingleSession: Boolean

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  66. lazy val hiveconf: HiveConf

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  67. def invalidateTable(tableName: String): Unit

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  68. def isCached(tableName: String): Boolean

    Definition Classes
    SQLContext
  69. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  70. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  71. lazy val listenerManager: ExecutionListenerManager

    Definition Classes
    SQLContext
  72. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  73. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  74. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  75. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  76. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  77. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  78. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  79. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  80. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  81. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  82. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  83. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  84. lazy val metadataHive: ClientInterface

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  85. val moduleLoader: ModuleLoader

  86. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  87. def newSession(): HiveContext

    Definition Classes
    SparklineDataContext → HiveContext → SQLContext
  88. final def notify(): Unit

    Definition Classes
    AnyRef
  89. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  90. lazy val optimizer: Optimizer

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SparklineDataContext → SQLContext
  91. def parseDataType(dataTypeString: String): DataType

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  92. def parseSql(sql: String): LogicalPlan

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  93. val planner: SparkPlanner with HiveStrategies

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  94. val prepareForExecution: RuleExecutor[SparkPlan]

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  95. def range(start: Long, end: Long, step: Long, numPartitions: Int): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  96. def range(start: Long, end: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  97. def range(end: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  98. def read: DataFrameReader

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  99. def refreshTable(tableName: String): Unit

    Definition Classes
    HiveContext
  100. def runSqlHive(sql: String): Seq[String]

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  101. def setConf(key: String, value: String): Unit

    Definition Classes
    HiveContext → SQLContext
  102. def setConf(props: Properties): Unit

    Definition Classes
    SQLContext
  103. val sparkContext: SparkContext

    Definition Classes
    SQLContext
  104. def sql(sqlText: String): DataFrame

    Definition Classes
    SQLContext
  105. val sqlParser: SparkSQLParser

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  106. lazy val substitutor: VariableSubstitution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext
  107. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  108. def table(tableName: String): DataFrame

    Definition Classes
    SQLContext
  109. def tableNames(databaseName: String): Array[String]

    Definition Classes
    SQLContext
  110. def tableNames(): Array[String]

    Definition Classes
    SQLContext
  111. def tables(databaseName: String): DataFrame

    Definition Classes
    SQLContext
  112. def tables(): DataFrame

    Definition Classes
    SQLContext
  113. def toString(): String

    Definition Classes
    AnyRef → Any
  114. val udf: UDFRegistration

    Definition Classes
    SQLContext
  115. def uncacheTable(tableName: String): Unit

    Definition Classes
    SQLContext
  116. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  117. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  118. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Deprecated Value Members

  1. def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame. This will be removed in Spark 2.0.

  2. def applySchema(rdd: RDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame. This will be removed in Spark 2.0.

  3. def applySchema(rowRDD: JavaRDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame. This will be removed in Spark 2.0.

  4. def applySchema(rowRDD: RDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame. This will be removed in Spark 2.0.

  5. def jdbc(url: String, table: String, theParts: Array[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc(). This will be removed in Spark 2.0.

  6. def jdbc(url: String, table: String, columnName: String, lowerBound: Long, upperBound: Long, numPartitions: Int): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc(). This will be removed in Spark 2.0.

  7. def jdbc(url: String, table: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc(). This will be removed in Spark 2.0.

  8. def jsonFile(path: String, samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  9. def jsonFile(path: String, schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  10. def jsonFile(path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  11. def jsonRDD(json: JavaRDD[String], samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  12. def jsonRDD(json: RDD[String], samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  13. def jsonRDD(json: JavaRDD[String], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  14. def jsonRDD(json: RDD[String], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  15. def jsonRDD(json: JavaRDD[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  16. def jsonRDD(json: RDD[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  17. def load(source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).schema(schema).options(options).load(). This will be removed in Spark 2.0.

  18. def load(source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).schema(schema).options(options).load(). This will be removed in Spark 2.0.

  19. def load(source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).options(options).load(). This will be removed in Spark 2.0.

  20. def load(source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).options(options).load(). This will be removed in Spark 2.0.

  21. def load(path: String, source: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).load(path). This will be removed in Spark 2.0.

  22. def load(path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.load(path). This will be removed in Spark 2.0.

  23. def parquetFile(paths: String*): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated @varargs()
    Deprecated

    (Since version 1.4.0) Use read.parquet(). This will be removed in Spark 2.0.

Inherited from HiveContext

Inherited from SQLContext

Inherited from Serializable

Inherited from Serializable

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped