com.lucidworks.spark

SolrSQLHiveContext

class SolrSQLHiveContext extends HiveContext with Logging

Linear Supertypes
HiveContext, SQLContext, Serializable, Serializable, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. SolrSQLHiveContext
  2. HiveContext
  3. SQLContext
  4. Serializable
  5. Serializable
  6. Logging
  7. AnyRef
  8. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SolrSQLHiveContext(sparkContext: SparkContext, config: Map[String, String], tablePermissionChecker: Option[TablePermissionChecker] = scala.None)

Type Members

  1. class QueryExecution extends org.apache.spark.sql.execution.QueryExecution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  2. class SparkPlanner extends org.apache.spark.sql.execution.SparkPlanner

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.6.0) use org.apache.spark.sql.SparkPlanner

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def addJar(path: String): Unit

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  7. def analyze(tableName: String): Unit

    Definition Classes
    HiveContext
  8. lazy val analyzer: Analyzer

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  9. def applySchemaToPythonRDD(rdd: RDD[Array[Any]], schema: StructType): DataFrame

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  10. def applySchemaToPythonRDD(rdd: RDD[Array[Any]], schemaString: String): DataFrame

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  11. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  12. def baseRelationToDataFrame(baseRelation: BaseRelation): DataFrame

    Definition Classes
    SQLContext
  13. val cacheManager: CacheManager

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  14. def cacheTable(tableName: String): Unit

    Definition Classes
    SQLContext
  15. var cachedSQLQueries: Map[String, String]

  16. lazy val catalog: HiveMetastoreCatalog with OverrideCatalog

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  17. def checkReadAccess(resource: String, resourceType: String): Unit

  18. def checkWriteAccess(resource: String, resourceType: String): Unit

  19. def clearCache(): Unit

    Definition Classes
    SolrSQLHiveContext → SQLContext
  20. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  21. lazy val conf: SQLConf

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  22. val config: Map[String, String]

  23. def configure(): Map[String, String]

    Attributes
    protected
    Definition Classes
    HiveContext
  24. def convertCTAS: Boolean

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext
  25. def convertMetastoreParquet: Boolean

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext
  26. def convertMetastoreParquetWithSchemaMerging: Boolean

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext
  27. def createDataFrame(data: List[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  28. def createDataFrame(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  29. def createDataFrame(rdd: RDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  30. def createDataFrame(rows: List[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  31. def createDataFrame(rowRDD: JavaRDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  32. def createDataFrame(rowRDD: RDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi()
  33. def createDataFrame[A <: Product](data: Seq[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  34. def createDataFrame[A <: Product](rdd: RDD[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  35. def createDataset[T](data: List[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
  36. def createDataset[T](data: RDD[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
  37. def createDataset[T](data: Seq[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
  38. def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  39. def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  40. def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  41. def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  42. def createExternalTable(tableName: String, path: String, source: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  43. def createExternalTable(tableName: String, path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  44. val ddlParser: DDLParser

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  45. def dialectClassName: String

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  46. def dropTempTable(tableName: String): Unit

    Definition Classes
    SQLContext
  47. lazy val emptyDataFrame: DataFrame

    Definition Classes
    SQLContext
  48. lazy val emptyResult: RDD[InternalRow]

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  49. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  50. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  51. def executePlan(plan: LogicalPlan): QueryExecution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  52. def executeSql(sql: String): org.apache.spark.sql.execution.QueryExecution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  53. lazy val executionHive: ClientWrapper

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  54. val experimental: ExperimentalMethods

    Definition Classes
    SQLContext
  55. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  56. lazy val functionRegistry: FunctionRegistry

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  57. def getAllConfs: Map[String, String]

    Definition Classes
    SQLContext
  58. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  59. def getConf(key: String, defaultValue: String): String

    Definition Classes
    SQLContext
  60. def getConf(key: String): String

    Definition Classes
    SQLContext
  61. def getSQLDialect(): ParserDialect

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  62. def getSchema(beanClass: Class[_]): Seq[AttributeReference]

    Attributes
    protected
    Definition Classes
    SQLContext
  63. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  64. def hiveMetastoreBarrierPrefixes: Seq[String]

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  65. def hiveMetastoreJars: String

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  66. def hiveMetastoreSharedPrefixes: Seq[String]

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  67. def hiveMetastoreVersion: String

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  68. def hiveThriftServerAsync: Boolean

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  69. def hiveThriftServerSingleSession: Boolean

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  70. lazy val hiveconf: HiveConf

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  71. def invalidateTable(tableName: String): Unit

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  72. def isCached(tableName: String): Boolean

    Definition Classes
    SQLContext
  73. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  74. val isRootContext: Boolean

    Definition Classes
    SQLContext
  75. def isSolrQuery(sqlText: String): Option[String]

  76. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  77. lazy val listenerManager: ExecutionListenerManager

    Definition Classes
    SQLContext
  78. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  79. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  80. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  81. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  82. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  83. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  84. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  85. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  86. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  87. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  88. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  89. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  90. lazy val metadataHive: ClientInterface

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  91. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  92. def newSession(): HiveContext

    Definition Classes
    HiveContext → SQLContext
  93. final def notify(): Unit

    Definition Classes
    AnyRef
  94. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  95. lazy val optimizer: Optimizer

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  96. def parseDataType(dataTypeString: String): DataType

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  97. def parseSql(sql: String): LogicalPlan

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  98. val planner: SparkPlanner with HiveStrategies

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext → SQLContext
  99. val prepareForExecution: RuleExecutor[SparkPlan]

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  100. def processPushDownSql(sqlText: String, matcher: Matcher): String

  101. def processSqlStmt(sqlText: String): String

  102. def range(start: Long, end: Long, step: Long, numPartitions: Int): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  103. def range(start: Long, end: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  104. def range(end: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  105. def read: DataFrameReader

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  106. def refreshTable(tableName: String): Unit

    Definition Classes
    HiveContext
  107. def registerSolrPushdownQuery(tempTableName: String, sqlText: String, tableName: String): Unit

  108. def runSqlHive(sql: String): Seq[String]

    Attributes
    protected[org.apache.spark.sql.hive]
    Definition Classes
    HiveContext
  109. def setConf(key: String, value: String): Unit

    Definition Classes
    HiveContext → SQLContext
  110. def setConf(props: Properties): Unit

    Definition Classes
    SQLContext
  111. def sql(sqlText: String): DataFrame

    Definition Classes
    SolrSQLHiveContext → SQLContext
  112. val sqlParser: SparkSQLParser

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    SQLContext
  113. lazy val substitutor: VariableSubstitution

    Attributes
    protected[org.apache.spark.sql]
    Definition Classes
    HiveContext
  114. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  115. def table(tableName: String): DataFrame

    Definition Classes
    SQLContext
  116. def tableNames(databaseName: String): Array[String]

    Definition Classes
    SQLContext
  117. def tableNames(): Array[String]

    Definition Classes
    SQLContext
  118. val tablePermissionChecker: Option[TablePermissionChecker]

  119. var tableToResource: Map[String, SecuredResource]

  120. def tables(databaseName: String): DataFrame

    Definition Classes
    SQLContext
  121. def tables(): DataFrame

    Definition Classes
    SQLContext
  122. def toString(): String

    Definition Classes
    AnyRef → Any
  123. val udf: UDFRegistration

    Definition Classes
    SQLContext
  124. def uncacheTable(tableName: String): Unit

    Definition Classes
    SQLContext
  125. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  126. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  127. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Deprecated Value Members

  1. def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame. This will be removed in Spark 2.0.

  2. def applySchema(rdd: RDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame. This will be removed in Spark 2.0.

  3. def applySchema(rowRDD: JavaRDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame. This will be removed in Spark 2.0.

  4. def applySchema(rowRDD: RDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame. This will be removed in Spark 2.0.

  5. def jdbc(url: String, table: String, theParts: Array[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc(). This will be removed in Spark 2.0.

  6. def jdbc(url: String, table: String, columnName: String, lowerBound: Long, upperBound: Long, numPartitions: Int): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc(). This will be removed in Spark 2.0.

  7. def jdbc(url: String, table: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc(). This will be removed in Spark 2.0.

  8. def jsonFile(path: String, samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  9. def jsonFile(path: String, schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  10. def jsonFile(path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  11. def jsonRDD(json: JavaRDD[String], samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  12. def jsonRDD(json: RDD[String], samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  13. def jsonRDD(json: JavaRDD[String], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  14. def jsonRDD(json: RDD[String], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  15. def jsonRDD(json: JavaRDD[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  16. def jsonRDD(json: RDD[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json(). This will be removed in Spark 2.0.

  17. def load(source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).schema(schema).options(options).load(). This will be removed in Spark 2.0.

  18. def load(source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).schema(schema).options(options).load(). This will be removed in Spark 2.0.

  19. def load(source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).options(options).load(). This will be removed in Spark 2.0.

  20. def load(source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).options(options).load(). This will be removed in Spark 2.0.

  21. def load(path: String, source: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).load(path). This will be removed in Spark 2.0.

  22. def load(path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.load(path). This will be removed in Spark 2.0.

  23. def parquetFile(paths: String*): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated @varargs()
    Deprecated

    (Since version 1.4.0) Use read.parquet(). This will be removed in Spark 2.0.

Inherited from HiveContext

Inherited from SQLContext

Inherited from Serializable

Inherited from Serializable

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped