package
util
Type Members
-
-
-
case class
PivotField(solrField: String, prefix: String, otherSuffix: String, maxCols: Int) extends Product with Serializable
-
case class
QueryField(name: String, alias: Option[String] = None, funcReturnType: Option[DataType] = None) extends Product with Serializable
-
-
-
class
ScalaUtil extends Serializable
-
case class
SolrFieldMeta(fieldType: String, dynamicBase: Option[String], isRequired: Option[Boolean], isMultiValued: Option[Boolean], isDocValues: Option[Boolean], isStored: Option[Boolean], fieldTypeClass: Option[String]) extends Product with Serializable
-
class
Utils extends AnyRef
Usage:
import SolrDataFrameImplicits._ // then you can: val spark: SparkSession val collectionName: String val df = spark.read.solr(collectionName) // do stuff df.write.solr(collectionName, overwrite = true) // or various other combinations, like setting your own options earlier df.write.option("zkhost", "some other solr cluster's zk host").solr(collectionName)