Object

com.mongodb.spark.api.java

MongoSpark

Related Doc: package java

Permalink

object MongoSpark

Linear Supertypes
AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. MongoSpark
  2. AnyRef
  3. Any
  1. Hide All
  2. Show all
Visibility
  1. Public
  2. All

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  5. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  6. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  7. def equals(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  8. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  9. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  10. def hashCode(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  11. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  12. def load[D](dataFrameReader: DataFrameReader, clazz: Class[D]): DataFrame

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    dataFrameReader

    the DataFrameReader to load

    clazz

    the java Bean class representing the Schema for the DataFrame

    returns

    a DataFrame

  13. def load(dataFrameReader: DataFrameReader): DataFrame

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    dataFrameReader

    the DataFrameReader to load

    returns

    a DataFrame

  14. def load[D](sqlContext: SQLContext, connector: MongoConnector, readConfig: ReadConfig, pipeline: List[Bson], clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    sqlContext

    the SQLContext

    connector

    thecom.mongodb.spark.MongoConnector

    readConfig

    the com.mongodb.spark.config.ReadConfig

    pipeline

    aggregate pipeline

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  15. def load(sqlContext: SQLContext, connector: MongoConnector, readConfig: ReadConfig, pipeline: List[Bson]): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    sqlContext

    the SQLContext

    connector

    thecom.mongodb.spark.MongoConnector

    readConfig

    the com.mongodb.spark.config.ReadConfig

    pipeline

    aggregate pipeline

    returns

    a MongoRDD

  16. def load[D](sqlContext: SQLContext, connector: MongoConnector, readConfig: ReadConfig, clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    sqlContext

    the SQLContext

    connector

    thecom.mongodb.spark.MongoConnector

    readConfig

    the com.mongodb.spark.config.ReadConfig

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  17. def load(sqlContext: SQLContext, connector: MongoConnector, readConfig: ReadConfig): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    sqlContext

    the SQLContext

    connector

    thecom.mongodb.spark.MongoConnector

    readConfig

    the com.mongodb.spark.config.ReadConfig

    returns

    a MongoRDD

  18. def load[D](sqlContext: SQLContext, connector: MongoConnector, clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    sqlContext

    the SQLContext

    connector

    thecom.mongodb.spark.MongoConnector

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  19. def load(sqlContext: SQLContext, connector: MongoConnector): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    sqlContext

    the SQLContext

    connector

    thecom.mongodb.spark.MongoConnector

    returns

    a MongoRDD

  20. def load[D](sqlContext: SQLContext, clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    sqlContext

    the Spark context containing the MongoDB connection configuration

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  21. def load(sqlContext: SQLContext): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    sqlContext

    the Spark context containing the MongoDB connection configuration

    returns

    a MongoRDD

  22. def load[D](jsc: JavaSparkContext, connector: MongoConnector, readConfig: ReadConfig, pipeline: List[Bson], clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    jsc

    the Java Spark context

    connector

    thecom.mongodb.spark.MongoConnector

    readConfig

    the com.mongodb.spark.config.ReadConfig

    pipeline

    aggregate pipeline

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  23. def load(jsc: JavaSparkContext, connector: MongoConnector, readConfig: ReadConfig, pipeline: List[Bson]): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    jsc

    the Java Spark context

    connector

    thecom.mongodb.spark.MongoConnector

    readConfig

    the com.mongodb.spark.config.ReadConfig

    pipeline

    aggregate pipeline

    returns

    a MongoRDD

  24. def load[D](jsc: JavaSparkContext, connector: MongoConnector, readConfig: ReadConfig, clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    jsc

    the Java Spark context

    connector

    thecom.mongodb.spark.MongoConnector

    readConfig

    the com.mongodb.spark.config.ReadConfig

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  25. def load(jsc: JavaSparkContext, connector: MongoConnector, readConfig: ReadConfig): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    jsc

    the Java Spark context

    connector

    thecom.mongodb.spark.MongoConnector

    readConfig

    the com.mongodb.spark.config.ReadConfig

    returns

    a MongoRDD

  26. def load[D](jsc: JavaSparkContext, connector: MongoConnector, clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    jsc

    the Java Spark context

    connector

    thecom.mongodb.spark.MongoConnector

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  27. def load(jsc: JavaSparkContext, connector: MongoConnector): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    jsc

    the Java Spark context

    connector

    thecom.mongodb.spark.MongoConnector

    returns

    a MongoRDD

  28. def load[D](jsc: JavaSparkContext, readConfig: ReadConfig, clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    jsc

    the Java Spark context

    readConfig

    the com.mongodb.spark.config.ReadConfig

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  29. def load(jsc: JavaSparkContext, readConfig: ReadConfig): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    jsc

    the Java Spark context

    readConfig

    the com.mongodb.spark.config.ReadConfig

    returns

    a MongoRDD

  30. def load[D](jsc: JavaSparkContext, clazz: Class[D]): JavaMongoRDD[D]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    D

    the type of Document to return

    jsc

    the Spark context containing the MongoDB connection configuration

    clazz

    the class of the return type for the RDD

    returns

    a MongoRDD

  31. def load(jsc: JavaSparkContext): JavaMongoRDD[Document]

    Permalink

    Load data from MongoDB

    Load data from MongoDB

    jsc

    the Spark context containing the MongoDB connection configuration

    returns

    a MongoRDD

  32. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  33. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  34. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  35. def read(sqlContext: SQLContext): DataFrameReader

    Permalink

    Creates a DataFrameReader with the MongoDB underlying output data source.

    Creates a DataFrameReader with the MongoDB underlying output data source.

    Uses the SparkConf for the database and collection information

    sqlContext

    the SQLContext

    returns

    the DataFrameReader

  36. def save[D](javaRDD: JavaRDD[D], writeConfig: WriteConfig, clazz: Class[D]): Unit

    Permalink

    Save data to MongoDB

    Save data to MongoDB

    Uses the writeConfig for the database information Requires a codec for the data type

    D

    the type of the data in the RDD

    javaRDD

    the RDD data to save to MongoDB

    writeConfig

    the com.mongodb.spark.config.WriteConfig

    clazz

    the class of the data contained in the RDD

    returns

    the javaRDD

  37. def save(javaRDD: JavaRDD[Document], writeConfig: WriteConfig): Unit

    Permalink

    Save data to MongoDB

    Save data to MongoDB

    Uses the SparkConf for the database information

    javaRDD

    the RDD data to save to MongoDB

    writeConfig

    the com.mongodb.spark.config.WriteConfig

    returns

    the javaRDD

  38. def save[D](javaRDD: JavaRDD[D], clazz: Class[D]): Unit

    Permalink

    Save data to MongoDB

    Save data to MongoDB

    Uses the SparkConf for the database and collection information Requires a codec for the data type

    D

    the type of the data in the RDD

    javaRDD

    the RDD data to save to MongoDB

    clazz

    the class of the data contained in the RDD

    returns

    the javaRDD

  39. def save(javaRDD: JavaRDD[Document]): Unit

    Permalink

    Save data to MongoDB

    Save data to MongoDB

    Uses the SparkConf for the database and collection information

    javaRDD

    the RDD data to save to MongoDB

    returns

    the javaRDD

  40. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  41. def toString(): String

    Permalink
    Definition Classes
    AnyRef → Any
  42. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  43. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  44. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  45. def write(dataFrame: DataFrame): DataFrameWriter

    Permalink

    Creates a DataFrameWriter with the MongoDB underlying output data source.

    Creates a DataFrameWriter with the MongoDB underlying output data source.

    Uses the SparkConf for the database and collection information

    dataFrame

    the DataFrame to convert into a DataFrameWriter

    returns

    the DataFrameWriter

Inherited from AnyRef

Inherited from Any

Ungrouped