Class RddRdfLoaders

java.lang.Object
net.sansa_stack.spark.io.rdf.input.impl.RddRdfLoaders

public class RddRdfLoaders extends Object
  • Constructor Summary

    Constructors
    Constructor
    Description
     
  • Method Summary

    Modifier and Type
    Method
    Description
    static <T> org.apache.spark.api.java.JavaRDD<T>
    asJavaRdd(org.apache.spark.rdd.RDD<scala.Tuple2<org.apache.hadoop.io.LongWritable,T>> rdd)
    Tiny helper to get the desired JavaRDD
    static <T> RddRdfLoader<T>
    create(Class<T> clazz, Class<? extends org.apache.hadoop.mapreduce.lib.input.FileInputFormat<org.apache.hadoop.io.LongWritable,T>> fileInputFormat)
    Create a default RddRdfLoader
    static <T> org.apache.spark.api.java.JavaRDD<T>
    createJavaRdd(org.apache.spark.SparkContext sparkContext, String path, Class<T> clazz, Class<? extends org.apache.hadoop.mapreduce.lib.input.FileInputFormat<org.apache.hadoop.io.LongWritable,T>> fileInputFormat)
     
    static <T> org.apache.spark.rdd.RDD<T>
    createRdd(org.apache.spark.SparkContext sparkContext, String path, Class<T> clazz, Class<? extends org.apache.hadoop.mapreduce.lib.input.FileInputFormat<org.apache.hadoop.io.LongWritable,T>> fileInputFormat)
     
    static <T> org.apache.spark.rdd.RDD<scala.Tuple2<org.apache.hadoop.io.LongWritable,T>>
    createRddOfDatasetCore(org.apache.spark.SparkContext sparkContext, String path, Class<T> clazz, Class<? extends org.apache.hadoop.mapreduce.lib.input.FileInputFormat<org.apache.hadoop.io.LongWritable,T>> fileInputFormat)
     

    Methods inherited from class java.lang.Object

    clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
  • Constructor Details

    • RddRdfLoaders

      public RddRdfLoaders()
  • Method Details

    • create

      public static <T> RddRdfLoader<T> create(Class<T> clazz, Class<? extends org.apache.hadoop.mapreduce.lib.input.FileInputFormat<org.apache.hadoop.io.LongWritable,T>> fileInputFormat)
      Create a default RddRdfLoader
    • createJavaRdd

      public static <T> org.apache.spark.api.java.JavaRDD<T> createJavaRdd(org.apache.spark.SparkContext sparkContext, String path, Class<T> clazz, Class<? extends org.apache.hadoop.mapreduce.lib.input.FileInputFormat<org.apache.hadoop.io.LongWritable,T>> fileInputFormat)
    • createRdd

      public static <T> org.apache.spark.rdd.RDD<T> createRdd(org.apache.spark.SparkContext sparkContext, String path, Class<T> clazz, Class<? extends org.apache.hadoop.mapreduce.lib.input.FileInputFormat<org.apache.hadoop.io.LongWritable,T>> fileInputFormat)
    • createRddOfDatasetCore

      public static <T> org.apache.spark.rdd.RDD<scala.Tuple2<org.apache.hadoop.io.LongWritable,T>> createRddOfDatasetCore(org.apache.spark.SparkContext sparkContext, String path, Class<T> clazz, Class<? extends org.apache.hadoop.mapreduce.lib.input.FileInputFormat<org.apache.hadoop.io.LongWritable,T>> fileInputFormat)
    • asJavaRdd

      public static <T> org.apache.spark.api.java.JavaRDD<T> asJavaRdd(org.apache.spark.rdd.RDD<scala.Tuple2<org.apache.hadoop.io.LongWritable,T>> rdd)
      Tiny helper to get the desired JavaRDD