Instance Constructors
-
new
ConverterCombineIngestJob(dsParams: Map[String, String], sft: SimpleFeatureType, converterConfig: Config, paths: Seq[String], maxSplitSize: Option[Integer], libjarsFile: String, libjarsPaths: Iterator[() ⇒ Seq[File]])
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
def
configureJob(job: Job): Unit
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
val
failCounters: Seq[(String, String)]
-
def
failed(job: Job): Long
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
val
inputFormatClass: Class[ConverterCombineInputFormat]
-
final
def
isInstanceOf[T0]: Boolean
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
def
run(statusCallback: StatusCallback, waitForCompletion: Boolean = true): Option[(Long, Long)]
-
def
setLibJars(job: Job, fileName: String, searchPath: Iterator[() ⇒ Seq[File]]): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
-
def
written(job: Job): Long
Inherited from JobWithLibJars
Inherited from AnyRef
Inherited from Any
Distributed job that uses converters to process input files in batches. This allows multiple files to be processed by one mapper. Batch size is controlled by the 'maxSplitSize' and should be scaled with mapper memory.