public static class VariantsSparkSink.SparkVCFOutputFormat
extends org.seqdoop.hadoop_bam.KeyIgnoringVCFOutputFormat<org.apache.hadoop.io.NullWritable>
Modifier and Type | Field and Description |
---|---|
static htsjdk.variant.vcf.VCFHeader |
vcfHeader |
header, WRITE_HEADER_PROPERTY
Constructor and Description |
---|
SparkVCFOutputFormat() |
Modifier and Type | Method and Description |
---|---|
void |
checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext job) |
org.apache.hadoop.mapreduce.RecordWriter<org.apache.hadoop.io.NullWritable,org.seqdoop.hadoop_bam.VariantContextWritable> |
getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext ctx) |
static void |
setVCFHeader(htsjdk.variant.vcf.VCFHeader header) |
getHeader, getRecordWriter, getRecordWriter, readHeaderFrom, readHeaderFrom, setHeader
getCompressOutput, getDefaultWorkFile, getOutputCommitter, getOutputCompressorClass, getOutputName, getOutputPath, getPathForWorkFile, getUniqueFile, getWorkOutputPath, setCompressOutput, setOutputCompressorClass, setOutputName, setOutputPath
public static void setVCFHeader(htsjdk.variant.vcf.VCFHeader header)
public org.apache.hadoop.mapreduce.RecordWriter<org.apache.hadoop.io.NullWritable,org.seqdoop.hadoop_bam.VariantContextWritable> getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext ctx) throws java.io.IOException
getRecordWriter
in class org.seqdoop.hadoop_bam.KeyIgnoringVCFOutputFormat<org.apache.hadoop.io.NullWritable>
java.io.IOException
public void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext job) throws java.io.IOException
checkOutputSpecs
in class org.apache.hadoop.mapreduce.lib.output.FileOutputFormat<org.apache.hadoop.io.NullWritable,org.seqdoop.hadoop_bam.VariantContextWritable>
java.io.IOException