Package com.pulumi.gcp.dataproc.outputs
Class WorkflowTemplateJob
- java.lang.Object
-
- com.pulumi.gcp.dataproc.outputs.WorkflowTemplateJob
-
public final class WorkflowTemplateJob extends java.lang.Object
-
-
Nested Class Summary
Nested Classes Modifier and Type Class Description static class
WorkflowTemplateJob.Builder
-
Method Summary
All Methods Static Methods Instance Methods Concrete Methods Modifier and Type Method Description static WorkflowTemplateJob.Builder
builder()
static WorkflowTemplateJob.Builder
builder(WorkflowTemplateJob defaults)
java.util.Optional<WorkflowTemplateJobHadoopJob>
hadoopJob()
java.util.Optional<WorkflowTemplateJobHiveJob>
hiveJob()
java.util.Map<java.lang.String,java.lang.String>
labels()
java.util.Optional<WorkflowTemplateJobPigJob>
pigJob()
java.util.List<java.lang.String>
prerequisiteStepIds()
java.util.Optional<WorkflowTemplateJobPrestoJob>
prestoJob()
java.util.Optional<WorkflowTemplateJobPysparkJob>
pysparkJob()
java.util.Optional<WorkflowTemplateJobScheduling>
scheduling()
java.util.Optional<WorkflowTemplateJobSparkJob>
sparkJob()
java.util.Optional<WorkflowTemplateJobSparkRJob>
sparkRJob()
java.util.Optional<WorkflowTemplateJobSparkSqlJob>
sparkSqlJob()
java.lang.String
stepId()
-
-
-
Method Detail
-
hadoopJob
public java.util.Optional<WorkflowTemplateJobHadoopJob> hadoopJob()
- Returns:
- Job is a Hadoop job.
-
hiveJob
public java.util.Optional<WorkflowTemplateJobHiveJob> hiveJob()
- Returns:
- Job is a Hive job.
-
labels
public java.util.Map<java.lang.String,java.lang.String> labels()
- Returns:
- The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job.
-
pigJob
public java.util.Optional<WorkflowTemplateJobPigJob> pigJob()
- Returns:
- Job is a Pig job.
-
prerequisiteStepIds
public java.util.List<java.lang.String> prerequisiteStepIds()
- Returns:
- The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow.
-
prestoJob
public java.util.Optional<WorkflowTemplateJobPrestoJob> prestoJob()
- Returns:
- Job is a Presto job.
-
pysparkJob
public java.util.Optional<WorkflowTemplateJobPysparkJob> pysparkJob()
- Returns:
- Job is a PySpark job.
-
scheduling
public java.util.Optional<WorkflowTemplateJobScheduling> scheduling()
- Returns:
- Job scheduling configuration.
-
sparkJob
public java.util.Optional<WorkflowTemplateJobSparkJob> sparkJob()
- Returns:
- Job is a Spark job.
-
sparkRJob
public java.util.Optional<WorkflowTemplateJobSparkRJob> sparkRJob()
- Returns:
- Job is a SparkR job.
-
sparkSqlJob
public java.util.Optional<WorkflowTemplateJobSparkSqlJob> sparkSqlJob()
- Returns:
- Job is a SparkSql job.
-
stepId
public java.lang.String stepId()
- Returns:
- Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job `goog-dataproc-workflow-step-id` label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.
-
builder
public static WorkflowTemplateJob.Builder builder()
-
builder
public static WorkflowTemplateJob.Builder builder(WorkflowTemplateJob defaults)
-
-