package
objects
Type Members
-
-
-
case class
Ec2Resource(id: String, terminateAfter: String, role: Option[String], resourceRole: Option[String], instanceType: String, region: Option[String], imageId: Option[String], securityGroups: Seq[String], securityGroupIds: Seq[String], associatePublicIpAddress: Boolean)(implicit hc: HyperionContext) extends ResourceObject with Product with Serializable
-
-
-
-
-
-
case class
JarActivity(id: String, runsOn: Ec2Resource, jar: Option[String] = None, mainClass: Option[String] = None, arguments: Seq[String] = Seq(), dependsOn: Seq[PipelineActivity] = Seq(), input: Option[S3DataNode] = None, output: Option[S3DataNode] = None, stdout: Option[String] = None, stderr: Option[String] = None)(implicit hc: HyperionContext) extends PipelineActivity with Product with Serializable
-
-
case class
MapReduceCluster(id: String = "MapReduceCluster", taskInstanceCount: Int = 0)(implicit hc: HyperionContext) extends EmrCluster with Product with Serializable
-
case class
MapReduceStep(jar: String = "", mainClass: String = "", args: Seq[String] = List()) extends Product with Serializable
-
-
-
-
-
case class
RedshiftDataNode(id: String, database: RedshiftDatabase, tableName: String, createTableSql: Option[String] = None, schemaName: Option[String] = None, primaryKeys: Option[Seq[String]] = None) extends PipelineObject with Product with Serializable
-
-
-
-
-
-
-
case class
S3File(id: String, filePath: String = "", dataFormat: Option[DataFormat] = None) extends S3DataNode with Product with Serializable
-
case class
S3Folder(id: String, directoryPath: String = "", dataFormat: Option[DataFormat] = None) extends S3DataNode with Product with Serializable
-
case class
Schedule(id: String = "PipelineSchedule", start: Option[github.nscala_time.time.Imports.DateTime] = None, period: DpPeriod = 1.day, occurrences: Option[Int] = None, scheduleType: ScheduleType = Cron) extends PipelineObject with Product with Serializable
-
case class
ShellCommandActivity(id: String, runsOn: Ec2Resource, command: Option[String] = None, scriptUri: Option[String] = None, scriptArguments: Seq[String] = Seq(), stage: Boolean = true, input: Option[S3DataNode] = None, output: Option[S3DataNode] = None, dependsOn: Seq[PipelineActivity] = Seq(), stdout: Option[String] = None, stderr: Option[String] = None) extends PipelineActivity with Product with Serializable
-
-
case class
SparkCluster(id: String, taskInstanceCount: Int, coreInstanceCount: Int, instanceType: String, amiVersion: String, sparkVersion: String, terminateAfter: String)(implicit hc: HyperionContext) extends EmrCluster with Product with Serializable
-
case class
SparkStep(jar: String = "", mainClass: String = "", args: Seq[String] = List())(implicit hc: HyperionContext) extends Product with Serializable
-
case class
TsvDataFormat(id: String, column: Option[Seq[String]] = None) extends DataFormat with Product with Serializable
Cron liked schedule that runs at defined period.
If start time given is a past time, data pipeline will perform back fill from the start.