class
SimpleJob extends Job with Logging
Instance Constructors
-
new
SimpleJob(sched: MesosScheduler, tasksSeq: Seq[spark.Task[_]], runId: Int, jobId: Int)
Value Members
-
final
def
!=(arg0: AnyRef): Boolean
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: AnyRef): Boolean
-
final
def
==(arg0: Any): Boolean
-
val
CPUS_PER_TASK: Double
-
val
EXCEPTION_PRINT_INTERVAL: Long
-
val
LOCALITY_WAIT: Long
-
val
MAX_TASK_FAILURES: Int
-
def
abort(message: String): Unit
-
def
addPendingTask(index: Int): Unit
-
val
allPendingTasks: ArrayBuffer[Int]
-
final
def
asInstanceOf[T0]: T0
-
val
callingThread: Thread
-
var
causeOfFailure: String
-
def
clone(): AnyRef
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
error(message: String): Unit
-
var
failed: Boolean
-
def
finalize(): Unit
-
def
findTask(host: String, localOnly: Boolean): Option[Int]
-
def
findTaskFromList(list: ArrayBuffer[Int]): Option[Int]
-
val
finished: Array[Boolean]
-
final
def
getClass(): java.lang.Class[_]
-
def
getPendingTasksForHost(host: String): ArrayBuffer[Int]
-
def
hashCode(): Int
-
def
initLogging(): Unit
-
final
def
isInstanceOf[T0]: Boolean
-
def
isPreferredLocation(task: spark.Task[_], host: String): Boolean
-
var
lastPreferredLaunchTime: Long
-
val
launched: Array[Boolean]
-
def
log: Logger
-
def
logDebug(msg: ⇒ String, throwable: Throwable): Unit
-
def
logDebug(msg: ⇒ String): Unit
-
def
logError(msg: ⇒ String, throwable: Throwable): Unit
-
def
logError(msg: ⇒ String): Unit
-
def
logInfo(msg: ⇒ String, throwable: Throwable): Unit
-
def
logInfo(msg: ⇒ String): Unit
-
def
logWarning(msg: ⇒ String, throwable: Throwable): Unit
-
def
logWarning(msg: ⇒ String): Unit
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
val
numFailures: Array[Int]
-
val
numTasks: Int
-
val
pendingTasksForHost: HashMap[String, ArrayBuffer[Int]]
-
val
pendingTasksWithNoPrefs: ArrayBuffer[Int]
-
val
recentExceptions: HashMap[String, (Int, Long)]
-
-
def
slaveOffer(offer: Offer, availableCpus: Double): Option[TaskInfo]
-
def
statusUpdate(status: TaskStatus): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
taskFinished(status: TaskStatus): Unit
-
def
taskLost(status: TaskStatus): Unit
-
val
tasks: Array[spark.Task[_]]
-
var
tasksFinished: Int
-
var
tasksLaunched: Int
-
val
tidToIndex: HashMap[String, Int]
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Inherited from AnyRef
Inherited from Any
A Job that runs a set of tasks with no interdependencies.