jupyter
.
spark
JupyterSparkContext
Related Docs:
object JupyterSparkContext
|
package spark
class
JupyterSparkContext
extends
SparkContext
Linear Supertypes
SparkContext
,
ExecutorAllocationClient
,
Logging
,
AnyRef
,
Any
Ordering
Alphabetic
By Inheritance
Inherited
JupyterSparkContext
SparkContext
ExecutorAllocationClient
Logging
AnyRef
Any
Hide All
Show All
Visibility
Public
All
Instance Constructors
new
JupyterSparkContext
()
(
implicit
interpApi:
InterpAPI
,
runtimeApi:
RuntimeAPI
)
new
JupyterSparkContext
(
config:
SparkConf
)
(
implicit
interpApi:
InterpAPI
,
runtimeApi:
RuntimeAPI
)
Value Members
final
def
!=
(
arg0:
Any
)
:
Boolean
Definition Classes
AnyRef → Any
final
def
##
()
:
Int
Definition Classes
AnyRef → Any
final
def
==
(
arg0:
Any
)
:
Boolean
Definition Classes
AnyRef → Any
def
addFile
(
path:
String
,
recursive:
Boolean
)
:
Unit
Definition Classes
SparkContext
def
addFile
(
path:
String
)
:
Unit
Definition Classes
SparkContext
def
addJar
(
path:
String
)
:
Unit
Definition Classes
SparkContext
def
addSparkListener
(
listener:
SparkListenerInterface
)
:
Unit
Definition Classes
SparkContext
Annotations
@DeveloperApi
()
def
appName
:
String
Definition Classes
SparkContext
def
applicationAttemptId
:
Option
[
String
]
Definition Classes
SparkContext
def
applicationId
:
String
Definition Classes
SparkContext
final
def
asInstanceOf
[
T0
]
:
T0
Definition Classes
Any
def
binaryFiles
(
path:
String
,
minPartitions:
Int
)
:
RDD
[(
String
,
PortableDataStream
)]
Definition Classes
SparkContext
def
binaryRecords
(
path:
String
,
recordLength:
Int
,
conf:
Configuration
)
:
RDD
[
Array
[
Byte
]]
Definition Classes
SparkContext
def
broadcast
[
T
]
(
value:
T
)
(
implicit
arg0:
ClassTag
[
T
]
)
:
Broadcast
[
T
]
Definition Classes
SparkContext
def
cancelAllJobs
()
:
Unit
Definition Classes
SparkContext
def
cancelJobGroup
(
groupId:
String
)
:
Unit
Definition Classes
SparkContext
def
checkpointFile
[
T
]
(
path:
String
)
(
implicit
arg0:
ClassTag
[
T
]
)
:
RDD
[
T
]
Attributes
protected[
org.apache.spark
]
Definition Classes
SparkContext
def
clearCallSite
()
:
Unit
Definition Classes
SparkContext
def
clearJobGroup
()
:
Unit
Definition Classes
SparkContext
def
clone
()
:
AnyRef
Attributes
protected[
java.lang
]
Definition Classes
AnyRef
Annotations
@throws
(
...
)
def
collectionAccumulator
[
T
]
(
name:
String
)
:
CollectionAccumulator
[
T
]
Definition Classes
SparkContext
def
collectionAccumulator
[
T
]
:
CollectionAccumulator
[
T
]
Definition Classes
SparkContext
def
defaultMinPartitions
:
Int
Definition Classes
SparkContext
def
defaultParallelism
:
Int
Definition Classes
SparkContext
def
deployMode
:
String
Definition Classes
SparkContext
def
doubleAccumulator
(
name:
String
)
:
DoubleAccumulator
Definition Classes
SparkContext
def
doubleAccumulator
:
DoubleAccumulator
Definition Classes
SparkContext
def
emptyRDD
[
T
]
(
implicit
arg0:
ClassTag
[
T
]
)
:
RDD
[
T
]
Definition Classes
SparkContext
final
def
eq
(
arg0:
AnyRef
)
:
Boolean
Definition Classes
AnyRef
def
equals
(
arg0:
Any
)
:
Boolean
Definition Classes
AnyRef → Any
def
files
:
Seq
[
String
]
Definition Classes
SparkContext
def
finalize
()
:
Unit
Attributes
protected[
java.lang
]
Definition Classes
AnyRef
Annotations
@throws
(
classOf[java.lang.Throwable]
)
def
getAllPools
:
Seq
[
Schedulable
]
Definition Classes
SparkContext
Annotations
@DeveloperApi
()
def
getCheckpointDir
:
Option
[
String
]
Definition Classes
SparkContext
final
def
getClass
()
:
Class
[_]
Definition Classes
AnyRef → Any
def
getConf
:
SparkConf
Definition Classes
SparkContext
def
getExecutorMemoryStatus
:
Map
[
String
, (
Long
,
Long
)]
Definition Classes
SparkContext
def
getExecutorStorageStatus
:
Array
[
StorageStatus
]
Definition Classes
SparkContext
Annotations
@DeveloperApi
()
def
getLocalProperty
(
key:
String
)
:
String
Definition Classes
SparkContext
def
getPersistentRDDs
:
Map
[
Int
,
RDD
[_]]
Definition Classes
SparkContext
def
getPoolForName
(
pool:
String
)
:
Option
[
Schedulable
]
Definition Classes
SparkContext
Annotations
@DeveloperApi
()
def
getRDDStorageInfo
:
Array
[
RDDInfo
]
Definition Classes
SparkContext
Annotations
@DeveloperApi
()
def
getSchedulingMode
:
SchedulingMode
Definition Classes
SparkContext
def
hadoopConfiguration
:
Configuration
Definition Classes
SparkContext
def
hadoopFile
[
K
,
V
,
F <:
InputFormat
[
K
,
V
]
]
(
path:
String
)
(
implicit
km:
ClassTag
[
K
]
,
vm:
ClassTag
[
V
]
,
fm:
ClassTag
[
F
]
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
hadoopFile
[
K
,
V
,
F <:
InputFormat
[
K
,
V
]
]
(
path:
String
,
minPartitions:
Int
)
(
implicit
km:
ClassTag
[
K
]
,
vm:
ClassTag
[
V
]
,
fm:
ClassTag
[
F
]
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
hadoopFile
[
K
,
V
]
(
path:
String
,
inputFormatClass:
Class
[_ <:
InputFormat
[
K
,
V
]]
,
keyClass:
Class
[
K
]
,
valueClass:
Class
[
V
]
,
minPartitions:
Int
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
hadoopRDD
[
K
,
V
]
(
conf:
JobConf
,
inputFormatClass:
Class
[_ <:
InputFormat
[
K
,
V
]]
,
keyClass:
Class
[
K
]
,
valueClass:
Class
[
V
]
,
minPartitions:
Int
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
hashCode
()
:
Int
Definition Classes
AnyRef → Any
def
initializeLogIfNecessary
(
isInterpreter:
Boolean
)
:
Unit
Attributes
protected
Definition Classes
Logging
final
def
isInstanceOf
[
T0
]
:
Boolean
Definition Classes
Any
def
isLocal
:
Boolean
Definition Classes
SparkContext
def
isStopped
:
Boolean
Definition Classes
SparkContext
def
isTraceEnabled
()
:
Boolean
Attributes
protected
Definition Classes
Logging
def
jars
:
Seq
[
String
]
Definition Classes
SparkContext
def
killExecutor
(
executorId:
String
)
:
Boolean
Definition Classes
SparkContext → ExecutorAllocationClient
Annotations
@DeveloperApi
()
def
killExecutors
(
executorIds:
Seq
[
String
]
)
:
Boolean
Definition Classes
SparkContext → ExecutorAllocationClient
Annotations
@DeveloperApi
()
def
listFiles
()
:
Seq
[
String
]
Definition Classes
SparkContext
def
listJars
()
:
Seq
[
String
]
Definition Classes
SparkContext
val
localProperties
:
InheritableThreadLocal
[
Properties
]
Attributes
protected[
org.apache.spark
]
Definition Classes
SparkContext
def
log
:
Logger
Attributes
protected
Definition Classes
Logging
def
logDebug
(
msg: ⇒
String
,
throwable:
Throwable
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logDebug
(
msg: ⇒
String
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logError
(
msg: ⇒
String
,
throwable:
Throwable
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logError
(
msg: ⇒
String
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logInfo
(
msg: ⇒
String
,
throwable:
Throwable
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logInfo
(
msg: ⇒
String
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logName
:
String
Attributes
protected
Definition Classes
Logging
def
logTrace
(
msg: ⇒
String
,
throwable:
Throwable
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logTrace
(
msg: ⇒
String
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logWarning
(
msg: ⇒
String
,
throwable:
Throwable
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
logWarning
(
msg: ⇒
String
)
:
Unit
Attributes
protected
Definition Classes
Logging
def
longAccumulator
(
name:
String
)
:
LongAccumulator
Definition Classes
SparkContext
def
longAccumulator
:
LongAccumulator
Definition Classes
SparkContext
def
makeRDD
[
T
]
(
seq:
Seq
[(
T
,
Seq
[
String
])]
)
(
implicit
arg0:
ClassTag
[
T
]
)
:
RDD
[
T
]
Definition Classes
SparkContext
def
makeRDD
[
T
]
(
seq:
Seq
[
T
]
,
numSlices:
Int
)
(
implicit
arg0:
ClassTag
[
T
]
)
:
RDD
[
T
]
Definition Classes
SparkContext
def
master
:
String
Definition Classes
SparkContext
final
def
ne
(
arg0:
AnyRef
)
:
Boolean
Definition Classes
AnyRef
def
newAPIHadoopFile
[
K
,
V
,
F <:
InputFormat
[
K
,
V
]
]
(
path:
String
,
fClass:
Class
[
F
]
,
kClass:
Class
[
K
]
,
vClass:
Class
[
V
]
,
conf:
Configuration
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
newAPIHadoopFile
[
K
,
V
,
F <:
InputFormat
[
K
,
V
]
]
(
path:
String
)
(
implicit
km:
ClassTag
[
K
]
,
vm:
ClassTag
[
V
]
,
fm:
ClassTag
[
F
]
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
newAPIHadoopRDD
[
K
,
V
,
F <:
InputFormat
[
K
,
V
]
]
(
conf:
Configuration
,
fClass:
Class
[
F
]
,
kClass:
Class
[
K
]
,
vClass:
Class
[
V
]
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
final
def
notify
()
:
Unit
Definition Classes
AnyRef
final
def
notifyAll
()
:
Unit
Definition Classes
AnyRef
def
objectFile
[
T
]
(
path:
String
,
minPartitions:
Int
)
(
implicit
arg0:
ClassTag
[
T
]
)
:
RDD
[
T
]
Definition Classes
SparkContext
def
parallelize
[
T
]
(
seq:
Seq
[
T
]
,
numSlices:
Int
)
(
implicit
arg0:
ClassTag
[
T
]
)
:
RDD
[
T
]
Definition Classes
SparkContext
def
range
(
start:
Long
,
end:
Long
,
step:
Long
,
numSlices:
Int
)
:
RDD
[
Long
]
Definition Classes
SparkContext
def
register
(
acc:
AccumulatorV2
[_, _]
,
name:
String
)
:
Unit
Definition Classes
SparkContext
def
register
(
acc:
AccumulatorV2
[_, _]
)
:
Unit
Definition Classes
SparkContext
def
requestExecutors
(
numAdditionalExecutors:
Int
)
:
Boolean
Definition Classes
SparkContext → ExecutorAllocationClient
Annotations
@DeveloperApi
()
def
requestTotalExecutors
(
numExecutors:
Int
,
localityAwareTasks:
Int
,
hostToLocalTaskCount:
Map
[
String
,
Int
]
)
:
Boolean
Definition Classes
SparkContext → ExecutorAllocationClient
Annotations
@DeveloperApi
()
def
runApproximateJob
[
T
,
U
,
R
]
(
rdd:
RDD
[
T
]
,
func: (
TaskContext
,
Iterator
[
T
]) ⇒
U
,
evaluator:
ApproximateEvaluator
[
U
,
R
]
,
timeout:
Long
)
:
PartialResult
[
R
]
Definition Classes
SparkContext
Annotations
@DeveloperApi
()
def
runJob
[
T
,
U
]
(
rdd:
RDD
[
T
]
,
processPartition: (
Iterator
[
T
]) ⇒
U
,
resultHandler: (
Int
,
U
) ⇒
Unit
)
(
implicit
arg0:
ClassTag
[
U
]
)
:
Unit
Definition Classes
SparkContext
def
runJob
[
T
,
U
]
(
rdd:
RDD
[
T
]
,
processPartition: (
TaskContext
,
Iterator
[
T
]) ⇒
U
,
resultHandler: (
Int
,
U
) ⇒
Unit
)
(
implicit
arg0:
ClassTag
[
U
]
)
:
Unit
Definition Classes
SparkContext
def
runJob
[
T
,
U
]
(
rdd:
RDD
[
T
]
,
func: (
Iterator
[
T
]) ⇒
U
)
(
implicit
arg0:
ClassTag
[
U
]
)
:
Array
[
U
]
Definition Classes
SparkContext
def
runJob
[
T
,
U
]
(
rdd:
RDD
[
T
]
,
func: (
TaskContext
,
Iterator
[
T
]) ⇒
U
)
(
implicit
arg0:
ClassTag
[
U
]
)
:
Array
[
U
]
Definition Classes
SparkContext
def
runJob
[
T
,
U
]
(
rdd:
RDD
[
T
]
,
func: (
Iterator
[
T
]) ⇒
U
,
partitions:
Seq
[
Int
]
)
(
implicit
arg0:
ClassTag
[
U
]
)
:
Array
[
U
]
Definition Classes
SparkContext
def
runJob
[
T
,
U
]
(
rdd:
RDD
[
T
]
,
func: (
TaskContext
,
Iterator
[
T
]) ⇒
U
,
partitions:
Seq
[
Int
]
)
(
implicit
arg0:
ClassTag
[
U
]
)
:
Array
[
U
]
Definition Classes
SparkContext
def
runJob
[
T
,
U
]
(
rdd:
RDD
[
T
]
,
func: (
TaskContext
,
Iterator
[
T
]) ⇒
U
,
partitions:
Seq
[
Int
]
,
resultHandler: (
Int
,
U
) ⇒
Unit
)
(
implicit
arg0:
ClassTag
[
U
]
)
:
Unit
Definition Classes
SparkContext
def
sequenceFile
[
K
,
V
]
(
path:
String
,
minPartitions:
Int
)
(
implicit
km:
ClassTag
[
K
]
,
vm:
ClassTag
[
V
]
,
kcf: () ⇒
WritableConverter
[
K
]
,
vcf: () ⇒
WritableConverter
[
V
]
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
sequenceFile
[
K
,
V
]
(
path:
String
,
keyClass:
Class
[
K
]
,
valueClass:
Class
[
V
]
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
sequenceFile
[
K
,
V
]
(
path:
String
,
keyClass:
Class
[
K
]
,
valueClass:
Class
[
V
]
,
minPartitions:
Int
)
:
RDD
[(
K
,
V
)]
Definition Classes
SparkContext
def
setCallSite
(
shortCallSite:
String
)
:
Unit
Definition Classes
SparkContext
def
setCheckpointDir
(
directory:
String
)
:
Unit
Definition Classes
SparkContext
def
setJobDescription
(
value:
String
)
:
Unit
Definition Classes
SparkContext
def
setJobGroup
(
groupId:
String
,
description:
String
,
interruptOnCancel:
Boolean
)
:
Unit
Definition Classes
SparkContext
def
setLocalProperty
(
key:
String
,
value:
String
)
:
Unit
Definition Classes
SparkContext
def
setLogLevel
(
logLevel:
String
)
:
Unit
Definition Classes
SparkContext
val
sparkUser
:
String
Definition Classes
SparkContext
val
startTime
:
Long
Definition Classes
SparkContext
def
statusTracker
:
SparkStatusTracker
Definition Classes
SparkContext
def
stop
()
:
Unit
Definition Classes
JupyterSparkContext
→ SparkContext
def
submitJob
[
T
,
U
,
R
]
(
rdd:
RDD
[
T
]
,
processPartition: (
Iterator
[
T
]) ⇒
U
,
partitions:
Seq
[
Int
]
,
resultHandler: (
Int
,
U
) ⇒
Unit
,
resultFunc: ⇒
R
)
:
SimpleFutureAction
[
R
]
Definition Classes
SparkContext
final
def
synchronized
[
T0
]
(
arg0: ⇒
T0
)
:
T0
Definition Classes
AnyRef
def
textFile
(
path:
String
,
minPartitions:
Int
)
:
RDD
[
String
]
Definition Classes
SparkContext
def
toString
()
:
String
Definition Classes
AnyRef → Any
def
uiWebUrl
:
Option
[
String
]
Definition Classes
SparkContext
def
union
[
T
]
(
first:
RDD
[
T
]
,
rest:
RDD
[
T
]*
)
(
implicit
arg0:
ClassTag
[
T
]
)
:
RDD
[
T
]
Definition Classes
SparkContext
def
union
[
T
]
(
rdds:
Seq
[
RDD
[
T
]]
)
(
implicit
arg0:
ClassTag
[
T
]
)
:
RDD
[
T
]
Definition Classes
SparkContext
def
version
:
String
Definition Classes
SparkContext
final
def
wait
()
:
Unit
Definition Classes
AnyRef
Annotations
@throws
(
...
)
final
def
wait
(
arg0:
Long
,
arg1:
Int
)
:
Unit
Definition Classes
AnyRef
Annotations
@throws
(
...
)
final
def
wait
(
arg0:
Long
)
:
Unit
Definition Classes
AnyRef
Annotations
@throws
(
...
)
def
wholeTextFiles
(
path:
String
,
minPartitions:
Int
)
:
RDD
[(
String
,
String
)]
Definition Classes
SparkContext
Deprecated Value Members
def
accumulable
[
R
,
T
]
(
initialValue:
R
,
name:
String
)
(
implicit
param:
AccumulableParam
[
R
,
T
]
)
:
Accumulable
[
R
,
T
]
Definition Classes
SparkContext
Annotations
@deprecated
Deprecated
(Since version 2.0.0)
use AccumulatorV2
def
accumulable
[
R
,
T
]
(
initialValue:
R
)
(
implicit
param:
AccumulableParam
[
R
,
T
]
)
:
Accumulable
[
R
,
T
]
Definition Classes
SparkContext
Annotations
@deprecated
Deprecated
(Since version 2.0.0)
use AccumulatorV2
def
accumulableCollection
[
R
,
T
]
(
initialValue:
R
)
(
implicit
arg0: (
R
) ⇒
Growable
[
T
] with
TraversableOnce
[
T
] with
Serializable
,
arg1:
ClassTag
[
R
]
)
:
Accumulable
[
R
,
T
]
Definition Classes
SparkContext
Annotations
@deprecated
Deprecated
(Since version 2.0.0)
use AccumulatorV2
def
accumulator
[
T
]
(
initialValue:
T
,
name:
String
)
(
implicit
param:
AccumulatorParam
[
T
]
)
:
Accumulator
[
T
]
Definition Classes
SparkContext
Annotations
@deprecated
Deprecated
(Since version 2.0.0)
use AccumulatorV2
def
accumulator
[
T
]
(
initialValue:
T
)
(
implicit
param:
AccumulatorParam
[
T
]
)
:
Accumulator
[
T
]
Definition Classes
SparkContext
Annotations
@deprecated
Deprecated
(Since version 2.0.0)
use AccumulatorV2
Inherited from
SparkContext
Inherited from
ExecutorAllocationClient
Inherited from
Logging
Inherited from
AnyRef
Inherited from
Any
Ungrouped