@UriEndpoint(firstVersion="2.17.0",
scheme="spark",
title="Spark",
syntax="spark:endpointType",
producerOnly=true,
category={BIGDATA,IOT})
public class SparkEndpoint
extends org.apache.camel.support.DefaultEndpoint
| Constructor and Description |
|---|
SparkEndpoint(String endpointUri,
SparkComponent component,
EndpointType endpointType) |
| Modifier and Type | Method and Description |
|---|---|
org.apache.camel.Consumer |
createConsumer(org.apache.camel.Processor processor) |
org.apache.camel.Producer |
createProducer() |
protected void |
doInit() |
SparkComponent |
getComponent() |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
getDataFrame() |
DataFrameCallback |
getDataFrameCallback() |
EndpointType |
getEndpointType() |
org.apache.spark.api.java.JavaRDDLike |
getRdd() |
RddCallback |
getRddCallback() |
boolean |
isCollect() |
void |
setCollect(boolean collect)
Indicates if results should be collected or counted.
|
void |
setDataFrame(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataFrame)
DataFrame to compute against.
|
void |
setDataFrameCallback(DataFrameCallback dataFrameCallback)
Function performing action against an DataFrame.
|
void |
setEndpointType(EndpointType endpointType)
Type of the endpoint (rdd, dataframe, hive).
|
void |
setRdd(org.apache.spark.api.java.JavaRDDLike rdd)
RDD to compute against.
|
void |
setRddCallback(RddCallback rddCallback)
Function performing action against an RDD.
|
configureConsumer, configureExchange, configurePollingConsumer, configureProperties, createAsyncProducer, createEndpointUri, createExchange, createExchange, createPollingConsumer, doStart, doStop, equals, getCamelContext, getEndpointKey, getEndpointUri, getExceptionHandler, getExchangePattern, getId, getPollingConsumerBlockTimeout, getPollingConsumerQueueSize, hashCode, isAutowiredEnabled, isBridgeErrorHandler, isLazyStartProducer, isLenientProperties, isPollingConsumerBlockWhenFull, isPollingConsumerCopy, isSingleton, setAutowiredEnabled, setBridgeErrorHandler, setCamelContext, setComponent, setEndpointUri, setEndpointUriIfNotSpecified, setExceptionHandler, setExchangePattern, setLazyStartProducer, setPollingConsumerBlockTimeout, setPollingConsumerBlockWhenFull, setPollingConsumerCopy, setPollingConsumerQueueSize, setProperties, toStringbuild, doBuild, doFail, doLifecycleChange, doResume, doShutdown, doSuspend, fail, getStatus, init, isBuild, isInit, isNew, isRunAllowed, isShutdown, isStarted, isStarting, isStartingOrStarted, isStopped, isStopping, isStoppingOrStopped, isSuspended, isSuspending, isSuspendingOrSuspended, resume, shutdown, start, stop, suspendclone, finalize, getClass, notify, notifyAll, wait, wait, waitpublic SparkEndpoint(String endpointUri, SparkComponent component, EndpointType endpointType)
protected void doInit()
throws Exception
doInit in class org.apache.camel.support.DefaultEndpointExceptionpublic org.apache.camel.Consumer createConsumer(org.apache.camel.Processor processor)
throws Exception
Exceptionpublic SparkComponent getComponent()
getComponent in class org.apache.camel.support.DefaultEndpointpublic EndpointType getEndpointType()
public void setEndpointType(EndpointType endpointType)
public org.apache.spark.api.java.JavaRDDLike getRdd()
public void setRdd(org.apache.spark.api.java.JavaRDDLike rdd)
public RddCallback getRddCallback()
public void setRddCallback(RddCallback rddCallback)
public org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> getDataFrame()
public void setDataFrame(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataFrame)
public DataFrameCallback getDataFrameCallback()
public void setDataFrameCallback(DataFrameCallback dataFrameCallback)
public boolean isCollect()
public void setCollect(boolean collect)
Apache Camel