blob: 9e7406e4961e2723cae9315b2a94763de3484fb8 [file] [log] [blame]
{"component":{"kind":"component","name":"spark","title":"Spark","description":"Send RDD or DataFrame jobs to Apache Spark clusters.","deprecated":false,"firstVersion":"2.17.0","label":"bigdata,iot","javaType":"org.apache.camel.component.spark.SparkComponent","supportLevel":"Stable","groupId":"org.apache.camel","artifactId":"camel-spark","version":"3.18.1","scheme":"spark","extendsScheme":"","syntax":"spark:endpointType","async":false,"api":false,"consumerOnly":false,"producerOnly":true,"lenientProperties":false},"headers":{"CAMEL_SPARK_RDD":{"kind":"header","displayName":"","group":"producer","label":"","required":false,"javaType":"Object","deprecated":false,"deprecationNote":"","autowired":false,"secret":false,"description":"The RDD.","constantName":"org.apache.camel.component.spark.SparkConstants#SPARK_RDD_HEADER"},"CAMEL_SPARK_RDD_CALLBACK":{"kind":"header","displayName":"","group":"producer","label":"","required":false,"javaType":"org.apache.camel.component.spark.RddCallback","deprecated":false,"deprecationNote":"","autowired":false,"secret":false,"description":"The function performing action against an RDD.","constantName":"org.apache.camel.component.spark.SparkConstants#SPARK_RDD_CALLBACK_HEADER"},"CAMEL_SPARK_DATAFRAME":{"kind":"header","displayName":"","group":"producer","label":"","required":false,"javaType":"Dataset<Row>","deprecated":false,"deprecationNote":"","autowired":false,"secret":false,"description":"The data frame to compute against.","constantName":"org.apache.camel.component.spark.SparkConstants#SPARK_DATAFRAME_HEADER"},"CAMEL_SPARK_DATAFRAME_CALLBACK":{"kind":"header","displayName":"","group":"producer","label":"","required":false,"javaType":"org.apache.camel.component.spark.DataFrameCallback","deprecated":false,"deprecationNote":"","autowired":false,"secret":false,"description":"The function performing action against a data frame.","constantName":"org.apache.camel.component.spark.SparkConstants#SPARK_DATAFRAME_CALLBACK_HEADER"}},"properties":{"endpointType":{"kind":"path","displayName":"Endpoint Type","group":"producer","label":"","required":true,"type":"object","javaType":"org.apache.camel.component.spark.EndpointType","enum":["rdd","dataframe","hive"],"deprecated":false,"deprecationNote":"","autowired":false,"secret":false,"description":"Type of the endpoint (rdd, dataframe, hive)."},"collect":{"kind":"parameter","displayName":"Collect","group":"producer","label":"","required":false,"type":"boolean","javaType":"boolean","deprecated":false,"autowired":false,"secret":false,"defaultValue":true,"description":"Indicates if results should be collected or counted."},"dataFrame":{"kind":"parameter","displayName":"Data Frame","group":"producer","label":"","required":false,"type":"object","javaType":"org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>","deprecated":false,"autowired":false,"secret":false,"description":"DataFrame to compute against."},"dataFrameCallback":{"kind":"parameter","displayName":"Data Frame Callback","group":"producer","label":"","required":false,"type":"object","javaType":"org.apache.camel.component.spark.DataFrameCallback","deprecated":false,"autowired":false,"secret":false,"description":"Function performing action against an DataFrame."},"rdd":{"kind":"parameter","displayName":"Rdd","group":"producer","label":"","required":false,"type":"object","javaType":"org.apache.spark.api.java.JavaRDDLike","deprecated":false,"autowired":false,"secret":false,"description":"RDD to compute against."},"rddCallback":{"kind":"parameter","displayName":"Rdd Callback","group":"producer","label":"","required":false,"type":"object","javaType":"org.apache.camel.component.spark.RddCallback","deprecated":false,"autowired":false,"secret":false,"description":"Function performing action against an RDD."},"lazyStartProducer":{"kind":"parameter","displayName":"Lazy Start Producer","group":"producer (advanced)","label":"producer,advanced","required":false,"type":"boolean","javaType":"boolean","deprecated":false,"autowired":false,"secret":false,"defaultValue":false,"description":"Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and starting the producer may take a little time and prolong the total processing time of the processing."}}}