blob: e4bb45fa6c92af9193ac1251295cda4745b3bea9 [file] [log] [blame]
{
"component": {
"kind": "component",
"name": "spark",
"title": "Spark",
"description": "Send RDD or DataFrame jobs to Apache Spark clusters.",
"deprecated": false,
"firstVersion": "2.17.0",
"label": "bigdata,iot",
"javaType": "org.apache.camel.component.spark.SparkComponent",
"supportLevel": "Stable",
"groupId": "org.apache.camel.springboot",
"artifactId": "camel-spark-starter",
"version": "3.11.1-SNAPSHOT",
"scheme": "spark",
"extendsScheme": "",
"syntax": "spark:endpointType",
"async": false,
"api": false,
"consumerOnly": false,
"producerOnly": true,
"lenientProperties": false
},
"componentProperties": {
"lazyStartProducer": { "kind": "property", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "autowired": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and starting the producer may take a little time and prolong the total processing time of the processing." },
"rdd": { "kind": "property", "displayName": "Rdd", "group": "producer", "label": "", "required": false, "type": "object", "javaType": "org.apache.spark.api.java.JavaRDDLike", "deprecated": false, "autowired": false, "secret": false, "description": "RDD to compute against." },
"rddCallback": { "kind": "property", "displayName": "Rdd Callback", "group": "producer", "label": "", "required": false, "type": "object", "javaType": "org.apache.camel.component.spark.RddCallback", "deprecated": false, "autowired": false, "secret": false, "description": "Function performing action against an RDD." },
"autowiredEnabled": { "kind": "property", "displayName": "Autowired Enabled", "group": "advanced", "label": "advanced", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "autowired": false, "secret": false, "defaultValue": true, "description": "Whether autowiring is enabled. This is used for automatic autowiring options (the option must be marked as autowired) by looking up in the registry to find if there is a single instance of matching type, which then gets configured on the component. This can be used for automatic configuring JDBC data sources, JMS connection factories, AWS Clients, etc." }
},
"properties": {
"endpointType": { "kind": "path", "displayName": "Endpoint Type", "group": "producer", "label": "", "required": true, "type": "object", "javaType": "org.apache.camel.component.spark.EndpointType", "enum": [ "rdd", "dataframe", "hive" ], "deprecated": false, "deprecationNote": "", "autowired": false, "secret": false, "description": "Type of the endpoint (rdd, dataframe, hive)." },
"collect": { "kind": "parameter", "displayName": "Collect", "group": "producer", "label": "", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "autowired": false, "secret": false, "defaultValue": true, "description": "Indicates if results should be collected or counted." },
"dataFrame": { "kind": "parameter", "displayName": "Data Frame", "group": "producer", "label": "", "required": false, "type": "object", "javaType": "org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>", "deprecated": false, "autowired": false, "secret": false, "description": "DataFrame to compute against." },
"dataFrameCallback": { "kind": "parameter", "displayName": "Data Frame Callback", "group": "producer", "label": "", "required": false, "type": "object", "javaType": "org.apache.camel.component.spark.DataFrameCallback", "deprecated": false, "autowired": false, "secret": false, "description": "Function performing action against an DataFrame." },
"lazyStartProducer": { "kind": "parameter", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "autowired": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and starting the producer may take a little time and prolong the total processing time of the processing." },
"rdd": { "kind": "parameter", "displayName": "Rdd", "group": "producer", "label": "", "required": false, "type": "object", "javaType": "org.apache.spark.api.java.JavaRDDLike", "deprecated": false, "autowired": false, "secret": false, "description": "RDD to compute against." },
"rddCallback": { "kind": "parameter", "displayName": "Rdd Callback", "group": "producer", "label": "", "required": false, "type": "object", "javaType": "org.apache.camel.component.spark.RddCallback", "deprecated": false, "autowired": false, "secret": false, "description": "Function performing action against an RDD." }
}
}