blob: cea4cccd04f5c982ca085a46918fa82d132fdbfa [file] [log] [blame]
{
"connector": {
"class": "org.apache.camel.kafkaconnector.azurestoragedatalake.CamelAzurestoragedatalakeSinkConnector",
"artifactId": "camel-azure-storage-datalake-kafka-connector",
"groupId": "org.apache.camel.kafkaconnector",
"id": "camel-azure-storage-datalake-sink",
"type": "sink",
"version": "0.11.6-SNAPSHOT",
"description": "Camel Azure Datalake Gen2 Component"
},
"properties": {
"camel.sink.path.accountName": {
"name": "camel.sink.path.accountName",
"description": "name of the azure account",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.path.fileSystemName": {
"name": "camel.sink.path.fileSystemName",
"description": "name of filesystem to be used",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.accountKey": {
"name": "camel.sink.endpoint.accountKey",
"description": "account key for authentication",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.clientId": {
"name": "camel.sink.endpoint.clientId",
"description": "client id for azure account",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.clientSecret": {
"name": "camel.sink.endpoint.clientSecret",
"description": "client secret for azure account",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.clientSecretCredential": {
"name": "camel.sink.endpoint.clientSecretCredential",
"description": "client secret credential for authentication",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.close": {
"name": "camel.sink.endpoint.close",
"description": "Whether or not a file changed event raised indicates completion (true) or modification (false)",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.closeStreamAfterRead": {
"name": "camel.sink.endpoint.closeStreamAfterRead",
"description": "check for closing stream after read",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.dataCount": {
"name": "camel.sink.endpoint.dataCount",
"description": "count number of bytes to download",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.dataLakeServiceClient": {
"name": "camel.sink.endpoint.dataLakeServiceClient",
"description": "service client of datalake",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.directoryName": {
"name": "camel.sink.endpoint.directoryName",
"description": "directory of the file to be handled in component",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.downloadLinkExpiration": {
"name": "camel.sink.endpoint.downloadLinkExpiration",
"description": "download link expiration time",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.expression": {
"name": "camel.sink.endpoint.expression",
"description": "expression for queryInputStream",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.fileDir": {
"name": "camel.sink.endpoint.fileDir",
"description": "directory of file to do operations in the local system",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.fileName": {
"name": "camel.sink.endpoint.fileName",
"description": "name of file to be handled in component",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.fileOffset": {
"name": "camel.sink.endpoint.fileOffset",
"description": "offset position in file for different operations",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.maxResults": {
"name": "camel.sink.endpoint.maxResults",
"description": "maximum number of results to show at a time",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.maxRetryRequests": {
"name": "camel.sink.endpoint.maxRetryRequests",
"description": "no of retries to a given request",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.openOptions": {
"name": "camel.sink.endpoint.openOptions",
"description": "set open options for creating file",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.path": {
"name": "camel.sink.endpoint.path",
"description": "path in azure datalake for operations",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.permission": {
"name": "camel.sink.endpoint.permission",
"description": "permission string for the file",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.position": {
"name": "camel.sink.endpoint.position",
"description": "This parameter allows the caller to upload data in parallel and control the order in which it is appended to the file.",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.recursive": {
"name": "camel.sink.endpoint.recursive",
"description": "recursively include all paths",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.regex": {
"name": "camel.sink.endpoint.regex",
"description": "regular expression for matching file names",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.retainUncommitedData": {
"name": "camel.sink.endpoint.retainUncommitedData",
"description": "Whether or not uncommitted data is to be retained after the operation",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.serviceClient": {
"name": "camel.sink.endpoint.serviceClient",
"description": "datalake service client for azure storage datalake",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.sharedKeyCredential": {
"name": "camel.sink.endpoint.sharedKeyCredential",
"description": "shared key credential for azure datalake gen2",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.tenantId": {
"name": "camel.sink.endpoint.tenantId",
"description": "tenant id for azure account",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.timeout": {
"name": "camel.sink.endpoint.timeout",
"description": "Timeout for operation",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.umask": {
"name": "camel.sink.endpoint.umask",
"description": "umask permission for file",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.userPrincipalNameReturned": {
"name": "camel.sink.endpoint.userPrincipalNameReturned",
"description": "whether or not to use upn",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.lazyStartProducer": {
"name": "camel.sink.endpoint.lazyStartProducer",
"description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and starting the producer may take a little time and prolong the total processing time of the processing.",
"defaultValue": "false",
"priority": "MEDIUM",
"required": "false"
},
"camel.sink.endpoint.operation": {
"name": "camel.sink.endpoint.operation",
"description": "operation to be performed One of: [listFileSystem] [listFiles]",
"defaultValue": "\"listFileSystem\"",
"priority": "MEDIUM",
"required": "false",
"enum": [
"listFileSystem",
"listFiles"
]
},
"camel.component.azure-storage-datalake.accountKey": {
"name": "camel.component.azure-storage-datalake.accountKey",
"description": "account key for authentication",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.clientId": {
"name": "camel.component.azure-storage-datalake.clientId",
"description": "client id for azure account",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.clientSecret": {
"name": "camel.component.azure-storage-datalake.clientSecret",
"description": "client secret for azure account",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.clientSecretCredential": {
"name": "camel.component.azure-storage-datalake.clientSecretCredential",
"description": "client secret credential for authentication",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.close": {
"name": "camel.component.azure-storage-datalake.close",
"description": "Whether or not a file changed event raised indicates completion (true) or modification (false)",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.closeStreamAfterRead": {
"name": "camel.component.azure-storage-datalake.closeStreamAfterRead",
"description": "check for closing stream after read",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.configuration": {
"name": "camel.component.azure-storage-datalake.configuration",
"description": "configuration object for datalake",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.dataCount": {
"name": "camel.component.azure-storage-datalake.dataCount",
"description": "count number of bytes to download",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.directoryName": {
"name": "camel.component.azure-storage-datalake.directoryName",
"description": "directory of the file to be handled in component",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.downloadLinkExpiration": {
"name": "camel.component.azure-storage-datalake.downloadLinkExpiration",
"description": "download link expiration time",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.expression": {
"name": "camel.component.azure-storage-datalake.expression",
"description": "expression for queryInputStream",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.fileDir": {
"name": "camel.component.azure-storage-datalake.fileDir",
"description": "directory of file to do operations in the local system",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.fileName": {
"name": "camel.component.azure-storage-datalake.fileName",
"description": "name of file to be handled in component",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.fileOffset": {
"name": "camel.component.azure-storage-datalake.fileOffset",
"description": "offset position in file for different operations",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.maxResults": {
"name": "camel.component.azure-storage-datalake.maxResults",
"description": "maximum number of results to show at a time",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.maxRetryRequests": {
"name": "camel.component.azure-storage-datalake.maxRetryRequests",
"description": "no of retries to a given request",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.openOptions": {
"name": "camel.component.azure-storage-datalake.openOptions",
"description": "set open options for creating file",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.path": {
"name": "camel.component.azure-storage-datalake.path",
"description": "path in azure datalake for operations",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.permission": {
"name": "camel.component.azure-storage-datalake.permission",
"description": "permission string for the file",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.position": {
"name": "camel.component.azure-storage-datalake.position",
"description": "This parameter allows the caller to upload data in parallel and control the order in which it is appended to the file.",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.recursive": {
"name": "camel.component.azure-storage-datalake.recursive",
"description": "recursively include all paths",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.regex": {
"name": "camel.component.azure-storage-datalake.regex",
"description": "regular expression for matching file names",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.retainUncommitedData": {
"name": "camel.component.azure-storage-datalake.retainUncommitedData",
"description": "Whether or not uncommitted data is to be retained after the operation",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.serviceClient": {
"name": "camel.component.azure-storage-datalake.serviceClient",
"description": "datalake service client for azure storage datalake",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.sharedKeyCredential": {
"name": "camel.component.azure-storage-datalake.sharedKeyCredential",
"description": "shared key credential for azure datalake gen2",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.tenantId": {
"name": "camel.component.azure-storage-datalake.tenantId",
"description": "tenant id for azure account",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.timeout": {
"name": "camel.component.azure-storage-datalake.timeout",
"description": "Timeout for operation",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.umask": {
"name": "camel.component.azure-storage-datalake.umask",
"description": "umask permission for file",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.userPrincipalNameReturned": {
"name": "camel.component.azure-storage-datalake.userPrincipalNameReturned",
"description": "whether or not to use upn",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.lazyStartProducer": {
"name": "camel.component.azure-storage-datalake.lazyStartProducer",
"description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and starting the producer may take a little time and prolong the total processing time of the processing.",
"defaultValue": "false",
"priority": "MEDIUM",
"required": "false"
},
"camel.component.azure-storage-datalake.operation": {
"name": "camel.component.azure-storage-datalake.operation",
"description": "operation to be performed One of: [listFileSystem] [listFiles]",
"defaultValue": "\"listFileSystem\"",
"priority": "MEDIUM",
"required": "false",
"enum": [
"listFileSystem",
"listFiles"
]
},
"camel.component.azure-storage-datalake.autowiredEnabled": {
"name": "camel.component.azure-storage-datalake.autowiredEnabled",
"description": "Whether autowiring is enabled. This is used for automatic autowiring options (the option must be marked as autowired) by looking up in the registry to find if there is a single instance of matching type, which then gets configured on the component. This can be used for automatic configuring JDBC data sources, JMS connection factories, AWS Clients, etc.",
"defaultValue": "true",
"priority": "MEDIUM",
"required": "false"
}
}
}