First cut of source and source task
diff --git a/build.gradle b/build.gradle
new file mode 100644
index 0000000..d694dfa
--- /dev/null
+++ b/build.gradle
@@ -0,0 +1,24 @@
+plugins {
+ id 'java'
+}
+
+group 'org.apache.geode'
+version '1.0-SNAPSHOT'
+
+sourceCompatibility = 1.8
+
+repositories {
+ mavenCentral()
+}
+
+dependencies {
+
+ compile 'org.apache.geode:geode-core:1.11.0'
+ compile(group: 'org.apache.kafka', name: 'connect-api', version: '2.3.1')
+ testCompile(group: 'org.apache.kafka', name: 'kafka_2.12', version: '2.3.1')
+ testCompile(group: 'org.apache.kafka', name: 'kafka-streams-test-utils', version: '1.1.0')
+ testCompile(group: 'org.apache.curator', name: 'curator-framework', version: '4.2.0')
+ testCompile(group: 'org.apache.kafka', name: 'connect-runtime', version: '2.3.1')
+
+ testCompile group: 'junit', name: 'junit', version: '4.12'
+}
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..87b738c
--- /dev/null
+++ b/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..44e7c4d
--- /dev/null
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-5.2.1-bin.zip
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/gradlew b/gradlew
new file mode 100755
index 0000000..af6708f
--- /dev/null
+++ b/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m"'
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+ echo "$*"
+}
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+ NONSTOP* )
+ nonstop=true
+ ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Escape application args
+save () {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+ cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/gradlew.bat b/gradlew.bat
new file mode 100644
index 0000000..0f8d593
--- /dev/null
+++ b/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS="-Xmx64m"
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/settings.gradle b/settings.gradle
new file mode 100644
index 0000000..b5a2326
--- /dev/null
+++ b/settings.gradle
@@ -0,0 +1,2 @@
+rootProject.name = 'geode-kafka-connector'
+
diff --git a/src/main/java/kafka/GeodeKafkaSink.java b/src/main/java/kafka/GeodeKafkaSink.java
new file mode 100644
index 0000000..af3a22a
--- /dev/null
+++ b/src/main/java/kafka/GeodeKafkaSink.java
@@ -0,0 +1,5 @@
+package kafka;
+
+public class GeodeKafkaSink {
+
+}
diff --git a/src/main/java/kafka/GeodeKafkaSource.java b/src/main/java/kafka/GeodeKafkaSource.java
new file mode 100644
index 0000000..d1599e9
--- /dev/null
+++ b/src/main/java/kafka/GeodeKafkaSource.java
@@ -0,0 +1,62 @@
+package kafka;
+
+import org.apache.kafka.common.config.ConfigDef;
+import org.apache.kafka.common.utils.AppInfoParser;
+import org.apache.kafka.connect.connector.Task;
+import org.apache.kafka.connect.source.SourceConnector;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class GeodeKafkaSource extends SourceConnector {
+
+ public static String REGION_NAME = "GEODE_REGION_NAME";
+ private String regionName;
+ private static String TOPICS = "TOPICS";
+
+ private Map<String, String> sharedProps;
+ private static final ConfigDef CONFIG_DEF = new ConfigDef();
+
+
+ @Override
+ public Class<? extends Task> taskClass() {
+ return GeodeKafkaSourceTask.class;
+ }
+
+ @Override
+ public List<Map<String, String>> taskConfigs(int maxTasks) {
+ List<Map<String, String>> taskConfigs = new ArrayList<>();
+ Map<String, String> taskProps = new HashMap<>();
+
+ taskProps.putAll(sharedProps);
+
+ // use the same props for all tasks at the moment
+ for (int i = 0; i < maxTasks; i++)
+ taskConfigs.add(taskProps);
+
+ return taskConfigs;
+ }
+
+
+ @Override
+ public ConfigDef config() {
+ return CONFIG_DEF;
+ }
+
+ @Override
+ public void start(Map<String, String> props) {
+ sharedProps = props;
+ }
+
+ @Override
+ public void stop() {
+
+ }
+
+ @Override
+ public String version() {
+ return AppInfoParser.getVersion();
+ }
+}
diff --git a/src/main/java/kafka/GeodeKafkaSourceTask.java b/src/main/java/kafka/GeodeKafkaSourceTask.java
new file mode 100644
index 0000000..6492199
--- /dev/null
+++ b/src/main/java/kafka/GeodeKafkaSourceTask.java
@@ -0,0 +1,120 @@
+package kafka;
+
+import org.apache.geode.cache.client.ClientCache;
+import org.apache.geode.cache.client.ClientCacheFactory;
+import org.apache.geode.cache.query.CqAttributes;
+import org.apache.geode.cache.query.CqAttributesFactory;
+import org.apache.geode.cache.query.CqEvent;
+import org.apache.geode.cache.query.CqException;
+import org.apache.geode.cache.query.CqExistsException;
+import org.apache.geode.cache.query.CqListener;
+import org.apache.kafka.connect.source.SourceRecord;
+import org.apache.kafka.connect.source.SourceTask;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+public class GeodeKafkaSourceTask extends SourceTask {
+ private static String REGION_NAME = "REGION_NAME";
+ private static String OFFSET = "OFFSET";
+ private static String topics[];
+ private int batchSize;
+ private int queueSize;
+ private static BlockingQueue<CqEvent> eventBuffer;
+ private Map<String, String> sourcePartition;
+ private Map<String, Long> offset;
+
+ private ClientCache clientCache;
+
+ @Override
+ public String version() {
+ return null;
+ }
+
+ @Override
+ public void start(Map<String, String> props) {
+ batchSize = 100;
+ queueSize = 100000;
+ String regionName = "someRegion";
+ eventBuffer = new LinkedBlockingQueue<>(queueSize);
+ topics = new String[] {"default"};
+ sourcePartition = new HashMap<>();
+ sourcePartition.put(REGION_NAME, regionName);
+
+ offset = new HashMap<>();
+ offset.put("OFFSET", 0L);
+
+ installOnGeode("localHost", 18888, "someRegion");
+ }
+
+ @Override
+ public List<SourceRecord> poll() throws InterruptedException {
+ ArrayList<SourceRecord> records = new ArrayList<>(batchSize);
+ ArrayList<CqEvent> events = new ArrayList<>(batchSize);
+ if (eventBuffer.drainTo(events, batchSize) > 0) {
+ for (CqEvent event : events) {
+
+ for (String topic : topics)
+ records.add(new SourceRecord(sourcePartition, offset, topic, null, event));
+ }
+
+ return records;
+ }
+
+ return null;
+ }
+
+ @Override
+ public void stop() {
+ clientCache.close(true);
+ }
+
+ private void installOnGeode(String locatorHost, int locatorPort, String regionName) {
+ clientCache = new ClientCacheFactory().set("durable-client-id", "someClient")
+ .set("durable-client-timeout", "200")
+ .setPoolSubscriptionEnabled(true).addPoolLocator(locatorHost, locatorPort).create();
+ CqAttributesFactory cqAttributesFactory = new CqAttributesFactory();
+ cqAttributesFactory.addCqListener(new GeodeKafkaSourceListener());
+ CqAttributes cqAttributes = cqAttributesFactory.create();
+ try {
+ clientCache.getQueryService().newCq("kafkaCQFor" + regionName, "select * from /" + regionName, cqAttributes,
+ true);
+ } catch (CqExistsException e) {
+ e.printStackTrace();
+ } catch (CqException e) {
+ e.printStackTrace();
+ }
+ clientCache.readyForEvents();
+ }
+
+ private static class GeodeKafkaSourceListener implements CqListener {
+
+ @Override
+ public void onEvent(CqEvent aCqEvent) {
+ try {
+ eventBuffer.offer(aCqEvent, 2, TimeUnit.SECONDS);
+ } catch (InterruptedException e) {
+
+ while (true) {
+ try {
+ if (!eventBuffer.offer(aCqEvent, 2, TimeUnit.SECONDS))
+ break;
+ } catch (InterruptedException ex) {
+ ex.printStackTrace();
+ }
+ System.out.println("GeodeKafkaSource Queue is full");
+ }
+ }
+ }
+
+ @Override
+ public void onError(CqEvent aCqEvent) {
+
+ }
+ }
+}
diff --git a/src/test/java/kafka/GeodeKafkaTestCluster.java b/src/test/java/kafka/GeodeKafkaTestCluster.java
new file mode 100644
index 0000000..fbaba2c
--- /dev/null
+++ b/src/test/java/kafka/GeodeKafkaTestCluster.java
@@ -0,0 +1,128 @@
+package kafka;
+
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.client.ClientCache;
+import org.apache.geode.cache.client.ClientCacheFactory;
+import org.apache.geode.cache.client.ClientRegionShortcut;
+import org.apache.kafka.connect.runtime.ConnectorConfig;
+import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.IOException;
+import java.util.Properties;
+
+public class GeodeKafkaTestCluster {
+
+ @ClassRule
+ public static TemporaryFolder temporaryFolder = new TemporaryFolder();
+ private static boolean debug = true;
+
+ private static ZooKeeperLocalCluster zooKeeperLocalCluster;
+ private static KafkaLocalCluster kafkaLocalCluster;
+ private static GeodeLocalCluster geodeLocalCluster;
+
+ @BeforeClass
+ public static void setup() throws IOException, QuorumPeerConfig.ConfigException, InterruptedException {
+ startZooKeeper();
+ startKafka();
+ startGeode();
+ }
+
+ @AfterClass
+ public static void shutdown() {
+ kafkaLocalCluster.stop();
+ geodeLocalCluster.stop();
+ }
+
+ private ClientCache createGeodeClient() {
+ return new ClientCacheFactory().addPoolLocator("127.0.0.1", 10334).create();
+ }
+
+ private static void startZooKeeper() throws IOException, QuorumPeerConfig.ConfigException {
+ zooKeeperLocalCluster = new ZooKeeperLocalCluster(getZooKeeperProperties());
+ zooKeeperLocalCluster.start();
+ }
+
+ private static void startKafka() throws IOException, InterruptedException, QuorumPeerConfig.ConfigException {
+ kafkaLocalCluster = new KafkaLocalCluster(getKafkaConfig());
+ kafkaLocalCluster.start();
+ }
+
+ private static void startGeode() throws IOException, InterruptedException {
+ geodeLocalCluster = new GeodeLocalCluster();
+ geodeLocalCluster.start();
+ }
+
+ private static Properties getZooKeeperProperties() throws IOException {
+ Properties properties = new Properties();
+ properties.setProperty("dataDir", (debug)? "/tmp/zookeeper" :temporaryFolder.newFolder("zookeeper").getAbsolutePath());
+ properties.setProperty("clientPort", "2181");
+ properties.setProperty("tickTime", "2000");
+ return properties;
+ }
+
+
+ private static Properties getKafkaConfig() throws IOException {
+ int BROKER_PORT = 8888;
+ Properties props = new Properties();
+
+ props.put("broker.id", "0");
+ props.put("zookeeper.connect", "localhost:2181");
+ props.put("host.name", "localHost");
+ props.put("port", BROKER_PORT);
+ props.put("offsets.topic.replication.factor", "1");
+ props.put("log.dir", (debug)? "/tmp/kafka" : temporaryFolder.newFolder("kafka").getAbsolutePath());
+ props.put("log.flush.interval.messages", "1");
+ props.put("log.flush.interval.ms", "10");
+
+
+ //Connector configs
+ props.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, GeodeKafkaSource.class.getName());
+ props.put(ConnectorConfig.NAME_CONFIG, "geode-kafka-source-connector");
+ props.put(ConnectorConfig.TASKS_MAX_CONFIG, "1");
+
+ //Specifically GeodeKafka connector configs
+
+ /*
+ props.put(ConnectorConfig.TASKS_MAX_CONFIG, "2");
+ props.put(ConnectorConfig.NAME_CONFIG, "test-src-connector");
+ props.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, IgniteSourceConnectorMock.class.getName());
+ props.put(IgniteSourceConstants.CACHE_NAME, "testCache");
+ props.put(IgniteSourceConstants.CACHE_CFG_PATH, "example-ignite.xml");
+ props.put(IgniteSourceConstants.TOPIC_NAMES, topics);
+ props.put(IgniteSourceConstants.CACHE_EVENTS, "put");
+ props.put(IgniteSourceConstants.CACHE_FILTER_CLASS, TestCacheEventFilter.class.getName());
+ props.put(IgniteSourceConstants.INTL_BUF_SIZE, "1000000");
+ */
+
+/*
+name=file-source
+# The class implementing the connector
+connector.class=FileStreamSource
+# Maximum number of tasks to run for this connector instance
+tasks.max=1
+# The input file (path relative to worker's working directory)
+# This is the only setting specific to the FileStreamSource
+file=test.txt
+# The output topic in Kafka
+topic=connect-test
+ */
+
+ return props;
+ }
+
+
+
+ @Test
+ public void testX() throws InterruptedException {
+ ClientCache client = createGeodeClient();
+ Region region = client.createClientRegionFactory(ClientRegionShortcut.PROXY).create("someRegion");
+ region.put("JASON KEY", "JASON VALUE");
+ System.out.println("TEST COMPLETE!");
+ }
+
+}
diff --git a/src/test/java/kafka/GeodeLocalCluster.java b/src/test/java/kafka/GeodeLocalCluster.java
new file mode 100644
index 0000000..fd72dec
--- /dev/null
+++ b/src/test/java/kafka/GeodeLocalCluster.java
@@ -0,0 +1,29 @@
+package kafka;
+
+import java.io.IOException;
+
+public class GeodeLocalCluster {
+
+ private JavaProcess locatorProcess;
+ private JavaProcess serverProcess;
+
+ public GeodeLocalCluster() {
+ locatorProcess = new JavaProcess(LocatorLauncherWrapper.class);
+ serverProcess = new JavaProcess(ServerLauncherWrapper.class);
+ }
+
+ public void start() throws IOException, InterruptedException {
+ locatorProcess.exec("10334");
+ Thread.sleep(30000);
+ System.out.println("is alive?" + locatorProcess.process.isAlive());
+ serverProcess.exec("40404");
+ Thread.sleep(30000);
+
+ }
+
+ public void stop() {
+ serverProcess.destroy();
+ locatorProcess.destroy();
+ }
+}
+
diff --git a/src/test/java/kafka/JavaProcess.java b/src/test/java/kafka/JavaProcess.java
new file mode 100644
index 0000000..30edfef
--- /dev/null
+++ b/src/test/java/kafka/JavaProcess.java
@@ -0,0 +1,45 @@
+package kafka;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+public class JavaProcess {
+
+ public Process process;
+ Class classWithMain;
+
+ public JavaProcess(Class classWithmain) {
+ this.classWithMain = classWithmain;
+ }
+
+ public void exec(String... args) throws IOException, InterruptedException {
+ String java = System.getProperty("java.home") + File.separator + "bin" + File.separator + "java";
+ String classpath = System.getProperty("java.class.path");
+ String className = classWithMain.getName();
+
+ ProcessBuilder builder = new ProcessBuilder(
+ java, "-cp", classpath, className, convertArgsToString(args));
+
+ process = builder.inheritIO().start();
+ }
+
+ private String convertArgsToString(String... args) {
+ String string = "";
+ for(String arg: args) {
+ string += arg;
+ }
+ return string;
+ }
+
+ public void waitFor() throws InterruptedException {
+ process.waitFor();
+ }
+
+ public void destroy() {
+ process.destroy();
+ }
+
+
+
+}
\ No newline at end of file
diff --git a/src/test/java/kafka/KafkaLocalCluster.java b/src/test/java/kafka/KafkaLocalCluster.java
new file mode 100644
index 0000000..cd2a3df
--- /dev/null
+++ b/src/test/java/kafka/KafkaLocalCluster.java
@@ -0,0 +1,32 @@
+package kafka;
+
+import kafka.server.KafkaConfig;
+import kafka.server.KafkaServerStartable;
+
+import java.io.IOException;
+import java.util.Properties;
+
+public class KafkaLocalCluster {
+
+ KafkaServerStartable kafka;
+
+ public KafkaLocalCluster(Properties kafkaProperties) throws IOException, InterruptedException {
+ KafkaConfig kafkaConfig = new KafkaConfig(kafkaProperties);
+ kafka = new KafkaServerStartable(kafkaConfig);
+ }
+
+ public void start() {
+ try {
+ kafka.startup();
+ System.out.println("Kafka started up");
+ }
+ catch (Throwable t) {
+ System.out.println(t);
+ }
+ }
+
+
+ public void stop() {
+ kafka.shutdown();
+ }
+}
diff --git a/src/test/java/kafka/LocatorLauncherWrapper.java b/src/test/java/kafka/LocatorLauncherWrapper.java
new file mode 100644
index 0000000..fe351be
--- /dev/null
+++ b/src/test/java/kafka/LocatorLauncherWrapper.java
@@ -0,0 +1,35 @@
+package kafka;
+
+import org.apache.geode.distributed.ConfigurationProperties;
+import org.apache.geode.distributed.Locator;
+import org.apache.geode.distributed.LocatorLauncher;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.Properties;
+
+public class LocatorLauncherWrapper {
+
+ public static void main(String[] args) throws IOException {
+ Properties properties = new Properties();
+// String statsFile = new File(context.getOutputDir(), "stats.gfs").getAbsolutePath();
+// properties.setProperty(ConfigurationPropert/**/ies.STATISTIC_ARCHIVE_FILE, statsFile);
+ properties.setProperty(ConfigurationProperties.NAME, "locator1");
+ Locator.startLocatorAndDS(10334, null, properties);
+
+//
+// LocatorLauncher locatorLauncher = new LocatorLauncher.Builder()
+// .setMemberName("locator1")
+//// .setPort(Integer.valueOf(args[0]))
+//// .setBindAddress("localhost")
+// .build();
+//
+// locatorLauncher.start();
+// while (!locatorLauncher.isRunning()) {
+//
+// }
+// System.out.println(locatorLauncher.getBindAddress() + ":" + locatorLauncher.getPort());
+
+ }
+}
diff --git a/src/test/java/kafka/ServerLauncherWrapper.java b/src/test/java/kafka/ServerLauncherWrapper.java
new file mode 100644
index 0000000..68161c2
--- /dev/null
+++ b/src/test/java/kafka/ServerLauncherWrapper.java
@@ -0,0 +1,48 @@
+package kafka;
+
+import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.CacheFactory;
+import org.apache.geode.cache.server.CacheServer;
+import org.apache.geode.distributed.ConfigurationProperties;
+import org.apache.geode.distributed.ServerLauncher;
+import org.apache.geode.pdx.ReflectionBasedAutoSerializer;
+
+import java.io.IOException;
+import java.util.Properties;
+
+public class ServerLauncherWrapper {
+
+ public static void main(String... args) throws IOException {
+// ServerLauncher serverLauncher = new ServerLauncher.Builder()
+// .setMemberName("server1")
+//// .setServerPort(Integer.valueOf(args[0]))
+//// .setServerBindAddress("localhost")
+// // .set("locators", "localhost[10334]")
+//// .set("jmx-manager", "true")
+//// .set("jmx-manager-start", "true")
+// .build();
+//
+// serverLauncher.start();
+// System.out.println("Geode Server Launcher complete");
+
+
+
+
+ Properties properties = new Properties();
+ String locatorString = "localhost[10334]";
+// String statsFile = new File(context.getOutputDir(), "stats.gfs").getAbsolutePath();
+ Cache cache = new CacheFactory(properties)
+// .setPdxSerializer(new ReflectionBasedAutoSerializer("benchmark.geode.data.*"))
+ .set(ConfigurationProperties.LOCATORS, locatorString)
+ .set(ConfigurationProperties.NAME,
+ "server-1")
+ .set(ConfigurationProperties.LOG_FILE, "/Users/jhuynh/Pivotal/geode-kafka-connector/server1.log")
+ .set(ConfigurationProperties.LOG_LEVEL, "info")
+// .set(ConfigurationProperties.STATISTIC_ARCHIVE_FILE, statsFile)
+ .create();
+ CacheServer cacheServer = cache.addCacheServer();
+ cacheServer.setPort(0);
+ cacheServer.setMaxConnections(Integer.MAX_VALUE);
+ cacheServer.start();
+ }
+}
diff --git a/src/test/java/kafka/ZooKeeperLocalCluster.java b/src/test/java/kafka/ZooKeeperLocalCluster.java
new file mode 100644
index 0000000..8b23f53
--- /dev/null
+++ b/src/test/java/kafka/ZooKeeperLocalCluster.java
@@ -0,0 +1,42 @@
+package kafka;
+
+import org.apache.zookeeper.server.ServerConfig;
+import org.apache.zookeeper.server.ZooKeeperServerMain;
+import org.apache.zookeeper.server.admin.AdminServer;
+import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
+
+import java.io.IOException;
+import java.util.Properties;
+
+public class ZooKeeperLocalCluster {
+
+ ZooKeeperServerMain zooKeeperServer;
+ private Properties zooKeeperProperties;
+ Thread zooKeeperThread;
+
+ public ZooKeeperLocalCluster(Properties zooKeeperProperties) {
+ this.zooKeeperProperties = zooKeeperProperties;
+ }
+
+ public void start() throws IOException, QuorumPeerConfig.ConfigException {
+ QuorumPeerConfig quorumConfiguration = new QuorumPeerConfig();
+ quorumConfiguration.parseProperties(zooKeeperProperties);
+
+ zooKeeperServer = new ZooKeeperServerMain();
+ final ServerConfig configuration = new ServerConfig();
+ configuration.readFrom(quorumConfiguration);
+
+ zooKeeperThread = new Thread() {
+ public void run() {
+ try {
+ zooKeeperServer.runFromConfig(configuration);
+ } catch (IOException | AdminServer.AdminServerException e) {
+ System.out.println("ZooKeeper Failed");
+ e.printStackTrace(System.err);
+ }
+ }
+ };
+ zooKeeperThread.start();
+ System.out.println("ZooKeeper thread started");
+ }
+}