IGNITE-15541 Move aws, gce, azure IP finders to the ignite-extenstsions
diff --git a/modules/aws-ext/README.txt b/modules/aws-ext/README.txt
new file mode 100644
index 0000000..b67972b
--- /dev/null
+++ b/modules/aws-ext/README.txt
@@ -0,0 +1,28 @@
+Apache Ignite AWS Module
+------------------------
+
+Apache Ignite AWS module provides S3-based implementations of checkpoint SPI and IP finder for TCP discovery.
+
+Importing AWS Module In Maven Project
+-------------------------------------
+
+If you are using Maven to manage dependencies of your project, you can add AWS module
+dependency like this (replace '${ignite.version}' with actual Ignite version you are
+interested in):
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+                        http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    ...
+    <dependencies>
+        ...
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-aws-ext</artifactId>
+            <version>${ignite.version}</version>
+        </dependency>
+        ...
+    </dependencies>
+    ...
+</project>
diff --git a/modules/aws-ext/licenses/apache-2.0.txt b/modules/aws-ext/licenses/apache-2.0.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/modules/aws-ext/licenses/apache-2.0.txt
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/modules/aws-ext/pom.xml b/modules/aws-ext/pom.xml
new file mode 100644
index 0000000..4ab7267
--- /dev/null
+++ b/modules/aws-ext/pom.xml
@@ -0,0 +1,216 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.ignite</groupId>
+        <artifactId>ignite-extensions-parent</artifactId>
+        <version>1</version>
+        <relativePath>../../parent</relativePath>
+    </parent>
+
+    <artifactId>ignite-aws-ext</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+    <url>https://ignite.apache.org</url>
+
+    <properties>
+        <spring.version>5.3.8</spring.version>
+        <aws.sdk.version>1.11.75</aws.sdk.version>
+        <aws.encryption.sdk.version>1.3.2</aws.encryption.sdk.version>
+        <bouncycastle.version>1.69</bouncycastle.version>
+        <httpclient.version>4.5.1</httpclient.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-core</artifactId>
+            <version>${ignite.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.amazonaws</groupId>
+            <artifactId>aws-java-sdk-core</artifactId>
+            <version>${aws.sdk.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.amazonaws</groupId>
+            <artifactId>aws-java-sdk-s3</artifactId>
+            <version>${aws.sdk.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.amazonaws</groupId>
+            <artifactId>aws-java-sdk-ec2</artifactId>
+            <version>${aws.sdk.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.amazonaws</groupId>
+            <artifactId>aws-java-sdk-elasticloadbalancing</artifactId>
+            <version>${aws.sdk.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.amazonaws</groupId>
+            <artifactId>aws-java-sdk-elasticloadbalancingv2</artifactId>
+            <version>${aws.sdk.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.amazonaws</groupId>
+            <artifactId>aws-java-sdk-kms</artifactId>
+            <version>${aws.sdk.version}</version>
+        </dependency>
+
+        <!-- Do not remove  com.fasterxml.jackson.core:jackson-core it is required at runtime -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-core</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <!-- Do not remove com.fasterxml.jackson.core:jackson-annotations it is required at runtime -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-annotations</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <!-- Do not remove com.fasterxml.jackson.core:jackson-databind it is required at runtime -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.amazonaws</groupId>
+            <artifactId>aws-encryption-sdk-java</artifactId>
+            <version>${aws.encryption.sdk.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.bouncycastle</groupId>
+                    <artifactId>bcprov-ext-jdk15on</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.bouncycastle</groupId>
+            <artifactId>bcprov-ext-jdk15on</artifactId>
+            <version>${bouncycastle.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>joda-time</groupId>
+            <artifactId>joda-time</artifactId>
+            <version>2.8.1</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+            <version>${httpclient.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpcore</artifactId>
+            <version>${httpcore.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>commons-codec</groupId>
+            <artifactId>commons-codec</artifactId>
+            <version>${commons.codec.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-core</artifactId>
+            <version>${ignite.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-tools</artifactId>
+            <version>${ignite.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-core</artifactId>
+            <version>${mockito.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-beans</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-context</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-core</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <!-- Generate the OSGi MANIFEST.MF for this bundle. -->
+            <plugin>
+                <groupId>org.apache.felix</groupId>
+                <artifactId>maven-bundle-plugin</artifactId>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-deploy-plugin</artifactId>
+                <configuration>
+                    <skip>false</skip>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointData.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointData.java
new file mode 100644
index 0000000..788271e
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointData.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import java.io.IOException;
+import java.io.InputStream;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.U;
+
+/**
+ * Wrapper of all checkpoint that are saved to the S3. It
+ * extends every checkpoint with expiration time and host name
+ * which created this checkpoint.
+ * <p>
+ * Host name is used by {@link S3CheckpointSpi} SPI to give node
+ * correct files if it is restarted.
+ */
+class S3CheckpointData {
+    /** Checkpoint data. */
+    private final byte[] state;
+
+    /** Checkpoint expiration time. */
+    private final long expTime;
+
+    /** Checkpoint key. */
+    private final String key;
+
+    /**
+     * Creates new instance of checkpoint data wrapper.
+     *
+     * @param state Checkpoint data.
+     * @param expTime Checkpoint expiration time in milliseconds.
+     * @param key Key of checkpoint.
+     */
+    S3CheckpointData(byte[] state, long expTime, String key) {
+        assert expTime >= 0;
+
+        this.state = state;
+        this.expTime = expTime;
+        this.key = key;
+    }
+
+    /**
+     * Gets checkpoint data.
+     *
+     * @return Checkpoint data.
+     */
+    byte[] getState() {
+        return state;
+    }
+
+    /**
+     * Gets checkpoint expiration time.
+     *
+     * @return Expire time in milliseconds.
+     */
+    long getExpireTime() {
+        return expTime;
+    }
+
+    /**
+     * Gets key of checkpoint.
+     *
+     * @return Key of checkpoint.
+     */
+    public String getKey() {
+        return key;
+    }
+
+    /**
+     * @return Serialized checkpoint data.
+     */
+    public byte[] toBytes() {
+        byte[] keyBytes = key.getBytes();
+
+        byte[] bytes = new byte[4 + state.length + 8 + 4 + keyBytes.length];
+
+        U.intToBytes(state.length, bytes, 0);
+        U.arrayCopy(state, 0, bytes, 4, state.length);
+        U.longToBytes(expTime, bytes, 4 + state.length);
+        U.intToBytes(keyBytes.length, bytes, 4 + state.length + 8);
+        U.arrayCopy(keyBytes, 0, bytes, 4 + state.length + 8 + 4, keyBytes.length);
+
+        return bytes;
+    }
+
+    /**
+     * @param in Input stream.
+     * @return Checkpoint data.
+     * @throws IOException In case of error.
+     */
+    public static S3CheckpointData fromStream(InputStream in) throws IOException {
+        byte[] buf = new byte[8];
+
+        read(in, buf, 4);
+
+        byte[] state = new byte[U.bytesToInt(buf, 0)];
+
+        read(in, state, state.length);
+
+        read(in, buf, 8);
+
+        long expTime = U.bytesToLong(buf, 0);
+
+        read(in, buf, 4);
+
+        byte[] keyBytes = new byte[U.bytesToInt(buf, 0)];
+
+        read(in, keyBytes, keyBytes.length);
+
+        return new S3CheckpointData(state, expTime, new String(keyBytes));
+    }
+
+    /**
+     * @param in Input stream.
+     * @param buf Buffer.
+     * @param len Number of bytes to read.
+     * @throws IOException In case of error.
+     */
+    private static void read(InputStream in, byte[] buf, int len) throws IOException {
+        int cnt = in.read(buf, 0, len);
+
+        if (cnt < len)
+            throw new IOException("End of stream reached.");
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(S3CheckpointData.class, this);
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpi.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpi.java
new file mode 100644
index 0000000..d56dc68
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpi.java
@@ -0,0 +1,830 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.Map;
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.AmazonServiceException;
+import com.amazonaws.ClientConfiguration;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.model.ObjectListing;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCheckedException;
+import org.apache.ignite.IgniteLogger;
+import org.apache.ignite.internal.IgniteInterruptedCheckedException;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.internal.util.tostring.GridToStringExclude;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.resources.IgniteInstanceResource;
+import org.apache.ignite.resources.LoggerResource;
+import org.apache.ignite.spi.IgniteSpiAdapter;
+import org.apache.ignite.spi.IgniteSpiConfiguration;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.IgniteSpiMBeanAdapter;
+import org.apache.ignite.spi.IgniteSpiMultipleInstancesSupport;
+import org.apache.ignite.spi.IgniteSpiThread;
+import org.apache.ignite.spi.checkpoint.CheckpointListener;
+import org.apache.ignite.spi.checkpoint.CheckpointSpi;
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * This class defines Amazon S3-based implementation for checkpoint SPI.
+ * <p>
+ * For information about Amazon S3 visit <a href="http://aws.amazon.com">aws.amazon.com</a>.
+ * <p>
+ * <h1 class="header">Configuration</h1>
+ * <h2 class="header">Mandatory</h2>
+ * This SPI has one mandatory configuration parameter:
+ * <ul>
+ *      <li>AWS credentials (see {@link #setAwsCredentials(AWSCredentials)}
+ * </ul>
+ * <h2 class="header">Optional</h2>
+ * This SPI has following optional configuration parameters:
+ * <ul>
+ *      <li>Bucket name suffix (see {@link #setBucketNameSuffix(String)})</li>
+ *      <li>Client configuration (see {@link #setClientConfiguration(ClientConfiguration)})</li>
+ *      <li>Bucket endpoint (see {@link #setBucketEndpoint(String)})</li>
+ *      <li>Server side encryption algorithm (see {@link #setSSEAlgorithm(String)})</li>
+ * </ul>
+ * <h2 class="header">Java Example</h2>
+ * {@link S3CheckpointSpi} can be configured as follows:
+ * <pre name="code" class="java">
+ * IgniteConfiguration cfg = new IgniteConfiguration();
+ *
+ * S3CheckpointSpi spi = new S3CheckpointSpi();
+ *
+ * AWSCredentials cred = new BasicAWSCredentials(YOUR_ACCESS_KEY_ID, YOUR_SECRET_ACCESS_KEY);
+ *
+ * spi.setAwsCredentials(cred);
+ *
+ * spi.setBucketNameSuffix("checkpoints");
+ *
+ * // Override default checkpoint SPI.
+ * cfg.setCheckpointSpi(cpSpi);
+ *
+ * // Start grid.
+ * G.start(cfg);
+ * </pre>
+ * <h2 class="header">Spring Example</h2>
+ * {@link S3CheckpointSpi} can be configured from Spring XML configuration file:
+ * <pre name="code" class="xml">
+ * &lt;bean id="grid.custom.cfg" class="org.apache.ignite.configuration.IgniteConfiguration" singleton="true"&gt;
+ *     ...
+ *        &lt;property name=&quot;checkpointSpi&quot;&gt;
+ *            &lt;bean class=&quot;org.apache.ignite.spi.checkpoint.s3.S3CheckpointSpi&quot;&gt;
+ *                &lt;property name=&quot;awsCredentials&quot;&gt;
+ *                    &lt;bean class=&quot;com.amazonaws.auth.BasicAWSCredentials&quot;&gt;
+ *                        &lt;constructor-arg value=&quot;YOUR_ACCESS_KEY_ID&quot; /&gt;
+ *                        &lt;constructor-arg value=&quot;YOUR_SECRET_ACCESS_KEY&quot; /&gt;
+ *                    &lt;/bean&gt;
+ *                &lt;/property&gt;
+ *            &lt;/bean&gt;
+ *        &lt;/property&gt;
+ *     ...
+ * &lt;/bean&gt;
+ * </pre>
+ * <p>
+ * Note that storing data in AWS S3 service will result in charges to your AWS account.
+ * Choose another implementation of {@link org.apache.ignite.spi.checkpoint.CheckpointSpi} for local or
+ * home network tests.
+ * <p>
+ * <img src="http://ignite.apache.org/images/spring-small.png">
+ * <br>
+ * For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a>
+ * @see org.apache.ignite.spi.checkpoint.CheckpointSpi
+ */
+@IgniteSpiMultipleInstancesSupport(true)
+public class S3CheckpointSpi extends IgniteSpiAdapter implements CheckpointSpi {
+    /** Logger. */
+    @SuppressWarnings({"FieldAccessedSynchronizedAndUnsynchronized"})
+    @LoggerResource
+    private IgniteLogger log;
+
+    /** Ignite instance. */
+    @IgniteInstanceResource
+    private Ignite ignite;
+
+    /** Task that takes care about outdated files. */
+    private S3TimeoutWorker timeoutWrk;
+
+    /** Listener. */
+    private CheckpointListener lsnr;
+
+    /** Prefix to use in bucket name generation. */
+    public static final String BUCKET_NAME_PREFIX = "ignite-checkpoint-";
+
+    /** Suffix to use in bucket name generation. */
+    public static final String DFLT_BUCKET_NAME_SUFFIX = "default-bucket";
+
+    /** Client to interact with S3 storage. */
+    @GridToStringExclude
+    private AmazonS3 s3;
+
+    /** Bucket name suffix (set by user). */
+    private String bucketNameSuffix;
+
+    /** Bucket name (generated). */
+    private String bucketName;
+
+    /** Bucket endpoint (set by user). */
+    @Nullable private String bucketEndpoint;
+
+    /** Server side encryption algorithm */
+    @Nullable private String sseAlg;
+
+    /** Amazon client configuration. */
+    private ClientConfiguration cfg;
+
+    /** AWS Credentials. */
+    @GridToStringExclude
+    private AWSCredentials cred;
+
+    /** Mutex. */
+    private final Object mux = new Object();
+
+    /**
+     * Gets S3 bucket name to use.
+     *
+     * @return S3 bucket name to use.
+     */
+    public String getBucketName() {
+        return bucketName;
+    }
+
+    /**
+     * Gets S3 bucket endpoint to use.
+     *
+     * @return S3 bucket endpoint to use.
+     */
+    @Nullable public String getBucketEndpoint() {
+        return bucketEndpoint;
+    }
+
+    /**
+     * Gets S3 server-side encryption algorithm.
+     *
+     * @return S3 server-side encryption algorithm to use.
+     */
+    @Nullable public String getSSEAlgorithm() {
+        return sseAlg;
+    }
+
+    /**
+     * Gets S3 access key.
+     *
+     * @return S3 access key.
+     */
+    public String getAccessKey() {
+        return cred.getAWSAccessKeyId();
+    }
+
+    /**
+     * Gets S3 secret key.
+     *
+     * @return S3 secret key.
+     */
+    public String getSecretAccessKey() {
+        return cred.getAWSSecretKey();
+    }
+
+    /**
+     * Gets HTTP proxy host.
+     *
+     * @return HTTP proxy host.
+     */
+    public String getProxyHost() {
+        return cfg.getProxyHost();
+    }
+
+    /**
+     * Gets HTTP proxy port.
+     *
+     * @return HTTP proxy port.
+     */
+    public int getProxyPort() {
+        return cfg.getProxyPort();
+    }
+
+    /**
+     * Gets HTTP proxy user name.
+     *
+     * @return HTTP proxy user name.
+     */
+    public String getProxyUsername() {
+        return cfg.getProxyUsername();
+    }
+
+    /**
+     * Gets HTTP proxy password.
+     *
+     * @return HTTP proxy password.
+     */
+    public String getProxyPassword() {
+        return cfg.getProxyPassword();
+    }
+
+    /**
+     * Sets bucket name suffix.
+     *
+     * @param bucketNameSuffix Bucket name suffix.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public S3CheckpointSpi setBucketNameSuffix(String bucketNameSuffix) {
+        this.bucketNameSuffix = bucketNameSuffix;
+
+        return this;
+    }
+
+    /**
+     * Sets bucket endpoint.
+     * If the endpoint is not set then S3CheckpointSpi will go to each region to find a corresponding bucket.
+     * For information about possible endpoint names visit
+     * <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region">docs.aws.amazon.com</a>
+     *
+     * @param bucketEndpoint Bucket endpoint, for example, {@code }s3.us-east-2.amazonaws.com.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public S3CheckpointSpi setBucketEndpoint(String bucketEndpoint) {
+        this.bucketEndpoint = bucketEndpoint;
+
+        return this;
+    }
+
+    /**
+     * Sets server-side encryption algorithm for Amazon S3-managed encryption keys.
+     * For information about possible S3-managed encryption keys visit
+     * <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingServerSideEncryption.html">docs.aws.amazon.com</a>.
+     *
+     * @param sseAlg Server-side encryption algorithm, for example, AES256 or SSES3.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public S3CheckpointSpi setSSEAlgorithm(String sseAlg) {
+        this.sseAlg = sseAlg;
+
+        return this;
+    }
+
+    /**
+     * Sets Amazon client configuration.
+     * <p>
+     * For details refer to Amazon S3 API reference.
+     *
+     * @param cfg Amazon client configuration.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public S3CheckpointSpi setClientConfiguration(ClientConfiguration cfg) {
+        this.cfg = cfg;
+
+        return this;
+    }
+
+    /**
+     * Sets AWS credentials.
+     * <p>
+     * For details refer to Amazon S3 API reference.
+     *
+     * @param cred AWS credentials.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public S3CheckpointSpi setAwsCredentials(AWSCredentials cred) {
+        this.cred = cred;
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void spiStart(String igniteInstanceName) throws IgniteSpiException {
+        // Start SPI start stopwatch.
+        startStopwatch();
+
+        assertParameter(cred != null, "awsCredentials != null");
+
+        if (log.isDebugEnabled()) {
+            log.debug(configInfo("awsCredentials", cred));
+            log.debug(configInfo("clientConfiguration", cfg));
+            log.debug(configInfo("bucketNameSuffix", bucketNameSuffix));
+            log.debug(configInfo("bucketEndpoint", bucketEndpoint));
+            log.debug(configInfo("SSEAlgorithm", sseAlg));
+        }
+
+        if (cfg == null)
+            U.warn(log, "Amazon client configuration is not set (will use default).");
+
+        if (F.isEmpty(bucketNameSuffix)) {
+            U.warn(log, "Bucket name suffix is null or empty (will use default bucket name).");
+
+            bucketName = BUCKET_NAME_PREFIX + DFLT_BUCKET_NAME_SUFFIX;
+        }
+        else
+            bucketName = BUCKET_NAME_PREFIX + bucketNameSuffix;
+
+        s3 = cfg != null ? new AmazonS3Client(cred, cfg) : new AmazonS3Client(cred);
+
+        if (!F.isEmpty(bucketEndpoint))
+            s3.setEndpoint(bucketEndpoint);
+
+        if (!s3.doesBucketExist(bucketName)) {
+            try {
+                s3.createBucket(bucketName);
+
+                if (log.isDebugEnabled())
+                    log.debug("Created S3 bucket: " + bucketName);
+
+                while (!s3.doesBucketExist(bucketName))
+                    try {
+                        U.sleep(200);
+                    }
+                    catch (IgniteInterruptedCheckedException e) {
+                        throw new IgniteSpiException("Thread has been interrupted.", e);
+                    }
+            }
+            catch (AmazonClientException e) {
+                try {
+                    if (!s3.doesBucketExist(bucketName))
+                        throw new IgniteSpiException("Failed to create bucket: " + bucketName, e);
+                }
+                catch (AmazonClientException ignored) {
+                    throw new IgniteSpiException("Failed to create bucket: " + bucketName, e);
+                }
+            }
+        }
+
+        Collection<S3TimeData> s3TimeDataLst = new LinkedList<>();
+
+        try {
+            ObjectListing list = s3.listObjects(bucketName);
+
+            while (true) {
+                for (S3ObjectSummary sum : list.getObjectSummaries()) {
+                    S3CheckpointData data = read(sum.getKey());
+
+                    if (data != null) {
+                        s3TimeDataLst.add(new S3TimeData(data.getExpireTime(), data.getKey()));
+
+                        if (log.isDebugEnabled())
+                            log.debug("Registered existing checkpoint from key: " + data.getKey());
+                    }
+                }
+
+                if (list.isTruncated())
+                    list = s3.listNextBatchOfObjects(list);
+                else
+                    break;
+            }
+        }
+        catch (AmazonClientException e) {
+            throw new IgniteSpiException("Failed to read checkpoint bucket: " + bucketName, e);
+        }
+        catch (IgniteCheckedException e) {
+            throw new IgniteSpiException("Failed to marshal/unmarshal objects in bucket: " + bucketName, e);
+        }
+
+        // Track expiration for only those data that are made by this node
+        timeoutWrk = new S3TimeoutWorker();
+
+        timeoutWrk.add(s3TimeDataLst);
+
+        timeoutWrk.start();
+
+        registerMBean(igniteInstanceName, new S3CheckpointSpiMBeanImpl(this), S3CheckpointSpiMBean.class);
+
+        // Ack ok start.
+        if (log.isDebugEnabled())
+            log.debug(startInfo());
+    }
+
+    /** {@inheritDoc} */
+    @Override public void spiStop() throws IgniteSpiException {
+        if (timeoutWrk != null) {
+            IgniteUtils.interrupt(timeoutWrk);
+            IgniteUtils.join(timeoutWrk, log);
+        }
+
+        unregisterMBean();
+
+        // Ack ok stop.
+        if (log.isDebugEnabled())
+            log.debug(stopInfo());
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] loadCheckpoint(String key) throws IgniteSpiException {
+        assert !F.isEmpty(key);
+
+        try {
+            S3CheckpointData data = read(key);
+
+            return data != null ?
+                data.getExpireTime() == 0 || data.getExpireTime() > U.currentTimeMillis() ?
+                    data.getState() :
+                    null :
+                null;
+        }
+        catch (AmazonClientException e) {
+            throw new IgniteSpiException("Failed to read checkpoint key: " + key, e);
+        }
+        catch (IgniteCheckedException e) {
+            throw new IgniteSpiException("Failed to marshal/unmarshal objects in checkpoint key: " + key, e);
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean saveCheckpoint(String key, byte[] state, long timeout, boolean overwrite)
+        throws IgniteSpiException {
+        assert !F.isEmpty(key);
+
+        long expireTime = 0;
+
+        if (timeout > 0) {
+            expireTime = U.currentTimeMillis() + timeout;
+
+            if (expireTime < 0)
+                expireTime = Long.MAX_VALUE;
+        }
+
+        try {
+            if (hasKey(key)) {
+                if (!overwrite)
+                    return false;
+
+                if (log.isDebugEnabled())
+                    log.debug("Overriding existing key: " + key);
+            }
+
+            S3CheckpointData data = new S3CheckpointData(state, expireTime, key);
+
+            write(data);
+        }
+        catch (AmazonClientException e) {
+            throw new IgniteSpiException("Failed to write checkpoint data [key=" + key + ", state=" +
+                Arrays.toString(state) + ']', e);
+        }
+        catch (IgniteCheckedException e) {
+            throw new IgniteSpiException("Failed to marshal checkpoint data [key=" + key + ", state=" +
+                Arrays.toString(state) + ']', e);
+        }
+
+        if (timeout > 0)
+            timeoutWrk.add(new S3TimeData(expireTime, key));
+
+        return true;
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean removeCheckpoint(String key) {
+        assert !F.isEmpty(key);
+
+        timeoutWrk.remove(key);
+
+        boolean rmv = false;
+
+        try {
+            rmv = delete(key);
+        }
+        catch (AmazonClientException e) {
+            U.error(log, "Failed to delete data by key: " + key, e);
+        }
+
+        if (rmv) {
+            CheckpointListener tmpLsnr = lsnr;
+
+            if (tmpLsnr != null)
+                tmpLsnr.onCheckpointRemoved(key);
+        }
+
+        return rmv;
+    }
+
+    /**
+     * Reads checkpoint data.
+     *
+     * @param key Key name to read data from.
+     * @return Checkpoint data object.
+     * @throws IgniteCheckedException Thrown if an error occurs while unmarshalling.
+     * @throws AmazonClientException If an error occurs while querying Amazon S3.
+     */
+    @Nullable private S3CheckpointData read(String key) throws IgniteCheckedException, AmazonClientException {
+        assert !F.isEmpty(key);
+
+        if (log.isDebugEnabled())
+            log.debug("Reading data from S3 [bucket=" + bucketName + ", key=" + key + ']');
+
+        try {
+            S3Object obj = s3.getObject(bucketName, key);
+
+            InputStream in = obj.getObjectContent();
+
+            try {
+                return S3CheckpointData.fromStream(in);
+            }
+            catch (IOException e) {
+                throw new IgniteCheckedException("Failed to unmarshal S3CheckpointData [bucketName=" +
+                    bucketName + ", key=" + key + ']', e);
+            }
+            finally {
+                U.closeQuiet(in);
+            }
+        }
+        catch (AmazonServiceException e) {
+            if (e.getStatusCode() != 404)
+                throw e;
+        }
+
+        return null;
+    }
+
+    /**
+     * Writes given checkpoint data to a given S3 bucket. Data is serialized to
+     * the binary stream and saved to the S3.
+     *
+     * @param data Checkpoint data.
+     * @throws IgniteCheckedException Thrown if an error occurs while marshalling.
+     * @throws AmazonClientException If an error occurs while querying Amazon S3.
+     */
+    private void write(S3CheckpointData data) throws IgniteCheckedException, AmazonClientException {
+        assert data != null;
+
+        if (log.isDebugEnabled())
+            log.debug("Writing data to S3 [bucket=" + bucketName + ", key=" + data.getKey() + ']');
+
+        byte[] buf = data.toBytes();
+
+        ObjectMetadata meta = new ObjectMetadata();
+
+        meta.setContentLength(buf.length);
+
+        if (!F.isEmpty(sseAlg))
+            meta.setSSEAlgorithm(sseAlg);
+
+        s3.putObject(bucketName, data.getKey(), new ByteArrayInputStream(buf), meta);
+    }
+
+    /**
+     * Deletes checkpoint data.
+     *
+     * @param key Key of the data in storage.
+     * @return {@code True} if operations succeeds and data is actually removed.
+     * @throws AmazonClientException If an error occurs while querying Amazon S3.
+     */
+    private boolean delete(String key) throws AmazonClientException {
+        assert !F.isEmpty(key);
+
+        if (log.isDebugEnabled())
+            log.debug("Removing data from S3 [bucket=" + bucketName + ", key=" + key + ']');
+
+        if (!hasKey(key))
+            return false;
+
+        s3.deleteObject(bucketName, key);
+
+        return true;
+    }
+
+    /**
+     * Returns {@code true} if mapping presents for the provided key.
+     *
+     * @param key Key to check mapping for.
+     * @return {@code true} if mapping presents for key.
+     * @throws AmazonClientException If an error occurs while querying Amazon S3.
+     */
+    boolean hasKey(String key) throws AmazonClientException {
+        assert !F.isEmpty(key);
+
+        try {
+            return s3.getObjectMetadata(bucketName, key).getContentLength() != 0;
+        }
+        catch (AmazonServiceException e) {
+            if (e.getStatusCode() != 404)
+                throw e;
+        }
+
+        return false;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void setCheckpointListener(CheckpointListener lsnr) {
+        this.lsnr = lsnr;
+    }
+
+    /** {@inheritDoc} */
+    @Override public S3CheckpointSpi setName(String name) {
+        super.setName(name);
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(S3CheckpointSpi.class, this);
+    }
+
+    /**
+     * Implementation of {@link org.apache.ignite.spi.IgniteSpiThread} that takes care about outdated S3 data.
+     * Every checkpoint has expiration date after which it makes no sense to
+     * keep it. This worker periodically cleans S3 bucket according to checkpoints
+     * expiration time.
+     */
+    private class S3TimeoutWorker extends IgniteSpiThread {
+        /** List of data with access and expiration date. */
+        private Map<String, S3TimeData> map = new HashMap<>();
+
+        /**
+         * Constructor.
+         */
+        S3TimeoutWorker() {
+            super(ignite.name(), "grid-s3-checkpoint-worker", log);
+        }
+
+        /** {@inheritDoc} */
+        @Override public void body() throws InterruptedException {
+            long nextTime = 0;
+
+            Collection<String> rmvKeys = new HashSet<>();
+
+            while (!isInterrupted()) {
+                rmvKeys.clear();
+
+                synchronized (mux) {
+                    long delay = U.currentTimeMillis() - nextTime;
+
+                    if (nextTime != 0 && delay > 0)
+                        mux.wait(delay);
+
+                    long now = U.currentTimeMillis();
+
+                    nextTime = -1;
+
+                    // check map one by one and physically remove
+                    // if (now - last modification date) > expiration time
+                    for (Iterator<Map.Entry<String, S3TimeData>> iter = map.entrySet().iterator(); iter.hasNext();) {
+                        Map.Entry<String, S3TimeData> entry = iter.next();
+
+                        String key = entry.getKey();
+
+                        S3TimeData timeData = entry.getValue();
+
+                        if (timeData.getExpireTime() > 0)
+                            if (timeData.getExpireTime() <= now) {
+                                try {
+                                   delete(key);
+
+                                   if (log.isDebugEnabled())
+                                        log.debug("Data was deleted by timeout: " + key);
+                                }
+                                catch (AmazonClientException e) {
+                                    U.error(log, "Failed to delete data by key: " + key, e);
+                                }
+
+                                iter.remove();
+
+                                rmvKeys.add(timeData.getKey());
+                            }
+                            else
+                                if (timeData.getExpireTime() < nextTime || nextTime == -1)
+                                    nextTime = timeData.getExpireTime();
+                    }
+                }
+
+                CheckpointListener tmpLsnr = lsnr;
+
+                if (tmpLsnr != null)
+                    for (String key : rmvKeys)
+                        tmpLsnr.onCheckpointRemoved(key);
+            }
+
+            synchronized (mux) {
+                map.clear();
+            }
+        }
+
+        /**
+         * Adds data to a list of files this task should look after.
+         *
+         * @param timeData File expiration and access information.
+         */
+        void add(S3TimeData timeData) {
+            assert timeData != null;
+
+            synchronized (mux) {
+                map.put(timeData.getKey(), timeData);
+
+                mux.notifyAll();
+            }
+        }
+
+        /**
+         * Adds list of data this task should look after.
+         *
+         * @param newData List of data.
+         */
+        void add(Iterable<S3TimeData> newData) {
+            assert newData != null;
+
+            synchronized (mux) {
+                for (S3TimeData data : newData)
+                    map.put(data.getKey(), data);
+
+                mux.notifyAll();
+            }
+        }
+
+        /**
+         * Removes data.
+         *
+         * @param key Checkpoint key.
+         */
+        public void remove(String key) {
+            assert key != null;
+
+            synchronized (mux) {
+                map.remove(key);
+            }
+        }
+
+        /** {@inheritDoc} */
+        @Override public String toString() {
+            return S.toString(S3TimeoutWorker.class, this);
+        }
+    }
+
+    /**
+     * MBean implementation for S3CheckpointSpi.
+     */
+    private class S3CheckpointSpiMBeanImpl extends IgniteSpiMBeanAdapter implements S3CheckpointSpiMBean {
+        /** {@inheritDoc} */
+        S3CheckpointSpiMBeanImpl(IgniteSpiAdapter spiAdapter) {
+            super(spiAdapter);
+        }
+
+        /** {@inheritDoc} */
+        @Override public String getBucketName() {
+            return S3CheckpointSpi.this.getBucketName();
+        }
+
+        /** {@inheritDoc} */
+        @Override public String getBucketEndpoint() {
+            return S3CheckpointSpi.this.getBucketName();
+        }
+
+        /** {@inheritDoc} */
+        @Override public String getSSEAlgorithm() {
+            return S3CheckpointSpi.this.getSSEAlgorithm();
+        }
+
+        /** {@inheritDoc} */
+        @Override public String getAccessKey() {
+            return S3CheckpointSpi.this.getAccessKey();
+        }
+
+        /** {@inheritDoc} */
+        @Override public String getProxyHost() {
+            return S3CheckpointSpi.this.getProxyHost();
+        }
+
+        /** {@inheritDoc} */
+        @Override public int getProxyPort() {
+            return S3CheckpointSpi.this.getProxyPort();
+        }
+
+        /** {@inheritDoc} */
+        @Override public String getProxyUsername() {
+            return S3CheckpointSpi.this.getProxyUsername();
+        }
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiMBean.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiMBean.java
new file mode 100644
index 0000000..edbda93
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiMBean.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import org.apache.ignite.mxbean.MXBeanDescription;
+import org.apache.ignite.spi.IgniteSpiManagementMBean;
+
+/**
+ * Management bean for {@link S3CheckpointSpi}.
+ */
+@MXBeanDescription("MBean that provides access to S3 checkpoint SPI configuration.")
+public interface S3CheckpointSpiMBean extends IgniteSpiManagementMBean {
+    /**
+     * Gets S3 bucket name to use.
+     *
+     * @return S3 bucket name to use.
+     */
+    @MXBeanDescription("S3 bucket name.")
+    public String getBucketName();
+
+    /**
+     * @return S3 bucket endpoint.
+     */
+    @MXBeanDescription("S3 bucket endpoint.")
+    public String getBucketEndpoint();
+
+    /**
+     * @return S3 server-side encryption algorithm.
+     */
+    @MXBeanDescription("S3 server-side encryption algorithm.")
+    public String getSSEAlgorithm();
+
+    /**
+     * @return S3 access key.
+     */
+    @MXBeanDescription("S3 access key.")
+    public String getAccessKey();
+
+    /**
+     * @return HTTP proxy host.
+     */
+    @MXBeanDescription("HTTP proxy host.")
+    public String getProxyHost();
+
+    /**
+     * @return HTTP proxy port
+     */
+    @MXBeanDescription("HTTP proxy port.")
+    public int getProxyPort();
+
+    /**
+     * @return HTTP proxy user name.
+     */
+    @MXBeanDescription("HTTP proxy user name.")
+    public String getProxyUsername();
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3TimeData.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3TimeData.java
new file mode 100644
index 0000000..6e0ab20
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/S3TimeData.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Helper class that keeps checkpoint expiration date inside to track and delete
+ * obsolete files.
+ */
+class S3TimeData {
+    /** Checkpoint expiration date. */
+    private long expTime;
+
+    /** Key of checkpoint. */
+    private String key;
+
+    /**
+     * Creates new instance of checkpoint time information.
+     *
+     * @param expTime Checkpoint expiration time.
+     * @param key Key of checkpoint.
+     */
+    S3TimeData(long expTime, String key) {
+        assert expTime >= 0;
+
+        this.expTime = expTime;
+        this.key = key;
+    }
+
+    /**
+     * Gets checkpoint expiration time.
+     *
+     * @return Expire time.
+     */
+    long getExpireTime() {
+        return expTime;
+    }
+
+    /**
+     * Sets checkpoint expiration time.
+     *
+     * @param expTime Checkpoint time-to-live value.
+     */
+    void setExpireTime(long expTime) {
+        assert expTime >= 0;
+
+        this.expTime = expTime;
+    }
+
+    /**
+     * Gets checkpoint key.
+     *
+     * @return Checkpoint key.
+     */
+    String getKey() {
+        return key;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(S3TimeData.class, this);
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/package-info.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/package-info.java
new file mode 100644
index 0000000..312abb6
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/checkpoint/s3/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains S3-based implementation for checkpoint SPI.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryAlbIpFinder.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryAlbIpFinder.java
new file mode 100644
index 0000000..316ae54
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryAlbIpFinder.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.elb;
+
+import java.net.InetSocketAddress;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import com.amazonaws.auth.AWSCredentialsProvider;
+import com.amazonaws.services.ec2.AmazonEC2;
+import com.amazonaws.services.ec2.AmazonEC2ClientBuilder;
+import com.amazonaws.services.ec2.model.DescribeInstancesRequest;
+import com.amazonaws.services.ec2.model.Instance;
+import com.amazonaws.services.ec2.model.Reservation;
+import com.amazonaws.services.elasticloadbalancingv2.AmazonElasticLoadBalancing;
+import com.amazonaws.services.elasticloadbalancingv2.AmazonElasticLoadBalancingClientBuilder;
+import com.amazonaws.services.elasticloadbalancingv2.model.DescribeTargetHealthRequest;
+import com.amazonaws.services.elasticloadbalancingv2.model.TargetDescription;
+import com.amazonaws.services.elasticloadbalancingv2.model.TargetHealthDescription;
+import org.apache.http.conn.util.InetAddressUtils;
+import org.apache.ignite.spi.IgniteSpiConfiguration;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter;
+
+import static com.amazonaws.util.StringUtils.isNullOrEmpty;
+
+/**
+ * AWS Application load balancer(ALB) based IP finder.
+ *
+ * <p>
+ *     For information about Amazon Application load balancer visit:
+ *     <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/application/introduction.html">aws.amazon.com</a>.
+ * </p>
+ *
+ * <h1 class="header">Configuration</h1>
+ * <h2 class="header">Mandatory</h2>
+ * <ul>
+ *     <li>{@link #setCredentialsProvider(AWSCredentialsProvider)}</li>
+ *     <li>Application load balancer target group ARN name (see {@link #setTargetGrpARN(String)})</li>
+ *     <li>Application load balancer region (see {@link #setRegion(String)})</li>
+ * </ul>
+ *
+ * <p> The finder will fetch all nodes connected under an Application load balancer and share with its peers for cluster
+ * awareness.</p>
+ *
+ * <p> Note that using AWS Application load balancer service will result in charges to your AWS account.</p>
+ *
+ * <p>
+ * Choose another implementation of {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder} for local
+ * or home network tests.</p>
+ *
+ * <p> Note that this finder is shared.</p>
+ *
+ * <p> Note that this finder can only be used on AWS EC2 instances that belong on a Load Balancer based auto scaling group.</p>
+ *
+ * @see TcpDiscoveryElbIpFinder
+ */
+public class TcpDiscoveryAlbIpFinder extends TcpDiscoveryIpFinderAdapter {
+    /** */
+    private AmazonElasticLoadBalancing amazonELBClient;
+
+    /** */
+    private String targetGrpARN;
+
+    /** */
+    private AmazonEC2 amazonEC2Client;
+
+    /** */
+    private AWSCredentialsProvider credsProvider;
+
+    /** */
+    private String region;
+
+    /**
+     * Creates Application load balancer IP finder instance.
+     */
+    public TcpDiscoveryAlbIpFinder() {
+        setShared(true);
+    }
+
+    /** {@inheritDoc} */
+    @Override public Collection<InetSocketAddress> getRegisteredAddresses() throws IgniteSpiException {
+        initClients();
+
+        DescribeTargetHealthRequest req = new DescribeTargetHealthRequest().withTargetGroupArn(targetGrpARN);
+
+        List<TargetHealthDescription> desc = amazonELBClient.describeTargetHealth(req).getTargetHealthDescriptions();
+
+        // instance ips
+        List<String> instanceIps = new LinkedList<>();
+        // instance ids
+        List<String> instanceIds = new LinkedList<>();
+
+        // Fetch the ids of instances in the given ARN via target health
+        for (TargetHealthDescription targetHealthDesc : desc) {
+            TargetDescription target = targetHealthDesc.getTarget();
+            String targetId = target.getId();
+
+            // divide the target ids into ips and instance ids
+            if (isIPAddress(targetId))
+                instanceIps.add(targetId);
+            else
+                instanceIds.add(targetId);
+        }
+
+        DescribeInstancesRequest descInstReq = new DescribeInstancesRequest().withInstanceIds(instanceIds);
+
+        List<Reservation> reservations = amazonEC2Client.describeInstances(descInstReq).getReservations();
+
+        // Convert instance ids to instance ips
+        for (Reservation reservation : reservations) {
+            List<Instance> instances = reservation.getInstances();
+
+            for (Instance instance : instances) {
+                String ip = instance.getPrivateIpAddress();
+                instanceIps.add(ip);
+            }
+        }
+
+        List<InetSocketAddress> addrs = new LinkedList<>();
+
+        for (String ip : instanceIps) {
+            InetSocketAddress addr = new InetSocketAddress(ip, 0);
+            addrs.add(addr);
+        }
+
+        return addrs;
+    }
+
+    /**
+     * Checks if the given id is a valid IP address
+     *
+     * @param id ip to be checked.
+     */
+    private boolean isIPAddress(String id) {
+        return InetAddressUtils.isIPv4Address(id) ||
+            InetAddressUtils.isIPv4MappedIPv64Address(id) ||
+            InetAddressUtils.isIPv6Address(id) ||
+            InetAddressUtils.isIPv6HexCompressedAddress(id) ||
+            InetAddressUtils.isIPv6StdAddress(id);
+    }
+
+    /**
+     * Initializing the IP finder.
+     */
+    private void initClients() {
+        if (credsProvider == null || isNullOrEmpty(targetGrpARN) || isNullOrEmpty(region))
+            throw new IgniteSpiException("One or more configuration parameters are invalid [setCredentialsProvider=" +
+                credsProvider + ", setRegion=" + region + ", setTargetGroupARN=" +
+                targetGrpARN + "]");
+
+        if (amazonEC2Client == null)
+            amazonEC2Client = AmazonEC2ClientBuilder.standard().withRegion(region).withCredentials(credsProvider)
+                .build();
+
+        if (amazonELBClient == null)
+            amazonELBClient = AmazonElasticLoadBalancingClientBuilder.standard().withRegion(region)
+                .withCredentials(credsProvider).build();
+    }
+
+    /**
+     * Sets AWS Application Load Balancer's target group ARN. For details refer to Amazon API reference.
+     *
+     * @param targetGrpARN Target group ARN attached to an AWS Application Load Balancer.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public void setTargetGrpARN(String targetGrpARN) {
+        this.targetGrpARN = targetGrpARN;
+    }
+
+    /**
+     * Sets AWS Application Load Balancer's region.
+     *
+     * For details refer to Amazon API reference.
+     *
+     * @param region AWS Elastic Load Balancer region (e.g: us-east-1)
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public void setRegion(String region) {
+        this.region = region;
+    }
+
+    /**
+     * Sets AWS credentials provider.
+     *
+     * For details refer to Amazon API reference.
+     *
+     * @param credsProvider AWS credentials provider.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public void setCredentialsProvider(AWSCredentialsProvider credsProvider) {
+        this.credsProvider = credsProvider;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void registerAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        //No-op, Application load balancer will take care of registration.
+    }
+
+    /** {@inheritDoc} */
+    @Override public void unregisterAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        // No-op, Application load balancer will take care of this process.
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryElbIpFinder.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryElbIpFinder.java
new file mode 100644
index 0000000..7a7891d
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryElbIpFinder.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.elb;
+
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import com.amazonaws.auth.AWSCredentialsProvider;
+import com.amazonaws.services.ec2.AmazonEC2;
+import com.amazonaws.services.ec2.AmazonEC2ClientBuilder;
+import com.amazonaws.services.ec2.model.DescribeInstancesRequest;
+import com.amazonaws.services.ec2.model.Reservation;
+import com.amazonaws.services.elasticloadbalancing.AmazonElasticLoadBalancing;
+import com.amazonaws.services.elasticloadbalancing.AmazonElasticLoadBalancingClientBuilder;
+import com.amazonaws.services.elasticloadbalancing.model.DescribeLoadBalancersRequest;
+import com.amazonaws.services.elasticloadbalancing.model.Instance;
+import com.amazonaws.services.elasticloadbalancing.model.LoadBalancerDescription;
+import org.apache.ignite.spi.IgniteSpiConfiguration;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter;
+
+import static com.amazonaws.util.StringUtils.isNullOrEmpty;
+
+/**
+ * AWS Classic load balancer based IP finder.
+ *
+ * <p>
+ *     For information about Amazon Classic load balancers visit:
+ *     <a href="https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/introduction.html">aws.amazon.com</a>.
+ * </p>
+ *
+ * <h1 class="header">Configuration</h1>
+ * <h2 class="header">Mandatory</h2>
+ * <ul>
+ *     <li>{@link #setCredentialsProvider(AWSCredentialsProvider)}</li>
+ *     <li>Classic load balancer name (see {@link #setLoadBalancerName(String)})</li>
+ *     <li>Classic load balancer region (see {@link #setRegion(String)})</li>
+ * </ul>
+ *
+ * <p> The finder will fetch all nodes connected under an Classic load balancer and share with its peers for cluster
+ * awareness.</p>
+ *
+ * <p> Note that using AWS Classic load balancer service will result in charges to your AWS account.</p>
+ *
+ * <p>
+ * Choose another implementation of {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder}
+ * for local or home network tests.</p>
+ *
+ * <p> Note that this finder is shared.</p>
+ *
+ * <p> Note that this finder can only be used on AWS EC2 instances that belong on a Load Balancer based auto scaling group.</p>
+ *
+ * @see TcpDiscoveryAlbIpFinder
+ */
+public class TcpDiscoveryElbIpFinder extends TcpDiscoveryIpFinderAdapter {
+    /** */
+    private AmazonElasticLoadBalancing amazonELBClient;
+
+    /** */
+    private AmazonEC2 amazonEC2Client;
+
+    /** */
+    private AWSCredentialsProvider credsProvider;
+
+    /** */
+    private String region;
+
+    /** */
+    private String loadBalancerName;
+
+    /**
+     * Creates Classic load balancer IP finder instance.
+     */
+    public TcpDiscoveryElbIpFinder() {
+        setShared(true);
+    }
+
+    /** {@inheritDoc} */
+    @Override public Collection<InetSocketAddress> getRegisteredAddresses() throws IgniteSpiException {
+        initClients();
+
+        List<String> instanceIds = new ArrayList<>();
+
+        DescribeLoadBalancersRequest req = new DescribeLoadBalancersRequest().withLoadBalancerNames(loadBalancerName);
+
+        List<LoadBalancerDescription> descs = amazonELBClient.describeLoadBalancers(req).getLoadBalancerDescriptions();
+
+        for (LoadBalancerDescription desc : descs) {
+            for (Instance instance : desc.getInstances())
+                instanceIds.add(instance.getInstanceId());
+        }
+
+        DescribeInstancesRequest instReq = new DescribeInstancesRequest().withInstanceIds(instanceIds);
+
+        List<Reservation> reservations = amazonEC2Client.describeInstances(instReq).getReservations();
+
+        List<InetSocketAddress> addrs = new ArrayList<>();
+
+        for (Reservation reservation : reservations) {
+            List<com.amazonaws.services.ec2.model.Instance> instances = reservation.getInstances();
+
+            for (com.amazonaws.services.ec2.model.Instance instance : instances)
+                addrs.add(new InetSocketAddress(instance.getPrivateIpAddress(), 0));
+        }
+
+        return addrs;
+    }
+
+    /**
+     * Initializing the IP finder.
+     */
+    private void initClients() {
+        if (credsProvider == null || isNullOrEmpty(loadBalancerName) || isNullOrEmpty(region))
+            throw new IgniteSpiException("One or more configuration parameters are invalid [setCredentialsProvider=" +
+                credsProvider + ", setRegion=" + region + ", setLoadBalancerName=" +
+                loadBalancerName + "]");
+
+        if (amazonEC2Client == null)
+            amazonEC2Client = AmazonEC2ClientBuilder.standard().withRegion(region).withCredentials(credsProvider)
+                .build();
+
+        if (amazonELBClient == null)
+            amazonELBClient = AmazonElasticLoadBalancingClientBuilder.standard().withRegion(region)
+                .withCredentials(credsProvider).build();
+    }
+
+    /**
+     * Sets AWS Classic load balancer name which nodes are plugged under it. For details refer to Amazon API
+     * reference.
+     *
+     * @param loadBalancerName AWS Classic load balancer name.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public void setLoadBalancerName(String loadBalancerName) {
+        this.loadBalancerName = loadBalancerName;
+    }
+
+    /**
+     * Sets Classic load balancer's region.
+     *
+     * For details refer to Amazon API reference.
+     *
+     * @param region AWS Classic load balancer region (i.e: us-east-1)
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public void setRegion(String region) {
+        this.region = region;
+    }
+
+    /**
+     * Sets AWS credentials provider.
+     *
+     * For details refer to Amazon API reference.
+     *
+     * @param credsProvider AWS credentials provider.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public void setCredentialsProvider(AWSCredentialsProvider credsProvider) {
+        this.credsProvider = credsProvider;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void registerAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        //No-op, Classic load balancer will take care of registration.
+    }
+
+    /** {@inheritDoc} */
+    @Override public void unregisterAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        // No-op, Classic load balancer will take care of this process.
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/package-info.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/package-info.java
new file mode 100644
index 0000000..9612104
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains AWS ELB-based IP finder.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.elb;
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinder.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinder.java
new file mode 100644
index 0000000..5eecc02
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinder.java
@@ -0,0 +1,501 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
+
+import java.io.ByteArrayInputStream;
+import java.net.InetSocketAddress;
+import java.nio.charset.StandardCharsets;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.StringTokenizer;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.regex.Pattern;
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.ClientConfiguration;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.AWSCredentialsProvider;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.model.ObjectListing;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+import org.apache.commons.codec.binary.Base32;
+import org.apache.ignite.IgniteLogger;
+import org.apache.ignite.internal.IgniteInterruptedCheckedException;
+import org.apache.ignite.internal.util.tostring.GridToStringExclude;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.SB;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.resources.LoggerResource;
+import org.apache.ignite.spi.IgniteSpiConfiguration;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt.EncryptionService;
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * AWS S3-based IP finder.
+ * <p>
+ * For information about Amazon S3 visit <a href="http://aws.amazon.com">aws.amazon.com</a>.
+ * <h1 class="header">Configuration</h1>
+ * <h2 class="header">Mandatory</h2>
+ * <ul>
+ * <li>AWS credentials (see {@link #setAwsCredentials(AWSCredentials)} and
+ * {@link #setAwsCredentialsProvider(AWSCredentialsProvider)}</li>
+ * <li>Bucket name (see {@link #setBucketName(String)})</li>
+ * </ul>
+ * <h2 class="header">Optional</h2>
+ * <ul>
+ * <li>Client configuration (see {@link #setClientConfiguration(ClientConfiguration)})</li>
+ * <li>Shared flag (see {@link #setShared(boolean)})</li>
+ * <li>Bucket endpoint (see {@link #setBucketEndpoint(String)})</li>
+ * <li>Server side encryption algorithm (see {@link #setSSEAlgorithm(String)})</li>
+ * <li>Key prefix for the node addresses (see {@link #setKeyPrefix(String)})</li>
+ * <li>Client side encryption service (see {@link #setEncryptionService(EncryptionService)})</li>
+ * </ul>
+ * <p>
+ * The finder will create S3 bucket with configured name. The bucket will contain entries named like the following:
+ * {@code 192.168.1.136#1001}.
+ * <p>
+ * Note that storing data in AWS S3 service will result in charges to your AWS account. Choose another implementation of
+ * {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder} for local or home network tests.
+ * <p>
+ * Note that this finder is shared by default (see {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder#isShared()}.
+ */
+public class TcpDiscoveryS3IpFinder extends TcpDiscoveryIpFinderAdapter {
+    /** Delimiter to use in S3 entries name. */
+    private static final String DELIM = "#";
+
+    /** Entry content. */
+    private static final byte[] ENTRY_CONTENT = new byte[] {1};
+
+    /** Entry metadata. */
+    @GridToStringExclude
+    private final ObjectMetadata objMetadata = new ObjectMetadata();
+
+    /** Grid logger. */
+    @LoggerResource
+    private IgniteLogger log;
+
+    /** Client to interact with S3 storage. */
+    @GridToStringExclude
+    private AmazonS3 s3;
+
+    /** Bucket name. */
+    private String bucketName;
+
+    /** Bucket endpoint. */
+    @Nullable private String bucketEndpoint;
+
+    /** Server side encryption algorithm. */
+    @Nullable private String sseAlg;
+
+    /** Sub-folder name to write node addresses. */
+    @Nullable private String keyPrefix;
+
+    /** Encryption service. **/
+    @Nullable private EncryptionService encryptionSvc;
+
+    /** Init guard. */
+    @GridToStringExclude
+    private final AtomicBoolean initGuard = new AtomicBoolean();
+
+    /** Init latch. */
+    @GridToStringExclude
+    private final CountDownLatch initLatch = new CountDownLatch(1);
+
+    /** Amazon client configuration. */
+    private ClientConfiguration cfg;
+
+    /** AWS Credentials. */
+    @GridToStringExclude
+    private AWSCredentials cred;
+
+    /** AWS Credentials. */
+    @GridToStringExclude
+    private AWSCredentialsProvider credProvider;
+
+    /**
+     * Constructor.
+     */
+    public TcpDiscoveryS3IpFinder() {
+        setShared(true);
+    }
+
+    /** {@inheritDoc} */
+    @Override public Collection<InetSocketAddress> getRegisteredAddresses() throws IgniteSpiException {
+        initClient();
+
+        Collection<InetSocketAddress> addrs = new LinkedList<>();
+
+        try {
+            ObjectListing list = keyPrefix == null ? s3.listObjects(bucketName) : s3.listObjects(bucketName, keyPrefix);
+
+            while (true) {
+                for (S3ObjectSummary sum : list.getObjectSummaries()) {
+                    InetSocketAddress addr = addr(sum);
+
+                    if (addr != null)
+                        addrs.add(addr);
+                }
+
+                if (list.isTruncated())
+                    list = s3.listNextBatchOfObjects(list);
+                else
+                    break;
+            }
+        }
+        catch (AmazonClientException e) {
+            throw new IgniteSpiException("Failed to list objects in the bucket: " + bucketName, e);
+        }
+
+        return addrs;
+    }
+
+    /**
+     * Parses the S3 key to return the ip and addresses.
+     *
+     * @param sum S3 Object summary.
+     */
+    private InetSocketAddress addr(S3ObjectSummary sum) {
+        String key = sum.getKey();
+        String addr = key;
+
+        if (keyPrefix != null)
+            addr = key.replaceFirst(Pattern.quote(keyPrefix), "");
+
+        if (encryptionSvc != null) {
+            byte[] encBytes = new Base32().decode(addr.getBytes(StandardCharsets.UTF_8));
+            byte[] decBytes = encryptionSvc.decrypt(encBytes);
+            addr = new String(decBytes, StandardCharsets.UTF_8).replaceAll("=", "");
+        }
+
+        StringTokenizer st = new StringTokenizer(addr, DELIM);
+
+        if (st.countTokens() != 2)
+            U.error(log, "Failed to parse S3 entry due to invalid format: " + addr);
+        else {
+            String addrStr = st.nextToken();
+            String portStr = st.nextToken();
+
+            int port = -1;
+
+            try {
+                port = Integer.parseInt(portStr);
+            }
+            catch (NumberFormatException e) {
+                U.error(log, "Failed to parse port for S3 entry: " + addr, e);
+            }
+
+            if (port != -1)
+                try {
+                    return new InetSocketAddress(addrStr, port);
+                }
+                catch (IllegalArgumentException e) {
+                    U.error(log, "Failed to parse port for S3 entry: " + addr, e);
+                }
+        }
+
+        return null;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void registerAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        assert !F.isEmpty(addrs);
+
+        initClient();
+
+        for (InetSocketAddress addr : addrs) {
+            String key = key(addr);
+
+            try {
+                s3.putObject(bucketName, key, new ByteArrayInputStream(ENTRY_CONTENT), objMetadata);
+            }
+            catch (AmazonClientException e) {
+                throw new IgniteSpiException("Failed to put entry [bucketName=" + bucketName +
+                    ", entry=" + key + ']', e);
+            }
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public void unregisterAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        assert !F.isEmpty(addrs);
+
+        initClient();
+
+        for (InetSocketAddress addr : addrs) {
+            String key = key(addr);
+
+            try {
+                s3.deleteObject(bucketName, key);
+            }
+            catch (AmazonClientException e) {
+                throw new IgniteSpiException("Failed to delete entry [bucketName=" + bucketName +
+                    ", entry=" + key + ']', e);
+            }
+        }
+    }
+
+    /**
+     * Gets S3 key for provided address.
+     *
+     * @param addr Node address.
+     * @return Key.
+     */
+    private String key(InetSocketAddress addr) {
+        assert addr != null;
+
+        SB sb = new SB();
+
+        if (keyPrefix != null)
+            sb.a(keyPrefix);
+
+        String addrStr = addr.getAddress().getHostAddress();
+
+        if (encryptionSvc != null) {
+            String addrPort = new SB()
+                .a(addrStr)
+                .a(DELIM)
+                .a(addr.getPort()).toString();
+
+            byte[] encBytes = encryptionSvc.encrypt(addrPort.getBytes(StandardCharsets.UTF_8));
+            byte[] base32Bytes = new Base32().encode(encBytes);
+            String encStr = new String(base32Bytes, StandardCharsets.UTF_8).replaceAll("=", "");
+
+            sb.a(encStr);
+        }
+        else
+            sb.a(addrStr)
+                .a(DELIM)
+                .a(addr.getPort());
+
+        return sb.toString();
+    }
+
+    /**
+     * Amazon s3 client initialization.
+     *
+     * @throws org.apache.ignite.spi.IgniteSpiException In case of error.
+     */
+    private void initClient() throws IgniteSpiException {
+        if (initGuard.compareAndSet(false, true))
+            try {
+                if (cred == null && credProvider == null)
+                    throw new IgniteSpiException("AWS credentials are not set.");
+
+                if (cfg == null)
+                    U.warn(log, "Amazon client configuration is not set (will use default).");
+
+                if (F.isEmpty(bucketName))
+                    throw new IgniteSpiException("Bucket name is null or empty (provide bucket name and restart).");
+
+                objMetadata.setContentLength(ENTRY_CONTENT.length);
+
+                if (!F.isEmpty(sseAlg))
+                    objMetadata.setSSEAlgorithm(sseAlg);
+
+                s3 = createAmazonS3Client();
+
+                if (!s3.doesBucketExist(bucketName)) {
+                    try {
+                        s3.createBucket(bucketName);
+
+                        if (log.isDebugEnabled())
+                            log.debug("Created S3 bucket: " + bucketName);
+
+                        while (!s3.doesBucketExist(bucketName))
+                            try {
+                                U.sleep(200);
+                            }
+                            catch (IgniteInterruptedCheckedException e) {
+                                throw new IgniteSpiException("Thread has been interrupted.", e);
+                            }
+                    }
+                    catch (AmazonClientException e) {
+                        if (!s3.doesBucketExist(bucketName)) {
+                            s3 = null;
+
+                            throw new IgniteSpiException("Failed to create bucket: " + bucketName, e);
+                        }
+                    }
+                }
+            }
+            finally {
+                initLatch.countDown();
+            }
+        else {
+            try {
+                U.await(initLatch);
+            }
+            catch (IgniteInterruptedCheckedException e) {
+                throw new IgniteSpiException("Thread has been interrupted.", e);
+            }
+
+            if (s3 == null)
+                throw new IgniteSpiException("Ip finder has not been initialized properly.");
+        }
+    }
+
+    /**
+     * Instantiates {@code AmazonS3Client} instance.
+     *
+     * @return Client instance to use to connect to AWS.
+     */
+    AmazonS3Client createAmazonS3Client() {
+        AmazonS3Client cln = cfg != null
+            ? (cred != null ? new AmazonS3Client(cred, cfg) : new AmazonS3Client(credProvider, cfg))
+            : (cred != null ? new AmazonS3Client(cred) : new AmazonS3Client(credProvider));
+
+        if (!F.isEmpty(bucketEndpoint))
+            cln.setEndpoint(bucketEndpoint);
+
+        return cln;
+    }
+
+    /**
+     * Sets bucket name for IP finder.
+     *
+     * @param bucketName Bucket name.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryS3IpFinder setBucketName(String bucketName) {
+        this.bucketName = bucketName;
+
+        return this;
+    }
+
+    /**
+     * Sets bucket endpoint for IP finder. If the endpoint is not set then IP finder will go to each region to find a
+     * corresponding bucket. For information about possible endpoint names visit
+     * <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region">docs.aws.amazon.com</a>.
+     *
+     * @param bucketEndpoint Bucket endpoint, for example, s3.us-east-2.amazonaws.com.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public TcpDiscoveryS3IpFinder setBucketEndpoint(String bucketEndpoint) {
+        this.bucketEndpoint = bucketEndpoint;
+
+        return this;
+    }
+
+    /**
+     * Sets server-side encryption algorithm for Amazon S3-managed encryption keys. For information about possible
+     * S3-managed encryption keys visit
+     * <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingServerSideEncryption.html">docs.aws.amazon.com</a>.
+     *
+     * @param sseAlg Server-side encryption algorithm, for example, AES256 or SSES3.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public TcpDiscoveryS3IpFinder setSSEAlgorithm(String sseAlg) {
+        this.sseAlg = sseAlg;
+
+        return this;
+    }
+
+    /**
+     * Sets Amazon client configuration.
+     * <p>
+     * For details refer to Amazon S3 API reference.
+     *
+     * @param cfg Amazon client configuration.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public TcpDiscoveryS3IpFinder setClientConfiguration(ClientConfiguration cfg) {
+        this.cfg = cfg;
+
+        return this;
+    }
+
+    /**
+     * Sets encryption service for client side node address encryption.
+     *
+     * @param encryptionSvc Encryption service .
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public TcpDiscoveryS3IpFinder setEncryptionService(EncryptionService encryptionSvc) {
+        this.encryptionSvc = encryptionSvc;
+
+        return this;
+    }
+
+    /**
+     * Sets AWS credentials. Either use {@link #setAwsCredentialsProvider(AWSCredentialsProvider)} or this one.
+     * <p>
+     * For details refer to Amazon S3 API reference.
+     *
+     * @param cred AWS credentials.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryS3IpFinder setAwsCredentials(AWSCredentials cred) {
+        this.cred = cred;
+
+        return this;
+    }
+
+    /**
+     * Sets AWS credentials provider. Either use {@link #setAwsCredentials(AWSCredentials)} or this one.
+     * <p>
+     * For details refer to Amazon S3 API reference.
+     *
+     * @param credProvider AWS credentials provider.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryS3IpFinder setAwsCredentialsProvider(AWSCredentialsProvider credProvider) {
+        this.credProvider = credProvider;
+
+        return this;
+    }
+
+    /**
+     * This can be thought of as the sub-folder within the bucket that will hold the node addresses.
+     * <p>
+     * For details visit
+     * <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/ListingKeysHierarchy.html"/>
+     *
+     * @param keyPrefix AWS credentials provider.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = true)
+    public TcpDiscoveryS3IpFinder setKeyPrefix(String keyPrefix) {
+        this.keyPrefix = keyPrefix;
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public TcpDiscoveryS3IpFinder setShared(boolean shared) {
+        super.setShared(shared);
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(TcpDiscoveryS3IpFinder.class, this, "super", super.toString());
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AsymmetricKeyEncryptionService.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AsymmetricKeyEncryptionService.java
new file mode 100644
index 0000000..f85a906
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AsymmetricKeyEncryptionService.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+import java.security.Key;
+import java.security.KeyPair;
+import javax.crypto.BadPaddingException;
+import javax.crypto.Cipher;
+import javax.crypto.IllegalBlockSizeException;
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Provides an implementation of asymmetric encryption to encrypt/decrypt the data.
+ */
+public class AsymmetricKeyEncryptionService implements EncryptionService {
+    /** Public key. */
+    private Key publicKey;
+
+    /** Private key. */
+    private Key privateKey;
+
+    /** Encryption service. */
+    private Cipher encCipher;
+
+    /** Decryption service. */
+    private Cipher decCipher;
+
+    /**
+     * Set the public private key pair.
+     *
+     * @param keyPair Key pair of Public and Private key.
+     */
+    public void setKeyPair(KeyPair keyPair) {
+        if (keyPair.getPublic() == null)
+            throw new IgniteException("Public key was not set / was set to null.");
+
+        if (keyPair.getPrivate() == null)
+            throw new IgniteException("Private key was not set / was set to null.");
+
+        publicKey = keyPair.getPublic();
+        privateKey = keyPair.getPrivate();
+    }
+
+    /** {@inheritDoc} */
+    @Override public void init() throws IgniteException {
+        if (privateKey == null)
+            throw new IgniteException("Private key was not set / was set to null.");
+
+        if (publicKey == null)
+            throw new IgniteException("Public key was not set / was set to null.");
+
+        encCipher = IgniteUtils.createCipher(privateKey, Cipher.ENCRYPT_MODE);
+        decCipher = IgniteUtils.createCipher(publicKey, Cipher.DECRYPT_MODE);
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] encrypt(byte[] data) {
+        if (data == null)
+            throw new IgniteException("Parameter data cannot be null");
+
+        if (encCipher == null)
+            throw new IgniteException("The init() method was not called.");
+
+        try {
+            return encCipher.doFinal(data);
+        }
+        catch (IllegalBlockSizeException | BadPaddingException e) {
+            throw new IgniteException(e);
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] decrypt(byte[] data) {
+        if (data == null)
+            throw new IgniteException("Parameter data cannot be null");
+
+        if (decCipher == null)
+            throw new IgniteException("The init() method was not called.");
+
+        try {
+            return decCipher.doFinal(data);
+        }
+        catch (IllegalBlockSizeException | BadPaddingException e) {
+            throw new IgniteException(e);
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(AsymmetricKeyEncryptionService.class, this, "super", super.toString());
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AwsKmsEncryptionService.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AwsKmsEncryptionService.java
new file mode 100644
index 0000000..aa878b4
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AwsKmsEncryptionService.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+import java.util.List;
+import com.amazonaws.ClientConfiguration;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.encryptionsdk.AwsCrypto;
+import com.amazonaws.encryptionsdk.CryptoResult;
+import com.amazonaws.encryptionsdk.kms.KmsMasterKey;
+import com.amazonaws.encryptionsdk.kms.KmsMasterKeyProvider;
+import com.amazonaws.regions.Region;
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Provides an implementation of AWS KMS to encrypt/decrypt the data.
+ */
+public class AwsKmsEncryptionService implements EncryptionService {
+    /** KMS Key id. */
+    private String keyId;
+
+    /** AWS Region. */
+    private Region region;
+
+    /** AWS Credentials to access the key. */
+    private AWSCredentials creds;
+
+    /** AWS Client conf. */
+    private ClientConfiguration clientConf = new ClientConfiguration();
+
+    /** Kms master key provider. */
+    private KmsMasterKeyProvider prov;
+
+    /** Aws crypto. */
+    private AwsCrypto crypto;
+
+    /**
+     * Set the KMS key id used to encrypt/decrypt the data.
+     *
+     * @param keyId Key id.
+     * @return {@code this} for chaining.
+     */
+    public AwsKmsEncryptionService setKeyId(String keyId) {
+        this.keyId = keyId;
+
+        return this;
+    }
+
+    /**
+     * AWS region.
+     *
+     * @param region Region.
+     * @return {@code this} for chaining.
+     */
+    public AwsKmsEncryptionService setRegion(Region region) {
+        this.region = region;
+
+        return this;
+    }
+
+    /**
+     * AWS credentials.
+     *
+     * @param creds Aws Credentials.
+     * @return {@code this} for chaining.
+     */
+    public AwsKmsEncryptionService setCredentials(AWSCredentials creds) {
+        this.creds = creds;
+
+        return this;
+    }
+
+    /**
+     * AWS client configuration.
+     *
+     * @param clientConf Client conf.
+     * @return {@code this} for chaining.
+     */
+    public AwsKmsEncryptionService setClientConf(ClientConfiguration clientConf) {
+        this.clientConf = clientConf;
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void init() {
+        if (creds == null || region == null || keyId == null || keyId.trim().isEmpty())
+            throw new IgniteException(String.format("At-least one of the required parameters " +
+                "[creds = %s, region = %s, keyId = %s] is invalid.", creds, region, keyId));
+
+        crypto = createClient();
+
+        prov = createKmsMasterKeyProvider();
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] encrypt(byte[] data) {
+        if (crypto == null || prov == null)
+            throw new IgniteException("The init() method was not called.");
+
+        return crypto.encryptData(prov, data).getResult();
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] decrypt(byte[] data) {
+        if (crypto == null || prov == null)
+            throw new IgniteException("The init() method was not called.");
+
+        CryptoResult<byte[], KmsMasterKey> decryptRes = crypto.decryptData(prov, data);
+
+        List<String> keyIds = decryptRes.getMasterKeyIds();
+
+        if (keyIds != null && !keyIds.contains(keyId))
+            throw new IgniteException("Wrong KMS key ID!");
+
+        return decryptRes.getResult();
+    }
+
+    /**
+     * @return An instance of {@link AwsCrypto}.
+     */
+    AwsCrypto createClient() {
+        return crypto = new AwsCrypto();
+    }
+
+    /**
+     * @return An instance of {@link KmsMasterKeyProvider}.
+     */
+    KmsMasterKeyProvider createKmsMasterKeyProvider() {
+        return new KmsMasterKeyProvider(new AWSStaticCredentialsProvider(creds), region, clientConf, keyId);
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(AwsKmsEncryptionService.class, this, "super", super.toString());
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/EncryptionService.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/EncryptionService.java
new file mode 100644
index 0000000..296dc34
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/EncryptionService.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+/**
+ * A implementation of this interface should provide feature to encrypt/decrypt the data.
+ */
+public interface EncryptionService {
+    /**
+     * For initialization operations. Must be called before the {@link EncryptionService#encrypt(byte[])} and {@link
+     * EncryptionService#decrypt(byte[])} are used.
+     */
+    public void init();
+
+    /**
+     * Encrypt the input data.
+     *
+     * @param data Data. bytes to be encrypted.
+     * @return The encrypted data bytes.
+     * @throws IllegalArgumentException If the parameter data is null.
+     */
+    public byte[] encrypt(byte[] data);
+
+    /**
+     * Decrypt the input data.
+     *
+     * @param data Encrypted data.
+     * @return Decrypted result.
+     * @throws IllegalArgumentException If the parameter data is null.
+     */
+    public byte[] decrypt(byte[] data);
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/SymmetricKeyEncryptionService.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/SymmetricKeyEncryptionService.java
new file mode 100644
index 0000000..89f0402
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/SymmetricKeyEncryptionService.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+import java.security.Key;
+import javax.crypto.BadPaddingException;
+import javax.crypto.Cipher;
+import javax.crypto.IllegalBlockSizeException;
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.util.IgniteUtils;
+import org.apache.ignite.internal.util.typedef.internal.S;
+
+/**
+ * Service to encrypt data using symmetric key encryption.
+ */
+public class SymmetricKeyEncryptionService implements EncryptionService {
+    /** Secret key. */
+    private Key secretKey;
+
+    /** Cipher, to be used for encryption. */
+    private Cipher encCipher;
+
+    /** Cipher, to be used for decryption. */
+    private Cipher decCipher;
+
+    /**
+     * The key used to encrypt and decrypt the data.
+     *
+     * @param secretKey Secret key.
+     * @return {@code this} for chaining.
+     */
+    public SymmetricKeyEncryptionService setSecretKey(Key secretKey) {
+        this.secretKey = secretKey;
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void init() throws IgniteException {
+        if (secretKey == null)
+            throw new IgniteException("Secret key was not set / was set to null.");
+
+        encCipher = IgniteUtils.createCipher(secretKey, Cipher.ENCRYPT_MODE);
+        decCipher = IgniteUtils.createCipher(secretKey, Cipher.DECRYPT_MODE);
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] encrypt(byte[] data) {
+        if (data == null)
+            throw new IgniteException("Parameter [data] cannot be null");
+
+        if (encCipher == null)
+            throw new IgniteException("The init() method was not called.");
+
+        try {
+            return encCipher.doFinal(data);
+        }
+        catch (IllegalBlockSizeException | BadPaddingException e) {
+            throw new IgniteException(e);
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] decrypt(byte[] data) {
+        if (data == null)
+            throw new IgniteException("Parameter [data] cannot be null");
+
+        if (decCipher == null)
+            throw new IgniteException("The init() method was not called.");
+
+        try {
+            return decCipher.doFinal(data);
+        }
+        catch (BadPaddingException | IllegalBlockSizeException e) {
+            throw new IgniteException(e);
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(SymmetricKeyEncryptionService.class, this, "super", super.toString());
+    }
+}
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/package-info.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/package-info.java
new file mode 100644
index 0000000..ec64be2
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. --> Contains Encryption services.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
diff --git a/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/package-info.java b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/package-info.java
new file mode 100644
index 0000000..347c5fd
--- /dev/null
+++ b/modules/aws-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Contains AWS S3-based IP finder.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointManagerSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointManagerSelfTest.java
new file mode 100644
index 0000000..02a62cc
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointManagerSelfTest.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.internal.managers.checkpoint.GridCheckpointManagerAbstractSelfTest;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Checkpoint manager test using {@link S3CheckpointSpi}.
+ */
+public class S3CheckpointManagerSelfTest extends GridCheckpointManagerAbstractSelfTest {
+    /** {@inheritDoc} */
+    @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
+        assertTrue("Unexpected Ignite instance name: " + igniteInstanceName, igniteInstanceName.contains("s3"));
+
+        IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
+
+        S3CheckpointSpi spi = new S3CheckpointSpi();
+
+        AWSCredentials cred = new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey());
+
+        spi.setAwsCredentials(cred);
+
+        spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix());
+
+        cfg.setCheckpointSpi(spi);
+
+        return cfg;
+    }
+
+    /**
+     * @throws Exception Thrown if any exception occurs.
+     */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    public void testS3Based() throws Exception {
+        retries = 6;
+
+        doTest("s3");
+    }
+
+    /**
+     * @throws Exception Thrown if any exception occurs.
+     */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    public void testMultiNodeS3Based() throws Exception {
+        retries = 6;
+
+        doMultiNodeTest("s3");
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiConfigSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiConfigSelfTest.java
new file mode 100644
index 0000000..766b013
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiConfigSelfTest.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import org.apache.ignite.testframework.junits.spi.GridSpiAbstractConfigTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.junit.Test;
+
+/**
+ * Grid S3 checkpoint SPI config self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+public class S3CheckpointSpiConfigSelfTest extends GridSpiAbstractConfigTest<S3CheckpointSpi> {
+    /**
+     * @throws Exception If failed.
+     */
+    @Test
+    public void testNegativeConfig() throws Exception {
+        checkNegativeSpiProperty(new S3CheckpointSpi(), "awsCredentials", null);
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiSelfTest.java
new file mode 100644
index 0000000..9b51da2
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiSelfTest.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.concurrent.ThreadLocalRandom;
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.model.ObjectListing;
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+import org.apache.ignite.GridTestIoUtils;
+import org.apache.ignite.IgniteCheckedException;
+import org.apache.ignite.internal.IgniteInterruptedCheckedException;
+import org.apache.ignite.internal.util.lang.GridAbsClosure;
+import org.apache.ignite.internal.util.lang.GridAbsClosureX;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.checkpoint.GridCheckpointTestState;
+import org.apache.ignite.testframework.GridTestUtils;
+import org.apache.ignite.testframework.junits.spi.GridSpiAbstractTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Grid S3 checkpoint SPI self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+@Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+public class S3CheckpointSpiSelfTest extends GridSpiAbstractTest<S3CheckpointSpi> {
+    /** */
+    private static final int CHECK_POINT_COUNT = 10;
+
+    /** */
+    private static final String KEY_PREFIX = "testCheckpoint";
+
+    /** {@inheritDoc} */
+    @Override protected void spiConfigure(S3CheckpointSpi spi) throws Exception {
+        AWSCredentials cred = new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey());
+
+        spi.setAwsCredentials(cred);
+
+        spi.setBucketNameSuffix(getBucketNameSuffix());
+
+        super.spiConfigure(spi);
+    }
+
+    /**
+     * @throws Exception If error.
+     */
+    @Override protected void afterSpiStopped() throws Exception {
+        AWSCredentials cred = new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey());
+
+        AmazonS3 s3 = new AmazonS3Client(cred);
+
+        String bucketName = S3CheckpointSpi.BUCKET_NAME_PREFIX + "unit-test-bucket";
+
+        try {
+            ObjectListing list = s3.listObjects(bucketName);
+
+            while (true) {
+                for (S3ObjectSummary sum : list.getObjectSummaries())
+                    s3.deleteObject(bucketName, sum.getKey());
+
+                if (list.isTruncated())
+                    list = s3.listNextBatchOfObjects(list);
+                else
+                    break;
+            }
+        }
+        catch (AmazonClientException e) {
+            throw new IgniteSpiException("Failed to read checkpoint bucket: " + bucketName, e);
+        }
+    }
+
+    /**
+     * @throws Exception Thrown in case of any errors.
+     */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    public void testSaveLoadRemoveWithoutExpire() throws Exception {
+        String dataPrefix = "Test check point data ";
+
+        // Save states.
+        for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+            GridCheckpointTestState state = new GridCheckpointTestState(dataPrefix + i);
+
+            getSpi().saveCheckpoint(KEY_PREFIX + i, GridTestIoUtils.serializeJdk(state), 0, true);
+        }
+
+        // Load and check states.
+        for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+            final String key = KEY_PREFIX + i;
+
+            assertWithRetries(new GridAbsClosureX() {
+                @Override public void applyx() throws IgniteCheckedException {
+                    assertNotNull("Missing checkpoint: " + key,
+                        getSpi().loadCheckpoint(key));
+                }
+            });
+
+            // Doing it again as pulling value from repeated assertion is tricky,
+            // and all assertions below shouldn't be retried in case of failure.
+            byte[] serState = getSpi().loadCheckpoint(key);
+
+            GridCheckpointTestState state = GridTestIoUtils.deserializeJdk(serState);
+
+            assertNotNull("Can't load checkpoint state for key: " + key, state);
+            assertEquals("Invalid state loaded [expected='" + dataPrefix + i + "', received='" + state.getData() + "']",
+                dataPrefix + i, state.getData());
+        }
+
+        // Remove states.
+        for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+            final String key = KEY_PREFIX + i;
+
+            assertWithRetries(new GridAbsClosureX() {
+                @Override public void applyx() throws IgniteCheckedException {
+                    assertTrue(getSpi().removeCheckpoint(key));
+                }
+            });
+        }
+
+        // Check that states was removed.
+        for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+            final String key = KEY_PREFIX + i;
+
+            assertWithRetries(new GridAbsClosureX() {
+                @Override public void applyx() throws IgniteCheckedException {
+                    assertNull(getSpi().loadCheckpoint(key));
+                }
+            });
+        }
+    }
+
+    /**
+     * @throws Exception Thrown in case of any errors.
+     */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    public void testSaveWithExpire() throws Exception {
+        // Save states.
+        for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+            GridCheckpointTestState state = new GridCheckpointTestState("Test check point data " + i + '.');
+
+            getSpi().saveCheckpoint(KEY_PREFIX + i, GridTestIoUtils.serializeJdk(state), 1, true);
+        }
+
+        // For small expiration intervals no warranty that state will be removed.
+        Thread.sleep(100);
+
+        // Check that states was removed.
+        for (int i = 0; i < CHECK_POINT_COUNT; i++) {
+            final String key = KEY_PREFIX + i;
+
+            assertWithRetries(new GridAbsClosureX() {
+                @Override public void applyx() throws IgniteCheckedException {
+                    assertNull("Checkpoint state should not be loaded with key: " + key,
+                        getSpi().loadCheckpoint(key));
+                }
+            });
+        }
+    }
+
+    /**
+     * @throws Exception Thrown in case of any errors.
+     */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    public void testDuplicates() throws Exception {
+        int idx1 = 1;
+        int idx2 = 2;
+
+        GridCheckpointTestState state1 = new GridCheckpointTestState(Integer.toString(idx1));
+        GridCheckpointTestState state2 = new GridCheckpointTestState(Integer.toString(idx2));
+
+        getSpi().saveCheckpoint(KEY_PREFIX, GridTestIoUtils.serializeJdk(state1), 0, true);
+        getSpi().saveCheckpoint(KEY_PREFIX, GridTestIoUtils.serializeJdk(state2), 0, true);
+
+        assertWithRetries(new GridAbsClosureX() {
+            @Override public void applyx() throws IgniteCheckedException {
+                assertNotNull(getSpi().loadCheckpoint(KEY_PREFIX));
+            }
+        });
+
+        byte[] serState = getSpi().loadCheckpoint(KEY_PREFIX);
+
+        GridCheckpointTestState state = GridTestIoUtils.deserializeJdk(serState);
+
+        assertNotNull(state);
+        assertEquals(state2, state);
+
+        // Remove.
+        getSpi().removeCheckpoint(KEY_PREFIX);
+
+        assertWithRetries(new GridAbsClosureX() {
+            @Override public void applyx() throws IgniteCheckedException {
+                assertNull(getSpi().loadCheckpoint(KEY_PREFIX));
+            }
+        });
+    }
+
+    /**
+     * Wrapper around {@link GridTestUtils#retryAssert(org.apache.ignite.IgniteLogger, int, long, GridAbsClosure)}.
+     * Provides s3-specific timeouts.
+     * @param assertion Closure with assertion inside.
+     * @throws org.apache.ignite.internal.IgniteInterruptedCheckedException If was interrupted.
+     */
+    private void assertWithRetries(GridAbsClosureX assertion) throws IgniteInterruptedCheckedException {
+        GridTestUtils.retryAssert(log, 6, 5000, assertion);
+    }
+
+    /**
+     * Gets a Bucket name suffix
+     * Bucket name suffix should be unique for the host to parallel test run on one bucket.
+     * Please note that the final bucket name should not exceed 63 chars.
+     *
+     * @return Bucket name suffix.
+     */
+    static String getBucketNameSuffix() {
+        String bucketNameSuffix;
+        try {
+            bucketNameSuffix = IgniteS3TestSuite.getBucketName(
+                "unit-test-" + InetAddress.getLocalHost().getHostName().toLowerCase());
+        }
+        catch (UnknownHostException e) {
+            bucketNameSuffix = IgniteS3TestSuite.getBucketName(
+                "unit-test-rnd-" + ThreadLocalRandom.current().nextInt(100));
+        }
+
+        return bucketNameSuffix;
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopBucketEndpointSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopBucketEndpointSelfTest.java
new file mode 100644
index 0000000..2e1bd9b
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopBucketEndpointSelfTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.spi.GridSpiStartStopAbstractTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Grid S3 checkpoint SPI start stop self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+public class S3CheckpointSpiStartStopBucketEndpointSelfTest extends GridSpiStartStopAbstractTest<S3CheckpointSpi> {
+    /** {@inheritDoc} */
+    @Override protected void spiConfigure(S3CheckpointSpi spi) throws Exception {
+        AWSCredentials cred = new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey());
+
+        spi.setAwsCredentials(cred);
+        spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix() + "-e");
+        spi.setBucketEndpoint("s3.us-east-2.amazonaws.com");
+
+        super.spiConfigure(spi);
+    }
+
+    /** {@inheritDoc} */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    @Override public void testStartStop() throws Exception {
+        super.testStartStop();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopSSEAlgorithmSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopSSEAlgorithmSelfTest.java
new file mode 100644
index 0000000..916c79b
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopSSEAlgorithmSelfTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.spi.GridSpiStartStopAbstractTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Grid S3 checkpoint SPI start stop self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+public class S3CheckpointSpiStartStopSSEAlgorithmSelfTest extends GridSpiStartStopAbstractTest<S3CheckpointSpi> {
+    /** {@inheritDoc} */
+    @Override protected void spiConfigure(S3CheckpointSpi spi) throws Exception {
+        AWSCredentials cred = new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey());
+
+        spi.setAwsCredentials(cred);
+        spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix());
+        spi.setSSEAlgorithm("AES256");
+
+        super.spiConfigure(spi);
+    }
+
+    /** {@inheritDoc} */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    @Override public void testStartStop() throws Exception {
+        super.testStartStop();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopSelfTest.java
new file mode 100644
index 0000000..eadefaf
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointSpiStartStopSelfTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.spi.GridSpiStartStopAbstractTest;
+import org.apache.ignite.testframework.junits.spi.GridSpiTest;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Grid S3 checkpoint SPI start stop self test.
+ */
+@GridSpiTest(spi = S3CheckpointSpi.class, group = "Checkpoint SPI")
+public class S3CheckpointSpiStartStopSelfTest extends GridSpiStartStopAbstractTest<S3CheckpointSpi> {
+    /** {@inheritDoc} */
+    @Override protected void spiConfigure(S3CheckpointSpi spi) throws Exception {
+        AWSCredentials cred = new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey());
+
+        spi.setAwsCredentials(cred);
+
+        spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix());
+
+        super.spiConfigure(spi);
+    }
+
+    /** {@inheritDoc} */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    @Override public void testStartStop() throws Exception {
+        super.testStartStop();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3SessionCheckpointSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3SessionCheckpointSelfTest.java
new file mode 100644
index 0000000..43c0e26
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3SessionCheckpointSelfTest.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
+
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.session.GridSessionCheckpointAbstractSelfTest;
+import org.apache.ignite.session.GridSessionCheckpointSelfTest;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Grid session checkpoint self test using {@link S3CheckpointSpi}.
+ */
+public class S3SessionCheckpointSelfTest extends GridSessionCheckpointAbstractSelfTest {
+    /**
+     * @throws Exception If failed.
+     */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    public void testS3Checkpoint() throws Exception {
+        IgniteConfiguration cfg = getConfiguration();
+
+        S3CheckpointSpi spi = new S3CheckpointSpi();
+
+        AWSCredentials cred = new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey());
+
+        spi.setAwsCredentials(cred);
+
+        spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix());
+
+        cfg.setCheckpointSpi(spi);
+
+        GridSessionCheckpointSelfTest.spi = spi;
+
+        checkCheckpoints(cfg);
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/package-info.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/package-info.java
new file mode 100644
index 0000000..e403a5e
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/checkpoint/s3/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains internal tests or test related classes and interfaces.
+ */
+
+package org.apache.ignite.spi.checkpoint.s3;
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryAlbIpFinderSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryAlbIpFinderSelfTest.java
new file mode 100644
index 0000000..c9581c9
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryAlbIpFinderSelfTest.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.elb;
+
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAbstractSelfTest;
+
+/**
+ * TcpDiscoveryAlbIpFinderSelfTest test.
+ */
+public class TcpDiscoveryAlbIpFinderSelfTest extends TcpDiscoveryIpFinderAbstractSelfTest<TcpDiscoveryAlbIpFinder> {
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryAlbIpFinderSelfTest() throws Exception {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() throws Exception {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override protected TcpDiscoveryAlbIpFinder ipFinder() throws Exception {
+        return null;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void testIpFinder() throws Exception {
+        TcpDiscoveryAlbIpFinder ipFinder = new TcpDiscoveryAlbIpFinder();
+
+        ipFinder.setRegion(null);
+
+        try {
+            ipFinder.getRegisteredAddresses();
+        }
+        catch (IgniteSpiException e) {
+            assertTrue(e.getMessage().startsWith("One or more configuration parameters are invalid"));
+        }
+
+        ipFinder = new TcpDiscoveryAlbIpFinder();
+
+        ipFinder.setTargetGrpARN(null);
+
+        try {
+            ipFinder.getRegisteredAddresses();
+        }
+        catch (IgniteSpiException e) {
+            assertTrue(e.getMessage().startsWith("One or more configuration parameters are invalid"));
+        }
+
+        ipFinder = new TcpDiscoveryAlbIpFinder();
+
+        ipFinder.setCredentialsProvider(null);
+
+        try {
+            ipFinder.getRegisteredAddresses();
+        }
+        catch (IgniteSpiException e) {
+            assertTrue(e.getMessage().startsWith("One or more configuration parameters are invalid"));
+        }
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryElbIpFinderSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryElbIpFinderSelfTest.java
new file mode 100644
index 0000000..312f065
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/TcpDiscoveryElbIpFinderSelfTest.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.elb;
+
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAbstractSelfTest;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryElbIpFinderSelfTest test.
+ */
+public class TcpDiscoveryElbIpFinderSelfTest extends TcpDiscoveryIpFinderAbstractSelfTest<TcpDiscoveryElbIpFinder> {
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryElbIpFinderSelfTest() throws Exception {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() throws Exception {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override protected TcpDiscoveryElbIpFinder ipFinder() throws Exception {
+        return null;
+    }
+
+    /** {@inheritDoc} */
+    @Test
+    @Override public void testIpFinder() throws Exception {
+        TcpDiscoveryElbIpFinder ipFinder = new TcpDiscoveryElbIpFinder();
+
+        ipFinder.setRegion(null);
+
+        try {
+            ipFinder.getRegisteredAddresses();
+        }
+        catch (IgniteSpiException e) {
+            assertTrue(e.getMessage().startsWith("One or more configuration parameters are invalid"));
+        }
+
+        ipFinder = new TcpDiscoveryElbIpFinder();
+
+        ipFinder.setLoadBalancerName(null);
+
+        try {
+            ipFinder.getRegisteredAddresses();
+        }
+        catch (IgniteSpiException e) {
+            assertTrue(e.getMessage().startsWith("One or more configuration parameters are invalid"));
+        }
+
+        ipFinder = new TcpDiscoveryElbIpFinder();
+
+        ipFinder.setCredentialsProvider(null);
+
+        try {
+            ipFinder.getRegisteredAddresses();
+        }
+        catch (IgniteSpiException e) {
+            assertTrue(e.getMessage().startsWith("One or more configuration parameters are invalid"));
+        }
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/package-info.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/package-info.java
new file mode 100644
index 0000000..dffd886
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/elb/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains internal tests or test related classes and interfaces.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.elb;
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAbstractSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAbstractSelfTest.java
new file mode 100644
index 0000000..685a8d6
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAbstractSelfTest.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.Collection;
+import java.util.concurrent.ThreadLocalRandom;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAbstractSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt.EncryptionService;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.jetbrains.annotations.Nullable;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Abstract TcpDiscoveryS3IpFinder to test with different ways of setting AWS credentials.
+ */
+public abstract class TcpDiscoveryS3IpFinderAbstractSelfTest
+    extends TcpDiscoveryIpFinderAbstractSelfTest<TcpDiscoveryS3IpFinder> {
+    /** Bucket endpoint */
+    @Nullable protected String bucketEndpoint;
+
+    /** Server-side encryption algorithm for Amazon S3-managed encryption keys. */
+    @Nullable protected String SSEAlgorithm;
+
+    /** Key prefix of the address. */
+    @Nullable protected String keyPrefix;
+
+    /** Encryption service. */
+    @Nullable protected EncryptionService encryptionSvc;
+
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    TcpDiscoveryS3IpFinderAbstractSelfTest() throws Exception {
+    }
+
+    /** {@inheritDoc} */
+    @Override protected TcpDiscoveryS3IpFinder ipFinder() throws Exception {
+        TcpDiscoveryS3IpFinder finder = new TcpDiscoveryS3IpFinder();
+
+        injectLogger(finder);
+
+        assert finder.isShared() : "Ip finder should be shared by default.";
+
+        setAwsCredentials(finder);
+        setBucketEndpoint(finder);
+        setBucketName(finder);
+        setSSEAlgorithm(finder);
+        setKeyPrefix(finder);
+        setEncryptionService(finder);
+
+        for (int i = 0; i < 5; i++) {
+            Collection<InetSocketAddress> addrs = finder.getRegisteredAddresses();
+
+            if (!addrs.isEmpty())
+                finder.unregisterAddresses(addrs);
+            else
+                return finder;
+
+            U.sleep(1000);
+        }
+
+        if (!finder.getRegisteredAddresses().isEmpty())
+            throw new Exception("Failed to initialize IP finder.");
+
+        return finder;
+    }
+
+    /** {@inheritDoc} */
+    @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420")
+    @Test
+    @Override public void testIpFinder() throws Exception {
+        super.testIpFinder();
+    }
+
+    /**
+     * Set AWS credentials into the provided {@code finder}.
+     *
+     * @param finder finder credentials to set into
+     */
+    protected abstract void setAwsCredentials(TcpDiscoveryS3IpFinder finder);
+
+    /**
+     * Set Bucket endpoint into the provided {@code finder}.
+     *
+     * @param finder finder endpoint to set into.
+     */
+    private void setBucketEndpoint(TcpDiscoveryS3IpFinder finder) {
+        finder.setBucketEndpoint(bucketEndpoint);
+    }
+
+    /**
+     * Set server-side encryption algorithm for Amazon S3-managed encryption keys into the provided {@code finder}.
+     *
+     * @param finder finder encryption algorithm to set into.
+     */
+    private void setSSEAlgorithm(TcpDiscoveryS3IpFinder finder) {
+        finder.setSSEAlgorithm(SSEAlgorithm);
+    }
+
+    /**
+     * Set Bucket endpoint into the provided {@code finder}.
+     *
+     * @param finder finder endpoint to set into.
+     */
+    protected void setBucketName(TcpDiscoveryS3IpFinder finder) {
+        finder.setBucketName(getBucketName());
+    }
+
+    /**
+     * Set the ip address key prefix into the provided {@code finder}.
+     *
+     * @param finder finder encryption algorithm to set into.
+     */
+    protected void setKeyPrefix(TcpDiscoveryS3IpFinder finder) {
+        finder.setKeyPrefix(keyPrefix);
+    }
+
+    /**
+     * Set encryption service into the provided {@code finder}.
+     *
+     * @param finder finder encryption service to set into.
+     */
+    protected void setEncryptionService(TcpDiscoveryS3IpFinder finder) {
+        finder.setEncryptionService(encryptionSvc);
+    }
+
+    /**
+     * Gets Bucket name. Bucket name should be unique for the host to parallel test run on one bucket. Please note that
+     * the final bucket name should not exceed 63 chars.
+     *
+     * @return Bucket name.
+     */
+    static String getBucketName() {
+        String bucketName;
+        try {
+            bucketName = IgniteS3TestSuite.getBucketName(
+                "ip-finder-unit-test-" + InetAddress.getLocalHost().getHostName().toLowerCase());
+        }
+        catch (UnknownHostException e) {
+            bucketName = IgniteS3TestSuite.getBucketName(
+                "ip-finder-unit-test-rnd-" + ThreadLocalRandom.current().nextInt(100));
+        }
+
+        return bucketName;
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest.java
new file mode 100644
index 0000000..dc3aa58
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
+
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryS3IpFinder test using AWS credentials provider.
+ */
+public class TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest() throws Exception {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+        finder.setAwsCredentialsProvider(new AWSStaticCredentialsProvider(
+            new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(), IgniteS3TestSuite.getSecretKey())));
+    }
+
+    /** {@inheritDoc} */
+    @Test
+    @Override public void testIpFinder() throws Exception {
+        super.testIpFinder();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAwsCredentialsSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAwsCredentialsSelfTest.java
new file mode 100644
index 0000000..64dfc7b
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderAwsCredentialsSelfTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
+
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryS3IpFinder test using AWS credentials.
+ */
+public class TcpDiscoveryS3IpFinderAwsCredentialsSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryS3IpFinderAwsCredentialsSelfTest() throws Exception {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+        finder.setAwsCredentials(new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey()));
+    }
+
+    /** {@inheritDoc} */
+    @Test
+    @Override public void testIpFinder() throws Exception {
+        super.testIpFinder();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderBucketEndpointSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderBucketEndpointSelfTest.java
new file mode 100644
index 0000000..9b280e8
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderBucketEndpointSelfTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
+
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryS3IpFinder tests bucket endpoint for IP finder.
+ * For information about possible endpoint names visit
+ * <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region">docs.aws.amazon.com</a>.
+ */
+public class TcpDiscoveryS3IpFinderBucketEndpointSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryS3IpFinderBucketEndpointSelfTest() throws Exception {
+        bucketEndpoint = "s3.us-east-2.amazonaws.com";
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+        finder.setAwsCredentials(new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey()));
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setBucketName(TcpDiscoveryS3IpFinder finder) {
+        super.setBucketName(finder);
+
+        finder.setBucketName(getBucketName() + "-e");
+    }
+
+    /** {@inheritDoc} */
+    @Test
+    @Override public void testIpFinder() throws Exception {
+        super.testIpFinder();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest.java
new file mode 100644
index 0000000..73cc3fc
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
+
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client.DummyS3Client;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt.EncryptionService;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt.MockEncryptionService;
+import org.mockito.Mockito;
+
+/**
+ * TcpDiscoveryS3IpFinder tests client side encryption for S3 IP finder.
+ */
+public class TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest() throws Exception {
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+        finder.setAwsCredentials(new BasicAWSCredentials("dummy", "dummy"));
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setEncryptionService(TcpDiscoveryS3IpFinder finder) {
+        EncryptionService encryptionSvc = MockEncryptionService.instance();
+        encryptionSvc.init();
+        finder.setEncryptionService(encryptionSvc);
+    }
+
+    /** {@inheritDoc} */
+    @Override protected TcpDiscoveryS3IpFinder ipFinder() {
+        TcpDiscoveryS3IpFinder ipFinder = Mockito.spy(new TcpDiscoveryS3IpFinder());
+
+        Mockito.doReturn(new DummyS3Client()).when(ipFinder).createAmazonS3Client();
+
+        setAwsCredentials(ipFinder);
+        setBucketName(ipFinder);
+        setKeyPrefix(ipFinder);
+        setEncryptionService(ipFinder);
+
+        return ipFinder;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void testIpFinder() throws Exception {
+        injectLogger(finder);
+
+        assert finder.isShared() : "Ip finder should be shared by default.";
+
+        super.testIpFinder();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderKeyPrefixSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderKeyPrefixSelfTest.java
new file mode 100644
index 0000000..c99d06a
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderKeyPrefixSelfTest.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
+
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client.DummyS3Client;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ * TcpDiscoveryS3IpFinder tests key prefix for IP finder. For information about key prefix visit:
+ * <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/ListingKeysHierarchy.html"/>.
+ */
+public class TcpDiscoveryS3IpFinderKeyPrefixSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryS3IpFinderKeyPrefixSelfTest() throws Exception {
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+        finder.setAwsCredentials(new BasicAWSCredentials("dummy", "dummy"));
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setKeyPrefix(TcpDiscoveryS3IpFinder finder) {
+        finder.setKeyPrefix("/test/key/prefix");
+    }
+
+    /** {@inheritDoc} */
+    @Override protected TcpDiscoveryS3IpFinder ipFinder() throws Exception {
+        TcpDiscoveryS3IpFinder ipFinder = Mockito.spy(new TcpDiscoveryS3IpFinder());
+
+        Mockito.doReturn(new DummyS3Client()).when(ipFinder).createAmazonS3Client();
+
+        setAwsCredentials(ipFinder);
+        setBucketName(ipFinder);
+        setKeyPrefix(ipFinder);
+
+        return ipFinder;
+    }
+
+    /** {@inheritDoc} */
+    @Test
+    @Override public void testIpFinder() throws Exception {
+        injectLogger(finder);
+
+        assert finder.isShared() : "Ip finder should be shared by default.";
+
+        super.testIpFinder();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest.java
new file mode 100644
index 0000000..d13c7c3
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
+
+import com.amazonaws.auth.BasicAWSCredentials;
+import org.apache.ignite.testsuites.IgniteS3TestSuite;
+import org.junit.Test;
+
+/**
+ * TcpDiscoveryS3IpFinder tests server-side encryption algorithm for Amazon S3-managed encryption keys.
+ * For information about possible S3-managed encryption keys visit
+ * <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingServerSideEncryption.html">docs.aws.amazon.com</a>.
+ */
+public class TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest extends TcpDiscoveryS3IpFinderAbstractSelfTest {
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest() throws Exception {
+        SSEAlgorithm = "AES256";
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void setAwsCredentials(TcpDiscoveryS3IpFinder finder) {
+        finder.setAwsCredentials(new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(),
+            IgniteS3TestSuite.getSecretKey()));
+    }
+
+    /** {@inheritDoc} */
+    @Test
+    @Override public void testIpFinder() throws Exception {
+        super.testIpFinder();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyObjectListing.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyObjectListing.java
new file mode 100644
index 0000000..c3300a5
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyObjectListing.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+import com.amazonaws.services.s3.model.ObjectListing;
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+
+/**
+ * Class to simulate the functionality of {@link ObjectListing}.
+ */
+public class DummyObjectListing extends ObjectListing {
+    /** Iterator over the S3 object summaries. */
+    private Iterator<S3ObjectSummary> objSummariesIter;
+
+    /**
+     * Constructor
+     *
+     * @param objSummaries Iterator over the S3 object summaries.
+     */
+    private DummyObjectListing(Iterator<S3ObjectSummary> objSummaries) {
+        this.objSummariesIter = objSummaries;
+    }
+
+    /**
+     * Creates an instance of {@link DummyObjectListing}. The object summaries are created using the given  bucket name
+     * and object keys.
+     *
+     * @param bucketName AWS Bucket name.
+     * @param keys The keys in the bucket.
+     * @return Instance of this object.
+     */
+    static DummyObjectListing of(String bucketName, Set<String> keys) {
+        List<S3ObjectSummary> objSummaries = keys.stream().map(key -> {
+            S3ObjectSummary s3ObjSummary = new S3ObjectSummary();
+            s3ObjSummary.setBucketName(bucketName);
+            s3ObjSummary.setKey(key);
+            return s3ObjSummary;
+        }).collect(Collectors.toList());
+
+        return new DummyObjectListing(objSummaries.iterator());
+    }
+
+    /** {@inheritDoc} */
+    @Override public List<S3ObjectSummary> getObjectSummaries() {
+        if (objSummariesIter.hasNext()) {
+            S3ObjectSummary s3ObjSummary = objSummariesIter.next();
+
+            List<S3ObjectSummary> list = new LinkedList<>();
+
+            list.add(s3ObjSummary);
+
+            return list;
+        }
+        else
+            return Collections.emptyList();
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean isTruncated() {
+        return objSummariesIter.hasNext();
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyObjectListingTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyObjectListingTest.java
new file mode 100644
index 0000000..834453b
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyObjectListingTest.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import com.amazonaws.services.s3.model.ObjectListing;
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Test;
+
+/**
+ * Class to test {@link DummyObjectListing}.
+ */
+public class DummyObjectListingTest extends GridCommonAbstractTest {
+    /**
+     * Test cases for various object listing functions for S3 bucket.
+     */
+    @Test
+    public void testDummyObjectListing() {
+        Set<String> fakeKeyPrefixSet = new HashSet<>();
+
+        fakeKeyPrefixSet.add("/test/path/val");
+        fakeKeyPrefixSet.add("/test/val/test/path");
+        fakeKeyPrefixSet.add("/test/test/path/val");
+
+        ObjectListing listing = DummyObjectListing.of("bucket", fakeKeyPrefixSet);
+
+        List<S3ObjectSummary> summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' contains keys", summaries.isEmpty());
+        assertTrue("'testBucket' contains more keys to fetch", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' contains keys", summaries.isEmpty());
+        assertTrue("'testBucket' contains more keys to fetch", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' contains keys", summaries.isEmpty());
+        assertFalse("'testBucket' does not contain anymore keys", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        listing = DummyObjectListing.of("bucket", new HashSet<>());
+
+        summaries = listing.getObjectSummaries();
+
+        assertTrue("'testBucket' does not contains keys", summaries.isEmpty());
+        assertFalse("'testBucket' does not contain anymore keys", listing.isTruncated());
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyS3Client.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyS3Client.java
new file mode 100644
index 0000000..350675a
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyS3Client.java
@@ -0,0 +1,1085 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client;
+
+import java.io.File;
+import java.io.InputStream;
+import java.net.URL;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+import com.amazonaws.AmazonWebServiceRequest;
+import com.amazonaws.HttpMethod;
+import com.amazonaws.SdkClientException;
+import com.amazonaws.regions.Region;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.S3ClientOptions;
+import com.amazonaws.services.s3.S3ResponseMetadata;
+import com.amazonaws.services.s3.model.AbortMultipartUploadRequest;
+import com.amazonaws.services.s3.model.AccessControlList;
+import com.amazonaws.services.s3.model.AmazonS3Exception;
+import com.amazonaws.services.s3.model.Bucket;
+import com.amazonaws.services.s3.model.BucketAccelerateConfiguration;
+import com.amazonaws.services.s3.model.BucketCrossOriginConfiguration;
+import com.amazonaws.services.s3.model.BucketLifecycleConfiguration;
+import com.amazonaws.services.s3.model.BucketLoggingConfiguration;
+import com.amazonaws.services.s3.model.BucketNotificationConfiguration;
+import com.amazonaws.services.s3.model.BucketPolicy;
+import com.amazonaws.services.s3.model.BucketReplicationConfiguration;
+import com.amazonaws.services.s3.model.BucketTaggingConfiguration;
+import com.amazonaws.services.s3.model.BucketVersioningConfiguration;
+import com.amazonaws.services.s3.model.BucketWebsiteConfiguration;
+import com.amazonaws.services.s3.model.CannedAccessControlList;
+import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
+import com.amazonaws.services.s3.model.CompleteMultipartUploadResult;
+import com.amazonaws.services.s3.model.CopyObjectRequest;
+import com.amazonaws.services.s3.model.CopyObjectResult;
+import com.amazonaws.services.s3.model.CopyPartRequest;
+import com.amazonaws.services.s3.model.CopyPartResult;
+import com.amazonaws.services.s3.model.CreateBucketRequest;
+import com.amazonaws.services.s3.model.DeleteBucketAnalyticsConfigurationRequest;
+import com.amazonaws.services.s3.model.DeleteBucketAnalyticsConfigurationResult;
+import com.amazonaws.services.s3.model.DeleteBucketCrossOriginConfigurationRequest;
+import com.amazonaws.services.s3.model.DeleteBucketInventoryConfigurationRequest;
+import com.amazonaws.services.s3.model.DeleteBucketInventoryConfigurationResult;
+import com.amazonaws.services.s3.model.DeleteBucketLifecycleConfigurationRequest;
+import com.amazonaws.services.s3.model.DeleteBucketMetricsConfigurationRequest;
+import com.amazonaws.services.s3.model.DeleteBucketMetricsConfigurationResult;
+import com.amazonaws.services.s3.model.DeleteBucketPolicyRequest;
+import com.amazonaws.services.s3.model.DeleteBucketReplicationConfigurationRequest;
+import com.amazonaws.services.s3.model.DeleteBucketRequest;
+import com.amazonaws.services.s3.model.DeleteBucketTaggingConfigurationRequest;
+import com.amazonaws.services.s3.model.DeleteBucketWebsiteConfigurationRequest;
+import com.amazonaws.services.s3.model.DeleteObjectRequest;
+import com.amazonaws.services.s3.model.DeleteObjectTaggingRequest;
+import com.amazonaws.services.s3.model.DeleteObjectTaggingResult;
+import com.amazonaws.services.s3.model.DeleteObjectsRequest;
+import com.amazonaws.services.s3.model.DeleteObjectsResult;
+import com.amazonaws.services.s3.model.DeleteVersionRequest;
+import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest;
+import com.amazonaws.services.s3.model.GetBucketAccelerateConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketAclRequest;
+import com.amazonaws.services.s3.model.GetBucketAnalyticsConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketAnalyticsConfigurationResult;
+import com.amazonaws.services.s3.model.GetBucketCrossOriginConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketInventoryConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketInventoryConfigurationResult;
+import com.amazonaws.services.s3.model.GetBucketLifecycleConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketLocationRequest;
+import com.amazonaws.services.s3.model.GetBucketLoggingConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketMetricsConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketMetricsConfigurationResult;
+import com.amazonaws.services.s3.model.GetBucketNotificationConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketPolicyRequest;
+import com.amazonaws.services.s3.model.GetBucketReplicationConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketTaggingConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketVersioningConfigurationRequest;
+import com.amazonaws.services.s3.model.GetBucketWebsiteConfigurationRequest;
+import com.amazonaws.services.s3.model.GetObjectAclRequest;
+import com.amazonaws.services.s3.model.GetObjectMetadataRequest;
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.GetObjectTaggingRequest;
+import com.amazonaws.services.s3.model.GetObjectTaggingResult;
+import com.amazonaws.services.s3.model.GetS3AccountOwnerRequest;
+import com.amazonaws.services.s3.model.HeadBucketRequest;
+import com.amazonaws.services.s3.model.HeadBucketResult;
+import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
+import com.amazonaws.services.s3.model.InitiateMultipartUploadResult;
+import com.amazonaws.services.s3.model.ListBucketAnalyticsConfigurationsRequest;
+import com.amazonaws.services.s3.model.ListBucketAnalyticsConfigurationsResult;
+import com.amazonaws.services.s3.model.ListBucketInventoryConfigurationsRequest;
+import com.amazonaws.services.s3.model.ListBucketInventoryConfigurationsResult;
+import com.amazonaws.services.s3.model.ListBucketMetricsConfigurationsRequest;
+import com.amazonaws.services.s3.model.ListBucketMetricsConfigurationsResult;
+import com.amazonaws.services.s3.model.ListBucketsRequest;
+import com.amazonaws.services.s3.model.ListMultipartUploadsRequest;
+import com.amazonaws.services.s3.model.ListNextBatchOfObjectsRequest;
+import com.amazonaws.services.s3.model.ListNextBatchOfVersionsRequest;
+import com.amazonaws.services.s3.model.ListObjectsRequest;
+import com.amazonaws.services.s3.model.ListObjectsV2Request;
+import com.amazonaws.services.s3.model.ListObjectsV2Result;
+import com.amazonaws.services.s3.model.ListPartsRequest;
+import com.amazonaws.services.s3.model.ListVersionsRequest;
+import com.amazonaws.services.s3.model.MultipartUploadListing;
+import com.amazonaws.services.s3.model.ObjectListing;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.Owner;
+import com.amazonaws.services.s3.model.PartListing;
+import com.amazonaws.services.s3.model.PutObjectRequest;
+import com.amazonaws.services.s3.model.PutObjectResult;
+import com.amazonaws.services.s3.model.RestoreObjectRequest;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.SetBucketAccelerateConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketAclRequest;
+import com.amazonaws.services.s3.model.SetBucketAnalyticsConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketAnalyticsConfigurationResult;
+import com.amazonaws.services.s3.model.SetBucketCrossOriginConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketInventoryConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketInventoryConfigurationResult;
+import com.amazonaws.services.s3.model.SetBucketLifecycleConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketLoggingConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketMetricsConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketMetricsConfigurationResult;
+import com.amazonaws.services.s3.model.SetBucketNotificationConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketPolicyRequest;
+import com.amazonaws.services.s3.model.SetBucketReplicationConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketTaggingConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketVersioningConfigurationRequest;
+import com.amazonaws.services.s3.model.SetBucketWebsiteConfigurationRequest;
+import com.amazonaws.services.s3.model.SetObjectAclRequest;
+import com.amazonaws.services.s3.model.SetObjectTaggingRequest;
+import com.amazonaws.services.s3.model.SetObjectTaggingResult;
+import com.amazonaws.services.s3.model.StorageClass;
+import com.amazonaws.services.s3.model.UploadPartRequest;
+import com.amazonaws.services.s3.model.UploadPartResult;
+import com.amazonaws.services.s3.model.VersionListing;
+import com.amazonaws.services.s3.model.analytics.AnalyticsConfiguration;
+import com.amazonaws.services.s3.model.inventory.InventoryConfiguration;
+import com.amazonaws.services.s3.model.metrics.MetricsConfiguration;
+import com.amazonaws.services.s3.waiters.AmazonS3Waiters;
+
+/**
+ * Class to simulate the functionality of {@link AmazonS3Client}.
+ */
+public final class DummyS3Client extends AmazonS3Client {
+    /** Map of Bucket names as keys and the keys as set of values. */
+    private final Map<String, Set<String>> objMap;
+
+    /**
+     * Constructor.
+     */
+    public DummyS3Client() {
+        this.objMap = new HashMap<>();
+    }
+
+    /**
+     * Constructor to add an object map with fake data.
+     */
+    public DummyS3Client(Map<String, Set<String>> objMap) {
+        this.objMap = Objects.requireNonNull(objMap, "Object map cannot be null");
+    }
+
+    /** Empty Method. */
+    @Override public void setEndpoint(String endpoint) {
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setRegion(Region region) throws IllegalArgumentException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setS3ClientOptions(S3ClientOptions clientOptions) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void changeObjectStorageClass(String bucketName, String key, StorageClass newStorageCls)
+        throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setObjectRedirectLocation(String bucketName, String key, String newRedirectLocation)
+        throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** {@inheritDoc} */
+    @Override public ObjectListing listObjects(String bucketName) throws SdkClientException {
+        checkBucketExists(bucketName);
+        return DummyObjectListing.of(bucketName, objMap.get(bucketName));
+    }
+
+    /** {@inheritDoc} */
+    @Override public ObjectListing listObjects(String bucketName, String prefix) throws SdkClientException {
+        checkBucketExists(bucketName);
+
+        Set<String> keys = objMap.get(bucketName).stream()
+            .filter(key -> key.startsWith(prefix)).collect(Collectors.toSet());
+
+        return DummyObjectListing.of(bucketName, keys);
+    }
+
+    /** Unsupported Operation. */
+    @Override public ObjectListing listObjects(ListObjectsRequest listObjectsReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ListObjectsV2Result listObjectsV2(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ListObjectsV2Result listObjectsV2(String bucketName,
+        String prefix) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ListObjectsV2Result listObjectsV2(ListObjectsV2Request listObjectsV2Req) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** {@inheritDoc} */
+    @Override public ObjectListing listNextBatchOfObjects(ObjectListing previousObjListing) throws SdkClientException {
+        return previousObjListing;
+    }
+
+    /** Unsupported Operation. */
+    @Override public ObjectListing listNextBatchOfObjects(ListNextBatchOfObjectsRequest listNextBatchOfObjectsReq)
+        throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public VersionListing listVersions(String bucketName, String prefix) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public VersionListing listNextBatchOfVersions(VersionListing previousVerListing) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public VersionListing listNextBatchOfVersions(ListNextBatchOfVersionsRequest listNextBatchOfVersionsReq)
+        throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public VersionListing listVersions(String bucketName, String prefix, String keyMarker, String verIdMarker,
+        String delim, Integer maxResults) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public VersionListing listVersions(ListVersionsRequest listVersionsReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public Owner getS3AccountOwner() throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public Owner getS3AccountOwner(GetS3AccountOwnerRequest getS3AccountOwnerReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** {@inheritDoc} */
+    @Override public boolean doesBucketExist(String bucketName) throws SdkClientException {
+        return objMap.containsKey(bucketName);
+    }
+
+    /** Unsupported Operation. */
+    @Override public HeadBucketResult headBucket(HeadBucketRequest headBucketReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public List<Bucket> listBuckets() throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public List<Bucket> listBuckets(ListBucketsRequest listBucketsReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public String getBucketLocation(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public String getBucketLocation(GetBucketLocationRequest getBucketLocationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public Bucket createBucket(CreateBucketRequest createBucketReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** {@inheritDoc} */
+    @Override public Bucket createBucket(String bucketName) throws SdkClientException {
+        if (doesBucketExist(bucketName))
+            throw new AmazonS3Exception("The specified bucket already exist");
+        else {
+            objMap.put(bucketName, new HashSet<>());
+
+            return new Bucket();
+        }
+    }
+
+    /** Unsupported Operation. */
+    @Override public Bucket createBucket(String bucketName,
+        com.amazonaws.services.s3.model.Region region) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public Bucket createBucket(String bucketName, String region) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public AccessControlList getObjectAcl(String bucketName,
+        String key) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public AccessControlList getObjectAcl(String bucketName, String key,
+        String verId) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public AccessControlList getObjectAcl(GetObjectAclRequest getObjAclReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setObjectAcl(String bucketName, String key, AccessControlList acl) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setObjectAcl(String bucketName, String key,
+        CannedAccessControlList acl) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setObjectAcl(String bucketName, String key, String verId,
+        AccessControlList acl) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setObjectAcl(String bucketName, String key, String verId,
+        CannedAccessControlList acl) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setObjectAcl(SetObjectAclRequest setObjAclReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public AccessControlList getBucketAcl(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketAcl(SetBucketAclRequest setBucketAclReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public AccessControlList getBucketAcl(GetBucketAclRequest getBucketAclReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketAcl(String bucketName, AccessControlList acl) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketAcl(String bucketName, CannedAccessControlList acl) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ObjectMetadata getObjectMetadata(String bucketName, String key) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ObjectMetadata getObjectMetadata(
+        GetObjectMetadataRequest getObjMetadataReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public S3Object getObject(String bucketName, String key) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public S3Object getObject(GetObjectRequest getObjReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ObjectMetadata getObject(GetObjectRequest getObjReq, File destFile) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public String getObjectAsString(String bucketName, String key) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public GetObjectTaggingResult getObjectTagging(GetObjectTaggingRequest getObjTaggingReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public SetObjectTaggingResult setObjectTagging(SetObjectTaggingRequest setObjTaggingReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public DeleteObjectTaggingResult deleteObjectTagging(DeleteObjectTaggingRequest delObjTaggingReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucket(DeleteBucketRequest delBucketReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucket(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public PutObjectResult putObject(PutObjectRequest putObjReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public PutObjectResult putObject(String bucketName, String key,
+        File file) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** {@inheritDoc} */
+    @Override public PutObjectResult putObject(String bucketName, String key, InputStream input,
+        ObjectMetadata metadata) throws SdkClientException {
+        checkBucketExists(bucketName);
+
+        Set<String> keys = objMap.get(bucketName);
+
+        keys.add(key);
+
+        return new PutObjectResult();
+    }
+
+    /** Unsupported Operation. */
+    @Override public PutObjectResult putObject(String bucketName, String key,
+        String content) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public CopyObjectResult copyObject(String srcBucketName, String srcKey, String destBucketName,
+        String destKey) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public CopyObjectResult copyObject(CopyObjectRequest cpObjReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public CopyPartResult copyPart(CopyPartRequest cpPartReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** {@inheritDoc} */
+    @Override public void deleteObject(String bucketName, String key) throws SdkClientException {
+        checkBucketExists(bucketName);
+        Set<String> keys = objMap.get(bucketName);
+
+        Set<String> keysToDel = keys.stream().filter(k -> k.contains(key)).collect(Collectors.toSet());
+
+        keys.removeAll(keysToDel);
+
+        objMap.put(bucketName, keys);
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteObject(DeleteObjectRequest delObjReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public DeleteObjectsResult deleteObjects(DeleteObjectsRequest delObjectsReq)
+        throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteVersion(String bucketName, String key, String verId) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteVersion(DeleteVersionRequest delVerReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketLoggingConfiguration getBucketLoggingConfiguration(
+        String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketLoggingConfiguration getBucketLoggingConfiguration(
+        GetBucketLoggingConfigurationRequest getBucketLoggingConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketLoggingConfiguration(
+        SetBucketLoggingConfigurationRequest setBucketLoggingConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketVersioningConfiguration getBucketVersioningConfiguration(
+        String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketVersioningConfiguration getBucketVersioningConfiguration(
+        GetBucketVersioningConfigurationRequest getBucketVersioningConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketVersioningConfiguration(
+        SetBucketVersioningConfigurationRequest setBucketVersioningConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketLifecycleConfiguration getBucketLifecycleConfiguration(String bucketName) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketLifecycleConfiguration getBucketLifecycleConfiguration(
+        GetBucketLifecycleConfigurationRequest getBucketLifecycleConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketLifecycleConfiguration(String bucketName,
+        BucketLifecycleConfiguration bucketLifecycleConfiguration) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketLifecycleConfiguration(
+        SetBucketLifecycleConfigurationRequest setBucketLifecycleConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketLifecycleConfiguration(String bucketName) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketLifecycleConfiguration(
+        DeleteBucketLifecycleConfigurationRequest delBucketLifecycleConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketCrossOriginConfiguration getBucketCrossOriginConfiguration(String bucketName) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketCrossOriginConfiguration getBucketCrossOriginConfiguration(
+        GetBucketCrossOriginConfigurationRequest getBucketCrossOriginConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketCrossOriginConfiguration(String bucketName,
+        BucketCrossOriginConfiguration bucketCrossOriginConfiguration) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketCrossOriginConfiguration(
+        SetBucketCrossOriginConfigurationRequest setBucketCrossOriginConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketCrossOriginConfiguration(String bucketName) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketCrossOriginConfiguration(
+        DeleteBucketCrossOriginConfigurationRequest delBucketCrossOriginConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketTaggingConfiguration getBucketTaggingConfiguration(String bucketName) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketTaggingConfiguration getBucketTaggingConfiguration(
+        GetBucketTaggingConfigurationRequest getBucketTaggingConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketTaggingConfiguration(String bucketName,
+        BucketTaggingConfiguration bucketTaggingConfiguration) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketTaggingConfiguration(
+        SetBucketTaggingConfigurationRequest setBucketTaggingConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketTaggingConfiguration(String bucketName) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketTaggingConfiguration(
+        DeleteBucketTaggingConfigurationRequest delBucketTaggingConfigurationReq) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketNotificationConfiguration getBucketNotificationConfiguration(
+        String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketNotificationConfiguration getBucketNotificationConfiguration(
+        GetBucketNotificationConfigurationRequest getBucketNotificationConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketNotificationConfiguration(
+        SetBucketNotificationConfigurationRequest setBucketNotificationConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketNotificationConfiguration(String bucketName,
+        BucketNotificationConfiguration bucketNotificationConfiguration) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketWebsiteConfiguration getBucketWebsiteConfiguration(
+        String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketWebsiteConfiguration getBucketWebsiteConfiguration(
+        GetBucketWebsiteConfigurationRequest getBucketWebsiteConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketWebsiteConfiguration(String bucketName,
+        BucketWebsiteConfiguration configuration) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketWebsiteConfiguration(
+        SetBucketWebsiteConfigurationRequest setBucketWebsiteConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketWebsiteConfiguration(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketWebsiteConfiguration(
+        DeleteBucketWebsiteConfigurationRequest delBucketWebsiteConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketPolicy getBucketPolicy(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketPolicy getBucketPolicy(GetBucketPolicyRequest getBucketPlcReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketPolicy(String bucketName, String plcText) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketPolicy(SetBucketPolicyRequest setBucketPlcReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketPolicy(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketPolicy(DeleteBucketPolicyRequest delBucketPlcReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public URL generatePresignedUrl(String bucketName, String key,
+        Date expiration) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public URL generatePresignedUrl(String bucketName, String key, Date expiration,
+        HttpMethod mtd) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public URL generatePresignedUrl(
+        GeneratePresignedUrlRequest generatePresignedUrlReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public InitiateMultipartUploadResult initiateMultipartUpload(
+        InitiateMultipartUploadRequest req) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public UploadPartResult uploadPart(UploadPartRequest req) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public PartListing listParts(ListPartsRequest req) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void abortMultipartUpload(
+        AbortMultipartUploadRequest req) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public CompleteMultipartUploadResult completeMultipartUpload(
+        CompleteMultipartUploadRequest req) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public MultipartUploadListing listMultipartUploads(
+        ListMultipartUploadsRequest req) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public S3ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest req) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void restoreObject(RestoreObjectRequest req) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void restoreObject(String bucketName, String key, int expirationInDays) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void enableRequesterPays(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void disableRequesterPays(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public boolean isRequesterPaysEnabled(String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketReplicationConfiguration(String bucketName,
+        BucketReplicationConfiguration configuration) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketReplicationConfiguration(
+        SetBucketReplicationConfigurationRequest setBucketReplicationConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketReplicationConfiguration getBucketReplicationConfiguration(
+        String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketReplicationConfiguration getBucketReplicationConfiguration(
+        GetBucketReplicationConfigurationRequest getBucketReplicationConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketReplicationConfiguration(
+        String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void deleteBucketReplicationConfiguration(
+        DeleteBucketReplicationConfigurationRequest req) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public boolean doesObjectExist(String bucketName,
+        String objName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketAccelerateConfiguration getBucketAccelerateConfiguration(
+        String bucketName) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public BucketAccelerateConfiguration getBucketAccelerateConfiguration(
+        GetBucketAccelerateConfigurationRequest getBucketAccelerateConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketAccelerateConfiguration(String bucketName,
+        BucketAccelerateConfiguration accelerateConfiguration) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public void setBucketAccelerateConfiguration(
+        SetBucketAccelerateConfigurationRequest setBucketAccelerateConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public DeleteBucketMetricsConfigurationResult deleteBucketMetricsConfiguration(String bucketName,
+        String id) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public DeleteBucketMetricsConfigurationResult deleteBucketMetricsConfiguration(
+        DeleteBucketMetricsConfigurationRequest delBucketMetricsConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public GetBucketMetricsConfigurationResult getBucketMetricsConfiguration(String bucketName,
+        String id) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public GetBucketMetricsConfigurationResult getBucketMetricsConfiguration(
+        GetBucketMetricsConfigurationRequest getBucketMetricsConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public SetBucketMetricsConfigurationResult setBucketMetricsConfiguration(String bucketName,
+        MetricsConfiguration metricsConfiguration) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public SetBucketMetricsConfigurationResult setBucketMetricsConfiguration(
+        SetBucketMetricsConfigurationRequest setBucketMetricsConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ListBucketMetricsConfigurationsResult listBucketMetricsConfigurations(
+        ListBucketMetricsConfigurationsRequest listBucketMetricsConfigurationsReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public DeleteBucketAnalyticsConfigurationResult deleteBucketAnalyticsConfiguration(String bucketName,
+        String id) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public DeleteBucketAnalyticsConfigurationResult deleteBucketAnalyticsConfiguration(
+        DeleteBucketAnalyticsConfigurationRequest delBucketAnalyticsConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public GetBucketAnalyticsConfigurationResult getBucketAnalyticsConfiguration(String bucketName,
+        String id) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public GetBucketAnalyticsConfigurationResult getBucketAnalyticsConfiguration(
+        GetBucketAnalyticsConfigurationRequest getBucketAnalyticsConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public SetBucketAnalyticsConfigurationResult setBucketAnalyticsConfiguration(String bucketName,
+        AnalyticsConfiguration analyticsConfiguration) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public SetBucketAnalyticsConfigurationResult setBucketAnalyticsConfiguration(
+        SetBucketAnalyticsConfigurationRequest setBucketAnalyticsConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ListBucketAnalyticsConfigurationsResult listBucketAnalyticsConfigurations(
+        ListBucketAnalyticsConfigurationsRequest listBucketAnalyticsConfigurationsReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public DeleteBucketInventoryConfigurationResult deleteBucketInventoryConfiguration(String bucketName,
+        String id) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public DeleteBucketInventoryConfigurationResult deleteBucketInventoryConfiguration(
+        DeleteBucketInventoryConfigurationRequest delBucketInventoryConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public GetBucketInventoryConfigurationResult getBucketInventoryConfiguration(String bucketName,
+        String id) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public GetBucketInventoryConfigurationResult getBucketInventoryConfiguration(
+        GetBucketInventoryConfigurationRequest getBucketInventoryConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public SetBucketInventoryConfigurationResult setBucketInventoryConfiguration(String bucketName,
+        InventoryConfiguration inventoryConfiguration) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public SetBucketInventoryConfigurationResult setBucketInventoryConfiguration(
+        SetBucketInventoryConfigurationRequest setBucketInventoryConfigurationReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public ListBucketInventoryConfigurationsResult listBucketInventoryConfigurations(
+        ListBucketInventoryConfigurationsRequest listBucketInventoryConfigurationsReq) throws SdkClientException {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public com.amazonaws.services.s3.model.Region getRegion() {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public String getRegionName() {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public URL getUrl(String bucketName, String key) {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /** Unsupported Operation. */
+    @Override public AmazonS3Waiters waiters() {
+        throw new UnsupportedOperationException("Operation not supported");
+    }
+
+    /**
+     * Check if a bucket exists.
+     *
+     * @param bucketName bucket name to check.
+     * @throws AmazonS3Exception If the specified bucket does not exist.
+     */
+    private void checkBucketExists(String bucketName) {
+        if (!doesBucketExist(bucketName))
+            throw new AmazonS3Exception("The specified bucket does not exist");
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyS3ClientTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyS3ClientTest.java
new file mode 100644
index 0000000..4a50592
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/DummyS3ClientTest.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import com.amazonaws.services.s3.model.AmazonS3Exception;
+import com.amazonaws.services.s3.model.ObjectListing;
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Test;
+
+/**
+ * Class to test {@link DummyS3Client}.
+ */
+public class DummyS3ClientTest extends GridCommonAbstractTest {
+    /** Instance of {@link DummyS3Client} to be used for tests. */
+    private DummyS3Client s3;
+
+    /** Holds fake key prefixes. */
+    private Set<String> fakeKeyPrefixSet;
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() {
+        fakeKeyPrefixSet = new HashSet<>();
+        fakeKeyPrefixSet.add("/test/path/val");
+        fakeKeyPrefixSet.add("/test/val/test/path");
+        fakeKeyPrefixSet.add("/test/test/path/val");
+
+        Map<String, Set<String>> fakeObjMap = new HashMap<>();
+
+        fakeObjMap.put("testBucket", fakeKeyPrefixSet);
+
+        s3 = new DummyS3Client(fakeObjMap);
+    }
+
+    /**
+     * Test cases to check the 'doesBucketExist' method.
+     */
+    @Test
+    public void testDoesBucketExist() {
+        assertTrue("The bucket 'testBucket' should exist", s3.doesBucketExist("testBucket"));
+        assertFalse("The bucket 'nonExistentBucket' should not exist", s3.doesBucketExist("nonExistentBucket"));
+    }
+
+    /**
+     * Test cases for various object listing functions for S3 bucket.
+     */
+    @Test
+    public void testListObjects() {
+        ObjectListing listing = s3.listObjects("testBucket");
+
+        List<S3ObjectSummary> summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' contains keys", summaries.isEmpty());
+        assertTrue("'testBucket' contains more keys to fetch", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        listing = s3.listNextBatchOfObjects(listing);
+
+        summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' contains keys", summaries.isEmpty());
+        assertTrue("'testBucket' contains more keys to fetch", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        listing = s3.listNextBatchOfObjects(listing);
+
+        summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' contains keys", summaries.isEmpty());
+        assertFalse("'testBucket' does not contain anymore keys", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        try {
+            s3.listObjects("nonExistentBucket");
+        }
+        catch (AmazonS3Exception e) {
+            assertTrue(e.getMessage().contains("The specified bucket does not exist"));
+        }
+    }
+
+    /**
+     * Test cases for various object listing functions for S3 bucket and key prefix.
+     */
+    @Test
+    public void testListObjectsWithAPrefix() {
+        ObjectListing listing = s3.listObjects("testBucket", "/test");
+
+        List<S3ObjectSummary> summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' must contain key with prefix '/test'", summaries.isEmpty());
+        assertTrue("'testBucket' contains more keys with prefix '/test'", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        listing = s3.listNextBatchOfObjects(listing);
+
+        summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' must contain key with prefix '/test'", summaries.isEmpty());
+        assertTrue("'testBucket' contains more keys with prefix '/test'", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        listing = s3.listNextBatchOfObjects(listing);
+
+        summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' must contain key with prefix '/test'", summaries.isEmpty());
+        assertFalse("'testBucket' does not contain anymore keys with prefix '/test'", listing.isTruncated());
+        assertTrue(fakeKeyPrefixSet.contains(summaries.get(0).getKey()));
+
+        listing = s3.listObjects("testBucket", "/test/path");
+
+        summaries = listing.getObjectSummaries();
+
+        assertFalse("'testBucket' must contain key with prefix '/test'", summaries.isEmpty());
+        assertFalse("'testBucket' does not contain anymore keys with prefix '/test/path'", listing.isTruncated());
+        assertEquals("/test/path/val", summaries.get(0).getKey());
+
+        listing = s3.listObjects("testBucket", "/non/existent/test/path");
+
+        summaries = listing.getObjectSummaries();
+
+        assertTrue("'testBucket' must not contain key with prefix '/non/existent/test/path'", summaries.isEmpty());
+        assertFalse("'testBucket' does not contain anymore keys with prefix '/non/existent/test/path'", listing.isTruncated());
+
+        try {
+            s3.listObjects("nonExistentBucket", "/test");
+        }
+        catch (AmazonS3Exception e) {
+            assertTrue(e.getMessage().contains("The specified bucket does not exist"));
+        }
+    }
+
+    /**
+     * Test case to check if a bucket is created properly.
+     */
+    @Test
+    public void testCreateBucket() {
+        s3.createBucket("testBucket1");
+
+        assertTrue("The bucket 'testBucket1' should exist", s3.doesBucketExist("testBucket1"));
+
+        try {
+            s3.createBucket("testBucket");
+        }
+        catch (AmazonS3Exception e) {
+            assertTrue(e.getMessage().contains("The specified bucket already exist"));
+        }
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/package-info.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/package-info.java
new file mode 100644
index 0000000..3d70294
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/client/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains internal tests or test related classes and interfaces.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client;
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AsymmetricKeyEncryptionServiceTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AsymmetricKeyEncryptionServiceTest.java
new file mode 100644
index 0000000..9f4673b
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AsymmetricKeyEncryptionServiceTest.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+import java.nio.charset.StandardCharsets;
+import java.security.KeyFactory;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.PKCS8EncodedKeySpec;
+import java.security.spec.X509EncodedKeySpec;
+import java.util.Base64;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Contains tests for {@link AsymmetricKeyEncryptionService}.
+ */
+public class AsymmetricKeyEncryptionServiceTest extends GridCommonAbstractTest {
+    /** Asymmetric key encryption service. */
+    private AsymmetricKeyEncryptionService encryptionSvc;
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() {
+        try {
+            String algo = "RSA";
+            // Public and private key pair is generated using 'openssl'
+            String publicKeyStr = "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCbuQ7RcOtsHf2oGQ" +
+                "b//cMgfN9kS8tsn21BOAnXwkBN0LwpVXdw1SAfN6fhdJqr4Z585IgF" +
+                "EDOlimoDZ2pXHZ6NfmAot4xkioXlsX+lsSir3gMtPfJhtTFvvnvzgr" +
+                "ZGWVxu0eLBCiuhlUpYNTHlFaiD8C/Qj7eRY+tUagZRskug8QIDAQAB";
+
+            String privateKeyStr = "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAJu5D" +
+                "tFw62wd/agZBv/9wyB832RLy2yfbUE4CdfCQE3QvClVd3DVIB83p+" +
+                "F0mqvhnnzkiAUQM6WKagNnalcdno1+YCi3jGSKheWxf6WxKKveAy0" +
+                "98mG1MW++e/OCtkZZXG7R4sEKK6GVSlg1MeUVqIPwL9CPt5Fj61Rq" +
+                "BlGyS6DxAgMBAAECgYEAj+lILnqitvpIb08hzvYfnCiK8s+xIaN8f" +
+                "qdhQUo9zyw2mCRqC5aK5w6yUYNHZc1OgLFamwNMF5KBQsAR4Ix492" +
+                "1K8ch4fmqtnaD4wlx3euyH1+ZjmagzutlFHKxKOnFuoaWeWJj0RN2" +
+                "f2S3dci2Kh1hkde3PylOgOfKXmz0MfAECQQDMjqEr4KdWnAUwBFgP" +
+                "+48wQufpfWzTt2rR7lDxfWoeoo0BlIPVEgvrjmr3mwcX2/kyZK1tD" +
+                "Hf9BSTI65a9zl4hAkEAwuJ7mmd/emqXCqgIs8qsLaaNnZUfTTyzb4" +
+                "iHgFyh/FEyXeuPN/hyg3Hch2/uA+ZFW+Bc46GSSmzWK4RTJGfI0QJ" +
+                "BAI3tHBhUe+ZUxCinqu4T7SpgEYZoNrzCkwPrJRAYoyt0Pv9sqveH" +
+                "2Otr2f3H+2jrgAAd6FI0B4BvNDGPe/xfleECQHkopP+RaMeKjOyrG" +
+                "v3r+q9G5LQbiaJTIpssnlFHRc3ADTgmwpthcpAVsaziAW+bMXO1QQ" +
+                "qj4Hc0wtG7KpVvkIECQBm72Wh6od+BFeWq2iN7XiXIAgXRRvfVTuD" +
+                "KFM3vYQlszEsTI2YKcCg2Lg1oFoHn/tuRjOajNs6eWz/0BWzfuHY=";
+
+            PublicKey publicKey = KeyFactory.getInstance(algo)
+                .generatePublic(new X509EncodedKeySpec(Base64.getDecoder().decode(publicKeyStr)));
+
+            PrivateKey privateKey = KeyFactory.getInstance(algo)
+                .generatePrivate(new PKCS8EncodedKeySpec(Base64.getDecoder().decode(privateKeyStr)));
+
+            KeyPair keyPair = new KeyPair(publicKey, privateKey);
+
+            encryptionSvc = new AsymmetricKeyEncryptionService();
+            encryptionSvc.setKeyPair(keyPair);
+            encryptionSvc.init();
+        }
+        catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
+            Assert.fail();
+        }
+    }
+
+    /**
+     * Test encryption and decryption.
+     */
+    @Test
+    public void testEncryptDecrypt() {
+        byte[] testData = "This is some test data.".getBytes(StandardCharsets.UTF_8);
+
+        byte[] encData = encryptionSvc.encrypt(testData);
+        byte[] decData = encryptionSvc.decrypt(encData);
+
+        Assert.assertArrayEquals(testData, decData);
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AwsKmsEncryptionServiceTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AwsKmsEncryptionServiceTest.java
new file mode 100644
index 0000000..f5eb566
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/AwsKmsEncryptionServiceTest.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.encryptionsdk.AwsCrypto;
+import com.amazonaws.encryptionsdk.CryptoResult;
+import com.amazonaws.encryptionsdk.kms.KmsMasterKeyProvider;
+import com.amazonaws.regions.Region;
+import com.amazonaws.regions.Regions;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ * Class to test {@link AwsKmsEncryptionService}.
+ */
+public class AwsKmsEncryptionServiceTest extends GridCommonAbstractTest {
+    /**
+     * Test encryption and decryption.
+     */
+    @Test
+    public void testEncryptDecrypt() {
+        String encKey = "12345";
+        byte[] testData = "test string".getBytes(StandardCharsets.UTF_8);
+        byte[] encTestData = "enc test string".getBytes(StandardCharsets.UTF_8);
+
+        AwsKmsEncryptionService awsKmsEncryptionSvc = Mockito.spy(new AwsKmsEncryptionService());
+        awsKmsEncryptionSvc.setKeyId(encKey)
+            .setCredentials(new BasicAWSCredentials("dummy", "dummy"))
+            .setRegion(Region.getRegion(Regions.AP_SOUTH_1));
+
+        AwsCrypto awsCrypto = Mockito.mock(AwsCrypto.class);
+        KmsMasterKeyProvider prov = Mockito.mock(KmsMasterKeyProvider.class);
+        CryptoResult encCryptoRes = Mockito.mock(CryptoResult.class);
+        CryptoResult decCryptoRes = Mockito.mock(CryptoResult.class);
+
+        Mockito.doReturn(awsCrypto).when(awsKmsEncryptionSvc).createClient();
+        Mockito.doReturn(prov).when(awsKmsEncryptionSvc).createKmsMasterKeyProvider();
+
+        awsKmsEncryptionSvc.init();
+
+        Mockito.doReturn(encCryptoRes).when(awsCrypto).encryptData(prov, testData);
+        Mockito.doReturn(encTestData).when(encCryptoRes).getResult();
+
+        Mockito.doReturn(decCryptoRes).when(awsCrypto).decryptData(prov, encTestData);
+        Mockito.doReturn(Arrays.asList(encKey)).when(decCryptoRes).getMasterKeyIds();
+        Mockito.doReturn(testData).when(decCryptoRes).getResult();
+
+        byte[] encData = awsKmsEncryptionSvc.encrypt(testData);
+        byte[] actualOutput = awsKmsEncryptionSvc.decrypt(encData);
+
+        Assert.assertArrayEquals(testData, actualOutput);
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/MockEncryptionService.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/MockEncryptionService.java
new file mode 100644
index 0000000..d5dbe44
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/MockEncryptionService.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+import java.nio.charset.StandardCharsets;
+import javax.crypto.SecretKey;
+import javax.crypto.spec.SecretKeySpec;
+
+/**
+ * Class to provide a mock implementation of {@link EncryptionService}.
+ */
+public class MockEncryptionService implements EncryptionService {
+    /** Encryption service. */
+    private final EncryptionService encryptionSvc;
+
+    /**
+     * Constructor
+     *
+     * @param encryptionSvc Encryption service.
+     */
+    private MockEncryptionService(EncryptionService encryptionSvc) {
+        this.encryptionSvc = encryptionSvc;
+    }
+
+    /**
+     * @return An instance of this class.
+     */
+    public static MockEncryptionService instance() {
+        SecretKey secretKey = new SecretKeySpec("0000000000000000".getBytes(StandardCharsets.UTF_8), "AES");
+        EncryptionService encryptionSvc = new SymmetricKeyEncryptionService().setSecretKey(secretKey);
+
+        encryptionSvc.init();
+
+        return new MockEncryptionService(encryptionSvc);
+    }
+
+    /** {@inheritDoc} */
+    @Override public void init() {
+        // Nothing to do
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] encrypt(byte[] payload) {
+        return encryptionSvc.encrypt(payload);
+    }
+
+    /** {@inheritDoc} */
+    @Override public byte[] decrypt(byte[] payload) {
+        return encryptionSvc.decrypt(payload);
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/MockEncryptionServiceTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/MockEncryptionServiceTest.java
new file mode 100644
index 0000000..4be4538
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/MockEncryptionServiceTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+import java.nio.charset.StandardCharsets;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Class to test {@link MockEncryptionService}.
+ */
+public class MockEncryptionServiceTest extends GridCommonAbstractTest {
+    /** Mock encryption service. */
+    private MockEncryptionService mockEncryptionSvc;
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() {
+        mockEncryptionSvc = MockEncryptionService.instance();
+    }
+
+    /**
+     * Test if the service correctly encrypts and decrypts data.
+     */
+    @Test
+    public void testEncryptDecrypt() {
+        byte[] testStr = "test string".getBytes(StandardCharsets.UTF_8);
+
+        byte[] encData = mockEncryptionSvc.encrypt(testStr);
+        byte[] decData = mockEncryptionSvc.decrypt(encData);
+
+        Assert.assertArrayEquals(testStr, decData);
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/SymmetricKeyEncryptionServiceTest.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/SymmetricKeyEncryptionServiceTest.java
new file mode 100644
index 0000000..e3024a1
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/SymmetricKeyEncryptionServiceTest.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
+
+import java.nio.charset.StandardCharsets;
+import javax.crypto.SecretKey;
+import javax.crypto.spec.SecretKeySpec;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Class to test {@link SymmetricKeyEncryptionService}.
+ */
+public class SymmetricKeyEncryptionServiceTest extends GridCommonAbstractTest {
+    /** Symmetric key encryption service. */
+    private SymmetricKeyEncryptionService encryptionSvc;
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() {
+        byte[] key = "0000000000000000".getBytes(StandardCharsets.UTF_8);
+        SecretKey secretKey = new SecretKeySpec(key, "AES");
+
+        encryptionSvc = new SymmetricKeyEncryptionService().setSecretKey(secretKey);
+        encryptionSvc.init();
+    }
+
+    /**
+     * Test whether encryption and decryption.
+     */
+    @Test
+    public void testEncryptDecrypt() {
+        byte[] testData = "test string".getBytes(StandardCharsets.UTF_8);
+        byte[] encData = encryptionSvc.encrypt(testData);
+        byte[] decData = encryptionSvc.decrypt(encData);
+
+        Assert.assertArrayEquals(testData, decData);
+    }
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/package-info.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/package-info.java
new file mode 100644
index 0000000..a2cb726
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/encrypt/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. --> Contains internal tests or test related classes and interfaces.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt;
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/package-info.java b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/package-info.java
new file mode 100644
index 0000000..ede3925
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/s3/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains internal tests or test related classes and interfaces.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.s3;
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/testsuites/IgniteElbTestSuite.java b/modules/aws-ext/src/test/java/org/apache/ignite/testsuites/IgniteElbTestSuite.java
new file mode 100644
index 0000000..bc9ece0
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/testsuites/IgniteElbTestSuite.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.testsuites;
+
+import org.apache.ignite.spi.discovery.tcp.ipfinder.elb.TcpDiscoveryAlbIpFinderSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.elb.TcpDiscoveryElbIpFinderSelfTest;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+/**
+ * ELB IP finder test suite.
+ */
+@RunWith(Suite.class)
+@Suite.SuiteClasses({TcpDiscoveryElbIpFinderSelfTest.class,
+                     TcpDiscoveryAlbIpFinderSelfTest.class})
+public class IgniteElbTestSuite {
+}
diff --git a/modules/aws-ext/src/test/java/org/apache/ignite/testsuites/IgniteS3TestSuite.java b/modules/aws-ext/src/test/java/org/apache/ignite/testsuites/IgniteS3TestSuite.java
new file mode 100644
index 0000000..0b3a404
--- /dev/null
+++ b/modules/aws-ext/src/test/java/org/apache/ignite/testsuites/IgniteS3TestSuite.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.testsuites;
+
+import org.apache.ignite.spi.checkpoint.s3.S3CheckpointManagerSelfTest;
+import org.apache.ignite.spi.checkpoint.s3.S3CheckpointSpiConfigSelfTest;
+import org.apache.ignite.spi.checkpoint.s3.S3CheckpointSpiSelfTest;
+import org.apache.ignite.spi.checkpoint.s3.S3CheckpointSpiStartStopBucketEndpointSelfTest;
+import org.apache.ignite.spi.checkpoint.s3.S3CheckpointSpiStartStopSSEAlgorithmSelfTest;
+import org.apache.ignite.spi.checkpoint.s3.S3CheckpointSpiStartStopSelfTest;
+import org.apache.ignite.spi.checkpoint.s3.S3SessionCheckpointSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.TcpDiscoveryS3IpFinderAwsCredentialsSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.TcpDiscoveryS3IpFinderBucketEndpointSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.TcpDiscoveryS3IpFinderKeyPrefixSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client.DummyObjectListingTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.client.DummyS3ClientTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt.AsymmetricKeyEncryptionServiceTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt.AwsKmsEncryptionServiceTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt.MockEncryptionServiceTest;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.s3.encrypt.SymmetricKeyEncryptionServiceTest;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+/**
+ * S3 integration tests.
+ */
+@RunWith(Suite.class)
+@Suite.SuiteClasses({
+    // Checkpoint SPI.
+    S3CheckpointSpiConfigSelfTest.class,
+    S3CheckpointSpiSelfTest.class,
+    S3CheckpointSpiStartStopSelfTest.class,
+    S3CheckpointManagerSelfTest.class,
+    S3SessionCheckpointSelfTest.class,
+    S3CheckpointSpiStartStopBucketEndpointSelfTest.class,
+    S3CheckpointSpiStartStopSSEAlgorithmSelfTest.class,
+
+    // S3 Encryption tests.
+    MockEncryptionServiceTest.class,
+    AwsKmsEncryptionServiceTest.class,
+    SymmetricKeyEncryptionServiceTest.class,
+    AsymmetricKeyEncryptionServiceTest.class,
+
+    // S3 IP finder.
+    DummyS3ClientTest.class,
+    DummyObjectListingTest.class,
+    TcpDiscoveryS3IpFinderAwsCredentialsSelfTest.class,
+    TcpDiscoveryS3IpFinderAwsCredentialsProviderSelfTest.class,
+    TcpDiscoveryS3IpFinderBucketEndpointSelfTest.class,
+    TcpDiscoveryS3IpFinderSSEAlgorithmSelfTest.class,
+    TcpDiscoveryS3IpFinderKeyPrefixSelfTest.class,
+    TcpDiscoveryS3IpFinderClientSideEncryptionSelfTest.class,
+})
+public class IgniteS3TestSuite {
+    /**
+     * @return Access key.
+     */
+    public static String getAccessKey() {
+        return getRequiredEnvVar("test.amazon.access.key");
+    }
+
+    /**
+     * @return Access key.
+     */
+    public static String getSecretKey() {
+        return getRequiredEnvVar("test.amazon.secret.key");
+    }
+
+    /**
+     * @param dfltBucketName Default bucket name.
+     * @return Bucket name.
+     */
+    public static String getBucketName(final String dfltBucketName) {
+        String val = System.getenv("test.s3.bucket.name");
+
+        return val == null ? dfltBucketName : val;
+    }
+
+    /**
+     * @param name Name of environment.
+     * @return Environment variable value.
+     */
+    private static String getRequiredEnvVar(String name) {
+        String key = System.getenv(name);
+
+        assert key != null : String.format("Environment variable '%s' is not set", name);
+
+        return key;
+    }
+}
diff --git a/modules/azure-ext/README.txt b/modules/azure-ext/README.txt
new file mode 100644
index 0000000..54ae772
--- /dev/null
+++ b/modules/azure-ext/README.txt
@@ -0,0 +1,28 @@
+Apache Ignite Azure Module
+------------------------
+
+Apache Ignite Azure module provides Azure Blob Storage based implementation of IP finder for TCP discovery.
+
+Importing Azure Module In Maven Project
+-------------------------------------
+
+If you are using Maven to manage dependencies of your project, you can add Azure module
+dependency like this (replace '${ignite.version}' with actual Ignite version you are
+interested in):
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+                        http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    ...
+    <dependencies>
+        ...
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-azure-ext</artifactId>
+            <version>${ignite.version}</version>
+        </dependency>
+        ...
+    </dependencies>
+    ...
+</project>
diff --git a/modules/azure-ext/licenses/apache-2.0.txt b/modules/azure-ext/licenses/apache-2.0.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/modules/azure-ext/licenses/apache-2.0.txt
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/modules/azure-ext/pom.xml b/modules/azure-ext/pom.xml
new file mode 100644
index 0000000..89999c9
--- /dev/null
+++ b/modules/azure-ext/pom.xml
@@ -0,0 +1,381 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.ignite</groupId>
+        <artifactId>ignite-extensions-parent</artifactId>
+        <version>1</version>
+        <relativePath>../../parent</relativePath>
+    </parent>
+
+    <artifactId>ignite-azure-ext</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+    <url>https://ignite.apache.org</url>
+
+    <properties>
+        <spring.version>5.3.8</spring.version>
+        <netty.version>4.1.66.Final</netty.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-core</artifactId>
+            <version>${ignite.version}</version>
+            <scope>compile</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-core</artifactId>
+            <version>${ignite.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-tools</artifactId>
+            <version>${ignite.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/org.jetbrains/annotations -->
+        <dependency>
+            <groupId>org.jetbrains</groupId>
+            <artifactId>annotations</artifactId>
+            <version>${jetbrains.annotations.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.azure</groupId>
+            <artifactId>azure-core-http-netty</artifactId>
+            <version>1.10.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.azure</groupId>
+            <artifactId>azure-core</artifactId>
+            <version>1.17.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.azure</groupId>
+            <artifactId>azure-storage-internal-avro</artifactId>
+            <version>12.0.5</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.azure</groupId>
+            <artifactId>azure-storage-blob</artifactId>
+            <version>12.13.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.azure</groupId>
+            <artifactId>azure-storage-common</artifactId>
+            <version>12.12.0</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-core</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-annotations</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.dataformat</groupId>
+            <artifactId>jackson-dataformat-xml</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310 -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.datatype</groupId>
+            <artifactId>jackson-datatype-jsr310</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-jaxb-annotations -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.module</groupId>
+            <artifactId>jackson-module-jaxb-annotations</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api -->
+        <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-api</artifactId>
+            <version>${log4j.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
+        <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-core</artifactId>
+            <version>${log4j.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-slf4j-impl -->
+        <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-slf4j-impl</artifactId>
+            <version>${log4j.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-buffer -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-buffer</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-codec -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-codec -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec-dns</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-codec-http2 -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec-http2</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-codec-http -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec-http</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-codec-socks -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec-socks</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-common -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-common</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-handler -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-handler</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-handler-proxy -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-handler-proxy</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-resolver -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-resolver</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-resolver-dns -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-resolver-dns</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-resolver-dns-native-macos</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-tcnative-boringssl-static</artifactId>
+            <version>2.0.39.Final</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-codec-dns -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec-dns</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-transport -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-transport</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-transport-native-epoll -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-transport-native-epoll</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-transport-native-kqueue -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-transport-native-kqueue</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-transport-native-unix-common -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-transport-native-unix-common</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.netty/netty-tcnative -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-tcnative</artifactId>
+            <version>2.0.40.Final</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.reactivestreams</groupId>
+            <artifactId>reactive-streams</artifactId>
+            <version>1.0.3</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.projectreactor/reactor-core -->
+        <dependency>
+            <groupId>io.projectreactor</groupId>
+            <artifactId>reactor-core</artifactId>
+            <version>3.4.6</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/io.projectreactor.netty/reactor-netty-core -->
+        <dependency>
+            <groupId>io.projectreactor.netty</groupId>
+            <artifactId>reactor-netty-core</artifactId>
+            <version>1.0.7</version>
+        </dependency>
+
+        <dependency>
+            <groupId>io.projectreactor.netty</groupId>
+            <artifactId>reactor-netty-http</artifactId>
+            <version>1.0.7</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/com.fasterxml.woodstox/woodstox-core -->
+        <dependency>
+            <groupId>com.fasterxml.woodstox</groupId>
+            <artifactId>woodstox-core</artifactId>
+            <version>6.2.4</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/org.codehaus.woodstox/stax2-api -->
+        <dependency>
+            <groupId>org.codehaus.woodstox</groupId>
+            <artifactId>stax2-api</artifactId>
+            <version>4.2.1</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/org.reflections/reflections -->
+        <dependency>
+            <groupId>org.reflections</groupId>
+            <artifactId>reflections</artifactId>
+            <version>0.9.12</version>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-simple -->
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-simple</artifactId>
+            <version>${slf4j.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-beans</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-context</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-core</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+</project>
diff --git a/modules/azure-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/TcpDiscoveryAzureBlobStoreIpFinder.java b/modules/azure-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/TcpDiscoveryAzureBlobStoreIpFinder.java
new file mode 100644
index 0000000..6540909
--- /dev/null
+++ b/modules/azure-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/TcpDiscoveryAzureBlobStoreIpFinder.java
@@ -0,0 +1,350 @@
+package org.apache.ignite.spi.discovery.tcp.ipfinder.azure;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.io.ByteArrayInputStream;
+import java.io.UnsupportedEncodingException;
+import java.net.InetSocketAddress;
+import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobServiceClient;
+import com.azure.storage.blob.BlobServiceClientBuilder;
+import com.azure.storage.blob.models.BlobErrorCode;
+import com.azure.storage.blob.models.BlobItem;
+import com.azure.storage.blob.models.BlobStorageException;
+import com.azure.storage.blob.specialized.BlockBlobClient;
+import com.azure.storage.common.StorageSharedKeyCredential;
+import org.apache.ignite.IgniteLogger;
+import org.apache.ignite.internal.IgniteInterruptedCheckedException;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.resources.LoggerResource;
+import org.apache.ignite.spi.IgniteSpiConfiguration;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter;
+
+/**
+ * Azure Blob Storage based IP Finder
+ * <p>
+ * For information about Blob Storage visit <a href="https://azure.microsoft.com/en-in/services/storage/blobs/">azure.microsoft.com</a>.
+ * <h1 class="header">Configuration</h1>
+ * <h2 class="header">Mandatory</h2>
+ * <ul>
+ *      <li>AccountName (see {@link #setAccountName(String)})</li>
+ *      <li>AccountKey (see {@link #setAccountKey(String)})</li>
+ *      <li>Account Endpoint (see {@link #setAccountEndpoint(String)})</li>
+ *      <li>Container Name (see {@link #setContainerName(String)})</li>
+ * </ul>
+ * <h2 class="header">Optional</h2>
+ * <ul>
+ *      <li>Shared flag (see {@link #setShared(boolean)})</li>
+ * </ul>
+ * <p>
+ * The finder will create a container with the provided name. The container will contain entries named
+ * like the following: {@code 192.168.1.136#1001}.
+ * <p>
+ * Note that storing data in Azure Blob Storage service will result in charges to your Azure account.
+ * Choose another implementation of {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder} for local
+ * or home network tests.
+ * <p>
+ * Note that this finder is shared by default (see {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder#isShared()}.
+ */
+public class TcpDiscoveryAzureBlobStoreIpFinder extends TcpDiscoveryIpFinderAdapter {
+    /** Default object's content. */
+    private static final byte[] OBJECT_CONTENT = new byte[0];
+
+    /** Grid logger. */
+    @LoggerResource
+    private IgniteLogger log;
+
+    /** Azure Blob Storage's account name*/
+    private String accountName;
+
+    /** Azure Blob Storage's account key */
+    private String accountKey;
+
+    /** End point URL */
+    private String endPoint;
+
+    /** Container name */
+    private String containerName;
+
+    /** Storage credential */
+    StorageSharedKeyCredential credential;
+
+    /** Blob service client */
+    private BlobServiceClient blobServiceClient;
+
+    /** Blob container client */
+    private BlobContainerClient blobContainerClient;
+
+    /** Init routine guard. */
+    private final AtomicBoolean initGuard = new AtomicBoolean();
+
+    /** Init routine latch. */
+    private final CountDownLatch initLatch = new CountDownLatch(1);
+
+    /**
+     * Default constructor
+     */
+    public TcpDiscoveryAzureBlobStoreIpFinder() {
+        setShared(true);
+    }
+
+    /** {@inheritDoc} */
+    @Override public Collection<InetSocketAddress> getRegisteredAddresses() throws IgniteSpiException {
+        init();
+
+        Collection<InetSocketAddress> addrs = new ArrayList<>();
+        Set<String> seenBlobNames = new HashSet<>();
+
+        Iterator<BlobItem> blobItemIterator = blobContainerClient.listBlobs().iterator();
+
+        while (blobItemIterator.hasNext()) {
+            BlobItem blobItem = blobItemIterator.next();
+
+            // https://github.com/Azure/azure-sdk-for-java/issues/20515
+            if (seenBlobNames.contains(blobItem.getName())) {
+                break;
+            }
+
+            try {
+                if (!blobItem.isDeleted()) {
+                    addrs.add(addrFromString(blobItem.getName()));
+                    seenBlobNames.add(blobItem.getName());
+                }
+            }
+            catch (Exception e) {
+                throw new IgniteSpiException("Failed to get content from the container: " + containerName, e);
+            }
+        }
+
+        return addrs;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void registerAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        assert !F.isEmpty(addrs);
+
+        init();
+
+        for (InetSocketAddress addr : addrs) {
+            try {
+                String key = URLEncoder.encode(keyFromAddr(addr), StandardCharsets.UTF_8.name());
+                BlockBlobClient blobClient = blobContainerClient.getBlobClient(key).getBlockBlobClient();
+
+                blobClient.upload(new ByteArrayInputStream(OBJECT_CONTENT), OBJECT_CONTENT.length);
+            }
+            catch (UnsupportedEncodingException e) {
+                throw new IgniteSpiException("Unable to encode URL due to error " + e.getMessage(), e);
+            }
+            catch (BlobStorageException e) {
+                // If the blob already exists, ignore
+                if (e.getStatusCode() != 409)
+                    throw new IgniteSpiException("Failed to upload blob with exception " + e.getMessage(), e);
+            }
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public void unregisterAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        assert !F.isEmpty(addrs);
+
+        init();
+
+        for (InetSocketAddress addr : addrs) {
+            String key = keyFromAddr(addr);
+
+            try {
+                blobContainerClient.getBlobClient(key).delete();
+            } catch (Exception e) {
+                // https://github.com/Azure/azure-sdk-for-java/issues/20551
+                if ((!(e.getMessage().contains("InterruptedException"))) || (e instanceof BlobStorageException
+                    && (((BlobStorageException)e).getErrorCode() != BlobErrorCode.BLOB_NOT_FOUND))) {
+                    throw new IgniteSpiException("Failed to delete entry [containerName=" + containerName +
+                        ", entry=" + key + ']', e);
+                }
+            }
+        }
+    }
+
+    /**
+     * Sets Azure Blob Storage Account Name.
+     * <p>
+     * For details refer to Azure Blob Storage API reference.
+     *
+     * @param accountName Account Name
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryAzureBlobStoreIpFinder setAccountName(String accountName) {
+        this.accountName = accountName;
+
+        return this;
+    }
+
+    /**
+     * Sets Azure Blob Storage Account Key
+     * <p>
+     * For details refer to Azure Blob Storage API reference.
+     *
+     * @param accountKey Account Key
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryAzureBlobStoreIpFinder setAccountKey(String accountKey) {
+        this.accountKey = accountKey;
+
+        return this;
+    }
+
+    /**
+     * Sets Azure Blob Storage endpoint
+     * <p>
+     * For details refer to Azure Blob Storage API reference.
+     *
+     * @param endPoint Endpoint for storage
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryAzureBlobStoreIpFinder setAccountEndpoint(String endPoint) {
+        this.endPoint = endPoint;
+
+        return this;
+    }
+
+    /**
+     * Sets container name for using in the context
+     * If the container name doesn't exist Ignite will automatically create itß.
+     *
+     * @param containerName Container Name.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryAzureBlobStoreIpFinder setContainerName(String containerName) {
+        this.containerName = containerName;
+
+        return this;
+    }
+
+    /**
+     * Initialize the IP finder
+     * @throws IgniteSpiException
+     */
+    private void init() throws IgniteSpiException {
+        if (initGuard.compareAndSet(false, true)) {
+            if (accountKey == null || accountName == null || containerName == null || endPoint == null) {
+                throw new IgniteSpiException(
+                        "One or more of the required parameters is not set [accountName=" +
+                                accountName + ", accountKey=" + accountKey + ", containerName=" +
+                                containerName + ", endPoint=" + endPoint + "]");
+            }
+
+            try {
+                credential = new StorageSharedKeyCredential(accountName, accountKey);
+                blobServiceClient = new BlobServiceClientBuilder().endpoint(endPoint).credential(credential).buildClient();
+                blobContainerClient = blobServiceClient.getBlobContainerClient(containerName);
+
+                if (!blobContainerClient.exists()) {
+                    U.warn(log, "Container doesn't exist, will create it [containerName=" + containerName + "]");
+
+                    blobContainerClient.create();
+                }
+            }
+            finally {
+                initLatch.countDown();
+            }
+        }
+        else {
+            try {
+                U.await(initLatch);
+            }
+            catch (IgniteInterruptedCheckedException e) {
+                throw new IgniteSpiException("Thread has been interrupted.", e);
+            }
+
+            try {
+                if (!blobContainerClient.exists())
+                    throw new IgniteSpiException("IpFinder has not been initialized properly");
+            } catch (Exception e) {
+                // Check if this is a nested exception wrapping an InterruptedException
+                // https://github.com/Azure/azure-sdk-for-java/issues/20551
+                if (!(e.getCause() instanceof InterruptedException)) {
+                    throw e;
+                }
+            }
+        }
+    }
+
+    /**
+     * Constructs a node address from bucket's key.
+     *
+     * @param key Bucket key.
+     * @return Node address.
+     * @throws IgniteSpiException In case of error.
+     */
+    private InetSocketAddress addrFromString(String key) throws IgniteSpiException {
+        //TODO: This needs to move out to a generic helper class
+        String[] res = key.split("#");
+
+        if (res.length != 2)
+            throw new IgniteSpiException("Invalid address string: " + key);
+
+        int port;
+
+        try {
+            port = Integer.parseInt(res[1]);
+        }
+        catch (NumberFormatException ignored) {
+            throw new IgniteSpiException("Invalid port number: " + res[1]);
+        }
+
+        return new InetSocketAddress(res[0], port);
+    }
+
+    /**
+     * Constructs bucket's key from an address.
+     *
+     * @param addr Node address.
+     * @return Bucket key.
+     */
+    private String keyFromAddr(InetSocketAddress addr) {
+        // TODO: This needs to move out to a generic helper class
+        return addr.getAddress().getHostAddress() + "#" + addr.getPort();
+    }
+
+    /** {@inheritDoc} */
+    @Override public TcpDiscoveryAzureBlobStoreIpFinder setShared(boolean shared) {
+        super.setShared(shared);
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(TcpDiscoveryAzureBlobStoreIpFinder.class, this);
+    }
+}
diff --git a/modules/azure-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/package-info.java b/modules/azure-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/package-info.java
new file mode 100644
index 0000000..b8410b3
--- /dev/null
+++ b/modules/azure-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains Azure Blob Storage IP finder.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder;
diff --git a/modules/azure-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/TcpDiscoveryAzureBlobStoreIpFinderSelfTest.java b/modules/azure-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/TcpDiscoveryAzureBlobStoreIpFinderSelfTest.java
new file mode 100644
index 0000000..9e8ac18
--- /dev/null
+++ b/modules/azure-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/TcpDiscoveryAzureBlobStoreIpFinderSelfTest.java
@@ -0,0 +1,100 @@
+package org.apache.ignite.spi.discovery.tcp.ipfinder.azure;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.util.Collection;
+
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobServiceClientBuilder;
+import com.azure.storage.common.StorageSharedKeyCredential;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAbstractSelfTest;
+import org.apache.ignite.testsuites.IgniteAzureTestSuite;
+
+/** */
+public class TcpDiscoveryAzureBlobStoreIpFinderSelfTest
+        extends TcpDiscoveryIpFinderAbstractSelfTest<TcpDiscoveryAzureBlobStoreIpFinder> {
+    /** */
+    private static String containerName;
+
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryAzureBlobStoreIpFinderSelfTest() throws Exception {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTestsStarted() throws Exception {
+        containerName = "ip-finder-test-container-" + InetAddress.getLocalHost().getAddress()[3];
+
+        super.beforeTestsStarted();
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void afterTestsStopped() throws Exception {
+        super.afterTestsStopped();
+
+        try {
+            BlobContainerClient container =
+                new BlobServiceClientBuilder().endpoint(IgniteAzureTestSuite.getEndpoint()).credential(
+                    new StorageSharedKeyCredential(IgniteAzureTestSuite.getAccountName(),
+                        IgniteAzureTestSuite.getAccountKey())).buildClient().getBlobContainerClient(containerName);
+
+            if (container.exists())
+                container.delete();
+        }
+        catch (Exception e) {
+            log.warning("Failed to remove bucket on Azure [containerName=" + containerName + ", mes=" + e.getMessage() + ']');
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override protected TcpDiscoveryAzureBlobStoreIpFinder ipFinder() throws Exception {
+        TcpDiscoveryAzureBlobStoreIpFinder finder = new TcpDiscoveryAzureBlobStoreIpFinder();
+
+        injectLogger(finder);
+
+        assert finder.isShared() : "Ip finder must be shared by default.";
+
+        finder.setAccountName(IgniteAzureTestSuite.getAccountName());
+        finder.setAccountKey(IgniteAzureTestSuite.getAccountKey());
+        finder.setAccountEndpoint(IgniteAzureTestSuite.getEndpoint());
+
+        finder.setContainerName(containerName);
+
+        for (int i = 0; i < 5; i++) {
+            Collection<InetSocketAddress> addrs = finder.getRegisteredAddresses();
+
+            if (!addrs.isEmpty())
+                finder.unregisterAddresses(addrs);
+            else
+                return finder;
+
+            U.sleep(1000);
+        }
+
+        if (!finder.getRegisteredAddresses().isEmpty())
+            throw new Exception("Failed to initialize IP finder.");
+
+        return finder;
+    }
+}
diff --git a/modules/azure-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/package-info.java b/modules/azure-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/package-info.java
new file mode 100644
index 0000000..c3f6711
--- /dev/null
+++ b/modules/azure-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/azure/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * <!-- Package description. -->
+ * Contains Azure Blob Storage IP finder internal tests.
+ */
+package org.apache.ignite.spi.discovery.tcp.ipfinder.azure;
diff --git a/modules/azure-ext/src/test/java/org/apache/ignite/testsuites/IgniteAzureTestSuite.java b/modules/azure-ext/src/test/java/org/apache/ignite/testsuites/IgniteAzureTestSuite.java
new file mode 100644
index 0000000..0549568
--- /dev/null
+++ b/modules/azure-ext/src/test/java/org/apache/ignite/testsuites/IgniteAzureTestSuite.java
@@ -0,0 +1,61 @@
+package org.apache.ignite.testsuites;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.ignite.spi.discovery.tcp.ipfinder.azure.TcpDiscoveryAzureBlobStoreIpFinderSelfTest;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+/**
+ * Azure integration tests
+ */
+@RunWith(Suite.class)
+@Suite.SuiteClasses({TcpDiscoveryAzureBlobStoreIpFinderSelfTest.class})
+public class IgniteAzureTestSuite {
+    /**
+     * @return Account Name
+     */
+    public static String getAccountName() {
+        String id = System.getenv("test.azure.account.name");
+
+        assert id != null : "Environment variable 'test.azure.account.name' is not set";
+
+        return id;
+    }
+
+    /**
+     * @return Account Key
+     */
+    public static String getAccountKey() {
+        String path = System.getenv("test.azure.account.key");
+
+        assert path != null : "Environment variable 'test.azure.account.key' is not set";
+
+        return path;
+    }
+
+    /**
+     * @return Endpoint
+     */
+    public static String getEndpoint() {
+        String name = System.getenv("test.azure.endpoint");
+
+        assert name != null : "Environment variable 'test.azure.endpoint' is not set";
+
+        return name;
+    }
+}
diff --git a/modules/gce-ext/README.txt b/modules/gce-ext/README.txt
new file mode 100644
index 0000000..0963545
--- /dev/null
+++ b/modules/gce-ext/README.txt
@@ -0,0 +1,28 @@
+Apache Ignite GCE Module
+------------------------
+
+Apache Ignite GCE module provides Google Cloud Storage based implementations of IP finder for TCP discovery.
+
+Importing GCE Module In Maven Project
+-------------------------------------
+
+If you are using Maven to manage dependencies of your project, you can add GCE module
+dependency like this (replace '${ignite.version}' with actual Ignite version you are
+interested in):
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+                        http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    ...
+    <dependencies>
+        ...
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-gce-ext</artifactId>
+            <version>${ignite.version}</version>
+        </dependency>
+        ...
+    </dependencies>
+    ...
+</project>
diff --git a/modules/gce-ext/licenses/apache-2.0.txt b/modules/gce-ext/licenses/apache-2.0.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/modules/gce-ext/licenses/apache-2.0.txt
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/modules/gce-ext/pom.xml b/modules/gce-ext/pom.xml
new file mode 100644
index 0000000..666daae
--- /dev/null
+++ b/modules/gce-ext/pom.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.ignite</groupId>
+        <artifactId>ignite-extensions-parent</artifactId>
+        <version>1</version>
+        <relativePath>../../parent</relativePath>
+    </parent>
+
+    <artifactId>ignite-gce-ext</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+    <url>https://ignite.apache.org</url>
+
+    <properties>
+        <spring.version>5.3.8</spring.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-core</artifactId>
+            <version>${ignite.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.google.api-client</groupId>
+            <artifactId>google-api-client</artifactId>
+            <version>1.30.10</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>commons-codec</groupId>
+                    <artifactId>commons-codec</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>com.google.apis</groupId>
+            <artifactId>google-api-services-storage</artifactId>
+            <version>v1-rev92-1.22.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-core</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-core</artifactId>
+            <version>${ignite.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.ignite</groupId>
+            <artifactId>ignite-tools</artifactId>
+            <version>${ignite.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-beans</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-context</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-core</artifactId>
+            <version>${spring.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <plugins>
+
+            <plugin>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>copy-libs</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>copy-dependencies</goal>
+                        </goals>
+                        <configuration>
+                            <outputDirectory>target/libs</outputDirectory>
+                            <includeScope>runtime</includeScope>
+                            <excludeTransitive>false</excludeTransitive>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-deploy-plugin</artifactId>
+                <configuration>
+                    <skip>false</skip>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>
diff --git a/modules/gce-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/TcpDiscoveryGoogleStorageIpFinder.java b/modules/gce-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/TcpDiscoveryGoogleStorageIpFinder.java
new file mode 100644
index 0000000..084783a
--- /dev/null
+++ b/modules/gce-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/TcpDiscoveryGoogleStorageIpFinder.java
@@ -0,0 +1,431 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.gce;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.security.GeneralSecurityException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
+import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
+import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
+import com.google.api.client.googleapis.json.GoogleJsonResponseException;
+import com.google.api.client.http.InputStreamContent;
+import com.google.api.client.http.javanet.NetHttpTransport;
+import com.google.api.client.json.jackson2.JacksonFactory;
+import com.google.api.services.storage.Storage;
+import com.google.api.services.storage.StorageScopes;
+import com.google.api.services.storage.model.Bucket;
+import com.google.api.services.storage.model.StorageObject;
+import org.apache.ignite.IgniteLogger;
+import org.apache.ignite.internal.IgniteInterruptedCheckedException;
+import org.apache.ignite.internal.util.typedef.F;
+import org.apache.ignite.internal.util.typedef.internal.S;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.resources.LoggerResource;
+import org.apache.ignite.spi.IgniteSpiConfiguration;
+import org.apache.ignite.spi.IgniteSpiException;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter;
+
+/**
+ * Google Cloud Storage based IP finder.
+ * <p>
+ * For information about Cloud Storage visit <a href="https://cloud.google.com/storage/">cloud.google.com</a>.
+ * <h1 class="header">Configuration</h1>
+ * <h2 class="header">Mandatory</h2>
+ * <ul>
+ *      <li>Service Account Id (see {@link #setServiceAccountId(String)})</li>
+ *      <li>Service Account P12 key file path (see {@link #setServiceAccountP12FilePath(String)})</li>
+ *      <li>Google Platform project name (see {@link #setProjectName(String)})</li>
+ *      <li>Google Storage bucket name (see {@link #setBucketName(String)})</li>
+ * </ul>
+ * <h2 class="header">Optional</h2>
+ * <ul>
+ *      <li>Shared flag (see {@link #setShared(boolean)})</li>
+ * </ul>
+ * <p>
+ * The finder will create a bucket with the provided name. The bucket will contain entries named
+ * like the following: {@code 192.168.1.136#1001}.
+ * <p>
+ * Note that storing data in Google Cloud Storage service will result in charges to your Google Cloud Platform account.
+ * Choose another implementation of {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder} for local
+ * or home network tests.
+ * <p>
+ * Note that this finder is shared by default (see {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder#isShared()}.
+ */
+public class TcpDiscoveryGoogleStorageIpFinder extends TcpDiscoveryIpFinderAdapter {
+    /** Default object's content. */
+    private static final ByteArrayInputStream OBJECT_CONTENT = new ByteArrayInputStream(new byte[0]);
+
+    /** Grid logger. */
+    @LoggerResource
+    private IgniteLogger log;
+
+    /** Google Cloud Platform's project name.*/
+    private String projectName;
+
+    /** Google Storage bucket name. */
+    private String bucketName;
+
+    /** Service account p12 private key file name. */
+    private String srvcAccountP12FilePath;
+
+    /** Service account id. */
+    private String srvcAccountId;
+
+    /** Google storage. */
+    private Storage storage;
+
+    /** Init routine guard. */
+    private final AtomicBoolean initGuard = new AtomicBoolean();
+
+    /** Init routine latch. */
+    private final CountDownLatch initLatch = new CountDownLatch(1);
+
+    /**
+     *
+     */
+    public TcpDiscoveryGoogleStorageIpFinder() {
+        setShared(true);
+    }
+
+    /** {@inheritDoc} */
+    @Override public Collection<InetSocketAddress> getRegisteredAddresses() throws IgniteSpiException {
+        init();
+
+        Collection<InetSocketAddress> addrs = new ArrayList<>();
+
+        try {
+            Storage.Objects.List listObjects = storage.objects().list(bucketName);
+
+            com.google.api.services.storage.model.Objects objects;
+
+            do {
+                objects = listObjects.execute();
+
+                if (objects == null || objects.getItems() == null)
+                    break;
+
+                for (StorageObject object : objects.getItems())
+                    addrs.add(addrFromString(object.getName()));
+
+                listObjects.setPageToken(objects.getNextPageToken());
+            }
+            while (null != objects.getNextPageToken());
+        }
+        catch (Exception e) {
+            throw new IgniteSpiException("Failed to get content from the bucket: " + bucketName, e);
+        }
+
+        return addrs;
+    }
+
+    /** {@inheritDoc} */
+    @Override public void registerAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        assert !F.isEmpty(addrs);
+
+        init();
+
+        for (InetSocketAddress addr : addrs) {
+            String key = keyFromAddr(addr);
+
+            StorageObject object = new StorageObject();
+
+            object.setBucket(bucketName);
+            object.setName(key);
+
+            InputStreamContent content = new InputStreamContent("application/octet-stream", OBJECT_CONTENT);
+
+            content.setLength(OBJECT_CONTENT.available());
+
+            try {
+                Storage.Objects.Insert insertObject = storage.objects().insert(bucketName, object, content);
+
+                insertObject.execute();
+            }
+            catch (Exception e) {
+                throw new IgniteSpiException("Failed to put entry [bucketName=" + bucketName +
+                    ", entry=" + key + ']', e);
+            }
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public void unregisterAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
+        assert !F.isEmpty(addrs);
+
+        init();
+
+        for (InetSocketAddress addr : addrs) {
+            String key = keyFromAddr(addr);
+
+            try {
+                Storage.Objects.Delete deleteObject = storage.objects().delete(bucketName, key);
+
+                deleteObject.execute();
+            }
+            catch (Exception e) {
+                throw new IgniteSpiException("Failed to delete entry [bucketName=" + bucketName +
+                    ", entry=" + key + ']', e);
+            }
+        }
+    }
+
+    /**
+     * Sets Google Cloud Platforms project name.
+     * Usually this is an auto generated project number (ex. 208709979073) that can be found in "Overview" section
+     * of Google Developer Console.
+     * <p>
+     * For details refer to Google Cloud Platform API reference.
+     *
+     * @param projectName Project name.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryGoogleStorageIpFinder setProjectName(String projectName) {
+        this.projectName = projectName;
+
+        return this;
+    }
+
+    /**
+     * Sets Google Cloud Storage bucket name.
+     * If the bucket doesn't exist Ignite will automatically create it. However the name must be unique across whole
+     * Google Cloud Storage and Service Account Id (see {@link #setServiceAccountId(String)}) must be authorized to
+     * perform this operation.
+     *
+     * @param bucketName Bucket name.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryGoogleStorageIpFinder setBucketName(String bucketName) {
+        this.bucketName = bucketName;
+
+        return this;
+    }
+
+
+    /**
+     * Sets a full path to the private key in PKCS12 format of the Service Account.
+     * <p>
+     * For more information please refer to
+     * <a href="https://cloud.google.com/storage/docs/authentication#service_accounts">
+     *     Service Account Authentication</a>.
+     *
+     * @param p12FileName Private key file full path.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryGoogleStorageIpFinder setServiceAccountP12FilePath(String p12FileName) {
+        this.srvcAccountP12FilePath = p12FileName;
+
+        return this;
+    }
+
+    /**
+     * Sets the service account ID (typically an e-mail address).
+     * <p>
+     * For more information please refer to
+     * <a href="https://cloud.google.com/storage/docs/authentication#service_accounts">
+     *     Service Account Authentication</a>.
+     *
+     * @param id Service account ID.
+     * @return {@code this} for chaining.
+     */
+    @IgniteSpiConfiguration(optional = false)
+    public TcpDiscoveryGoogleStorageIpFinder setServiceAccountId(String id) {
+        this.srvcAccountId = id;
+
+        return this;
+    }
+
+    /**
+     * Google Cloud Storage initialization.
+     *
+     * @throws IgniteSpiException In case of error.
+     */
+    private void init() throws IgniteSpiException {
+        if (initGuard.compareAndSet(false, true)) {
+            if (srvcAccountId == null ||
+                srvcAccountP12FilePath == null ||
+                projectName == null ||
+                bucketName == null) {
+                throw new IgniteSpiException(
+                    "One or more of the required parameters is not set [serviceAccountId=" +
+                        srvcAccountId + ", serviceAccountP12FilePath=" + srvcAccountP12FilePath + ", projectName=" +
+                        projectName + ", bucketName=" + bucketName + "]");
+            }
+
+            try {
+                NetHttpTransport httpTransport;
+
+                try {
+                    httpTransport = GoogleNetHttpTransport.newTrustedTransport();
+                }
+                catch (GeneralSecurityException | IOException e) {
+                    throw new IgniteSpiException(e);
+                }
+
+                GoogleCredential cred;
+
+                try {
+                    cred = new GoogleCredential.Builder().setTransport(httpTransport)
+                        .setJsonFactory(JacksonFactory.getDefaultInstance()).setServiceAccountId(srvcAccountId)
+                        .setServiceAccountPrivateKeyFromP12File(new File(srvcAccountP12FilePath))
+                        .setServiceAccountScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)).build();
+
+                }
+                catch (Exception e) {
+                    throw new IgniteSpiException("Failed to authenticate on Google Cloud Platform", e);
+                }
+
+                try {
+                    storage = new Storage.Builder(httpTransport, JacksonFactory.getDefaultInstance(), cred)
+                        .setApplicationName(projectName).build();
+                }
+                catch (Exception e) {
+                    throw new IgniteSpiException("Failed to open a storage for given project name: " + projectName, e);
+                }
+
+                boolean createBucket = false;
+
+                try {
+                    Storage.Buckets.Get getBucket = storage.buckets().get(bucketName);
+
+                    getBucket.setProjection("full");
+
+                    getBucket.execute();
+                }
+                catch (GoogleJsonResponseException e) {
+                    if (e.getStatusCode() == 404) {
+                        U.warn(log, "Bucket doesn't exist, will create it [bucketName=" + bucketName + "]");
+
+                        createBucket = true;
+                    }
+                    else
+                        throw new IgniteSpiException("Failed to open the bucket: " + bucketName, e);
+                }
+                catch (Exception e) {
+                    throw new IgniteSpiException("Failed to open the bucket: " + bucketName, e);
+                }
+
+                if (createBucket) {
+                    Bucket newBucket = new Bucket();
+
+                    newBucket.setName(bucketName);
+
+                    try {
+                        Storage.Buckets.Insert insertBucket = storage.buckets().insert(projectName, newBucket);
+
+                        insertBucket.setProjection("full");
+                        insertBucket.setPredefinedDefaultObjectAcl("projectPrivate");
+
+                        insertBucket.execute();
+                    }
+                    catch (Exception e) {
+                        throw new IgniteSpiException("Failed to create the bucket: " + bucketName, e);
+                    }
+                }
+            }
+            finally {
+                initLatch.countDown();
+            }
+        }
+        else {
+            try {
+                U.await(initLatch);
+            }
+            catch (IgniteInterruptedCheckedException e) {
+                throw new IgniteSpiException("Thread has been interrupted.", e);
+            }
+
+            if (storage == null)
+                throw new IgniteSpiException("IpFinder has not been initialized properly");
+        }
+    }
+
+    /**
+     * Constructs bucket's key from an address.
+     *
+     * @param addr Node address.
+     * @return Bucket key.
+     */
+    private String keyFromAddr(InetSocketAddress addr) {
+        return addr.getAddress().getHostAddress() + "#" + addr.getPort();
+    }
+
+    /**
+     * Constructs a node address from bucket's key.
+     *
+     * @param key Bucket key.
+     * @return Node address.
+     * @throws IgniteSpiException In case of error.
+     */
+    private InetSocketAddress addrFromString(String key) throws IgniteSpiException {
+        String[] res = key.split("#");
+
+        if (res.length != 2)
+            throw new IgniteSpiException("Invalid address string: " + key);
+
+        int port;
+
+        try {
+            port = Integer.parseInt(res[1]);
+        }
+        catch (NumberFormatException ignored) {
+            throw new IgniteSpiException("Invalid port number: " + res[1]);
+        }
+
+        return new InetSocketAddress(res[0], port);
+    }
+
+    /**
+     * Used by TEST SUITES only. Called through reflection.
+     *
+     * @param bucketName Bucket to delete.
+     */
+    private void removeBucket(String bucketName) {
+        init();
+
+        try {
+            Storage.Buckets.Delete deleteBucket = storage.buckets().delete(bucketName);
+
+            deleteBucket.execute();
+        }
+        catch (Exception e) {
+            throw new IgniteSpiException("Failed to remove the bucket: " + bucketName, e);
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override public TcpDiscoveryGoogleStorageIpFinder setShared(boolean shared) {
+        super.setShared(shared);
+
+        return this;
+    }
+
+    /** {@inheritDoc} */
+    @Override public String toString() {
+        return S.toString(TcpDiscoveryGoogleStorageIpFinder.class, this);
+    }
+}
diff --git a/modules/gce-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/package-info.java b/modules/gce-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/package-info.java
new file mode 100644
index 0000000..808d005
--- /dev/null
+++ b/modules/gce-ext/src/main/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains Google Cloud Storage IP finder.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.gce;
diff --git a/modules/gce-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/TcpDiscoveryGoogleStorageIpFinderSelfTest.java b/modules/gce-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/TcpDiscoveryGoogleStorageIpFinderSelfTest.java
new file mode 100644
index 0000000..1b3d94c
--- /dev/null
+++ b/modules/gce-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/TcpDiscoveryGoogleStorageIpFinderSelfTest.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.gce;
+
+import java.lang.reflect.Method;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.util.Collection;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAbstractSelfTest;
+import org.apache.ignite.testsuites.IgniteGCETestSuite;
+
+/**
+ * Google Cloud Storage based IP finder tests.
+ */
+public class TcpDiscoveryGoogleStorageIpFinderSelfTest
+    extends TcpDiscoveryIpFinderAbstractSelfTest<TcpDiscoveryGoogleStorageIpFinder> {
+    /** Bucket name. */
+    private static String bucketName;
+
+    /**
+     * Constructor.
+     *
+     * @throws Exception If any error occurs.
+     */
+    public TcpDiscoveryGoogleStorageIpFinderSelfTest() throws Exception {
+        // No-op.
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTestsStarted() throws Exception {
+        bucketName = "ip-finder-test-bucket-" + InetAddress.getLocalHost().getAddress()[3];
+
+        super.beforeTestsStarted();
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void afterTestsStopped() throws Exception {
+        try {
+            Method method = TcpDiscoveryGoogleStorageIpFinder.class.getDeclaredMethod("removeBucket", String.class);
+
+            method.setAccessible(true);
+
+            method.invoke(finder, bucketName);
+        }
+        catch (Exception e) {
+            log.warning("Failed to remove bucket on GCE [bucketName=" + bucketName + ", mes=" + e.getMessage() + ']');
+        }
+    }
+
+    /** {@inheritDoc} */
+    @Override protected TcpDiscoveryGoogleStorageIpFinder ipFinder() throws Exception {
+        TcpDiscoveryGoogleStorageIpFinder finder = new TcpDiscoveryGoogleStorageIpFinder();
+
+        injectLogger(finder);
+
+        assert finder.isShared() : "Ip finder must be shared by default.";
+
+        finder.setServiceAccountId(IgniteGCETestSuite.getServiceAccountId());
+        finder.setServiceAccountP12FilePath(IgniteGCETestSuite.getP12FilePath());
+        finder.setProjectName(IgniteGCETestSuite.getProjectName());
+
+        // Bucket name must be unique across the whole GCE platform.
+        finder.setBucketName(bucketName);
+
+        for (int i = 0; i < 5; i++) {
+            Collection<InetSocketAddress> addrs = finder.getRegisteredAddresses();
+
+            if (!addrs.isEmpty())
+                finder.unregisterAddresses(addrs);
+            else
+                return finder;
+
+            U.sleep(1000);
+        }
+
+        if (!finder.getRegisteredAddresses().isEmpty())
+            throw new Exception("Failed to initialize IP finder.");
+
+        return finder;
+    }
+}
diff --git a/modules/gce-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/package-info.java b/modules/gce-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/package-info.java
new file mode 100644
index 0000000..f3867cb
--- /dev/null
+++ b/modules/gce-ext/src/test/java/org/apache/ignite/spi/discovery/tcp/ipfinder/gce/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * Contains Google Cloud Storage IP finder internal tests.
+ */
+
+package org.apache.ignite.spi.discovery.tcp.ipfinder.gce;
diff --git a/modules/gce-ext/src/test/java/org/apache/ignite/testsuites/IgniteGCETestSuite.java b/modules/gce-ext/src/test/java/org/apache/ignite/testsuites/IgniteGCETestSuite.java
new file mode 100644
index 0000000..8d6cbe5
--- /dev/null
+++ b/modules/gce-ext/src/test/java/org/apache/ignite/testsuites/IgniteGCETestSuite.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.testsuites;
+
+import org.apache.ignite.spi.discovery.tcp.ipfinder.gce.TcpDiscoveryGoogleStorageIpFinderSelfTest;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+/**
+ * Google Compute Engine integration tests.
+ */
+@RunWith(Suite.class)
+@Suite.SuiteClasses({TcpDiscoveryGoogleStorageIpFinderSelfTest.class})
+public class IgniteGCETestSuite {
+    /**
+     * @return Service Account Id.
+     */
+    public static String getServiceAccountId() {
+        String id = System.getenv("test.gce.account.id");
+
+        assert id != null : "Environment variable 'test.gce.account.id' is not set";
+
+        return id;
+    }
+
+    /**
+     * @return Service Account p12 file path.
+     */
+    public static String getP12FilePath() {
+        String path = System.getenv("test.gce.p12.path");
+
+        assert path != null : "Environment variable 'test.gce.p12.path' is not set";
+
+        return path;
+    }
+
+    /**
+     * @return GCE project name.
+     */
+    public static String getProjectName() {
+        String name = System.getenv("test.gce.project.name");
+
+        assert name != null : "Environment variable 'test.gce.project.name' is not set";
+
+        return name;
+    }
+}
diff --git a/pom.xml b/pom.xml
index 92019f1..11a6c08 100644
--- a/pom.xml
+++ b/pom.xml
@@ -64,6 +64,9 @@
         <module>modules/spring-cache-ext</module>
         <module>modules/spring-session-ext</module>
         <module>modules/cdc-ext</module>
+        <module>modules/aws-ext</module>
+        <module>modules/azure-ext</module>
+        <module>modules/gce-ext</module>
     </modules>
 
     <profiles>