HADOOP-19565. Release Hadoop 3.4.2: improving release commands
diff --git a/README.md b/README.md
index 895f6ca..8cfda36 100644
--- a/README.md
+++ b/README.md
@@ -469,10 +469,15 @@
 
 # How to download and build a staged release candidate
 
-This relies on the release-info file pointing to the source directory
+This project can be used to download and validated a release created by other people,
+downloading the staged artifacts and validating their signatures before
+executing some (minimal) commands.
+
+This relies on the relevant `release-info-` file declaring the URL to download the artifacts from, and the maven staging repository.
+
 
 ```properties
-http.source=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-${hadoop.version}-RC${rc}/
+amd.src.dir=https://dist.apache.org/repos/dist/dev/hadoop/hadoop-${hadoop.version}-RC${rc}/
 ```
 
 ### Targets of Relevance
@@ -490,10 +495,9 @@
 | `release.bin.commands`  | execute a series of commands against the untarred binaries |
 | `release.site.untar`    | untar the downloaded site artifact                         |
 | `release.site.validate` | perform minimal validation of the site.                    |
-| `release.arm.untar`     | untar the ARM binary file                                  |
-| `release.arm.commands`  | execute commands against the arm binaries                  |
 | `release.lean.tar`      | create a release of the x86 binary tar without the AWS SDK |
 
+
 set `check.native.binaries` to false to skip native binary checks on platforms without them
 
 ### Download the Staged RC files from the Apache http servers
@@ -539,11 +543,11 @@
 ```properties
 source.compile.maven.args=-Pnative
 ```
-These are added at the end of the hard-coded arguments (`clean install -DskipTests`)
+These are added at the end of the hard-coded arguments (`ant clean install -DskipTests`)
 
 Testing is also possible through the target `release.src.test`
 
-```
+```bash
 ant release.src.test
 ```
 Again, the options set in `source.compile.maven.args` are passed down.
@@ -594,10 +598,19 @@
 is still executed, with the outcome printed (reporting a failure if
 the binaries are not present).
 
-The and build itself is successful.
+The ant build itself will succeed, even if the `checknative` command reports a failure.
 
 ## Testing ARM binaries
 
+There are ARM variants of the commands to fetch and validate the ARM binaries.
+
+| target                  | action                                                     |
+|-------------------------|------------------------------------------------------------|
+| `release.fetch.arm`     | fetch ARM artifacts                                        |
+| `gpg.arm.verify`        | verify ARM artifacts                                       |
+| `release.arm.untar`     | untar the ARM binary file                                  |
+| `release.arm.commands`  | execute commands against the arm binaries                  |
+
 ```bash
 # untars the `-aarch64.tar.gz` binary
 ant release.arm.untar
@@ -680,7 +693,6 @@
 ant cloudstore.build
 ```
 
-Note: this does not include the AWS V1 SDK `-Pextra` profile.
 
 ## Build and test Google GCS
 
@@ -747,6 +759,7 @@
 ```
 
 
+
 # After the Vote Succeeds: publishing the release
 
 ## Update the announcement and create site/email notifications
diff --git a/build.xml b/build.xml
index 9c1c220..0441213 100644
--- a/build.xml
+++ b/build.xml
@@ -218,7 +218,6 @@
       spark.version=${spark.version}
 
       cloudstore.dir=${cloudstore.dir}
-      cloudstore.profile=${cloudstore.profile}
       bigdata-interop.dir=${bigdata-interop.dir}
       hboss.dir=${hboss.dir}
       cloud-examples.dir=${cloud-examples.dir}
@@ -248,7 +247,7 @@
 
     <macrodef name="gpg-verify-file" >
       <attribute name="dir" />
-      <attribute name="file" />
+      <attribute name="name" />
       <sequential>
         <echo>Verifying GPG signature of @{dir}/@{name}</echo>
         <gpg dir="@{dir}">
@@ -485,6 +484,13 @@
     <gpgverify name="${release}-site.tar.gz"/>
     <gpgverify name="${release}.tar.gz"/>
     <gpgverify name="${release}-rat.txt"/>
+
+  </target>
+
+  <target name="gpg.arm.verify" depends="release.dir.check"
+    description="verify the downloaded arm artifacts">
+
+    <echo>Verifying GPG signatures of arm artifacts in ${release.dir}</echo>
     <gpgverify name="${arm.binary.filename}"/>
 
   </target>
@@ -940,11 +946,8 @@
       if this is done with java11, it shouldn't be released.
 
     </echo>
-    <require p="cloudstore.profile"/>
-
     <mvn dir="${cloudstore.dir}">
       <arg value="-Psnapshots-and-staging"/>
-      <arg value="-P${cloudstore.profile}"/>
       <arg value="-Dhadoop.version=${hadoop.version}"/>
       <arg value="clean"/>
       <arg value="package"/>
@@ -952,26 +955,6 @@
     </mvn>
   </target>
 
-  <!-- ========================================================= -->
-  <!-- Hadoop FS API shim WiP -->
-  <!-- ========================================================= -->
-
-
-  <target name="fsapi.test" if="fs-api-shim.dir"
-    depends="init"
-    description="Build and test fs-api-shim">
-    <echo>
-      Build the fs api shim module.
-      This MUST build against hadoop-3.2.0; the test version is
-      what we want here.
-    </echo>
-    <mvn dir="${fs-api-shim.dir}">
-      <arg value="-Psnapshots-and-staging"/>
-      <arg value="-Dhadoop.test.version=${hadoop.version}"/>
-      <arg value="clean"/>
-      <arg value="test"/>
-    </mvn>
-  </target>
 
   <!-- ========================================================= -->
   <!-- Parquet -->
@@ -1120,7 +1103,6 @@
       <download artifact="CHANGELOG.md"/>
       <download artifact="RELEASENOTES.md"/>
       <download artifact="${release}.tar.gz"/>
-      <download artifact="${release}-aarch64.tar.gz"/>
       <download artifact="${release}-site.tar.gz"/>
       <download artifact="${release}-src.tar.gz"/>
       <download artifact="${release}-rat.txt"/>
@@ -1224,6 +1206,9 @@
 
     <echo>checknative</echo>
 
+    <echo>
+      Checking for native binaries; check.native.binaries=${check.native.binaries}
+    </echo>
     <hadoopq failonerror="${check.native.binaries}">
       <arg value="checknative" />
     </hadoopq>
diff --git a/pom.xml b/pom.xml
index 9ee9681..3db252a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -29,8 +29,9 @@
     <enforced.java.version>${javac.version}</enforced.java.version>
     <maven-antrun-plugin.version>3.1.0</maven-antrun-plugin.version>
 
+    <aws-java-sdk-v2.version>2.29.52</aws-java-sdk-v2.version>
 
-    <hadoop.version>3.4.0</hadoop.version>
+    <hadoop.version>3.4.2</hadoop.version>
 
   </properties>
 
@@ -243,6 +244,17 @@
     </dependency>
 
     <dependency>
+      <groupId>software.amazon.awssdk</groupId>
+      <artifactId>bundle</artifactId>
+      <version>${aws-java-sdk-v2.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>*</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <version>4.13.2</version>
@@ -309,7 +321,7 @@
     <profile>
       <id>branch-3.4</id>
       <properties>
-        <hadoop.version>3.4.1</hadoop.version>
+        <hadoop.version>3.4.3-SNAPSHOT</hadoop.version>
       </properties>
     </profile>
 
diff --git a/src/releases/release-info-3.4.2.properties b/src/releases/release-info-3.4.2.properties
index cd1099d..a3656b2 100644
--- a/src/releases/release-info-3.4.2.properties
+++ b/src/releases/release-info-3.4.2.properties
@@ -28,5 +28,4 @@
 arm.src.dir=${amd.src.dir}
 http.source=${amd.src.dir}
 asf.staging.url=https://repository.apache.org/content/repositories/orgapachehadoop-1437
-cloudstore.profile=sdk2
 
diff --git a/src/test/java/org/apache/hadoop/validator/TestRuntimeValid.java b/src/test/java/org/apache/hadoop/validator/TestRuntimeValid.java
index 87e56ad..ec3336a 100644
--- a/src/test/java/org/apache/hadoop/validator/TestRuntimeValid.java
+++ b/src/test/java/org/apache/hadoop/validator/TestRuntimeValid.java
@@ -22,10 +22,7 @@
 
 import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem;
 import org.apache.hadoop.fs.s3a.S3AFileSystem;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.validator.CompileFS;
-
-/**
+import org.apache.hadoop.hdfs.DistributedFileSystem;/**
  * Let's test that runtime.
  */
 public class TestRuntimeValid {