Merge branch 'docs/markdown-converted/4.8.1' into master
diff --git a/CHANGES.txt b/CHANGES.txt
index 8383989..5c61e56 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -177,7 +177,7 @@
 	Van Den Berghe http://apache.markmail.org/message/hafnuhq2ydhfjmi2.
 •	Lucene.Net.Store.LockVerifyServer: Read/write 1 byte instead of 1 int (4 bytes). 
 	Also, we don't need 2 streams in .NET for input/output (solution provided by Vincent Van Den Berghe).
-•	Lucene.Net.TestFramework.Store.MockDirectoryWrapper.IndexInputSlicerAnonymousInnerClassHelper: Fixed Dispose() method.
+•	Lucene.Net.TestFramework.Store.MockDirectoryWrapper.IndexInputSlicerAnonymousClass: Fixed Dispose() method.
 •	Lucene.Net.Index.IndexWriter: Fixed string formatting of numeric values in InfoStream message.
 •	Lucene.Net.Tests.Querys.CommonTermsQueryTest: Added missing TestRandomIndex() test
 •	Lucene.Net.Analysis.Common.Analysis.CharFilter.MappingCharFilterFactory: fixed escaping problem in parsing regex
diff --git a/Directory.Build.targets b/Directory.Build.targets
index d62cfbb..dc364c2 100644
--- a/Directory.Build.targets
+++ b/Directory.Build.targets
@@ -1,4 +1,4 @@
-<!--
+<!--
 
  Licensed to the Apache Software Foundation (ASF) under one
  or more contributor license agreements.  See the NOTICE file
@@ -62,7 +62,6 @@
     <DefineConstants>$(DefineConstants);FEATURE_APPDOMAIN_ISFULLYTRUSTED</DefineConstants>
     <DefineConstants>$(DefineConstants);FEATURE_ASSEMBLY_GETCALLINGASSEMBLY</DefineConstants>
     <DefineConstants>$(DefineConstants);FEATURE_BITARRAY_COPYTO</DefineConstants>
-    <DefineConstants>$(DefineConstants);FEATURE_CONCURRENTMERGESCHEDULER</DefineConstants>
     <DefineConstants>$(DefineConstants);FEATURE_CULTUREINFO_GETCULTURES</DefineConstants>
     <DefineConstants>$(DefineConstants);FEATURE_DTD_PROCESSING</DefineConstants>
     <DefineConstants>$(DefineConstants);FEATURE_FILESTREAM_LOCK</DefineConstants>
diff --git a/README.md b/README.md
index 0e4902f..b3d8fa0 100644
--- a/README.md
+++ b/README.md
@@ -120,6 +120,8 @@
 dotnet tool install lucene-cli -g --version 4.8.0-beta00012
 ```
 
+> NOTE: The version of the CLI you install should match the version of Lucene.NET you use.
+
 Once installed, you can explore the commands and options that are available by entering the command `lucene`.
 
 [lucene-cli Documentation](https://github.com/apache/lucenenet/blob/master/src/dotnet/tools/lucene-cli/docs/index.md)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 1e44f50..232cd46 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -1,4 +1,4 @@
-# Licensed to the Apache Software Foundation (ASF) under one
+# Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
 # regarding copyright ownership.  The ASF licenses this file
@@ -49,6 +49,8 @@
 # Verbose: 'false' (Optional - set to true for verbose logging output)
 # Multiplier: '1' (Optional - the number of iterations to multiply applicable tests by)
 
+# RunX86Tests: 'false' (Optional - set to 'true' to enable x86 tests)
+
 variables:
 - name: BuildCounter
   value: $[counter(variables['VersionSuffix'],coalesce(variables['BuildCounterSeed'], 1250))]
@@ -348,8 +350,8 @@
         maximumParallelJobs: $(maximumParallelJobs)
         maximumAllowedFailures: $(maximumAllowedFailures)
 
-  - job: Test_net5_0_x86 # Only run Nightly
-    condition: and(succeeded(), ne(variables['RunTests'], 'false'), eq(variables['IsNightly'], 'true'))
+  - job: Test_net5_0_x86 # Only run Nightly or if explicitly enabled with RunX86Tests
+    condition: and(succeeded(), ne(variables['RunTests'], 'false'), or(eq(variables['IsNightly'], 'true'), eq(variables['RunX86Tests'], 'true')))
     strategy:
       matrix:
         Windows:
@@ -412,8 +414,8 @@
         maximumParallelJobs: $(maximumParallelJobs)
         maximumAllowedFailures: $(maximumAllowedFailures)
 
-  - job: Test_netcoreapp3_1_x86 # Only run Nightly
-    condition: and(succeeded(), ne(variables['RunTests'], 'false'), eq(variables['IsNightly'], 'true'))
+  - job: Test_netcoreapp3_1_x86 # Only run Nightly or if explicitly enabled with RunX86Tests
+    condition: and(succeeded(), ne(variables['RunTests'], 'false'), or(eq(variables['IsNightly'], 'true'), eq(variables['RunX86Tests'], 'true')))
     strategy:
       matrix:
         Windows:
@@ -476,8 +478,8 @@
         maximumParallelJobs: $(maximumParallelJobs)
         maximumAllowedFailures: $(maximumAllowedFailures)
 
-  - job: Test_netcoreapp2_2_x86 # Only run Nightly
-    condition: and(succeeded(), ne(variables['RunTests'], 'false'), eq(variables['IsNightly'], 'true'))
+  - job: Test_netcoreapp2_2_x86 # Only run Nightly or if explicitly enabled with RunX86Tests
+    condition: and(succeeded(), ne(variables['RunTests'], 'false'), or(eq(variables['IsNightly'], 'true'), eq(variables['RunX86Tests'], 'true')))
     strategy:
       matrix:
         Windows:
@@ -523,8 +525,8 @@
         maximumParallelJobs: 8
         maximumAllowedFailures: 2 # Maximum allowed failures for a successful build
 
-  - job: Test_net48_Windows_x86 # Only run Nightly
-    condition: and(succeeded(), ne(variables['RunTests'], 'false'), eq(variables['IsNightly'], 'true'))
+  - job: Test_net48_Windows_x86 # Only run Nightly or if explicitly enabled with RunX86Tests
+    condition: and(succeeded(), ne(variables['RunTests'], 'false'), or(eq(variables['IsNightly'], 'true'), eq(variables['RunX86Tests'], 'true')))
     displayName: 'Test net48,x86 on Windows'
     pool:
       vmImage: 'windows-2019'
diff --git a/build/Dependencies.props b/build/Dependencies.props
index 48103c5..9bc0dbd 100644
--- a/build/Dependencies.props
+++ b/build/Dependencies.props
@@ -1,4 +1,4 @@
-<!--
+<!--
 
  Licensed to the Apache Software Foundation (ASF) under one
  or more contributor license agreements.  See the NOTICE file
@@ -68,8 +68,8 @@
     <MSTestTestAdapterPackageVersion>$(MSTestTestFrameworkPackageVersion)</MSTestTestAdapterPackageVersion>
     <NETStandardLibrary20PackageVersion>2.0.3</NETStandardLibrary20PackageVersion>
     <NewtonsoftJsonPackageVersion>10.0.1</NewtonsoftJsonPackageVersion>
-    <NUnit3TestAdapterPackageVersion>3.16.1</NUnit3TestAdapterPackageVersion>
-    <NUnitPackageVersion>3.12.0</NUnitPackageVersion>
+    <NUnit3TestAdapterPackageVersion>3.17.0</NUnit3TestAdapterPackageVersion>
+    <NUnitPackageVersion>3.13.1</NUnitPackageVersion>
     <OpenNLPNETPackageVersion>1.9.1</OpenNLPNETPackageVersion>
     <SharpZipLibPackageVersion>1.1.0</SharpZipLibPackageVersion>
     <Spatial4nCorePackageVersion>0.4.1</Spatial4nCorePackageVersion>
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
index 097ba8c..46fa322 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/CharFilter/BaseCharFilter.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System.Diagnostics;
@@ -61,7 +62,7 @@
 
             while (hi >= lo)
             {
-                mid = (int)((uint)(lo + hi) >> 1);
+                mid = (lo + hi).TripleShift(1);
                 if (currentOff < offsets[mid])
                 {
                     hi = mid - 1;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
index 21d966a..f9373a7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/Hyphenation/HyphenationTree.cs
@@ -1,4 +1,5 @@
-using System;
+using J2N.Numerics;
+using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
@@ -100,7 +101,7 @@
             byte v = m_vspace[k++];
             while (v != 0)
             {
-                char c = (char)(((int)((uint)v >> 4)) - 1 + '0');
+                char c = (char)(v.TripleShift(4) - 1 + '0');;
                 buf.Append(c);
                 c = (char)(v & 0x0f);
                 if (c == 0)
@@ -251,7 +252,7 @@
             byte v = m_vspace[k++];
             while (v != 0)
             {
-                char c = (char)((((int)((uint)v >> 4))) - 1);
+                char c = (char)(v.TripleShift(4) - 1);
                 buf.Append(c);
                 c = (char)(v & 0x0f);
                 if (c == 0)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
index b61ebd9..6804308 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Dictionary.cs
@@ -1,5 +1,6 @@
 using J2N;
 using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using J2N.Text;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
@@ -946,7 +947,7 @@
             {
                 return CharsRef.EMPTY_CHARS;
             }
-            int len = (int)((uint)b.Length >> 1);
+            int len = b.Length.TripleShift(1);
             char[] flags = new char[len];
             int upto = 0;
             int end = b.Offset + b.Length;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs
index 9388316..6cdd491 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Hunspell/Stemmer.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis.Util;
+using J2N.Numerics;
+using Lucene.Net.Analysis.Util;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
@@ -197,7 +198,7 @@
                         char stripOrd = (char)(affixReader.ReadInt16() & 0xffff);
                         int condition = (char)(affixReader.ReadInt16() & 0xffff);
                         bool crossProduct = (condition & 1) == 1;
-                        condition = (int)((uint)condition >> 1);
+                        condition = condition.TripleShift(1);
                         char append = (char)(affixReader.ReadInt16() & 0xffff);
 
                         bool compatible;
@@ -266,7 +267,7 @@
                         char stripOrd = (char)(affixReader.ReadInt16() & 0xffff);
                         int condition = (char)(affixReader.ReadInt16() & 0xffff);
                         bool crossProduct = (condition & 1) == 1;
-                        condition = (int)((uint)condition >> 1);
+                        condition = condition.TripleShift(1);
                         char append = (char)(affixReader.ReadInt16() & 0xffff);
 
                         bool compatible;
@@ -368,7 +369,7 @@
             affixReader.SkipBytes(2); // strip
             int condition = (char)(affixReader.ReadInt16() & 0xffff);
             bool crossProduct = (condition & 1) == 1;
-            condition = (int)((uint)condition >> 1);
+            condition = condition.TripleShift(1);
             char append = (char)(affixReader.ReadInt16() & 0xffff);
 
             List<CharsRef> stems = new List<CharsRef>();
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs
index 76125c6..65f3808 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Miscellaneous/PrefixAndSuffixAwareTokenFilter.cs
@@ -31,15 +31,15 @@
         public PrefixAndSuffixAwareTokenFilter(TokenStream prefix, TokenStream input, TokenStream suffix) 
             : base(suffix)
         {
-            prefix = new PrefixAwareTokenFilterAnonymousInnerClassHelper(this, prefix, input);
-            this.suffix = new PrefixAwareTokenFilterAnonymousInnerClassHelper2(this, prefix, suffix);
+            prefix = new PrefixAwareTokenFilterAnonymousClass(this, prefix, input);
+            this.suffix = new PrefixAwareTokenFilterAnonymousClass2(this, prefix, suffix);
         }
 
-        private sealed class PrefixAwareTokenFilterAnonymousInnerClassHelper : PrefixAwareTokenFilter
+        private sealed class PrefixAwareTokenFilterAnonymousClass : PrefixAwareTokenFilter
         {
             private readonly PrefixAndSuffixAwareTokenFilter outerInstance;
 
-            public PrefixAwareTokenFilterAnonymousInnerClassHelper(PrefixAndSuffixAwareTokenFilter outerInstance, TokenStream prefix, TokenStream input) 
+            public PrefixAwareTokenFilterAnonymousClass(PrefixAndSuffixAwareTokenFilter outerInstance, TokenStream prefix, TokenStream input) 
                 : base(prefix, input)
             {
                 this.outerInstance = outerInstance;
@@ -51,11 +51,11 @@
             }
         }
 
-        private sealed class PrefixAwareTokenFilterAnonymousInnerClassHelper2 : PrefixAwareTokenFilter
+        private sealed class PrefixAwareTokenFilterAnonymousClass2 : PrefixAwareTokenFilter
         {
             private readonly PrefixAndSuffixAwareTokenFilter outerInstance;
 
-            public PrefixAwareTokenFilterAnonymousInnerClassHelper2(PrefixAndSuffixAwareTokenFilter outerInstance, TokenStream prefix, TokenStream suffix) 
+            public PrefixAwareTokenFilterAnonymousClass2(PrefixAndSuffixAwareTokenFilter outerInstance, TokenStream prefix, TokenStream suffix) 
                 : base(prefix, suffix)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenFilter.cs
index 157fed6..0c5deef 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/NGram/NGramTokenFilter.cs
@@ -106,14 +106,14 @@
             }
             else
             {
-                posIncAtt = new PositionIncrementAttributeAnonymousInnerClassHelper();
-                posLenAtt = new PositionLengthAttributeAnonymousInnerClassHelper();
+                posIncAtt = new PositionIncrementAttributeAnonymousClass();
+                posLenAtt = new PositionLengthAttributeAnonymousClass();
             }
             termAtt = AddAttribute<ICharTermAttribute>();
             offsetAtt = AddAttribute<IOffsetAttribute>();
         }
 
-        private class PositionIncrementAttributeAnonymousInnerClassHelper : PositionIncrementAttribute
+        private class PositionIncrementAttributeAnonymousClass : PositionIncrementAttribute
         {
             public override int PositionIncrement
             {
@@ -122,7 +122,7 @@
             }
         }
 
-        private class PositionLengthAttributeAnonymousInnerClassHelper : PositionLengthAttribute
+        private class PositionLengthAttributeAnonymousClass : PositionLengthAttribute
         {
             public override int PositionLength
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
index 09d202b..f66cd8c 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
@@ -259,9 +259,9 @@
             }
         }
 
-        private static readonly SinkFilter ACCEPT_ALL_FILTER = new SinkFilterAnonymousInnerClassHelper();
+        private static readonly SinkFilter ACCEPT_ALL_FILTER = new SinkFilterAnonymousClass();
 
-        private class SinkFilterAnonymousInnerClassHelper : SinkFilter
+        private class SinkFilterAnonymousClass : SinkFilter
         {
             public override bool Accept(AttributeSource source)
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicAnalyzer.cs
index 6137851..b99a2f9 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/ClassicAnalyzer.cs
@@ -104,16 +104,16 @@
             TokenStream tok = new ClassicFilter(src);
             tok = new LowerCaseFilter(m_matchVersion, tok);
             tok = new StopFilter(m_matchVersion, tok, m_stopwords);
-            return new TokenStreamComponentsAnonymousInnerClassHelper(this, src, tok);
+            return new TokenStreamComponentsAnonymousClass(this, src, tok);
         }
 
-        private class TokenStreamComponentsAnonymousInnerClassHelper : TokenStreamComponents
+        private class TokenStreamComponentsAnonymousClass : TokenStreamComponents
         {
             private readonly ClassicAnalyzer outerInstance;
 
             private readonly ClassicTokenizer src;
 
-            public TokenStreamComponentsAnonymousInnerClassHelper(ClassicAnalyzer outerInstance, ClassicTokenizer src, TokenStream tok)
+            public TokenStreamComponentsAnonymousClass(ClassicAnalyzer outerInstance, ClassicTokenizer src, TokenStream tok)
                 : base(src, tok)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardAnalyzer.cs
index 8475d58..b5b9b9e 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/StandardAnalyzer.cs
@@ -105,16 +105,16 @@
             TokenStream tok = new StandardFilter(m_matchVersion, src);
             tok = new LowerCaseFilter(m_matchVersion, tok);
             tok = new StopFilter(m_matchVersion, tok, m_stopwords);
-            return new TokenStreamComponentsAnonymousInnerClassHelper(this, src, tok);
+            return new TokenStreamComponentsAnonymousClass(this, src, tok);
         }
 
-        private class TokenStreamComponentsAnonymousInnerClassHelper : TokenStreamComponents
+        private class TokenStreamComponentsAnonymousClass : TokenStreamComponents
         {
             private readonly StandardAnalyzer outerInstance;
 
             private readonly StandardTokenizer src;
 
-            public TokenStreamComponentsAnonymousInnerClassHelper(StandardAnalyzer outerInstance, StandardTokenizer src, TokenStream tok)
+            public TokenStreamComponentsAnonymousClass(StandardAnalyzer outerInstance, StandardTokenizer src, TokenStream tok)
                 : base(src, tok)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailAnalyzer.cs b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailAnalyzer.cs
index 8d2f8a5..a42fe82 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailAnalyzer.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Standard/UAX29URLEmailAnalyzer.cs
@@ -95,16 +95,16 @@
             TokenStream tok = new StandardFilter(m_matchVersion, src);
             tok = new LowerCaseFilter(m_matchVersion, tok);
             tok = new StopFilter(m_matchVersion, tok, m_stopwords);
-            return new TokenStreamComponentsAnonymousInnerClassHelper(this, src, tok);
+            return new TokenStreamComponentsAnonymousClass(this, src, tok);
         }
 
-        private class TokenStreamComponentsAnonymousInnerClassHelper : TokenStreamComponents
+        private class TokenStreamComponentsAnonymousClass : TokenStreamComponents
         {
             private readonly UAX29URLEmailAnalyzer outerInstance;
 
             private readonly UAX29URLEmailTokenizer src;
 
-            public TokenStreamComponentsAnonymousInnerClassHelper(UAX29URLEmailAnalyzer outerInstance, UAX29URLEmailTokenizer src, TokenStream tok)
+            public TokenStreamComponentsAnonymousClass(UAX29URLEmailAnalyzer outerInstance, UAX29URLEmailTokenizer src, TokenStream tok)
                 : base(src, tok)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs
index faa5c59..50622b7 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/SynonymFilter.cs
@@ -1,4 +1,5 @@
 using J2N;
+using J2N.Numerics;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
@@ -493,7 +494,7 @@
 
             int code = bytesReader.ReadVInt32();
             bool keepOrig = (code & 0x1) == 0;
-            int count = (int)((uint)code >> 1);
+            int count = code.TripleShift(1);
             //System.out.println("  addOutput count=" + count + " keepOrig=" + keepOrig);
             for (int outputIDX = 0; outputIDX < count; outputIDX++)
             {
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayIterator.cs b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayIterator.cs
index 3a66d5b..7f014cd 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayIterator.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Util/CharArrayIterator.cs
@@ -129,10 +129,10 @@
         /// </summary>
         public static CharArrayIterator NewSentenceInstance()
         {
-            return new CharArrayIteratorAnonymousInnerClassHelper2();
+            return new CharArrayIteratorAnonymousClass2();
         }
 
-        private class CharArrayIteratorAnonymousInnerClassHelper2 : CharArrayIterator
+        private class CharArrayIteratorAnonymousClass2 : CharArrayIterator
         {
             // no bugs
             protected override char JreBugWorkaround(char ch)
@@ -147,10 +147,10 @@
         /// </summary>
         public static CharArrayIterator NewWordInstance()
         {
-            return new CharArrayIteratorAnonymousInnerClassHelper4();
+            return new CharArrayIteratorAnonymousClass4();
         }
 
-        private class CharArrayIteratorAnonymousInnerClassHelper4 : CharArrayIterator
+        private class CharArrayIteratorAnonymousClass4 : CharArrayIterator
         {
             // no bugs
             protected override char JreBugWorkaround(char ch)
diff --git a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
index aa74389..5368b4d 100644
--- a/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
+++ b/src/Lucene.Net.Analysis.ICU/Analysis/Icu/Segmentation/ICUTokenizerFactory.cs
@@ -112,14 +112,14 @@
             }
             else
             {
-                config = new DefaultICUTokenizerConfigAnonymousHelper(cjkAsWords, myanmarAsWords, tailored, loader);
+                config = new DefaultICUTokenizerConfigAnonymousClass(cjkAsWords, myanmarAsWords, tailored, loader);
             }
         }
 
-        private class DefaultICUTokenizerConfigAnonymousHelper : DefaultICUTokenizerConfig
+        private class DefaultICUTokenizerConfigAnonymousClass : DefaultICUTokenizerConfig
         {
             private readonly BreakIterator[] breakers;
-            public DefaultICUTokenizerConfigAnonymousHelper(bool cjkAsWords, bool myanmarAsWords, IDictionary<int, string> tailored, IResourceLoader loader)
+            public DefaultICUTokenizerConfigAnonymousClass(bool cjkAsWords, bool myanmarAsWords, IDictionary<int, string> tailored, IResourceLoader loader)
                 : base(cjkAsWords, myanmarAsWords)
             {
                 breakers = new BreakIterator[1 + UChar.GetIntPropertyMaxValue(UProperty.Script)];
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
index eb3c754..9ca9d5d 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/BinaryDictionary.cs
@@ -1,5 +1,6 @@
 using J2N;
 using J2N.IO;
+using J2N.Numerics;
 using Lucene.Net.Codecs;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
@@ -120,7 +121,7 @@
                         targetMapOffsets[sourceId] = ofs;
                         sourceId++;
                     }
-                    accum += (int)((uint)val) >> 1;
+                    accum += val.TripleShift(1);
                     targetMap[ofs] = accum;
                 }
                 if (sourceId + 1 != targetMapOffsets.Length)
@@ -222,12 +223,12 @@
 
         public virtual int GetLeftId(int wordId)
         {
-            return (short)((ushort)buffer.GetInt16(wordId)) >> 3;
+            return buffer.GetInt16(wordId).TripleShift(3);
         }
 
         public virtual int GetRightId(int wordId)
         {
-            return (short)((ushort)buffer.GetInt16(wordId)) >> 3;
+            return buffer.GetInt16(wordId).TripleShift(3);
         }
 
         public virtual int GetWordCost(int wordId)
@@ -241,7 +242,7 @@
             {
                 int offset = BaseFormOffset(wordId);
                 int data = buffer.Get(offset++) & 0xff;
-                int prefix = (int)((uint)data) >> 4;
+                int prefix = data.TripleShift(4);
                 int suffix = data & 0xF;
                 char[] text = new char[prefix + suffix];
                 System.Array.Copy(surfaceForm, off, text, 0, prefix);
@@ -263,7 +264,7 @@
             {
                 int offset = ReadingOffset(wordId);
                 int readingData = buffer.Get(offset++) & 0xff;
-                return ReadString(offset, (int)((uint)readingData) >> 1, (readingData & 1) == 1);
+                return ReadString(offset, readingData.TripleShift(1), (readingData & 1) == 1);
             }
             else
             {
@@ -296,7 +297,7 @@
             {
                 int offset = PronunciationOffset(wordId);
                 int pronunciationData = buffer.Get(offset++) & 0xff;
-                return ReadString(offset, (int)((uint)pronunciationData) >> 1, (pronunciationData & 1) == 1);
+                return ReadString(offset, pronunciationData.TripleShift(1), (pronunciationData & 1) == 1);
             }
             else
             {
@@ -346,7 +347,7 @@
                 }
                 else
                 {
-                    readingLength = (int)((uint)readingData) >> 1;
+                    readingLength = readingData.TripleShift(1);
                 }
                 return offset + readingLength;
             }
diff --git a/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs b/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs
index 2d41024..3845a77 100644
--- a/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs
+++ b/src/Lucene.Net.Analysis.Kuromoji/Dict/ConnectionCosts.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Codecs;
+using J2N.Numerics;
+using Lucene.Net.Codecs;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
 using System;
@@ -52,7 +53,7 @@
                     for (int i = 0; i < a.Length; i++)
                     {
                         int raw = @in.ReadVInt32();
-                        accum += ((int)((uint)raw) >> 1) ^ -(raw & 1);
+                        accum += raw.TripleShift(1) ^ -(raw & 1);
                         a[i] = (short)accum;
                     }
                 }
diff --git a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
index 23eb7ec..05c6633 100644
--- a/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
+++ b/src/Lucene.Net.Analysis.Phonetic/Language/Bm/Rule.cs
@@ -398,12 +398,12 @@
             }
         }
 
-        private class RuleAnonymousHelper : Rule
+        private class RuleAnonymousClass : Rule
         {
             private readonly int myLine;
             private readonly string loc;
 
-            public RuleAnonymousHelper(string pat, string lCon, string rCon, IPhonemeExpr ph, int cLine, string location)
+            public RuleAnonymousClass(string pat, string lCon, string rCon, IPhonemeExpr ph, int cLine, string location)
                 : base(pat, lCon, rCon, ph)
             {
                 this.myLine = cLine;
@@ -497,7 +497,7 @@
                                         string rCon = StripQuotes(parts[2]);
                                         IPhonemeExpr ph = ParsePhonemeExpr(StripQuotes(parts[3]));
                                         int cLine = currentLine;
-                                        Rule r = new RuleAnonymousHelper(pat, lCon, rCon, ph, cLine, location);
+                                        Rule r = new RuleAnonymousClass(pat, lCon, rCon, ph, cLine, location);
 
                                         string patternKey = r.pattern.Substring(0, 1 - 0);
                                         if (!lines.TryGetValue(patternKey, out IList<Rule> rules) || rules == null)
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/PathNode.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/PathNode.cs
index cda6cac..6b60c1b 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/PathNode.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/PathNode.cs
@@ -1,4 +1,5 @@
 // lucene version compatibility level: 4.8.1
+using J2N.Numerics;
 using System;
 
 namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
@@ -53,7 +54,7 @@
             result = prime * result + PreNode;
             long temp;
             temp = J2N.BitConversion.DoubleToInt64Bits(Weight);
-            result = prime * result + (int)(temp ^ (int)((uint)temp >> 32));
+            result = prime * result + (int)(temp ^ temp.TripleShift(32));
             return result;
         }
 
diff --git a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegTokenPair.cs b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegTokenPair.cs
index d554b41..8a6ebdd 100644
--- a/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegTokenPair.cs
+++ b/src/Lucene.Net.Analysis.SmartCn/Hhmm/SegTokenPair.cs
@@ -1,4 +1,5 @@
 // lucene version compatibility level: 4.8.1
+using J2N.Numerics;
 using Lucene.Net.Support;
 
 namespace Lucene.Net.Analysis.Cn.Smart.Hhmm
@@ -65,7 +66,7 @@
             result = prime * result + To;
             long temp;
             temp = J2N.BitConversion.DoubleToInt64Bits(Weight);
-            result = prime * result + (int)(temp ^ (int)((uint)temp >> 32));
+            result = prime * result + (int)(temp ^ temp.TripleShift(32));
             return result;
         }
 
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
index 07f2a19..c210d59 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/DemoHTMLParser.cs
@@ -61,7 +61,7 @@
                 parser.SetFeature(TagSoup.Parser.NAMESPACES_FEATURE, true);
 
                 StringBuilder title = new StringBuilder(), body = new StringBuilder();
-                DefaultHandler handler = new DefaultHandlerAnonymousHelper(this, title, body);
+                DefaultHandler handler = new DefaultHandlerAnonymousClass(this, title, body);
 
                 parser.ContentHandler = handler;
                 parser.ErrorHandler = handler;
@@ -74,7 +74,7 @@
                 this.body = body.ToString();
             }
 
-            private class DefaultHandlerAnonymousHelper : DefaultHandler
+            private class DefaultHandlerAnonymousClass : DefaultHandler
             {
                 private int inBODY = 0, inHEAD = 0, inTITLE = 0, suppressed = 0;
 
@@ -82,7 +82,7 @@
                 private readonly StringBuilder title;
                 private readonly StringBuilder body;
 
-                public DefaultHandlerAnonymousHelper(Parser outerInstance, StringBuilder title, StringBuilder body)
+                public DefaultHandlerAnonymousClass(Parser outerInstance, StringBuilder title, StringBuilder body)
                 {
                     this.outerInstance = outerInstance;
                     this.title = title;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs
index 8953a47..d7ae442 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Feeds/SpatialDocMaker.cs
@@ -68,7 +68,7 @@
         protected virtual SpatialStrategy MakeSpatialStrategy(Config config)
         {
             //A Map view of Config that prefixes keys with "spatial."
-            var configMap = new DictionaryAnonymousHelper(config);
+            var configMap = new DictionaryAnonymousClass(config);
 
             SpatialContext ctx = SpatialContextFactory.MakeSpatialContext(configMap /*, null*/); // LUCENENET TODO: What is this extra param?
 
@@ -77,10 +77,10 @@
             return MakeSpatialStrategy(config, configMap, ctx);
         }
 
-        private class DictionaryAnonymousHelper : Dictionary<string, string>
+        private class DictionaryAnonymousClass : Dictionary<string, string>
         {
             private readonly Config config;
-            public DictionaryAnonymousHelper(Config config)
+            public DictionaryAnonymousClass(Config config)
             {
                 this.config = config;
             }
@@ -96,7 +96,7 @@
             //A factory for the prefix tree grid
             SpatialPrefixTree grid = SpatialPrefixTreeFactory.MakeSPT(configMap, /*null,*/ ctx); // LUCENENET TODO: What is this extra param?
 
-            RecursivePrefixTreeStrategy strategy = new RecursivePrefixTreeStrategyAnonymousHelper(grid, SPATIAL_FIELD, config);
+            RecursivePrefixTreeStrategy strategy = new RecursivePrefixTreeStrategyAnonymousClass(grid, SPATIAL_FIELD, config);
 
             int prefixGridScanLevel = config.Get("query.spatial.prefixGridScanLevel", -4);
             if (prefixGridScanLevel < 0)
@@ -108,9 +108,9 @@
             return strategy;
         }
 
-        private class RecursivePrefixTreeStrategyAnonymousHelper : RecursivePrefixTreeStrategy
+        private class RecursivePrefixTreeStrategyAnonymousClass : RecursivePrefixTreeStrategy
         {
-            public RecursivePrefixTreeStrategyAnonymousHelper(SpatialPrefixTree grid, string fieldName, Config config)
+            public RecursivePrefixTreeStrategyAnonymousClass(SpatialPrefixTree grid, string fieldName, Config config)
                 : base(grid, fieldName)
             {
                 this.m_pointsOnly = config.Get("spatial.docPointsOnly", false);
@@ -142,17 +142,17 @@
             double plusMinus = config.Get(configKeyPrefix + "radiusDegreesRandPlusMinus", 0.0);
             bool bbox = config.Get(configKeyPrefix + "bbox", false);
 
-            return new ShapeConverterAnonymousHelper(spatialStrategy, radiusDegrees, plusMinus, bbox);
+            return new ShapeConverterAnonymousClass(spatialStrategy, radiusDegrees, plusMinus, bbox);
         }
 
-        private class ShapeConverterAnonymousHelper : IShapeConverter
+        private class ShapeConverterAnonymousClass : IShapeConverter
         {
             private readonly SpatialStrategy spatialStrategy;
             private readonly double radiusDegrees;
             private readonly double plusMinus;
             private readonly bool bbox;
 
-            public ShapeConverterAnonymousHelper(SpatialStrategy spatialStrategy, double radiusDegrees, double plusMinus, bool bbox)
+            public ShapeConverterAnonymousClass(SpatialStrategy spatialStrategy, double radiusDegrees, double plusMinus, bool bbox)
             {
                 this.spatialStrategy = spatialStrategy;
                 this.radiusDegrees = radiusDegrees;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs
index 53a68a5..af63cfa 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/CreateIndexTask.cs
@@ -111,14 +111,7 @@
 
             string mergeScheduler = config.Get("merge.scheduler",
                                                      "Lucene.Net.Index.ConcurrentMergeScheduler, Lucene.Net");
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-            // LUCENENET specific - hack to get our TaskMergeScheduler
-            // when a ConcurrentMergeScheduler is requested.
-            if (mergeScheduler.Contains(".ConcurrentMergeScheduler,"))
-            {
-                mergeScheduler = "Lucene.Net.Index.TaskMergeScheduler, Lucene.Net";
-            }
-#endif
+
             Type mergeSchedulerType = Type.GetType(mergeScheduler);
             if (mergeSchedulerType == null)
             {
@@ -141,15 +134,9 @@
 
                 if (mergeScheduler.Equals("Lucene.Net.Index.ConcurrentMergeScheduler", StringComparison.Ordinal))
                 {
-#if FEATURE_CONCURRENTMERGESCHEDULER
                     ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler)iwConf.MergeScheduler;
                     int maxThreadCount = config.Get("concurrent.merge.scheduler.max.thread.count", ConcurrentMergeScheduler.DEFAULT_MAX_THREAD_COUNT);
                     int maxMergeCount = config.Get("concurrent.merge.scheduler.max.merge.count", ConcurrentMergeScheduler.DEFAULT_MAX_MERGE_COUNT);
-#else
-                    TaskMergeScheduler cms = (TaskMergeScheduler)iwConf.MergeScheduler;
-                    int maxThreadCount = config.Get("concurrent.merge.scheduler.max.thread.count", 1);
-                    int maxMergeCount = config.Get("concurrent.merge.scheduler.max.merge.count", 2);
-#endif
                     cms.SetMaxMergesAndThreads(maxMergeCount, maxThreadCount);
                 }
             }
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetHighlightTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetHighlightTask.cs
index 21b1619..19a5af5 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetHighlightTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetHighlightTask.cs
@@ -94,15 +94,15 @@
         {
             m_highlighter = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(q));
             m_highlighter.MaxDocCharsToAnalyze = m_maxDocCharsToAnalyze;
-            return new BenchmarkHighlighterAnonymousHelper(this, m_highlighter);
+            return new BenchmarkHighlighterAnonymousClass(this, m_highlighter);
         }
 
-        private class BenchmarkHighlighterAnonymousHelper : BenchmarkHighlighter
+        private class BenchmarkHighlighterAnonymousClass : BenchmarkHighlighter
         {
             private readonly SearchTravRetHighlightTask outerInstance;
             private readonly Highlighter highlighter;
 
-            public BenchmarkHighlighterAnonymousHelper(SearchTravRetHighlightTask outerInstance, Highlighter highlighter)
+            public BenchmarkHighlighterAnonymousClass(SearchTravRetHighlightTask outerInstance, Highlighter highlighter)
             {
                 this.outerInstance = outerInstance;
                 this.highlighter = highlighter;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetVectorHighlightTask.cs b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetVectorHighlightTask.cs
index 365c152..b898bc0 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetVectorHighlightTask.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Tasks/SearchTravRetVectorHighlightTask.cs
@@ -98,15 +98,15 @@
         {
             m_highlighter = new FastVectorHighlighter(false, false);
             Query myq = q;
-            return new BenchmarkHighlighterAnonymousHelper(this, m_highlighter, myq);
+            return new BenchmarkHighlighterAnonymousClass(this, m_highlighter, myq);
         }
 
-        private class BenchmarkHighlighterAnonymousHelper : BenchmarkHighlighter
+        private class BenchmarkHighlighterAnonymousClass : BenchmarkHighlighter
         {
             private readonly SearchTravRetVectorHighlightTask outerInstance;
             private readonly FastVectorHighlighter highlighter;
             private readonly Query myq;
-            public BenchmarkHighlighterAnonymousHelper(SearchTravRetVectorHighlightTask outerInstance, FastVectorHighlighter highlighter, Query myq)
+            public BenchmarkHighlighterAnonymousClass(SearchTravRetVectorHighlightTask outerInstance, FastVectorHighlighter highlighter, Query myq)
             {
                 this.outerInstance = outerInstance;
                 this.highlighter = highlighter;
diff --git a/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs b/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs
index 257c662..90a08fe 100644
--- a/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs
+++ b/src/Lucene.Net.Benchmark/ByTask/Utils/AnalyzerFactory.cs
@@ -51,14 +51,14 @@
 
         public Analyzer Create()
         {
-            return new AnalyzerAnonymousHelper(this);
+            return new AnalyzerAnonymousClass(this);
         }
 
-        private sealed class AnalyzerAnonymousHelper : Analyzer
+        private sealed class AnalyzerAnonymousClass : Analyzer
         {
             private readonly AnalyzerFactory outerInstance;
 
-            public AnalyzerAnonymousHelper(AnalyzerFactory outerInstance)
+            public AnalyzerAnonymousClass(AnalyzerFactory outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Benchmark/Quality/Utils/DocNameExtractor.cs b/src/Lucene.Net.Benchmark/Quality/Utils/DocNameExtractor.cs
index ee9bdec..395d2bf 100644
--- a/src/Lucene.Net.Benchmark/Quality/Utils/DocNameExtractor.cs
+++ b/src/Lucene.Net.Benchmark/Quality/Utils/DocNameExtractor.cs
@@ -49,17 +49,17 @@
         public virtual string DocName(IndexSearcher searcher, int docid)
         {
             IList<string> name = new List<string>();
-            searcher.IndexReader.Document(docid, new StoredFieldVisitorAnonymousHelper(this, name));
+            searcher.IndexReader.Document(docid, new StoredFieldVisitorAnonymousClass(this, name));
 
             return name.Count > 0 ? name[0] : null;
         }
 
-        private class StoredFieldVisitorAnonymousHelper : StoredFieldVisitor
+        private class StoredFieldVisitorAnonymousClass : StoredFieldVisitor
         {
             private readonly DocNameExtractor outerInstance;
             private readonly IList<string> name;
 
-            public StoredFieldVisitorAnonymousHelper(DocNameExtractor outerInstance, IList<string> name)
+            public StoredFieldVisitorAnonymousClass(DocNameExtractor outerInstance, IList<string> name)
             {
                 this.outerInstance = outerInstance;
                 this.name = name;
diff --git a/src/Lucene.Net.Classification/ClassificationResult.cs b/src/Lucene.Net.Classification/ClassificationResult.cs
index 8744efb..e90f178 100644
--- a/src/Lucene.Net.Classification/ClassificationResult.cs
+++ b/src/Lucene.Net.Classification/ClassificationResult.cs
@@ -1,4 +1,4 @@
-namespace Lucene.Net.Classification
+namespace Lucene.Net.Classification
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -24,8 +24,8 @@
     public class ClassificationResult<T>
     {
 
-        private readonly T _assignedClass;
-        private readonly double _score;
+        private readonly T assignedClass;
+        private readonly double score;
 
         /// <summary>
         /// Constructor
@@ -34,19 +34,19 @@
         /// </summary>
         public ClassificationResult(T assignedClass, double score) 
         {
-            _assignedClass = assignedClass;
-            _score = score;
+            this.assignedClass = assignedClass;
+            this.score = score;
         }
 
         /// <summary>
         /// retrieve the result class
         /// @return a <typeparamref name="T"/> representing an assigned class
         /// </summary>
-        public virtual T AssignedClass => _assignedClass;
+        public virtual T AssignedClass => assignedClass;
 
         /// <summary>
         /// Gets a <see cref="double"/> representing a result score.
         /// </summary>
-        public virtual double Score => _score;
+        public virtual double Score => score;
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Classification/KNearestNeighborClassifier.cs b/src/Lucene.Net.Classification/KNearestNeighborClassifier.cs
index 9f198e5..b6970bd 100644
--- a/src/Lucene.Net.Classification/KNearestNeighborClassifier.cs
+++ b/src/Lucene.Net.Classification/KNearestNeighborClassifier.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Analysis;
+using Lucene.Net.Analysis;
 using Lucene.Net.Index;
 using Lucene.Net.Queries.Mlt;
 using Lucene.Net.Search;
@@ -34,21 +34,21 @@
     public class KNearestNeighborClassifier : IClassifier<BytesRef>
     {
 
-        private MoreLikeThis _mlt;
-        private string[] _textFieldNames;
-        private string _classFieldName;
-        private IndexSearcher _indexSearcher;
-        private readonly int _k;
-        private Query _query;
+        private MoreLikeThis mlt;
+        private string[] textFieldNames;
+        private string classFieldName;
+        private IndexSearcher indexSearcher;
+        private readonly int k;
+        private Query query;
 
-        private readonly int _minDocsFreq; // LUCENENET: marked readonly
-        private readonly int _minTermFreq; // LUCENENET: marked readonly
+        private readonly int minDocsFreq; // LUCENENET: marked readonly
+        private readonly int minTermFreq; // LUCENENET: marked readonly
 
         /// <summary>Create a <see cref="IClassifier{T}"/> using kNN algorithm</summary>
         /// <param name="k">the number of neighbors to analyze as an <see cref="int"/></param>
         public KNearestNeighborClassifier(int k)
         {
-            _k = k;
+            this.k = k;
         }
 
         /// <summary>Create a <see cref="IClassifier{T}"/> using kNN algorithm</summary>
@@ -57,9 +57,9 @@
         /// <param name="minTermFreq">the minimum number of term frequency for MLT to be set with <see cref="MoreLikeThis.MinTermFreq"/></param>
         public KNearestNeighborClassifier(int k, int minDocsFreq, int minTermFreq)
         {
-            _k = k;
-            _minDocsFreq = minDocsFreq;
-            _minTermFreq = minTermFreq;
+            this.k = k;
+            this.minDocsFreq = minDocsFreq;
+            this.minTermFreq = minTermFreq;
         }
 
         /// <summary>
@@ -69,23 +69,23 @@
         /// <returns>a <see cref="ClassificationResult{BytesRef}"/> holding assigned class of type <see cref="BytesRef"/> and score</returns>
         public virtual ClassificationResult<BytesRef> AssignClass(string text)
         {
-            if (_mlt == null)
+            if (mlt == null)
             {
                 throw new IOException("You must first call Classifier#train");
             }
 
             BooleanQuery mltQuery = new BooleanQuery();
-            foreach (string textFieldName in _textFieldNames)
+            foreach (string textFieldName in textFieldNames)
             {
-                mltQuery.Add(new BooleanClause(_mlt.Like(new StringReader(text), textFieldName), Occur.SHOULD));
+                mltQuery.Add(new BooleanClause(mlt.Like(new StringReader(text), textFieldName), Occur.SHOULD));
             }
-            Query classFieldQuery = new WildcardQuery(new Term(_classFieldName, "*"));
+            Query classFieldQuery = new WildcardQuery(new Term(classFieldName, "*"));
             mltQuery.Add(new BooleanClause(classFieldQuery, Occur.MUST));
-            if (_query != null)
+            if (query != null)
             {
-                mltQuery.Add(_query, Occur.MUST);
+                mltQuery.Add(query, Occur.MUST);
             }
-            TopDocs topDocs = _indexSearcher.Search(mltQuery, _k);
+            TopDocs topDocs = indexSearcher.Search(mltQuery, k);
             return SelectClassFromNeighbors(topDocs);
         }
 
@@ -96,7 +96,7 @@
 
             foreach (ScoreDoc scoreDoc in topDocs.ScoreDocs)
             {
-                BytesRef cl = new BytesRef(_indexSearcher.Doc(scoreDoc.Doc).GetField(_classFieldName).GetStringValue());
+                BytesRef cl = new BytesRef(indexSearcher.Doc(scoreDoc.Doc).GetField(classFieldName).GetStringValue());
                 if (classCounts.TryGetValue(cl, out int value))
                 {
                     classCounts[cl] = value + 1;
@@ -117,7 +117,7 @@
                     assignedClass = (BytesRef)entry.Key.Clone();
                 }
             }
-            double score = max / (double) _k;
+            double score = max / (double) k;
             return new ClassificationResult<BytesRef>(assignedClass, score);
         }
 
@@ -152,21 +152,21 @@
         /// <param name="textFieldNames">the names of the fields to be used to compare documents</param>
         public virtual void Train(AtomicReader atomicReader, string[] textFieldNames, string classFieldName, Analyzer analyzer, Query query)
         {
-            _textFieldNames = textFieldNames;
-            _classFieldName = classFieldName;
-            _mlt = new MoreLikeThis(atomicReader);
-            _mlt.Analyzer = analyzer;
-            _mlt.FieldNames = _textFieldNames;
-            _indexSearcher = new IndexSearcher(atomicReader);
-            if (_minDocsFreq > 0)
+            this.textFieldNames = textFieldNames;
+            this.classFieldName = classFieldName;
+            mlt = new MoreLikeThis(atomicReader);
+            mlt.Analyzer = analyzer;
+            mlt.FieldNames = this.textFieldNames;
+            indexSearcher = new IndexSearcher(atomicReader);
+            if (minDocsFreq > 0)
             {
-                _mlt.MinDocFreq = _minDocsFreq;
+                mlt.MinDocFreq = minDocsFreq;
             }
-            if (_minTermFreq > 0)
+            if (minTermFreq > 0)
             {
-                _mlt.MinTermFreq = _minTermFreq;
+                mlt.MinTermFreq = minTermFreq;
             }
-            _query = query;
+            this.query = query;
         }
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Classification/SimpleNaiveBayesClassifier.cs b/src/Lucene.Net.Classification/SimpleNaiveBayesClassifier.cs
index 98f60d5..9945efc 100644
--- a/src/Lucene.Net.Classification/SimpleNaiveBayesClassifier.cs
+++ b/src/Lucene.Net.Classification/SimpleNaiveBayesClassifier.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Analysis;
+using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
@@ -33,13 +33,13 @@
     /// </summary>
     public class SimpleNaiveBayesClassifier : IClassifier<BytesRef>
     {
-        private AtomicReader _atomicReader;
-        private string[] _textFieldNames;
-        private string _classFieldName;
-        private int _docsWithClassSize;
-        private Analyzer _analyzer;
-        private IndexSearcher _indexSearcher;
-        private Query _query;
+        private AtomicReader atomicReader;
+        private string[] textFieldNames;
+        private string classFieldName;
+        private int docsWithClassSize;
+        private Analyzer analyzer;
+        private IndexSearcher indexSearcher;
+        private Query query;
 
         /// <summary>
         /// Creates a new NaiveBayes classifier.
@@ -81,30 +81,30 @@
         /// <param name="textFieldNames">the names of the fields to be used to compare documents</param>
         public virtual void Train(AtomicReader atomicReader, string[] textFieldNames, string classFieldName, Analyzer analyzer, Query query)
         {
-            _atomicReader = atomicReader;
-            _indexSearcher = new IndexSearcher(_atomicReader);
-            _textFieldNames = textFieldNames;
-            _classFieldName = classFieldName;
-            _analyzer = analyzer;
-            _query = query;
-            _docsWithClassSize = CountDocsWithClass();
+            this.atomicReader = atomicReader;
+            indexSearcher = new IndexSearcher(this.atomicReader);
+            this.textFieldNames = textFieldNames;
+            this.classFieldName = classFieldName;
+            this.analyzer = analyzer;
+            this.query = query;
+            docsWithClassSize = CountDocsWithClass();
         }
 
         private int CountDocsWithClass() 
         {
-            int docCount = MultiFields.GetTerms(_atomicReader, _classFieldName).DocCount;
+            int docCount = MultiFields.GetTerms(atomicReader, classFieldName).DocCount;
             if (docCount == -1) 
             { // in case codec doesn't support getDocCount
                 TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
                 BooleanQuery q = new BooleanQuery
                 {
-                    new BooleanClause(new WildcardQuery(new Term(_classFieldName, WildcardQuery.WILDCARD_STRING.ToString())), Occur.MUST)
+                    new BooleanClause(new WildcardQuery(new Term(classFieldName, WildcardQuery.WILDCARD_STRING.ToString())), Occur.MUST)
                 };
-                if (_query != null) 
+                if (query != null) 
                 {
-                    q.Add(_query, Occur.MUST);
+                    q.Add(query, Occur.MUST);
                 }
-                _indexSearcher.Search(q, totalHitCountCollector);
+                indexSearcher.Search(q, totalHitCountCollector);
                 docCount = totalHitCountCollector.TotalHits;
             }
             return docCount;
@@ -113,8 +113,8 @@
         private string[] TokenizeDoc(string doc)
         {
             ICollection<string> result = new LinkedList<string>();
-            foreach (string textFieldName in _textFieldNames) {
-                TokenStream tokenStream = _analyzer.GetTokenStream(textFieldName, new StringReader(doc));
+            foreach (string textFieldName in textFieldNames) {
+                TokenStream tokenStream = analyzer.GetTokenStream(textFieldName, new StringReader(doc));
                 try 
                 {
                     ICharTermAttribute charTermAttribute = tokenStream.AddAttribute<ICharTermAttribute>();
@@ -142,14 +142,14 @@
         /// <returns>a <see cref="ClassificationResult{BytesRef}"/> holding assigned class of type <see cref="BytesRef"/> and score</returns>
         public virtual ClassificationResult<BytesRef> AssignClass(string inputDocument) 
         {
-            if (_atomicReader == null) 
+            if (atomicReader == null) 
             {
                 throw new IOException("You must first call Classifier#train");
             }
             double max = - double.MaxValue;
             BytesRef foundClass = new BytesRef();
 
-            Terms terms = MultiFields.GetTerms(_atomicReader, _classFieldName);
+            Terms terms = MultiFields.GetTerms(atomicReader, classFieldName);
             TermsEnum termsEnum = terms.GetEnumerator();
             BytesRef next;
             string[] tokenizedDoc = TokenizeDoc(inputDocument);
@@ -181,7 +181,7 @@
                 double num = hits + 1; // +1 is added because of add 1 smoothing
 
                 // den : for the whole dictionary, count the no of times a word appears in documents of class c (+|V|)
-                double den = GetTextTermFreqForClass(c) + _docsWithClassSize;
+                double den = GetTextTermFreqForClass(c) + docsWithClassSize;
 
                 // P(w|c) = num/den
                 double wordProbability = num / den;
@@ -195,13 +195,13 @@
         private double GetTextTermFreqForClass(BytesRef c)
         {
             double avgNumberOfUniqueTerms = 0;
-            foreach (string textFieldName in _textFieldNames) 
+            foreach (string textFieldName in textFieldNames) 
             {
-                Terms terms = MultiFields.GetTerms(_atomicReader, textFieldName);
+                Terms terms = MultiFields.GetTerms(atomicReader, textFieldName);
                 long numPostings = terms.SumDocFreq; // number of term/doc pairs
                 avgNumberOfUniqueTerms += numPostings / (double) terms.DocCount; // avg # of unique terms per doc
             }
-            int docsWithC = _atomicReader.DocFreq(new Term(_classFieldName, c));
+            int docsWithC = atomicReader.DocFreq(new Term(classFieldName, c));
             return avgNumberOfUniqueTerms * docsWithC; // avg # of unique terms in text fields per doc * # docs with c
         }
 
@@ -209,29 +209,29 @@
         {
             BooleanQuery booleanQuery = new BooleanQuery();
             BooleanQuery subQuery = new BooleanQuery();
-            foreach (string textFieldName in _textFieldNames) 
+            foreach (string textFieldName in textFieldNames) 
             {
                 subQuery.Add(new BooleanClause(new TermQuery(new Term(textFieldName, word)), Occur.SHOULD));
             }
             booleanQuery.Add(new BooleanClause(subQuery, Occur.MUST));
-            booleanQuery.Add(new BooleanClause(new TermQuery(new Term(_classFieldName, c)), Occur.MUST));
-            if (_query != null) 
+            booleanQuery.Add(new BooleanClause(new TermQuery(new Term(classFieldName, c)), Occur.MUST));
+            if (query != null) 
             {
-                booleanQuery.Add(_query, Occur.MUST);
+                booleanQuery.Add(query, Occur.MUST);
             }
             TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
-            _indexSearcher.Search(booleanQuery, totalHitCountCollector);
+            indexSearcher.Search(booleanQuery, totalHitCountCollector);
             return totalHitCountCollector.TotalHits;
         }
 
         private double CalculateLogPrior(BytesRef currentClass)
         {
-            return Math.Log((double) DocCount(currentClass)) - Math.Log(_docsWithClassSize);
+            return Math.Log((double) DocCount(currentClass)) - Math.Log(docsWithClassSize);
         }
 
         private int DocCount(BytesRef countedClass) 
         {
-            return _atomicReader.DocFreq(new Term(_classFieldName, countedClass));
+            return atomicReader.DocFreq(new Term(classFieldName, countedClass));
         }
     }   
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Classification/Utils/DatasetSplitter.cs b/src/Lucene.Net.Classification/Utils/DatasetSplitter.cs
index 702e363..c1a09e2 100644
--- a/src/Lucene.Net.Classification/Utils/DatasetSplitter.cs
+++ b/src/Lucene.Net.Classification/Utils/DatasetSplitter.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Analysis;
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
@@ -33,8 +33,8 @@
     public class DatasetSplitter
     {
 
-        private readonly double _crossValidationRatio;
-        private readonly double _testRatio;
+        private readonly double crossValidationRatio;
+        private readonly double testRatio;
 
         /// <summary>
         /// Create a <see cref="DatasetSplitter"/> by giving test and cross validation IDXs sizes
@@ -43,8 +43,8 @@
         /// <param name="crossValidationRatio">the ratio of the original index to be used for the c.v. IDX as a <see cref="double"/> between 0.0 and 1.0</param>
         public DatasetSplitter(double testRatio, double crossValidationRatio)
         {
-            this._crossValidationRatio = crossValidationRatio;
-            this._testRatio = testRatio;
+            this.crossValidationRatio = crossValidationRatio;
+            this.testRatio = testRatio;
         }
 
         /// <summary>
@@ -127,11 +127,11 @@
                     }
 
                     // add it to one of the IDXs
-                    if (b % 2 == 0 && testWriter.MaxDoc < size * _testRatio)
+                    if (b % 2 == 0 && testWriter.MaxDoc < size * testRatio)
                     {
                         testWriter.AddDocument(doc);
                     }
-                    else if (cvWriter.MaxDoc < size * _crossValidationRatio)
+                    else if (cvWriter.MaxDoc < size * crossValidationRatio)
                     {
                         cvWriter.AddDocument(doc);
                     }
diff --git a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
index 4e36e23..361f11e 100644
--- a/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
+++ b/src/Lucene.Net.Codecs/BlockTerms/FixedGapTermsIndexReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
@@ -195,7 +196,7 @@
 
                 while (hi >= lo)
                 {
-                    int mid = (int)(((uint)(lo + hi)) >> 1);
+                    int mid = (lo + hi).TripleShift(1);
 
                     long offset2 = fieldIndex.termOffsets.Get(mid);
                     int length2 = (int)(fieldIndex.termOffsets.Get(1 + mid) - offset2);
diff --git a/src/Lucene.Net.Codecs/DiskDV/DiskDocValuesFormat.cs b/src/Lucene.Net.Codecs/DiskDV/DiskDocValuesFormat.cs
index a5a5949..57a37b5 100644
--- a/src/Lucene.Net.Codecs/DiskDV/DiskDocValuesFormat.cs
+++ b/src/Lucene.Net.Codecs/DiskDV/DiskDocValuesFormat.cs
@@ -38,12 +38,12 @@
 
         public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
         {
-            return new Lucene45DocValuesConsumerAnonymousHelper(state);
+            return new Lucene45DocValuesConsumerAnonymousClass(state);
         }
 
-        private class Lucene45DocValuesConsumerAnonymousHelper : Lucene45DocValuesConsumer
+        private class Lucene45DocValuesConsumerAnonymousClass : Lucene45DocValuesConsumer
         {
-            public Lucene45DocValuesConsumerAnonymousHelper(SegmentWriteState state)
+            public Lucene45DocValuesConsumerAnonymousClass(SegmentWriteState state)
                 : base(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION)
             {
             }
diff --git a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs
index 357ef3d..ab70dc0 100644
--- a/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs
+++ b/src/Lucene.Net.Codecs/IntBlock/FixedIntBlockIndexInput.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Codecs.Sep;
+using J2N.Numerics;
+using Lucene.Net.Codecs.Sep;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
 using System.Diagnostics;
@@ -164,12 +165,12 @@
                     if ((uptoDelta & 1) == 1)
                     {
                         // same block
-                        upto += (int)((uint)uptoDelta >> 1);
+                        upto += uptoDelta.TripleShift(1);
                     }
                     else
                     {
                         // new block
-                        upto = (int)((uint)uptoDelta >> 1);
+                        upto = uptoDelta.TripleShift(1);
                         fp += indexIn.ReadVInt64();
                     }
                 }
diff --git a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs
index 23b3770..af67ee5 100644
--- a/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs
+++ b/src/Lucene.Net.Codecs/IntBlock/VariableIntBlockIndexInput.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Codecs.Sep;
+using J2N.Numerics;
+using Lucene.Net.Codecs.Sep;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
@@ -193,12 +194,12 @@
                     if ((uptoDelta & 1) == 1)
                     {
                         // same block
-                        upto += (int)((uint)uptoDelta >> 1);
+                        upto += uptoDelta.TripleShift(1);
                     }
                     else
                     {
                         // new block
-                        upto = (int)((uint)uptoDelta >> 1);
+                        upto = uptoDelta.TripleShift(1);
                         fp += indexIn.ReadVInt64();
                     }
                 }
diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs
index 66538ee..ee63af2 100644
--- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs
+++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesConsumer.cs
@@ -139,7 +139,7 @@
                 switch (byteWidth)
                 {
                     case 1:
-                        data.WriteByte((byte)(sbyte) v);
+                        data.WriteByte((byte) v);
                         break;
                     case 2:
                         data.WriteInt16((short) v);
@@ -295,7 +295,7 @@
             // First write docToOrdCounts, except we "aggregate" the
             // counts so they turn into addresses, and add a final
             // value = the total aggregate:
-            AddNumericFieldValues(field, new EnumerableAnonymousInnerClassHelper(docToOrdCount));
+            AddNumericFieldValues(field, new EnumerableAnonymousClass(docToOrdCount));
 
             // Write ordinals for all docs, appended into one big
             // numerics:
@@ -305,11 +305,11 @@
             AddBinaryFieldValues(field, values);
         }
 
-        private class EnumerableAnonymousInnerClassHelper : IEnumerable<long?>
+        private class EnumerableAnonymousClass : IEnumerable<long?>
         {
             private readonly IEnumerable<long?> _docToOrdCount;
 
-            public EnumerableAnonymousInnerClassHelper(IEnumerable<long?> docToOrdCount)
+            public EnumerableAnonymousClass(IEnumerable<long?> docToOrdCount)
             {
                 _docToOrdCount = docToOrdCount;
             }
diff --git a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs
index 9028f83..41eaef9 100644
--- a/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs
+++ b/src/Lucene.Net.Codecs/Memory/DirectDocValuesProducer.cs
@@ -234,7 +234,7 @@
                         data.ReadBytes(values, 0, entry.count);
                         ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
                         // LUCENENET: IMPORTANT - some bytes are negative here, so we need to pass as sbyte
-                        return new NumericDocValuesAnonymousInnerClassHelper((sbyte[])(Array)values);
+                        return new NumericDocValuesAnonymousClass((sbyte[])(Array)values);
                     }
 
                 case 2:
@@ -245,7 +245,7 @@
                             values[i] = data.ReadInt16();
                         }
                         ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-                        return new NumericDocValuesAnonymousInnerClassHelper2(values);
+                        return new NumericDocValuesAnonymousClass2(values);
                     }
 
                 case 4:
@@ -256,7 +256,7 @@
                             values[i] = data.ReadInt32();
                         }
                         ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-                        return new NumericDocValuesAnonymousInnerClassHelper3(values);
+                        return new NumericDocValuesAnonymousClass3(values);
                     }
 
                 case 8:
@@ -267,7 +267,7 @@
                             values[i] = data.ReadInt64();
                         }
                         ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-                        return new NumericDocValuesAnonymousInnerClassHelper4(values);
+                        return new NumericDocValuesAnonymousClass4(values);
                     }
 
                 default:
@@ -275,11 +275,11 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
+        private class NumericDocValuesAnonymousClass : NumericDocValues
         {
             private readonly sbyte[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper(sbyte[] values)
+            public NumericDocValuesAnonymousClass(sbyte[] values)
             {
                 this.values = values;
             }
@@ -290,11 +290,11 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues
+        private class NumericDocValuesAnonymousClass2 : NumericDocValues
         {
             private readonly short[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper2(short[] values)
+            public NumericDocValuesAnonymousClass2(short[] values)
             {
                 this.values = values;
             }
@@ -305,11 +305,11 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper3 : NumericDocValues
+        private class NumericDocValuesAnonymousClass3 : NumericDocValues
         {
             private readonly int[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper3(int[] values)
+            public NumericDocValuesAnonymousClass3(int[] values)
             {
                 this.values = values;
             }
@@ -320,11 +320,11 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper4 : NumericDocValues
+        private class NumericDocValuesAnonymousClass4 : NumericDocValues
         {
             private readonly long[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper4(long[] values)
+            public NumericDocValuesAnonymousClass4(long[] values)
             {
                 this.values = values;
             }
@@ -365,15 +365,15 @@
 
             ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes) + RamUsageEstimator.SizeOf(address));
 
-            return new BinaryDocValuesAnonymousInnerClassHelper(bytes, address);
+            return new BinaryDocValuesAnonymousClass(bytes, address);
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass : BinaryDocValues
         {
             private readonly byte[] bytes;
             private readonly int[] address;
 
-            public BinaryDocValuesAnonymousInnerClassHelper(byte[] bytes, int[] address)
+            public BinaryDocValuesAnonymousClass(byte[] bytes, int[] address)
             {
                 this.bytes = bytes;
                 this.address = address;
@@ -407,16 +407,16 @@
             NumericDocValues docToOrd = LoadNumeric(entry.docToOrd);
             BinaryDocValues values = LoadBinary(entry.values);
 
-            return new SortedDocValuesAnonymousInnerClassHelper(entry, docToOrd, values);
+            return new SortedDocValuesAnonymousClass(entry, docToOrd, values);
         }
 
-        private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues
+        private class SortedDocValuesAnonymousClass : SortedDocValues
         {
             private readonly SortedEntry entry;
             private readonly NumericDocValues docToOrd;
             private readonly BinaryDocValues values;
 
-            public SortedDocValuesAnonymousInnerClassHelper(SortedEntry entry, NumericDocValues docToOrd, BinaryDocValues values)
+            public SortedDocValuesAnonymousClass(SortedEntry entry, NumericDocValues docToOrd, BinaryDocValues values)
             {
                 this.entry = entry;
                 this.docToOrd = docToOrd;
@@ -458,18 +458,18 @@
                 var values = instance.values;
 
                 // Must make a new instance since the iterator has state:
-                return new RandomAccessOrdsAnonymousInnerClassHelper(entry, docToOrdAddress, ords, values);
+                return new RandomAccessOrdsAnonymousClass(entry, docToOrdAddress, ords, values);
             }
         }
 
-        private class RandomAccessOrdsAnonymousInnerClassHelper : RandomAccessOrds
+        private class RandomAccessOrdsAnonymousClass : RandomAccessOrds
         {
             private readonly SortedSetEntry entry;
             private readonly NumericDocValues docToOrdAddress;
             private readonly NumericDocValues ords;
             private readonly BinaryDocValues values;
 
-            public RandomAccessOrdsAnonymousInnerClassHelper(SortedSetEntry entry, NumericDocValues docToOrdAddress, NumericDocValues ords, BinaryDocValues values)
+            public RandomAccessOrdsAnonymousClass(SortedSetEntry entry, NumericDocValues docToOrdAddress, NumericDocValues ords, BinaryDocValues values)
             {
                 this.entry = entry;
                 this.docToOrdAddress = docToOrdAddress;
diff --git a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
index c31aa8e..3402074 100644
--- a/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Memory/DirectPostingsFormat.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Support;
 using System;
@@ -903,7 +904,7 @@
 
                     while (low <= high)
                     {
-                        int mid = (int) ((uint) (low + high) >> 1);
+                        int mid = (low + high).TripleShift(1);
                         int cmp = outerInstance.Compare(mid, term);
                         if (cmp < 0)
                         {
@@ -1479,7 +1480,7 @@
                                     skipUpto = 0;
                                     goto nextTermContinue;
                                 }
-                                int mid = (int)((uint)(low + high) >> 1);
+                                int mid = (low + high).TripleShift(1);
                                 int cmp = (outerInstance.termBytes[outerInstance.termOffsets[mid] + stateUpto] & 0xFF) -
                                           targetLabel;
                                 // if (DEBUG) {
@@ -2336,7 +2337,7 @@
                         break;
                     }
 
-                    int mid = (int) ((uint) (low + high) >> 1);
+                    int mid = (low + high).TripleShift(1);
                     int cmp = docIDs[mid] - target;
                     //System.out.println("    bsearch low=" + low + " high=" + high+ ": docIDs[" + mid + "]=" + docIDs[mid]);
 
@@ -2542,7 +2543,7 @@
                         break;
                     }
 
-                    int mid = (int) ((uint) (low + high) >> 1);
+                    int mid = (low + high).TripleShift(1);
                     int cmp = docIDs[mid] - target;
                     //System.out.println("    bsearch low=" + low + " high=" + high+ ": docIDs[" + mid + "]=" + docIDs[mid]);
 
diff --git a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs
index b7b19d3..e361514 100644
--- a/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs
+++ b/src/Lucene.Net.Codecs/Memory/FSTOrdTermsReader.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
@@ -366,7 +367,7 @@
                         int code = statsReader.ReadVInt32();
                         if (outerInstance.HasFreqs)
                         {
-                            docFreq[i] = ((int)((uint)code >> 1));
+                            docFreq[i] = code.TripleShift(1);
                             if ((code & 1) == 1)
                             {
                                 totalTermFreq[i] = docFreq[i];
diff --git a/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs b/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs
index f072365..570858a 100644
--- a/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs
+++ b/src/Lucene.Net.Codecs/Memory/FSTTermOutputs.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System.Diagnostics;
 
@@ -309,7 +310,7 @@
             int bit0 = bits & 1;
             int bit1 = bits & 2;
             int bit2 = bits & 4;
-            var bytesSize = ((int) ((uint) bits >> 3));
+            var bytesSize = bits.TripleShift(3);
             if (bit1 > 0 && bytesSize == 0) // determine extra length
             {
                 bytesSize = input.ReadVInt32();
@@ -331,7 +332,7 @@
                 int code = input.ReadVInt32();
                 if (_hasPos)
                 {
-                    totalTermFreq = docFreq = (int) ((uint) code >> 1);
+                    totalTermFreq = docFreq = code.TripleShift(1);
                     if ((code & 1) == 0)
                     {
                         totalTermFreq += input.ReadVInt64();
diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
index 5776bcf..bd67a5b 100644
--- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
+++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesConsumer.cs
@@ -403,18 +403,18 @@
             IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
         {
             // write the ordinals as a binary field
-            AddBinaryField(field, new IterableAnonymousInnerClassHelper(docToOrdCount, ords));
+            AddBinaryField(field, new IterableAnonymousClass(docToOrdCount, ords));
 
             // write the values as FST
             WriteFST(field, values);
         }
 
-        private class IterableAnonymousInnerClassHelper : IEnumerable<BytesRef>
+        private class IterableAnonymousClass : IEnumerable<BytesRef>
         {
             private readonly IEnumerable<long?> _docToOrdCount;
             private readonly IEnumerable<long?> _ords;
 
-            public IterableAnonymousInnerClassHelper(IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
+            public IterableAnonymousClass(IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
             {
                 _docToOrdCount = docToOrdCount;
                 _ords = ords;
diff --git a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs
index a1c2618..82aaaf5 100644
--- a/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs
+++ b/src/Lucene.Net.Codecs/Memory/MemoryDocValuesProducer.cs
@@ -251,7 +251,7 @@
                     var ordsReader = PackedInt32s.GetReaderNoHeader(data, PackedInt32s.Format.ById(formatID),
                         entry.packedIntsVersion, maxDoc, bitsPerValue);
                     ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(decode) + ordsReader.RamBytesUsed());
-                    return new NumericDocValuesAnonymousInnerClassHelper(decode, ordsReader);
+                    return new NumericDocValuesAnonymousClass(decode, ordsReader);
                 case DELTA_COMPRESSED:
                     int blockSize = data.ReadVInt32();
                     var reader = new BlockPackedReader(data, entry.packedIntsVersion, blockSize, maxDoc,
@@ -263,7 +263,7 @@
                     data.ReadBytes(bytes, 0, bytes.Length);
                     ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes));
                     // LUCENENET: IMPORTANT - some bytes are negative here, so we need to pass as sbyte
-                    return new NumericDocValuesAnonymousInnerClassHelper2((sbyte[])(Array)bytes);
+                    return new NumericDocValuesAnonymousClass2((sbyte[])(Array)bytes);
                 case GCD_COMPRESSED:
                     long min = data.ReadInt64();
                     long mult = data.ReadInt64();
@@ -271,18 +271,18 @@
                     var quotientReader = new BlockPackedReader(data, entry.packedIntsVersion,
                         quotientBlockSize, maxDoc, false);
                     ramBytesUsed.AddAndGet(quotientReader.RamBytesUsed());
-                    return new NumericDocValuesAnonymousInnerClassHelper3(min, mult, quotientReader);
+                    return new NumericDocValuesAnonymousClass3(min, mult, quotientReader);
                 default:
                     throw new InvalidOperationException();
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
+        private class NumericDocValuesAnonymousClass : NumericDocValues
         {
             private readonly long[] decode;
             private readonly PackedInt32s.Reader ordsReader;
 
-            public NumericDocValuesAnonymousInnerClassHelper(long[] decode, PackedInt32s.Reader ordsReader)
+            public NumericDocValuesAnonymousClass(long[] decode, PackedInt32s.Reader ordsReader)
             {
                 this.decode = decode;
                 this.ordsReader = ordsReader;
@@ -294,11 +294,11 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues
+        private class NumericDocValuesAnonymousClass2 : NumericDocValues
         {
             private readonly sbyte[] bytes;
 
-            public NumericDocValuesAnonymousInnerClassHelper2(sbyte[] bytes)
+            public NumericDocValuesAnonymousClass2(sbyte[] bytes)
             {
                 this.bytes = bytes;
             }
@@ -309,13 +309,13 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper3 : NumericDocValues
+        private class NumericDocValuesAnonymousClass3 : NumericDocValues
         {
             private readonly long min;
             private readonly long mult;
             private readonly BlockPackedReader quotientReader;
 
-            public NumericDocValuesAnonymousInnerClassHelper3(long min, long mult,
+            public NumericDocValuesAnonymousClass3(long min, long mult,
                 BlockPackedReader quotientReader)
             {
                 this.min = min;
@@ -353,7 +353,7 @@
             {
                 int fixedLength = entry.minLength;
                 ramBytesUsed.AddAndGet(bytes.RamBytesUsed());
-                return new BinaryDocValuesAnonymousInnerClassHelper(bytesReader, fixedLength);
+                return new BinaryDocValuesAnonymousClass(bytesReader, fixedLength);
             }
             else
             {
@@ -361,16 +361,16 @@
                 var addresses = new MonotonicBlockPackedReader(data, entry.packedIntsVersion,
                     entry.blockSize, maxDoc, false);
                 ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + addresses.RamBytesUsed());
-                return new BinaryDocValuesAnonymousInnerClassHelper2(bytesReader, addresses);
+                return new BinaryDocValuesAnonymousClass2(bytesReader, addresses);
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass : BinaryDocValues
         {
             private readonly PagedBytes.Reader bytesReader;
             private readonly int fixedLength;
 
-            public BinaryDocValuesAnonymousInnerClassHelper(PagedBytes.Reader bytesReader, int fixedLength)
+            public BinaryDocValuesAnonymousClass(PagedBytes.Reader bytesReader, int fixedLength)
             {
                 this.bytesReader = bytesReader;
                 this.fixedLength = fixedLength;
@@ -382,12 +382,12 @@
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper2 : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass2 : BinaryDocValues
         {
             private readonly PagedBytes.Reader bytesReader;
             private readonly MonotonicBlockPackedReader addresses;
 
-            public BinaryDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, MonotonicBlockPackedReader addresses)
+            public BinaryDocValuesAnonymousClass2(PagedBytes.Reader bytesReader, MonotonicBlockPackedReader addresses)
             {
                 this.bytesReader = bytesReader;
                 this.addresses = addresses;
@@ -429,11 +429,11 @@
             var scratchInts = new Int32sRef();
             var fstEnum = new BytesRefFSTEnum<long?>(fst);
 
-            return new SortedDocValuesAnonymousInnerClassHelper(entry, docToOrd, fst, @in, firstArc, scratchArc,
+            return new SortedDocValuesAnonymousClass(entry, docToOrd, fst, @in, firstArc, scratchArc,
                 scratchInts, fstEnum);
         }
 
-        private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues
+        private class SortedDocValuesAnonymousClass : SortedDocValues
         {
             private readonly MemoryDocValuesProducer.FSTEntry entry;
             private readonly NumericDocValues docToOrd;
@@ -444,7 +444,7 @@
             private readonly Int32sRef scratchInts;
             private readonly BytesRefFSTEnum<long?> fstEnum;
 
-            public SortedDocValuesAnonymousInnerClassHelper(FSTEntry fstEntry,
+            public SortedDocValuesAnonymousClass(FSTEntry fstEntry,
                 NumericDocValues numericDocValues, FST<long?> fst1, FST.BytesReader @in, FST.Arc<long?> arc, FST.Arc<long?> scratchArc1,
                 Int32sRef intsRef, BytesRefFSTEnum<long?> bytesRefFstEnum)
             {
@@ -542,11 +542,11 @@
             var fstEnum = new BytesRefFSTEnum<long?>(fst);
             var @ref = new BytesRef();
             var input = new ByteArrayDataInput();
-            return new SortedSetDocValuesAnonymousInnerClassHelper(entry, docToOrds, fst, @in, firstArc,
+            return new SortedSetDocValuesAnonymousClass(entry, docToOrds, fst, @in, firstArc,
                 scratchArc, scratchInts, fstEnum, @ref, input);
         }
 
-        private class SortedSetDocValuesAnonymousInnerClassHelper : SortedSetDocValues
+        private class SortedSetDocValuesAnonymousClass : SortedSetDocValues
         {
             private readonly MemoryDocValuesProducer.FSTEntry entry;
             private readonly BinaryDocValues docToOrds;
@@ -561,7 +561,7 @@
 
             private long currentOrd;
 
-            public SortedSetDocValuesAnonymousInnerClassHelper(FSTEntry fstEntry, BinaryDocValues binaryDocValues, FST<long?> fst1,
+            public SortedSetDocValuesAnonymousClass(FSTEntry fstEntry, BinaryDocValues binaryDocValues, FST<long?> fst1,
                 FST.BytesReader @in, FST.Arc<long?> arc, FST.Arc<long?> scratchArc1, Int32sRef intsRef, BytesRefFSTEnum<long?> bytesRefFstEnum,
                 BytesRef @ref, ByteArrayDataInput byteArrayDataInput)
             {
diff --git a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
index a53cfb4..9d51b0e 100644
--- a/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
+++ b/src/Lucene.Net.Codecs/Memory/MemoryPostingsFormat.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Util.Fst;
 using System;
@@ -334,16 +335,16 @@
                 }
             }
 
-            return new FieldsConsumerAnonymousInnerClassHelper(this, @out);
+            return new FieldsConsumerAnonymousClass(this, @out);
         }
 
-        private class FieldsConsumerAnonymousInnerClassHelper : FieldsConsumer
+        private class FieldsConsumerAnonymousClass : FieldsConsumer
         {
             private readonly MemoryPostingsFormat outerInstance;
 
             private readonly IndexOutput @out;
 
-            public FieldsConsumerAnonymousInnerClassHelper(MemoryPostingsFormat outerInstance, IndexOutput @out)
+            public FieldsConsumerAnonymousClass(MemoryPostingsFormat outerInstance, IndexOutput @out)
             {
                 this.outerInstance = outerInstance;
                 this.@out = @out;
@@ -438,7 +439,7 @@
                     else
                     {
                         int code = @in.ReadVInt32();
-                        accum += (int)((uint)code >> 1);
+                        accum += code.TripleShift(1);
                         //System.out.println("  docID=" + accum + " code=" + code);
                         if ((code & 1) != 0)
                         {
@@ -599,7 +600,7 @@
                     docUpto++;
 
                     int code = @in.ReadVInt32();
-                    accum += (int)((uint)code >> 1);
+                    accum += code.TripleShift(1);
                     if ((code & 1) != 0)
                     {
                         freq = 1;
@@ -665,7 +666,7 @@
                 else
                 {
                     int code = @in.ReadVInt32();
-                    pos += (int)((uint)code >> 1);
+                    pos += code.TripleShift(1);
                     if ((code & 1) != 0)
                     {
                         payloadLength = @in.ReadVInt32();
@@ -683,7 +684,7 @@
                         // new offset length
                         offsetLength = @in.ReadVInt32();
                     }
-                    startOffset += (int)((uint)offsetCode >> 1);
+                    startOffset += offsetCode.TripleShift(1);
                 }
 
                 if (storePayloads)
@@ -975,14 +976,14 @@
                 @in.Dispose();
             }
 
-            return new FieldsProducerAnonymousInnerClassHelper(fields);
+            return new FieldsProducerAnonymousClass(fields);
         }
 
-        private class FieldsProducerAnonymousInnerClassHelper : FieldsProducer
+        private class FieldsProducerAnonymousClass : FieldsProducer
         {
             private readonly IDictionary<string, TermsReader> _fields;
 
-            public FieldsProducerAnonymousInnerClassHelper(IDictionary<string, TermsReader> fields)
+            public FieldsProducerAnonymousClass(IDictionary<string, TermsReader> fields)
             {
                 _fields = fields;
             }
diff --git a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
index 7c46920..dc1e2b3 100644
--- a/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
+++ b/src/Lucene.Net.Codecs/Pulsing/PulsingPostingsReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using J2N.Runtime.CompilerServices;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
@@ -380,7 +381,7 @@
                     }
                     else
                     {
-                        _accum += (int)((uint)code >> 1); ; // shift off low bit
+                        _accum += code.TripleShift(1); ; // shift off low bit
                         _freq = (code & 1) != 0 ? 1 : _postings.ReadVInt32();
 
                         // LUCENENET specific - to avoid boxing, changed from CompareTo() to IndexOptionsComparer.Compare()
@@ -519,7 +520,7 @@
                     }
 
                     var code = _postings.ReadVInt32();
-                    _accum += (int)((uint)code >> 1); // shift off low bit 
+                    _accum += code.TripleShift(1); // shift off low bit 
                     _freq = (code & 1) != 0 ? 1 : _postings.ReadVInt32();
                     _posPending = _freq;
                     _startOffset = _storeOffsets ? 0 : -1; // always return -1 if no offsets are stored
@@ -557,7 +558,7 @@
                     {
                         _payloadLength = _postings.ReadVInt32();
                     }
-                    _position += (int)((uint)code >> 1);
+                    _position += code.TripleShift(1);
                     _payloadRetrieved = false;
                 }
                 else
@@ -573,7 +574,7 @@
                         // new offset length
                         _offsetLength = _postings.ReadVInt32();
                     }
-                    _startOffset += (int)((uint)offsetCode >> 1);
+                    _startOffset += offsetCode.TripleShift(1);
                 }
 
                 return _position;
diff --git a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs
index c87d3c3..3c525bf 100644
--- a/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs
+++ b/src/Lucene.Net.Codecs/Sep/SepPostingsReader.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
@@ -691,7 +692,7 @@
                         payloadLength = posReader.Next();
                         if (Debugging.AssertsEnabled) Debugging.Assert(payloadLength >= 0);
                     }
-                    position += (int)(((uint)code) >> 1);
+                    position += code.TripleShift(1);
                     pendingPayloadBytes += payloadLength;
                     payloadPending = payloadLength > 0;
                 }
diff --git a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs
index 3f38673..2c9bedf 100644
--- a/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs
+++ b/src/Lucene.Net.Codecs/Sep/SepSkipListReader.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
@@ -209,7 +210,7 @@
                     payloadLength[level] = skipStream.ReadVInt32();
                 }
                 //delta >>>= 1;
-                delta = (int)((uint)delta >> 1);
+                delta = delta.TripleShift(1);
             }
             else
             {
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
index f9c1a64..9e1515d 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesReader.cs
@@ -156,10 +156,10 @@
             var @in = (IndexInput)data.Clone();
             var scratch = new BytesRef();
             
-            return new NumericDocValuesAnonymousInnerClassHelper(this, field, @in, scratch);
+            return new NumericDocValuesAnonymousClass(this, field, @in, scratch);
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
+        private class NumericDocValuesAnonymousClass : NumericDocValues
         {
             private readonly SimpleTextDocValuesReader _outerInstance;
 
@@ -167,7 +167,7 @@
             private readonly IndexInput _input;
             private readonly BytesRef _scratch;
 
-            public NumericDocValuesAnonymousInnerClassHelper(SimpleTextDocValuesReader outerInstance,
+            public NumericDocValuesAnonymousClass(SimpleTextDocValuesReader outerInstance,
                 OneField field, IndexInput input, BytesRef scratch)
             {
                 _outerInstance = outerInstance;
@@ -206,10 +206,10 @@
             var field = fields[fieldInfo.Name];
             var input = (IndexInput)data.Clone();
             var scratch = new BytesRef();
-            return new BitsAnonymousInnerClassHelper(this, field, input, scratch);
+            return new BitsAnonymousClass(this, field, input, scratch);
         }
 
-        private class BitsAnonymousInnerClassHelper : IBits
+        private class BitsAnonymousClass : IBits
         {
             private readonly SimpleTextDocValuesReader _outerInstance;
 
@@ -217,7 +217,7 @@
             private readonly IndexInput _input;
             private readonly BytesRef _scratch;
 
-            public BitsAnonymousInnerClassHelper(SimpleTextDocValuesReader outerInstance,
+            public BitsAnonymousClass(SimpleTextDocValuesReader outerInstance,
                 OneField field, IndexInput @in, BytesRef scratch)
             {
                 _outerInstance = outerInstance;
@@ -251,10 +251,10 @@
             var input = (IndexInput)data.Clone();
             var scratch = new BytesRef();
 
-            return new BinaryDocValuesAnonymousInnerClassHelper(this, field, input, scratch);
+            return new BinaryDocValuesAnonymousClass(this, field, input, scratch);
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass : BinaryDocValues
         {
             private readonly SimpleTextDocValuesReader _outerInstance;
 
@@ -262,7 +262,7 @@
             private readonly IndexInput _input;
             private readonly BytesRef _scratch;
 
-            public BinaryDocValuesAnonymousInnerClassHelper(SimpleTextDocValuesReader outerInstance, OneField field,
+            public BinaryDocValuesAnonymousClass(SimpleTextDocValuesReader outerInstance, OneField field,
                 IndexInput input, BytesRef scratch)
             {
                 _outerInstance = outerInstance;
@@ -312,10 +312,10 @@
             var input = (IndexInput)data.Clone();
             var scratch = new BytesRef();
 
-            return new BitsAnonymousInnerClassHelper2(this, field, input, scratch);
+            return new BitsAnonymousClass2(this, field, input, scratch);
         }
 
-        private class BitsAnonymousInnerClassHelper2 : IBits
+        private class BitsAnonymousClass2 : IBits
         {
             private readonly SimpleTextDocValuesReader _outerInstance;
 
@@ -323,7 +323,7 @@
             private readonly IndexInput _input;
             private readonly BytesRef _scratch;
 
-            public BitsAnonymousInnerClassHelper2(SimpleTextDocValuesReader outerInstance, OneField field,
+            public BitsAnonymousClass2(SimpleTextDocValuesReader outerInstance, OneField field,
                 IndexInput input, BytesRef scratch)
             {
                 _outerInstance = outerInstance;
@@ -375,10 +375,10 @@
             var input = (IndexInput)data.Clone();
             var scratch = new BytesRef();
 
-            return new SortedDocValuesAnonymousInnerClassHelper(this, field, input, scratch);
+            return new SortedDocValuesAnonymousClass(this, field, input, scratch);
         }
 
-        private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues
+        private class SortedDocValuesAnonymousClass : SortedDocValues
         {
             private readonly SimpleTextDocValuesReader _outerInstance;
 
@@ -386,7 +386,7 @@
             private readonly IndexInput _input;
             private readonly BytesRef _scratch;
 
-            public SortedDocValuesAnonymousInnerClassHelper(SimpleTextDocValuesReader outerInstance,
+            public SortedDocValuesAnonymousClass(SimpleTextDocValuesReader outerInstance,
                 OneField field, IndexInput input, BytesRef scratch)
             {
                 _outerInstance = outerInstance;
@@ -476,10 +476,10 @@
             var input = (IndexInput) data.Clone();
             var scratch = new BytesRef();
             
-            return new SortedSetDocValuesAnonymousInnerClassHelper(this, field, input, scratch);
+            return new SortedSetDocValuesAnonymousClass(this, field, input, scratch);
         }
 
-        private class SortedSetDocValuesAnonymousInnerClassHelper : SortedSetDocValues
+        private class SortedSetDocValuesAnonymousClass : SortedSetDocValues
         {
             private readonly SimpleTextDocValuesReader _outerInstance;
 
@@ -487,7 +487,7 @@
             private readonly IndexInput _input;
             private readonly BytesRef _scratch;
 
-            public SortedSetDocValuesAnonymousInnerClassHelper(SimpleTextDocValuesReader outerInstance,
+            public SortedSetDocValuesAnonymousClass(SimpleTextDocValuesReader outerInstance,
                 OneField field, IndexInput input, BytesRef scratch)
             {
                 _outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs
index 3d5cca8..9739692 100644
--- a/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs
+++ b/src/Lucene.Net.Codecs/SimpleText/SimpleTextDocValuesWriter.cs
@@ -192,7 +192,7 @@
                 // pad to fit
                 for (int i = length; i < maxLength; i++)
                 {
-                    data.WriteByte((byte)(sbyte) ' ');
+                    data.WriteByte((byte) ' ');
                 }
                 SimpleTextUtil.WriteNewline(data);
                 SimpleTextUtil.Write(data, value == null ? "F" : "T", scratch);
diff --git a/src/Lucene.Net.Facet/DrillDownQuery.cs b/src/Lucene.Net.Facet/DrillDownQuery.cs
index f7fca27..9754c7f 100644
--- a/src/Lucene.Net.Facet/DrillDownQuery.cs
+++ b/src/Lucene.Net.Facet/DrillDownQuery.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Facet/DrillSideways.cs b/src/Lucene.Net.Facet/DrillSideways.cs
index 80d0509..9a8b9f5 100644
--- a/src/Lucene.Net.Facet/DrillSideways.cs
+++ b/src/Lucene.Net.Facet/DrillSideways.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+// Lucene version compatibility level 4.8.1
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Facet.SortedSet;
 using Lucene.Net.Facet.Taxonomy;
diff --git a/src/Lucene.Net.Facet/DrillSidewaysQuery.cs b/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
index bab7ed3..123c90f 100644
--- a/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
+++ b/src/Lucene.Net.Facet/DrillSidewaysQuery.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System;
 
 namespace Lucene.Net.Facet
@@ -103,17 +104,17 @@
                 }
             }
 
-            return new WeightAnonymousInnerClassHelper(this, baseWeight, drillDowns);
+            return new WeightAnonymousClass(this, baseWeight, drillDowns);
         }
 
-        private class WeightAnonymousInnerClassHelper : Weight
+        private class WeightAnonymousClass : Weight
         {
             private readonly DrillSidewaysQuery outerInstance;
 
             private readonly Weight baseWeight;
             private readonly object[] drillDowns;
 
-            public WeightAnonymousInnerClassHelper(DrillSidewaysQuery outerInstance, Weight baseWeight, object[] drillDowns)
+            public WeightAnonymousClass(DrillSidewaysQuery outerInstance, Weight baseWeight, object[] drillDowns)
             {
                 this.outerInstance = outerInstance;
                 this.baseWeight = baseWeight;
diff --git a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
index ff8dd1f..53694bb 100644
--- a/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
+++ b/src/Lucene.Net.Facet/DrillSidewaysScorer.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1 + LUCENE-6001
+using Lucene.Net.Diagnostics;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
diff --git a/src/Lucene.Net.Facet/FacetField.cs b/src/Lucene.Net.Facet/FacetField.cs
index 20bf4cc..a34f34d 100644
--- a/src/Lucene.Net.Facet/FacetField.cs
+++ b/src/Lucene.Net.Facet/FacetField.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System;
 using System.Diagnostics.CodeAnalysis;
 
diff --git a/src/Lucene.Net.Facet/FacetResult.cs b/src/Lucene.Net.Facet/FacetResult.cs
index f5911f8..c5c0d8e 100644
--- a/src/Lucene.Net.Facet/FacetResult.cs
+++ b/src/Lucene.Net.Facet/FacetResult.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System;
 using System.Diagnostics.CodeAnalysis;
 using System.Globalization;
diff --git a/src/Lucene.Net.Facet/Facets.cs b/src/Lucene.Net.Facet/Facets.cs
index 50b5263..64df1ae 100644
--- a/src/Lucene.Net.Facet/Facets.cs
+++ b/src/Lucene.Net.Facet/Facets.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
diff --git a/src/Lucene.Net.Facet/FacetsCollector.cs b/src/Lucene.Net.Facet/FacetsCollector.cs
index ab07ebe..2c2d723 100644
--- a/src/Lucene.Net.Facet/FacetsCollector.cs
+++ b/src/Lucene.Net.Facet/FacetsCollector.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
@@ -128,12 +129,12 @@
         /// </summary>
         protected virtual Docs CreateDocs(int maxDoc)
         {
-            return new DocsAnonymousInnerClassHelper(maxDoc);
+            return new DocsAnonymousClass(maxDoc);
         }
 
-        private class DocsAnonymousInnerClassHelper : Docs
+        private class DocsAnonymousClass : Docs
         {
-            public DocsAnonymousInnerClassHelper(int maxDoc)
+            public DocsAnonymousClass(int maxDoc)
             {
                 bits = new FixedBitSet(maxDoc);
             }
diff --git a/src/Lucene.Net.Facet/FacetsConfig.cs b/src/Lucene.Net.Facet/FacetsConfig.cs
index 8a430d0..735157e 100644
--- a/src/Lucene.Net.Facet/FacetsConfig.cs
+++ b/src/Lucene.Net.Facet/FacetsConfig.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Concurrent;
diff --git a/src/Lucene.Net.Facet/LabelAndValue.cs b/src/Lucene.Net.Facet/LabelAndValue.cs
index 8c7eb4c..a839540 100644
--- a/src/Lucene.Net.Facet/LabelAndValue.cs
+++ b/src/Lucene.Net.Facet/LabelAndValue.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Globalization;
 
 namespace Lucene.Net.Facet
diff --git a/src/Lucene.Net.Facet/MultiFacets.cs b/src/Lucene.Net.Facet/MultiFacets.cs
index cb4e57d..fe86597 100644
--- a/src/Lucene.Net.Facet/MultiFacets.cs
+++ b/src/Lucene.Net.Facet/MultiFacets.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Collections.Generic;
 
 namespace Lucene.Net.Facet
diff --git a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
index 0c80dab..06667da 100644
--- a/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Facet/RandomSamplingFacetsCollector.cs
@@ -1,4 +1,6 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using J2N.Numerics;
+using System;
 using System.Collections.Generic;
 using System.IO;
 
@@ -70,7 +72,7 @@
             public virtual long RandomInt64()
             {
                 x ^= (x << 21);
-                x ^= ((long)((ulong)x >> 35));
+                x ^= (x.TripleShift(35));
                 x ^= (x << 4);
                 return x;
             }
diff --git a/src/Lucene.Net.Facet/Range/DoubleRange.cs b/src/Lucene.Net.Facet/Range/DoubleRange.cs
index 767aed1..606415f 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRange.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRange.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Search;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Search;
 using Lucene.Net.Support;
 using System;
 
@@ -122,17 +123,17 @@
 
         public override Filter GetFilter(Filter fastMatchFilter, ValueSource valueSource)
         {
-            return new FilterAnonymousInnerClassHelper(this, fastMatchFilter, valueSource);
+            return new FilterAnonymousClass(this, fastMatchFilter, valueSource);
         }
 
-        private class FilterAnonymousInnerClassHelper : Filter
+        private class FilterAnonymousClass : Filter
         {
             private readonly DoubleRange outerInstance;
 
             private readonly Filter fastMatchFilter;
             private readonly ValueSource valueSource;
 
-            public FilterAnonymousInnerClassHelper(DoubleRange outerInstance, Filter fastMatchFilter, ValueSource valueSource)
+            public FilterAnonymousClass(DoubleRange outerInstance, Filter fastMatchFilter, ValueSource valueSource)
             {
                 this.outerInstance = outerInstance;
                 this.fastMatchFilter = fastMatchFilter;
@@ -176,19 +177,19 @@
                     fastMatchBits = null;
                 }
 
-                return new DocIdSetAnonymousInnerClassHelper(this, acceptDocs, values, maxDoc, fastMatchBits);
+                return new DocIdSetAnonymousClass(this, acceptDocs, values, maxDoc, fastMatchBits);
             }
 
-            private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+            private class DocIdSetAnonymousClass : DocIdSet
             {
-                private readonly FilterAnonymousInnerClassHelper outerInstance;
+                private readonly FilterAnonymousClass outerInstance;
 
                 private readonly IBits acceptDocs;
                 private readonly FunctionValues values;
                 private readonly int maxDoc;
                 private readonly IBits fastMatchBits;
 
-                public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper outerInstance, IBits acceptDocs, FunctionValues values, int maxDoc, IBits fastMatchBits)
+                public DocIdSetAnonymousClass(FilterAnonymousClass outerInstance, IBits acceptDocs, FunctionValues values, int maxDoc, IBits fastMatchBits)
                 {
                     this.outerInstance = outerInstance;
                     this.acceptDocs = acceptDocs;
@@ -197,13 +198,13 @@
                     this.fastMatchBits = fastMatchBits;
                 }
 
-                public override IBits Bits => new BitsAnonymousInnerClassHelper(this);
+                public override IBits Bits => new BitsAnonymousClass(this);
 
-                private class BitsAnonymousInnerClassHelper : IBits
+                private class BitsAnonymousClass : IBits
                 {
-                    private readonly DocIdSetAnonymousInnerClassHelper outerInstance;
+                    private readonly DocIdSetAnonymousClass outerInstance;
 
-                    public BitsAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper outerInstance)
+                    public BitsAnonymousClass(DocIdSetAnonymousClass outerInstance)
                     {
                         this.outerInstance = outerInstance;
                     }
diff --git a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
index fe95867..4a5f18f 100644
--- a/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/DoubleRangeFacetCounts.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 
diff --git a/src/Lucene.Net.Facet/Range/LongRange.cs b/src/Lucene.Net.Facet/Range/LongRange.cs
index 3ed9d02..026cb0d 100644
--- a/src/Lucene.Net.Facet/Range/LongRange.cs
+++ b/src/Lucene.Net.Facet/Range/LongRange.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System;
 
 namespace Lucene.Net.Facet.Range
@@ -116,17 +117,17 @@
 
         public override Filter GetFilter(Filter fastMatchFilter, ValueSource valueSource)
         {
-            return new FilterAnonymousInnerClassHelper(this, fastMatchFilter, valueSource);
+            return new FilterAnonymousClass(this, fastMatchFilter, valueSource);
         }
 
-        private class FilterAnonymousInnerClassHelper : Filter
+        private class FilterAnonymousClass : Filter
         {
             private readonly Int64Range outerInstance;
 
             private readonly Filter fastMatchFilter;
             private readonly ValueSource valueSource;
 
-            public FilterAnonymousInnerClassHelper(Int64Range outerInstance, Filter fastMatchFilter, ValueSource valueSource)
+            public FilterAnonymousClass(Int64Range outerInstance, Filter fastMatchFilter, ValueSource valueSource)
             {
                 this.outerInstance = outerInstance;
                 this.fastMatchFilter = fastMatchFilter;
@@ -171,19 +172,19 @@
                     fastMatchBits = null;
                 }
 
-                return new DocIdSetAnonymousInnerClassHelper(this, acceptDocs, values, maxDoc, fastMatchBits);
+                return new DocIdSetAnonymousClass(this, acceptDocs, values, maxDoc, fastMatchBits);
             }
 
-            private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+            private class DocIdSetAnonymousClass : DocIdSet
             {
-                private readonly FilterAnonymousInnerClassHelper outerInstance;
+                private readonly FilterAnonymousClass outerInstance;
 
                 private readonly IBits acceptDocs;
                 private readonly FunctionValues values;
                 private readonly int maxDoc;
                 private readonly IBits fastMatchBits;
 
-                public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper outerInstance, IBits acceptDocs, FunctionValues values, int maxDoc, IBits fastMatchBits)
+                public DocIdSetAnonymousClass(FilterAnonymousClass outerInstance, IBits acceptDocs, FunctionValues values, int maxDoc, IBits fastMatchBits)
                 {
                     this.outerInstance = outerInstance;
                     this.acceptDocs = acceptDocs;
@@ -193,13 +194,13 @@
                 }
 
 
-                public override IBits Bits => new BitsAnonymousInnerClassHelper(this);
+                public override IBits Bits => new BitsAnonymousClass(this);
 
-                private class BitsAnonymousInnerClassHelper : IBits
+                private class BitsAnonymousClass : IBits
                 {
-                    private readonly DocIdSetAnonymousInnerClassHelper outerInstance;
+                    private readonly DocIdSetAnonymousClass outerInstance;
 
-                    public BitsAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper outerInstance)
+                    public BitsAnonymousClass(DocIdSetAnonymousClass outerInstance)
                     {
                         this.outerInstance = outerInstance;
                     }
diff --git a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
index 7fa886c..2495c85 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeCounter.cs
@@ -1,6 +1,7 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using System.Collections.Generic;
-using System.Diagnostics;
 using System.Text;
 
 namespace Lucene.Net.Facet.Range
@@ -178,7 +179,7 @@
             int hi = boundaries.Length - 1;
             while (true)
             {
-                int mid = (int)((uint)(lo + hi) >> 1);
+                int mid = (lo + hi).TripleShift(1);
                 //System.out.println("  cycle lo=" + lo + " hi=" + hi + " mid=" + mid + " boundary=" + boundaries[mid] + " to " + boundaries[mid+1]);
                 if (v <= boundaries[mid])
                 {
@@ -261,7 +262,7 @@
             }
             else
             {
-                int mid = (int)((uint)(start + end) >> 1);
+                int mid = (start + end).TripleShift(1);
                 Int64RangeNode left = Split(start, mid, elementaryIntervals);
                 Int64RangeNode right = Split(mid, end, elementaryIntervals);
                 return new Int64RangeNode(left.start, right.end, left, right, -1);
diff --git a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
index 8afbe15..485f1c6 100644
--- a/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/LongRangeFacetCounts.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
 
diff --git a/src/Lucene.Net.Facet/Range/Range.cs b/src/Lucene.Net.Facet/Range/Range.cs
index db620be..745d7fe 100644
--- a/src/Lucene.Net.Facet/Range/Range.cs
+++ b/src/Lucene.Net.Facet/Range/Range.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 
 namespace Lucene.Net.Facet.Range
 {
diff --git a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
index 0e84cee..bd44fa8 100644
--- a/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Range/RangeFacetCounts.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Range
diff --git a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
index 235d2ee..3d7c3cc 100644
--- a/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/DefaultSortedSetDocValuesReaderState.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
index 0ee82f2..14ae716 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetCounts.cs
@@ -1,4 +1,5 @@
-using J2N.Text;
+// Lucene version compatibility level 4.8.1
+using J2N.Text;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
index 6d387ac..c5c0e40 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesFacetField.cs
@@ -1,4 +1,6 @@
-namespace Lucene.Net.Facet.SortedSet
+// Lucene version compatibility level 4.8.1
+
+namespace Lucene.Net.Facet.SortedSet
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
diff --git a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
index 4442924..4eb7bcc 100644
--- a/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
+++ b/src/Lucene.Net.Facet/SortedSet/SortedSetDocValuesReaderState.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.SortedSet
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
index 9723efd..d277361 100644
--- a/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/AssociationFacetField.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System;
 using System.Diagnostics.CodeAnalysis;
 
diff --git a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
index 6055162..7ff6132 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CachedOrdinalsReader.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System;
 using System.Diagnostics.CodeAnalysis;
 using System.Runtime.CompilerServices;
@@ -116,14 +117,14 @@
         public override OrdinalsSegmentReader GetReader(AtomicReaderContext context)
         {
             CachedOrds cachedOrds = GetCachedOrds(context);
-            return new OrdinalsSegmentReaderAnonymousInnerClassHelper(cachedOrds);
+            return new OrdinalsSegmentReaderAnonymousClass(cachedOrds);
         }
 
-        private class OrdinalsSegmentReaderAnonymousInnerClassHelper : OrdinalsSegmentReader
+        private class OrdinalsSegmentReaderAnonymousClass : OrdinalsSegmentReader
         {
             private readonly CachedOrds cachedOrds;
 
-            public OrdinalsSegmentReaderAnonymousInnerClassHelper(CachedOrds cachedOrds)
+            public OrdinalsSegmentReaderAnonymousClass(CachedOrds cachedOrds)
             {
                 this.cachedOrds = cachedOrds;
             }
diff --git a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
index 64d2f55..46c4309 100644
--- a/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/CategoryPath.cs
@@ -1,4 +1,5 @@
-using J2N.Text;
+// Lucene version compatibility level 4.8.1
+using J2N.Text;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs
index 3660ff0..39c991f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/Consts.cs
@@ -1,4 +1,6 @@
-namespace Lucene.Net.Facet.Taxonomy.Directory
+// Lucene version compatibility level 4.8.1
+
+namespace Lucene.Net.Facet.Taxonomy.Directory
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
index 2986afa..0de5a71 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyReader.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Text;
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
index 25fd4b8..c45a3f5 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/DirectoryTaxonomyWriter.cs
@@ -1,4 +1,5 @@
-using J2N.Threading.Atomic;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading.Atomic;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
@@ -8,9 +9,7 @@
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
 using System.IO;
-using System.Threading;
 
 namespace Lucene.Net.Facet.Taxonomy.Directory
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
index dd5030e..7c4e716 100644
--- a/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/Directory/TaxonomyIndexArrays.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Support;
 using System;
diff --git a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
index 46ab317..41359d9 100644
--- a/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/DocValuesOrdinalsReader.cs
@@ -1,4 +1,6 @@
-namespace Lucene.Net.Facet.Taxonomy
+// Lucene version compatibility level 4.8.1
+
+namespace Lucene.Net.Facet.Taxonomy
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -57,16 +59,16 @@
 
             BinaryDocValues values = values0;
 
-            return new OrdinalsSegmentReaderAnonymousInnerClassHelper(this, values);
+            return new OrdinalsSegmentReaderAnonymousClass(this, values);
         }
 
-        private class OrdinalsSegmentReaderAnonymousInnerClassHelper : OrdinalsSegmentReader
+        private class OrdinalsSegmentReaderAnonymousClass : OrdinalsSegmentReader
         {
             private readonly DocValuesOrdinalsReader outerInstance;
 
             private readonly BinaryDocValues values;
 
-            public OrdinalsSegmentReaderAnonymousInnerClassHelper(DocValuesOrdinalsReader outerInstance, BinaryDocValues values)
+            public OrdinalsSegmentReaderAnonymousClass(DocValuesOrdinalsReader outerInstance, BinaryDocValues values)
             {
                 this.outerInstance = outerInstance;
                 this.values = values;
@@ -107,7 +109,7 @@
             while (offset < upto)
             {
                 byte b = buf.Bytes[offset++];
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     ordinals.Int32s[ordinals.Length] = ((value << 7) | b) + prev;
                     value = 0;
diff --git a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
index 9f6b20f..0d6343f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FacetLabel.cs
@@ -1,4 +1,5 @@
-using J2N.Text;
+// Lucene version compatibility level 4.8.1
+using J2N.Text;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
diff --git a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
index e16e53a..54deaf0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FastTaxonomyFacetCounts.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
@@ -81,7 +82,7 @@
                     while (offset < end)
                     {
                         byte b = bytes[offset++];
-                        if ((sbyte)b >= 0)
+                        if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                         {
                             prev = ord = ((ord << 7) | b) + prev;
                             ++m_values[ord];
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
index 374f8a6..93eb176 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatAssociationFacetField.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System.Globalization;
 
 namespace Lucene.Net.Facet.Taxonomy
diff --git a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
index 15e8233..cbe9fe1 100644
--- a/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/FloatTaxonomyFacets.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
index 9e13e03..31cea93 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntAssociationFacetField.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
index 409dde9..a801681 100644
--- a/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/IntTaxonomyFacets.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
diff --git a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
index d1e3e59..cdc7230 100644
--- a/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/LRUHashMap.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Concurrent;
+// Lucene version compatibility level 4.8.1
+using J2N.Collections.Concurrent;
 using System;
 using System.Collections;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs b/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
index b390813..c73a770 100644
--- a/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/OrdinalsReader.cs
@@ -1,4 +1,6 @@
-namespace Lucene.Net.Facet.Taxonomy
+// Lucene version compatibility level 4.8.1
+
+namespace Lucene.Net.Facet.Taxonomy
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
diff --git a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
index 64b8dc9..d4dbd3f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/ParallelTaxonomyArrays.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using System.Diagnostics.CodeAnalysis;
 
 namespace Lucene.Net.Facet.Taxonomy
diff --git a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
index 44394a8..1b60962 100644
--- a/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/PrintTaxonomyStats.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.IO;
 
 namespace Lucene.Net.Facet.Taxonomy
diff --git a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
index 1427f15..8a5f82d 100644
--- a/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/SearcherTaxonomyManager.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Search;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Search;
 using System;
 
 namespace Lucene.Net.Facet.Taxonomy
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
index 0014366..bc8f9f0 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetCounts.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
index 72d35ac..06733fc 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumFloatAssociations.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
index cb4d17f..b30370f 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumIntAssociations.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
index 3d8c6b0..46ef005 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacetSumValueSource.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Collections;
 using System.Collections.Generic;
 using System.IO;
@@ -166,14 +167,14 @@
                 {
                     throw new ThreadStateException("scores are missing; be sure to pass keepScores=true to FacetsCollector");
                 }
-                return new DoubleDocValuesAnonymousInnerClassHelper(this, scorer);
+                return new DoubleDocValuesAnonymousClass(this, scorer);
             }
 
-            private class DoubleDocValuesAnonymousInnerClassHelper : DoubleDocValues
+            private class DoubleDocValuesAnonymousClass : DoubleDocValues
             {
                 private readonly Scorer scorer;
 
-                public DoubleDocValuesAnonymousInnerClassHelper(ScoreValueSource outerInstance, Scorer scorer)
+                public DoubleDocValuesAnonymousClass(ScoreValueSource outerInstance, Scorer scorer)
                     : base(outerInstance)
                 {
                     this.scorer = scorer;
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
index c0ae7b2..1205c65 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyFacets.cs
@@ -1,4 +1,5 @@
-using J2N.Text;
+// Lucene version compatibility level 4.8.1
+using J2N.Text;
 using System;
 using System.Collections.Generic;
 
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
index d3aee8f..3a82084 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyReader.cs
@@ -1,4 +1,5 @@
-using J2N.Threading.Atomic;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading.Atomic;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
index e4edd4c..89d41fd 100644
--- a/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/TaxonomyWriter.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Collections.Generic;
 
 namespace Lucene.Net.Facet.Taxonomy
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
index 4a2a00f..8dd51fd 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CategoryPathUtils.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
index c8412b6..a4db7e9 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CharBlockArray.cs
@@ -1,4 +1,5 @@
-using J2N.Text;
+// Lucene version compatibility level 4.8.1
+using J2N.Text;
 using Lucene.Net.Support.IO;
 using System;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
index e505ea0..3699a45 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/Cl2oTaxonomyWriterCache.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Threading;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
index 9ebc1c9..d4cf618 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CollisionMap.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Collections;
 using System.Collections.Generic;
 
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
index 824b42d..4b766e9 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/CompactLabelToOrdinal.cs
@@ -1,4 +1,6 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using J2N.Numerics;
+using System;
 using System.IO;
 using System.Runtime.Serialization;
 
@@ -311,10 +313,8 @@
         {
             int hash = label.GetHashCode();
 
-#pragma warning disable IDE0054 // Use compound assignment
-            hash = hash ^ (((int)((uint)hash >> 20)) ^ ((int)((uint)hash >> 12)));
-            hash = hash ^ ((int)((uint)hash >> 7)) ^ ((int)((uint)hash >> 4));
-#pragma warning restore IDE0054 // Use compound assignment
+            hash = hash ^ hash.TripleShift(20) ^ hash.TripleShift(12);
+            hash = hash ^ hash.TripleShift(7) ^ hash.TripleShift(4);
 
             return hash;
 
@@ -323,10 +323,8 @@
         internal static int StringHashCode(CharBlockArray labelRepository, int offset)
         {
             int hash = CategoryPathUtils.HashCodeOfSerialized(labelRepository, offset);
-#pragma warning disable IDE0054 // Use compound assignment
-            hash = hash ^ (((int)((uint)hash >> 20)) ^ ((int)((uint)hash >> 12)));
-            hash = hash ^ ((int)((uint)hash >> 7)) ^ ((int)((uint)hash >> 4));
-#pragma warning restore IDE0054 // Use compound assignment
+            hash = hash ^ hash.TripleShift(20) ^ hash.TripleShift(12);
+            hash = hash ^ hash.TripleShift(7) ^ hash.TripleShift(4);
             return hash;
         }
 
@@ -449,8 +447,8 @@
                     }
                     // Now that we've hashed the components of the label, do the
                     // final part of the hash algorithm.
-                    hash = hash ^ (((int)((uint)hash >> 20)) ^ ((int)((uint)hash >> 12)));
-                    hash = hash ^ ((int)((uint)hash >> 7)) ^ ((int)((uint)hash >> 4));
+                    hash = hash ^ hash.TripleShift(20) ^ hash.TripleShift(12);
+                    hash = hash ^ hash.TripleShift(7) ^ hash.TripleShift(4);
                     // Add the label, and let's keep going
                     l2o.AddLabelOffset(hash, cid, lastStartOffset);
                     cid++;
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
index ca522a7..20529c6 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LabelToOrdinal.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
index e187a80..484fc3a 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/LruTaxonomyWriterCache.cs
@@ -1,4 +1,5 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 
 namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs
index bdfb4d9..e54b721 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameHashIntCacheLRU.cs
@@ -1,4 +1,6 @@
-namespace Lucene.Net.Facet.Taxonomy.WriterCache
+// Lucene version compatibility level 4.8.1
+
+namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
index 25beaf6..f927262 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/NameIntCacheLRU.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Concurrent;
+// Lucene version compatibility level 4.8.1
+using J2N.Collections.Concurrent;
 using System;
 using System.Collections.Concurrent;
 using System.Collections.Generic;
diff --git a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
index 1f7e712..093c11d 100644
--- a/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
+++ b/src/Lucene.Net.Facet/Taxonomy/WriterCache/TaxonomyWriterCache.cs
@@ -1,22 +1,24 @@
-namespace Lucene.Net.Facet.Taxonomy.WriterCache
+// Lucene version compatibility level 4.8.1
+using System;
+
+namespace Lucene.Net.Facet.Taxonomy.WriterCache
 {
-    using System;
     /*
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
 
     using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
 
diff --git a/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs b/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
index 8761aa9..61c35a3 100644
--- a/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
+++ b/src/Lucene.Net.Facet/TopOrdAndFloatQueue.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Util;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Util;
 using System;
 using System.Runtime.InteropServices;
 
diff --git a/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs b/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
index e95ad97..f63040c 100644
--- a/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
+++ b/src/Lucene.Net.Facet/TopOrdAndIntQueue.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Util;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Util;
 
 namespace Lucene.Net.Facet
 {
diff --git a/src/Lucene.Net.Highlighter/Highlight/SimpleSpanFragmenter.cs b/src/Lucene.Net.Highlighter/Highlight/SimpleSpanFragmenter.cs
index c8b460b..d93530f 100644
--- a/src/Lucene.Net.Highlighter/Highlight/SimpleSpanFragmenter.cs
+++ b/src/Lucene.Net.Highlighter/Highlight/SimpleSpanFragmenter.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis;
+using J2N.Numerics;
+using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using System.Collections.Generic;
 
@@ -83,7 +84,7 @@
             }
 
             bool isNewFrag = offsetAtt.EndOffset >= (fragmentSize * currentNumFrags)
-                && (textSize - offsetAtt.EndOffset) >= (int)((uint)fragmentSize >> 1);
+                && (textSize - offsetAtt.EndOffset) >= fragmentSize.TripleShift(1);
 
 
             if (isNewFrag)
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
index 9af5ed8..fc83d5c 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/MultiTermHighlighting.cs
@@ -89,7 +89,7 @@
             {
                 if (aq.Field.Equals(field, StringComparison.Ordinal))
                 {
-                    list.Add(new CharacterRunAutomatonToStringAnonymousHelper(aq.Automaton, () => aq.ToString()));
+                    list.Add(new CharacterRunAutomatonToStringAnonymousClass(aq.Automaton, () => aq.ToString()));
                 }
             }
             else if (query is PrefixQuery pq)
@@ -97,7 +97,7 @@
                 Term prefix = pq.Prefix;
                 if (prefix.Field.Equals(field, StringComparison.Ordinal))
                 {
-                    list.Add(new CharacterRunAutomatonToStringAnonymousHelper(
+                    list.Add(new CharacterRunAutomatonToStringAnonymousClass(
                         BasicOperations.Concatenate(BasicAutomata.MakeString(prefix.Text()), BasicAutomata.MakeAnyString()),
                         () => pq.ToString()));
                 }
@@ -122,7 +122,7 @@
                         Automaton prefix = BasicAutomata.MakeString(UnicodeUtil.NewString(termText, 0, prefixLength));
                         automaton = BasicOperations.Concatenate(prefix, automaton);
                     }
-                    list.Add(new CharacterRunAutomatonToStringAnonymousHelper(automaton, () => fq.ToString()));
+                    list.Add(new CharacterRunAutomatonToStringAnonymousClass(automaton, () => fq.ToString()));
                 }
             }
             else if (query is TermRangeQuery tq)
@@ -130,17 +130,17 @@
                 if (tq.Field.Equals(field, StringComparison.Ordinal))
                 {
                     // this is *not* an automaton, but its very simple
-                    list.Add(new SimpleCharacterRunAutomatonAnonymousHelper(BasicAutomata.MakeEmpty(), tq));
+                    list.Add(new SimpleCharacterRunAutomatonAnonymousClass(BasicAutomata.MakeEmpty(), tq));
                 }
             }
             return list.ToArray(/*new CharacterRunAutomaton[list.size()]*/);
         }
 
-        internal class CharacterRunAutomatonToStringAnonymousHelper : CharacterRunAutomaton
+        private class CharacterRunAutomatonToStringAnonymousClass : CharacterRunAutomaton
         {
             private readonly Func<string> toStringMethod;
 
-            public CharacterRunAutomatonToStringAnonymousHelper(Automaton a, Func<string> toStringMethod)
+            public CharacterRunAutomatonToStringAnonymousClass(Automaton a, Func<string> toStringMethod)
                 : base(a)
             {
                 this.toStringMethod = toStringMethod;
@@ -152,7 +152,7 @@
             }
         }
 
-        internal class SimpleCharacterRunAutomatonAnonymousHelper : CharacterRunAutomaton
+        private class SimpleCharacterRunAutomatonAnonymousClass : CharacterRunAutomaton
         {
             private readonly CharsRef lowerBound;
             private readonly CharsRef upperBound;
@@ -163,7 +163,7 @@
             private static readonly IComparer<CharsRef> comparer = CharsRef.UTF16SortedAsUTF8Comparer; // LUCENENET specific - made static
 #pragma warning restore 612, 618
 
-            public SimpleCharacterRunAutomatonAnonymousHelper(Automaton a, TermRangeQuery tq)
+            public SimpleCharacterRunAutomatonAnonymousClass(Automaton a, TermRangeQuery tq)
                 : base(a)
             {
                 if (tq.LowerTerm == null)
@@ -230,16 +230,16 @@
             // would only serve to make this method less bogus.
             // instead, we always return freq() = Integer.MAX_VALUE and let PH terminate based on offset...
 
-            return new DocsAndPositionsEnumAnonymousHelper(ts, matchers, charTermAtt, offsetAtt);
+            return new DocsAndPositionsEnumAnonymousClass(ts, matchers, charTermAtt, offsetAtt);
         }
 
-        internal class DocsAndPositionsEnumAnonymousHelper : DocsAndPositionsEnum
+        private class DocsAndPositionsEnumAnonymousClass : DocsAndPositionsEnum
         {
             private readonly CharacterRunAutomaton[] matchers;
             private readonly ICharTermAttribute charTermAtt;
             private readonly IOffsetAttribute offsetAtt;
 
-            public DocsAndPositionsEnumAnonymousHelper(
+            public DocsAndPositionsEnumAnonymousClass(
                 TokenStream ts, CharacterRunAutomaton[] matchers, ICharTermAttribute charTermAtt, IOffsetAttribute offsetAtt)
             {
                 this.matchers = matchers;
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs
index fa0f236..33b7035 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/Passage.cs
@@ -64,13 +64,13 @@
             numMatches++;
         }
 
-        internal class InPlaceMergeSorterAnonymousHelper : InPlaceMergeSorter
+        private class InPlaceMergeSorterAnonymousClass : InPlaceMergeSorter
         {
             private readonly int[] starts;
             private readonly int[] ends;
             private readonly BytesRef[] terms;
 
-            public InPlaceMergeSorterAnonymousHelper(int[] starts, int[] ends, BytesRef[] terms)
+            public InPlaceMergeSorterAnonymousClass(int[] starts, int[] ends, BytesRef[] terms)
             {
                 this.starts = starts;
                 this.ends = ends;
@@ -103,7 +103,7 @@
             int[] starts = matchStarts;
             int[] ends = matchEnds;
             BytesRef[] terms = matchTerms;
-            new InPlaceMergeSorterAnonymousHelper(starts, ends, terms)
+            new InPlaceMergeSorterAnonymousClass(starts, ends, terms)
                 .Sort(0, numMatches);
         }
 
diff --git a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
index 1ec4c65..316dd52 100644
--- a/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
+++ b/src/Lucene.Net.Highlighter/PostingsHighlight/PostingsHighlighter.cs
@@ -333,11 +333,11 @@
             return snippets;
         }
 
-        internal class InPlaceMergeSorterAnonymousHelper : InPlaceMergeSorter
+        private class InPlaceMergeSorterAnonymousClass : InPlaceMergeSorter
         {
             private readonly string[] fields;
             private readonly int[] maxPassages;
-            public InPlaceMergeSorterAnonymousHelper(string[] fields, int[] maxPassages)
+            public InPlaceMergeSorterAnonymousClass(string[] fields, int[] maxPassages)
             {
                 this.fields = fields;
                 this.maxPassages = maxPassages;
@@ -407,7 +407,7 @@
 
             // sort for sequential io
             ArrayUtil.TimSort(docids);
-            new InPlaceMergeSorterAnonymousHelper(fields, maxPassages).Sort(0, fields.Length);
+            new InPlaceMergeSorterAnonymousClass(fields, maxPassages).Sort(0, fields.Length);
 
             // pull stored data:
             IList<string[]> contents = LoadFieldValues(searcher, fields, docids, maxLength);
@@ -819,13 +819,13 @@
             }
         }
 
-        private static readonly DocsAndPositionsEnum EMPTY = new DocsAndPositionsEnumAnonymousHelper();
+        private static readonly DocsAndPositionsEnum EMPTY = new DocsAndPositionsEnumAnonymousClass();
 
         /// <summary>
         /// we rewrite against an empty indexreader: as we don't want things like
         /// rangeQueries that don't summarize the document
         /// </summary>
-        private class DocsAndPositionsEnumAnonymousHelper : DocsAndPositionsEnum
+        private class DocsAndPositionsEnumAnonymousClass : DocsAndPositionsEnum
         {
             public override int NextPosition()
             {
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
index 16eeb0d..6cfe11a 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/BaseFragmentsBuilder.cs
@@ -151,16 +151,16 @@
         {
             // according to javadoc, doc.getFields(fieldName) cannot be used with lazy loaded field???
             List<Field> fields = new List<Field>();
-            reader.Document(docId, new GetFieldsStoredFieldsVisitorAnonymousHelper(fields, fieldName));
+            reader.Document(docId, new GetFieldsStoredFieldsVisitorAnonymousClass(fields, fieldName));
 
             return fields.ToArray(/*new Field[fields.size()]*/);
         }
 
-        internal class GetFieldsStoredFieldsVisitorAnonymousHelper : StoredFieldVisitor
+        private class GetFieldsStoredFieldsVisitorAnonymousClass : StoredFieldVisitor
         {
             private readonly IList<Field> fields;
             private readonly string fieldName;
-            public GetFieldsStoredFieldsVisitorAnonymousHelper(IList<Field> fields, string fieldName)
+            public GetFieldsStoredFieldsVisitorAnonymousClass(IList<Field> fields, string fieldName)
             {
                 this.fields = fields;
                 this.fieldName = fieldName;
diff --git a/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs b/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
index 692cd6d..e2afbce 100644
--- a/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
+++ b/src/Lucene.Net.Highlighter/VectorHighlight/FieldPhraseList.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Support;
+using J2N.Numerics;
+using Lucene.Net.Support;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
@@ -415,20 +416,10 @@
                 result = prime * result + StartOffset;
                 result = prime * result + EndOffset;
                 long b = J2N.BitConversion.DoubleToInt64Bits(Boost);
-                result = prime * result + (int)(b ^ TripleShift(b, 32));
+                result = prime * result + (int)(b ^ b.TripleShift(32));
                 return result;
             }
 
-            // LUCENENET NOTE: For some reason the standard way of correcting the >>>
-            // operator (int)((uint)b >> 32) didn't work here. Got this solution from http://stackoverflow.com/a/6625912
-            // and it works just like in Java.
-            private static long TripleShift(long n, int s)
-            {
-                if (n >= 0)
-                    return n >> s;
-                return (n >> s) + (2 << ~s);
-            }
-
             public override bool Equals(object obj)
             {
                 if (this == obj)
diff --git a/src/Lucene.Net.Join/FakeScorer.cs b/src/Lucene.Net.Join/FakeScorer.cs
index 90e24ff..a089ccc 100644
--- a/src/Lucene.Net.Join/FakeScorer.cs
+++ b/src/Lucene.Net.Join/FakeScorer.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Search;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Search;
 using System;
 using System.Collections.Generic;
 
diff --git a/src/Lucene.Net.Join/FixedBitSetCachingWrapperFilter.cs b/src/Lucene.Net.Join/FixedBitSetCachingWrapperFilter.cs
index 25ea5cf..ebfa668 100644
--- a/src/Lucene.Net.Join/FixedBitSetCachingWrapperFilter.cs
+++ b/src/Lucene.Net.Join/FixedBitSetCachingWrapperFilter.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 
diff --git a/src/Lucene.Net.Join/JoinUtil.cs b/src/Lucene.Net.Join/JoinUtil.cs
index d360825..6034516 100644
--- a/src/Lucene.Net.Join/JoinUtil.cs
+++ b/src/Lucene.Net.Join/JoinUtil.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Search;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Search;
 using System;
 using System.IO;
 
diff --git a/src/Lucene.Net.Join/ScoreMode.cs b/src/Lucene.Net.Join/ScoreMode.cs
index ccf2232..bbac796 100644
--- a/src/Lucene.Net.Join/ScoreMode.cs
+++ b/src/Lucene.Net.Join/ScoreMode.cs
@@ -1,4 +1,6 @@
-namespace Lucene.Net.Join
+// Lucene version compatibility level 4.8.1
+
+namespace Lucene.Net.Join
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
diff --git a/src/Lucene.Net.Join/TermsCollector.cs b/src/Lucene.Net.Join/TermsCollector.cs
index add6980..e744438 100644
--- a/src/Lucene.Net.Join/TermsCollector.cs
+++ b/src/Lucene.Net.Join/TermsCollector.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 
diff --git a/src/Lucene.Net.Join/TermsIncludingScoreQuery.cs b/src/Lucene.Net.Join/TermsIncludingScoreQuery.cs
index c28ca38..7725045 100644
--- a/src/Lucene.Net.Join/TermsIncludingScoreQuery.cs
+++ b/src/Lucene.Net.Join/TermsIncludingScoreQuery.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
@@ -116,16 +117,16 @@
         public override Weight CreateWeight(IndexSearcher searcher)
         {
             Weight originalWeight = _originalQuery.CreateWeight(searcher);
-            return new WeightAnonymousInnerClassHelper(this, originalWeight);
+            return new WeightAnonymousClass(this, originalWeight);
         }
 
-        private class WeightAnonymousInnerClassHelper : Weight
+        private class WeightAnonymousClass : Weight
         {
             private readonly TermsIncludingScoreQuery outerInstance;
 
             private readonly Weight originalWeight;
 
-            public WeightAnonymousInnerClassHelper(TermsIncludingScoreQuery outerInstance, Weight originalWeight)
+            public WeightAnonymousClass(TermsIncludingScoreQuery outerInstance, Weight originalWeight)
             {
                 this.outerInstance = outerInstance;
                 this.originalWeight = originalWeight;
diff --git a/src/Lucene.Net.Join/TermsQuery.cs b/src/Lucene.Net.Join/TermsQuery.cs
index 3d4f770..b2b3022 100644
--- a/src/Lucene.Net.Join/TermsQuery.cs
+++ b/src/Lucene.Net.Join/TermsQuery.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System.Collections.Generic;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
diff --git a/src/Lucene.Net.Join/TermsWithScoreCollector.cs b/src/Lucene.Net.Join/TermsWithScoreCollector.cs
index 13798dd..e79bc43 100644
--- a/src/Lucene.Net.Join/TermsWithScoreCollector.cs
+++ b/src/Lucene.Net.Join/TermsWithScoreCollector.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Support;
 using Lucene.Net.Util;
diff --git a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
index ad0a6d3..31f3538 100644
--- a/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/ToChildBlockJoinQuery.cs
@@ -1,10 +1,10 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
 
 namespace Lucene.Net.Join
 {
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
index 79a4e7e..5a5db84 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinCollector.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Search.Grouping;
@@ -7,7 +8,6 @@
 using System;
 using System.Collections.Concurrent;
 using System.Collections.Generic;
-using System.Diagnostics;
 using System.IO;
 
 namespace Lucene.Net.Join
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinFieldComparator.cs b/src/Lucene.Net.Join/ToParentBlockJoinFieldComparator.cs
index 07843d4..54ea12b 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinFieldComparator.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinFieldComparator.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
index 611e26b..2517f90 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinQuery.cs
@@ -1,10 +1,10 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System;
 using System.Collections.Generic;
-using System.Diagnostics;
 
 namespace Lucene.Net.Join
 {
diff --git a/src/Lucene.Net.Join/ToParentBlockJoinSortField.cs b/src/Lucene.Net.Join/ToParentBlockJoinSortField.cs
index 430c329..b12d30b 100644
--- a/src/Lucene.Net.Join/ToParentBlockJoinSortField.cs
+++ b/src/Lucene.Net.Join/ToParentBlockJoinSortField.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Search;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Search;
 
 namespace Lucene.Net.Join
 {
diff --git a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs
index c418ea2..6ff3c01 100644
--- a/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs
+++ b/src/Lucene.Net.Memory/MemoryIndex.MemoryIndexReader.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Search;
 using Lucene.Net.Search.Similarities;
@@ -106,14 +107,14 @@
 
                 public override IEnumerator<string> GetEnumerator()
                 {
-                    return new IteratorAnonymousInnerClassHelper(this);
+                    return new IteratorAnonymousClass(this);
                 }
 
-                private class IteratorAnonymousInnerClassHelper : IEnumerator<string>
+                private class IteratorAnonymousClass : IEnumerator<string>
                 {
                     private readonly MemoryFields outerInstance;
 
-                    public IteratorAnonymousInnerClassHelper(MemoryFields outerInstance)
+                    public IteratorAnonymousClass(MemoryFields outerInstance)
                     {
                         this.outerInstance = outerInstance;
                         upto = -1;
@@ -161,17 +162,17 @@
                         Info info = outerInstance.GetInfo(i);
                         info.SortTerms();
 
-                        return new TermsAnonymousInnerClassHelper(this, info);
+                        return new TermsAnonymousClass(this, info);
                     }
                 }
 
-                private class TermsAnonymousInnerClassHelper : Terms
+                private class TermsAnonymousClass : Terms
                 {
                     private readonly MemoryFields outerInstance;
 
                     private readonly MemoryIndex.Info info;
 
-                    public TermsAnonymousInnerClassHelper(MemoryFields outerInstance, MemoryIndex.Info info)
+                    public TermsAnonymousClass(MemoryFields outerInstance, MemoryIndex.Info info)
                     {
                         this.outerInstance = outerInstance;
                         this.info = info;
@@ -235,7 +236,7 @@
                     int mid; // LUCENENET: IDE0059: Remove unnecessary value assignment
                     while (low <= high)
                     {
-                        mid = (int)((uint)(low + high) >> 1);
+                        mid = (low + high).TripleShift(1);
                         hash.Get(ords[mid], bytesRef);
                         int cmp = comparer.Compare(bytesRef, b);
                         if (cmp < 0)
diff --git a/src/Lucene.Net.Memory/MemoryIndex.cs b/src/Lucene.Net.Memory/MemoryIndex.cs
index eaba454..f792302 100644
--- a/src/Lucene.Net.Memory/MemoryIndex.cs
+++ b/src/Lucene.Net.Memory/MemoryIndex.cs
@@ -277,12 +277,12 @@
                 throw new ArgumentException("keywords must not be null");
             }
 
-            return new TokenStreamAnonymousInnerClassHelper<T>(keywords);
+            return new TokenStreamAnonymousClass<T>(keywords);
         }
 
-        private sealed class TokenStreamAnonymousInnerClassHelper<T> : TokenStream
+        private sealed class TokenStreamAnonymousClass<T> : TokenStream
         {
-            public TokenStreamAnonymousInnerClassHelper(ICollection<T> keywords)
+            public TokenStreamAnonymousClass(ICollection<T> keywords)
             {
                 iter = keywords.GetEnumerator();
                 start = 0;
@@ -317,7 +317,7 @@
             }
 
             /// <summary>
-            /// Releases resources used by the <see cref="TokenStreamAnonymousInnerClassHelper{T}"/> and
+            /// Releases resources used by the <see cref="TokenStreamAnonymousClass{T}"/> and
             /// if overridden in a derived class, optionally releases unmanaged resources.
             /// </summary>
             /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources;
@@ -557,7 +557,7 @@
             try
             {
                 float[] scores = new float[1]; // inits to 0.0f (no match)
-                searcher.Search(query, new CollectorAnonymousInnerClassHelper(scores));
+                searcher.Search(query, new CollectorAnonymousClass(scores));
                 float score = scores[0];
                 return score;
             } // can never happen (RAMDirectory)
@@ -584,11 +584,11 @@
             }
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly float[] scores;
 
-            public CollectorAnonymousInnerClassHelper(float[] scores)
+            public CollectorAnonymousClass(float[] scores)
             {
                 this.scores = scores;
             }
diff --git a/src/Lucene.Net.Misc/Index/Sorter/BlockJoinComparatorSource.cs b/src/Lucene.Net.Misc/Index/Sorter/BlockJoinComparatorSource.cs
index cf41032..136b67f 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/BlockJoinComparatorSource.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/BlockJoinComparatorSource.cs
@@ -95,11 +95,11 @@
 
             // NOTE: we could return parent ID as value but really our sort "value" is more complex...
             // So we throw UOE for now. At the moment you really should only use this at indexing time.
-            return new FieldComparerAnonymousInnerClassHelper(this, parentSlots,
+            return new FieldComparerAnonymousClass(this, parentSlots,
                 childSlots, parentReverseMul, parentComparers, childReverseMul, childComparers);
         }
 
-        private class FieldComparerAnonymousInnerClassHelper : FieldComparer<int?>
+        private class FieldComparerAnonymousClass : FieldComparer<int?>
         {
             private readonly BlockJoinComparerSource outerInstance;
 
@@ -110,7 +110,7 @@
             private readonly int[] childReverseMul;
             private readonly FieldComparer[] childComparers;
 
-            public FieldComparerAnonymousInnerClassHelper(BlockJoinComparerSource outerInstance,
+            public FieldComparerAnonymousClass(BlockJoinComparerSource outerInstance,
                 int[] parentSlots, int[] childSlots, int[] parentReverseMul, FieldComparer[] parentComparers,
                 int[] childReverseMul, FieldComparer[] childComparers)
             {
diff --git a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs
index a6abc79..ddc77a0 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/Sorter.cs
@@ -209,16 +209,16 @@
             }
             oldToNew.Freeze();
 
-            return new DocMapAnonymousInnerClassHelper(maxDoc, newToOld, oldToNew);
+            return new DocMapAnonymousClass(maxDoc, newToOld, oldToNew);
         }
 
-        private class DocMapAnonymousInnerClassHelper : Sorter.DocMap
+        private class DocMapAnonymousClass : Sorter.DocMap
         {
             private readonly int maxDoc;
             private readonly MonotonicAppendingInt64Buffer newToOld;
             private readonly MonotonicAppendingInt64Buffer oldToNew;
 
-            public DocMapAnonymousInnerClassHelper(int maxDoc, MonotonicAppendingInt64Buffer newToOld, MonotonicAppendingInt64Buffer oldToNew)
+            public DocMapAnonymousClass(int maxDoc, MonotonicAppendingInt64Buffer newToOld, MonotonicAppendingInt64Buffer oldToNew)
             {
                 this.maxDoc = maxDoc;
                 this.newToOld = newToOld;
@@ -266,16 +266,16 @@
                 comparers[i].SetNextReader(reader.AtomicContext);
                 comparers[i].SetScorer(FAKESCORER);
             }
-            DocComparer comparer = new DocComparerAnonymousInnerClassHelper(reverseMul, comparers);
+            DocComparer comparer = new DocComparerAnonymousClass(reverseMul, comparers);
             return Sort(reader.MaxDoc, comparer);
         }
 
-        private class DocComparerAnonymousInnerClassHelper : DocComparer
+        private class DocComparerAnonymousClass : DocComparer
         {
             private readonly int[] reverseMul;
             private readonly FieldComparer[] comparers;
 
-            public DocComparerAnonymousInnerClassHelper(int[] reverseMul, FieldComparer[] comparers)
+            public DocComparerAnonymousClass(int[] reverseMul, FieldComparer[] comparers)
             {
                 this.reverseMul = reverseMul;
                 this.comparers = comparers;
@@ -321,11 +321,11 @@
             return ID;
         }
 
-        internal static readonly Scorer FAKESCORER = new ScorerAnonymousInnerClassHelper();
+        internal static readonly Scorer FAKESCORER = new ScorerAnonymousClass();
 
-        private class ScorerAnonymousInnerClassHelper : Scorer
+        private class ScorerAnonymousClass : Scorer
         {
-            public ScorerAnonymousInnerClassHelper() 
+            public ScorerAnonymousClass() 
                 : base(null)
             {
             }
diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
index 4bfefda..ff93a6d 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/SortingAtomicReader.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Search;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
@@ -116,16 +117,16 @@
                 {
                     return null;
                 }
-                return new BitsAnonymousInnerClassHelper(this, liveDocs);
+                return new BitsAnonymousClass(this, liveDocs);
             }
 
-            private class BitsAnonymousInnerClassHelper : IBits
+            private class BitsAnonymousClass : IBits
             {
                 private readonly SortingTermsEnum outerInstance;
 
                 private readonly IBits liveDocs;
 
-                public BitsAnonymousInnerClassHelper(SortingTermsEnum outerInstance, IBits liveDocs)
+                public BitsAnonymousClass(SortingTermsEnum outerInstance, IBits liveDocs)
                 {
                     this.outerInstance = outerInstance;
                     this.liveDocs = liveDocs;
@@ -709,7 +710,7 @@
             public override int NextPosition()
             {
                 int token = postingInput.ReadVInt32();
-                pos += (int)((uint)token >> 1);
+                pos += token.TripleShift(1);
                 if (storeOffsets)
                 {
                     startOffset = endOffset + postingInput.ReadVInt32();
diff --git a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
index ceae5b6..65a40d0 100644
--- a/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
+++ b/src/Lucene.Net.Misc/Index/Sorter/SortingMergePolicy.cs
@@ -136,17 +136,17 @@
                 }
                 if (Debugging.AssertsEnabled) Debugging.Assert(mergeState.DocMaps.Length == 1); // we returned a singleton reader
                 MonotonicAppendingInt64Buffer deletes = GetDeletes(unsortedReaders);
-                return new DocMapAnonymousInnerClassHelper(this, mergeState, deletes);
+                return new DocMapAnonymousClass(this, mergeState, deletes);
             }
 
-            private class DocMapAnonymousInnerClassHelper : MergePolicy.DocMap
+            private class DocMapAnonymousClass : MergePolicy.DocMap
             {
                 private readonly SortingOneMerge outerInstance;
 
                 private readonly MergeState mergeState;
                 private readonly MonotonicAppendingInt64Buffer deletes;
 
-                public DocMapAnonymousInnerClassHelper(SortingOneMerge outerInstance, MergeState mergeState, MonotonicAppendingInt64Buffer deletes)
+                public DocMapAnonymousClass(SortingOneMerge outerInstance, MergeState mergeState, MonotonicAppendingInt64Buffer deletes)
                 {
                     this.outerInstance = outerInstance;
                     this.mergeState = mergeState;
diff --git a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs
index 23fb938..75f9cd6 100644
--- a/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs
+++ b/src/Lucene.Net.Misc/Util/Fst/UpToTwoPositiveIntOutputs.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
 using System;
 using System.Runtime.CompilerServices;
@@ -95,7 +96,7 @@
 
             public override int GetHashCode()
             {
-                return (int)((first ^ ((long)((ulong)first >> 32))) ^ (second ^ (second >> 32)));
+                return (int)((first ^ (first.TripleShift(32))) ^ (second ^ (second >> 32)));
             }
         }
 
@@ -239,7 +240,7 @@
             if ((code & 1) == 0)
             {
                 // single long
-                long v = (long)((ulong)code >> 1);
+                long v = code.TripleShift(1);
                 if (v == 0)
                 {
                     return NO_OUTPUT;
@@ -252,7 +253,7 @@
             else
             {
                 // two longs
-                long first = (long)((ulong)code >> 1);
+                long first = code.TripleShift(1);
                 long second = @in.ReadVInt64();
                 return new TwoInt64s(first, second);
             }
diff --git a/src/Lucene.Net.Queries/BoostingQuery.cs b/src/Lucene.Net.Queries/BoostingQuery.cs
index c191150..7a6c142 100644
--- a/src/Lucene.Net.Queries/BoostingQuery.cs
+++ b/src/Lucene.Net.Queries/BoostingQuery.cs
@@ -53,32 +53,32 @@
 
         public override Query Rewrite(IndexReader reader)
         {
-            return new BooleanQueryAnonymousInnerClassHelper(this)
+            return new BooleanQueryAnonymousClass(this)
             {
                 { match, Occur.MUST },
                 { context, Occur.SHOULD }
             };
         }
 
-        private class BooleanQueryAnonymousInnerClassHelper : BooleanQuery
+        private class BooleanQueryAnonymousClass : BooleanQuery
         {
             private readonly BoostingQuery outerInstance;
 
-            public BooleanQueryAnonymousInnerClassHelper(BoostingQuery outerInstance)
+            public BooleanQueryAnonymousClass(BoostingQuery outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
 
             public override Weight CreateWeight(IndexSearcher searcher)
             {
-                return new BooleanWeightAnonymousInnerClassHelper(this, searcher);
+                return new BooleanWeightAnonymousClass(this, searcher);
             }
 
-            private class BooleanWeightAnonymousInnerClassHelper : BooleanWeight
+            private class BooleanWeightAnonymousClass : BooleanWeight
             {
-                private readonly BooleanQueryAnonymousInnerClassHelper outerInstance;
+                private readonly BooleanQueryAnonymousClass outerInstance;
 
-                public BooleanWeightAnonymousInnerClassHelper(BooleanQueryAnonymousInnerClassHelper outerInstance, IndexSearcher searcher)
+                public BooleanWeightAnonymousClass(BooleanQueryAnonymousClass outerInstance, IndexSearcher searcher)
                     : base(outerInstance, searcher, false)
                 {
                     this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/BoostedQuery.cs b/src/Lucene.Net.Queries/Function/BoostedQuery.cs
index db79ea9..844d02d 100644
--- a/src/Lucene.Net.Queries/Function/BoostedQuery.cs
+++ b/src/Lucene.Net.Queries/Function/BoostedQuery.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N.Numerics;
+using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
 using System.Collections;
@@ -216,9 +217,9 @@
         public override int GetHashCode()
         {
             int h = q.GetHashCode();
-            h ^= (h << 17) | ((int)((uint)h >> 16));
+            h ^= (h << 17) | (h.TripleShift(16));
             h += boostVal.GetHashCode();
-            h ^= (h << 8) | ((int)((uint)h >> 25));
+            h ^= (h << 8) | (h.TripleShift(25));
             h += J2N.BitConversion.SingleToInt32Bits(Boost);
             return h;
         }
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ByteFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/ByteFieldSource.cs
index 9628298..5279ffe 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/ByteFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/ByteFieldSource.cs
@@ -53,15 +53,15 @@
         {
             FieldCache.Bytes arr = m_cache.GetBytes(readerContext.AtomicReader, m_field, parser, false);
 
-            return new FunctionValuesAnonymousInnerClassHelper(this, arr);
+            return new FunctionValuesAnonymousClass(this, arr);
         }
 
-        private class FunctionValuesAnonymousInnerClassHelper : FunctionValues
+        private class FunctionValuesAnonymousClass : FunctionValues
         {
             private readonly ByteFieldSource outerInstance;
             private readonly FieldCache.Bytes arr;
 
-            public FunctionValuesAnonymousInnerClassHelper(ByteFieldSource outerInstance, FieldCache.Bytes arr)
+            public FunctionValuesAnonymousClass(ByteFieldSource outerInstance, FieldCache.Bytes arr)
             {
                 this.outerInstance = outerInstance;
                 this.arr = arr;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/BytesRefFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/BytesRefFieldSource.cs
index 2e137c9..1903796 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/BytesRefFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/BytesRefFieldSource.cs
@@ -42,22 +42,22 @@
             {
                 BinaryDocValues binaryValues = Search.FieldCache.DEFAULT.GetTerms(readerContext.AtomicReader, m_field, true);
                 IBits docsWithField = Search.FieldCache.DEFAULT.GetDocsWithField(readerContext.AtomicReader, m_field);
-                return new FunctionValuesAnonymousInnerClassHelper(this, binaryValues, docsWithField);
+                return new FunctionValuesAnonymousClass(this, binaryValues, docsWithField);
             }
             else
             {
-                return new DocTermsIndexDocValuesAnonymousInnerClassHelper(this, this, readerContext, m_field);
+                return new DocTermsIndexDocValuesAnonymousClass(this, this, readerContext, m_field);
             }
         }
 
-        private class FunctionValuesAnonymousInnerClassHelper : FunctionValues
+        private class FunctionValuesAnonymousClass : FunctionValues
         {
             private readonly BytesRefFieldSource outerInstance;
 
             private readonly BinaryDocValues binaryValues;
             private readonly IBits docsWithField;
 
-            public FunctionValuesAnonymousInnerClassHelper(BytesRefFieldSource outerInstance, BinaryDocValues binaryValues, IBits docsWithField)
+            public FunctionValuesAnonymousClass(BytesRefFieldSource outerInstance, BinaryDocValues binaryValues, IBits docsWithField)
             {
                 this.outerInstance = outerInstance;
                 this.binaryValues = binaryValues;
@@ -93,11 +93,11 @@
             }
         }
 
-        private class DocTermsIndexDocValuesAnonymousInnerClassHelper : DocTermsIndexDocValues
+        private class DocTermsIndexDocValuesAnonymousClass : DocTermsIndexDocValues
         {
             private readonly BytesRefFieldSource outerInstance;
 
-            public DocTermsIndexDocValuesAnonymousInnerClassHelper(BytesRefFieldSource outerInstance, BytesRefFieldSource @this, AtomicReaderContext readerContext, string field)
+            public DocTermsIndexDocValuesAnonymousClass(BytesRefFieldSource outerInstance, BytesRefFieldSource @this, AtomicReaderContext readerContext, string field)
                 : base(@this, readerContext, field)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ConstValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/ConstValueSource.cs
index d448348..607bd57 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/ConstValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/ConstValueSource.cs
@@ -42,14 +42,14 @@
 
         public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext)
         {
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this);
+            return new SingleDocValuesAnonymousClass(this, this);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly ConstValueSource outerInstance;
 
-            public SingleDocValuesAnonymousInnerClassHelper(ConstValueSource outerInstance, ConstValueSource @this)
+            public SingleDocValuesAnonymousClass(ConstValueSource outerInstance, ConstValueSource @this)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/DefFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/DefFunction.cs
index 40cf6b9..b12c4c8 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/DefFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/DefFunction.cs
@@ -39,12 +39,12 @@
 
         public override FunctionValues GetValues(IDictionary fcontext, AtomicReaderContext readerContext)
         {
-            return new ValuesAnonymousInnerClassHelper(this, ValsArr(m_sources, fcontext, readerContext));
+            return new ValuesAnonymousClass(this, ValsArr(m_sources, fcontext, readerContext));
         }
 
-        private class ValuesAnonymousInnerClassHelper : Values
+        private class ValuesAnonymousClass : Values
         {
-            public ValuesAnonymousInnerClassHelper(DefFunction outerInstance, FunctionValues[] valsArr)
+            public ValuesAnonymousClass(DefFunction outerInstance, FunctionValues[] valsArr)
                 : base(outerInstance, valsArr)
             {
                 upto = valsArr.Length - 1;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/DoubleConstValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/DoubleConstValueSource.cs
index 9353af8..f7b8d02 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/DoubleConstValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/DoubleConstValueSource.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N.Numerics;
+using Lucene.Net.Index;
 using Lucene.Net.Queries.Function.DocValues;
 using System;
 using System.Collections;
@@ -46,14 +47,14 @@
 
         public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext)
         {
-            return new DoubleDocValuesAnonymousInnerClassHelper(this, this);
+            return new DoubleDocValuesAnonymousClass(this, this);
         }
 
-        private class DoubleDocValuesAnonymousInnerClassHelper : DoubleDocValues
+        private class DoubleDocValuesAnonymousClass : DoubleDocValues
         {
             private readonly DoubleConstValueSource outerInstance;
 
-            public DoubleDocValuesAnonymousInnerClassHelper(DoubleConstValueSource outerInstance, DoubleConstValueSource @this)
+            public DoubleDocValuesAnonymousClass(DoubleConstValueSource outerInstance, DoubleConstValueSource @this)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
@@ -107,7 +108,7 @@
         public override int GetHashCode()
         {
             long bits = J2N.BitConversion.DoubleToRawInt64Bits(constant);
-            return (int)(bits ^ ((long)((ulong)bits >> 32)));
+            return (int)(bits ^ (bits.TripleShift(32)));
         }
 
         public override bool Equals(object o)
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/DoubleFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/DoubleFieldSource.cs
index d721773..a823e52 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/DoubleFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/DoubleFieldSource.cs
@@ -52,15 +52,15 @@
         {
             var arr = m_cache.GetDoubles(readerContext.AtomicReader, m_field, m_parser, true);
             var valid = m_cache.GetDocsWithField(readerContext.AtomicReader, m_field);
-            return new DoubleDocValuesAnonymousInnerClassHelper(this, arr, valid);
+            return new DoubleDocValuesAnonymousClass(this, arr, valid);
         }
 
-        private class DoubleDocValuesAnonymousInnerClassHelper : DoubleDocValues
+        private class DoubleDocValuesAnonymousClass : DoubleDocValues
         {
             private readonly FieldCache.Doubles arr;
             private readonly IBits valid;
 
-            public DoubleDocValuesAnonymousInnerClassHelper(DoubleFieldSource @this, FieldCache.Doubles arr, IBits valid)
+            public DoubleDocValuesAnonymousClass(DoubleFieldSource @this, FieldCache.Doubles arr, IBits valid)
                 : base(@this)
             {
                 this.arr = arr;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/DualFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/DualFloatFunction.cs
index d46128e..fc69e47 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/DualFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/DualFloatFunction.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N.Numerics;
+using Lucene.Net.Index;
 using Lucene.Net.Queries.Function.DocValues;
 using Lucene.Net.Search;
 using System.Collections;
@@ -53,17 +54,17 @@
         {
             FunctionValues aVals = m_a.GetValues(context, readerContext);
             FunctionValues bVals = m_b.GetValues(context, readerContext);
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this, aVals, bVals);
+            return new SingleDocValuesAnonymousClass(this, this, aVals, bVals);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly DualSingleFunction outerInstance;
 
             private readonly FunctionValues aVals;
             private readonly FunctionValues bVals;
 
-            public SingleDocValuesAnonymousInnerClassHelper(DualSingleFunction outerInstance, DualSingleFunction @this, FunctionValues aVals, FunctionValues bVals)
+            public SingleDocValuesAnonymousClass(DualSingleFunction outerInstance, DualSingleFunction @this, FunctionValues aVals, FunctionValues bVals)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
@@ -94,9 +95,9 @@
         public override int GetHashCode()
         {
             int h = m_a.GetHashCode();
-            h ^= (h << 13) | ((int)((uint)h >> 20));
+            h ^= (h << 13) | (h.TripleShift(20));
             h += m_b.GetHashCode();
-            h ^= (h << 23) | ((int)((uint)h >> 10));
+            h ^= (h << 23) | (h.TripleShift(10));
             h += Name.GetHashCode();
             return h;
         }
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
index b95cf8d..c1dc8ef 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/EnumFieldSource.cs
@@ -110,17 +110,17 @@
             var arr = m_cache.GetInt32s(readerContext.AtomicReader, m_field, parser, true);
             var valid = m_cache.GetDocsWithField(readerContext.AtomicReader, m_field);
 
-            return new Int32DocValuesAnonymousInnerClassHelper(this, this, arr, valid);
+            return new Int32DocValuesAnonymousClass(this, this, arr, valid);
         }
 
-        private class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues
+        private class Int32DocValuesAnonymousClass : Int32DocValues
         {
             private readonly EnumFieldSource outerInstance;
 
             private readonly FieldCache.Int32s arr;
             private readonly IBits valid;
 
-            public Int32DocValuesAnonymousInnerClassHelper(EnumFieldSource outerInstance, EnumFieldSource @this, FieldCache.Int32s arr, IBits valid)
+            public Int32DocValuesAnonymousClass(EnumFieldSource outerInstance, EnumFieldSource @this, FieldCache.Int32s arr, IBits valid)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/FloatFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/FloatFieldSource.cs
index ffb4f8c..f09a127 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/FloatFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/FloatFieldSource.cs
@@ -54,15 +54,15 @@
         {
             var arr = m_cache.GetSingles(readerContext.AtomicReader, m_field, m_parser, true);
             var valid = m_cache.GetDocsWithField(readerContext.AtomicReader, m_field);
-            return new SingleDocValuesAnonymousInnerClassHelper(this, arr, valid);
+            return new SingleDocValuesAnonymousClass(this, arr, valid);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly FieldCache.Singles arr;
             private readonly IBits valid;
 
-            public SingleDocValuesAnonymousInnerClassHelper(SingleFieldSource @this, FieldCache.Singles arr, IBits valid)
+            public SingleDocValuesAnonymousClass(SingleFieldSource @this, FieldCache.Singles arr, IBits valid)
                 : base(@this)
             {
                 this.arr = arr;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs
index af095f3..2c13413 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/IfFunction.cs
@@ -45,16 +45,16 @@
             FunctionValues trueVals = trueSource.GetValues(context, readerContext);
             FunctionValues falseVals = falseSource.GetValues(context, readerContext);
 
-            return new FunctionValuesAnonymousInnerClassHelper(ifVals, trueVals, falseVals);
+            return new FunctionValuesAnonymousClass(ifVals, trueVals, falseVals);
         }
 
-        private class FunctionValuesAnonymousInnerClassHelper : FunctionValues
+        private class FunctionValuesAnonymousClass : FunctionValues
         {
             private readonly FunctionValues ifVals;
             private readonly FunctionValues trueVals;
             private readonly FunctionValues falseVals;
 
-            public FunctionValuesAnonymousInnerClassHelper(FunctionValues ifVals, FunctionValues trueVals, FunctionValues falseVals)
+            public FunctionValuesAnonymousClass(FunctionValues ifVals, FunctionValues trueVals, FunctionValues falseVals)
             {
                 this.ifVals = ifVals;
                 this.trueVals = trueVals;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs
index 24a3989..ac11ed0 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/IntFieldSource.cs
@@ -57,17 +57,17 @@
             FieldCache.Int32s arr = m_cache.GetInt32s(readerContext.AtomicReader, m_field, parser, true);
             IBits valid = m_cache.GetDocsWithField(readerContext.AtomicReader, m_field);
 
-            return new Int32DocValuesAnonymousInnerClassHelper(this, this, arr, valid);
+            return new Int32DocValuesAnonymousClass(this, this, arr, valid);
         }
 
-        private class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues
+        private class Int32DocValuesAnonymousClass : Int32DocValues
         {
             private readonly Int32FieldSource outerInstance;
 
             private readonly FieldCache.Int32s arr;
             private readonly IBits valid;
 
-            public Int32DocValuesAnonymousInnerClassHelper(Int32FieldSource outerInstance, Int32FieldSource @this, FieldCache.Int32s arr, IBits valid)
+            public Int32DocValuesAnonymousClass(Int32FieldSource outerInstance, Int32FieldSource @this, FieldCache.Int32s arr, IBits valid)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/JoinDocFreqValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/JoinDocFreqValueSource.cs
index cc6288d..9054c0d 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/JoinDocFreqValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/JoinDocFreqValueSource.cs
@@ -54,17 +54,17 @@
             Terms t = MultiFields.GetTerms(top, m_qfield);
             TermsEnum termsEnum = t == null ? TermsEnum.EMPTY : t.GetEnumerator();
 
-            return new Int32DocValuesAnonymousInnerClassHelper(this, this, terms, termsEnum);
+            return new Int32DocValuesAnonymousClass(this, this, terms, termsEnum);
         }
 
-        private class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues
+        private class Int32DocValuesAnonymousClass : Int32DocValues
         {
             private readonly JoinDocFreqValueSource outerInstance;
 
             private readonly BinaryDocValues terms;
             private readonly TermsEnum termsEnum;
 
-            public Int32DocValuesAnonymousInnerClassHelper(JoinDocFreqValueSource outerInstance, JoinDocFreqValueSource @this, BinaryDocValues terms, TermsEnum termsEnum)
+            public Int32DocValuesAnonymousClass(JoinDocFreqValueSource outerInstance, JoinDocFreqValueSource @this, BinaryDocValues terms, TermsEnum termsEnum)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/LinearFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/LinearFloatFunction.cs
index 64ff15a..2a08aa6 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/LinearFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/LinearFloatFunction.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N.Numerics;
+using Lucene.Net.Index;
 using Lucene.Net.Queries.Function.DocValues;
 using Lucene.Net.Search;
 using System.Collections;
@@ -52,15 +53,15 @@
         public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext)
         {
             FunctionValues vals = m_source.GetValues(context, readerContext);
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this, vals);
+            return new SingleDocValuesAnonymousClass(this, this, vals);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly LinearSingleFunction outerInstance;
             private readonly FunctionValues vals;
 
-            public SingleDocValuesAnonymousInnerClassHelper(LinearSingleFunction outerInstance, LinearSingleFunction @this, FunctionValues vals)
+            public SingleDocValuesAnonymousClass(LinearSingleFunction outerInstance, LinearSingleFunction @this, FunctionValues vals)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
@@ -93,9 +94,9 @@
         public override int GetHashCode()
         {
             int h = J2N.BitConversion.SingleToInt32Bits(m_slope);
-            h = ((int)((uint)h >> 2)) | (h << 30);
+            h = (h.TripleShift(2)) | (h << 30);
             h += J2N.BitConversion.SingleToInt32Bits(m_intercept);
-            h ^= (h << 14) | ((int)((uint)h >> 19));
+            h ^= (h << 14) | (h.TripleShift(19));
             return h + m_source.GetHashCode();
         }
 
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/LiteralValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/LiteralValueSource.cs
index 9de611d..2dd7b38 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/LiteralValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/LiteralValueSource.cs
@@ -44,14 +44,14 @@
         public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext)
         {
 
-            return new StrDocValuesAnonymousInnerClassHelper(this, this);
+            return new StrDocValuesAnonymousClass(this, this);
         }
 
-        private class StrDocValuesAnonymousInnerClassHelper : StrDocValues
+        private class StrDocValuesAnonymousClass : StrDocValues
         {
             private readonly LiteralValueSource outerInstance;
 
-            public StrDocValuesAnonymousInnerClassHelper(LiteralValueSource outerInstance, LiteralValueSource @this)
+            public StrDocValuesAnonymousClass(LiteralValueSource outerInstance, LiteralValueSource @this)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/LongFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/LongFieldSource.cs
index b9882db..3d6653c 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/LongFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/LongFieldSource.cs
@@ -80,17 +80,17 @@
         {
             var arr = m_cache.GetInt64s(readerContext.AtomicReader, m_field, m_parser, true);
             var valid = m_cache.GetDocsWithField(readerContext.AtomicReader, m_field);
-            return new Int64DocValuesAnonymousInnerClassHelper(this, this, arr, valid);
+            return new Int64DocValuesAnonymousClass(this, this, arr, valid);
         }
 
-        private class Int64DocValuesAnonymousInnerClassHelper : Int64DocValues
+        private class Int64DocValuesAnonymousClass : Int64DocValues
         {
             private readonly Int64FieldSource outerInstance;
 
             private readonly FieldCache.Int64s arr;
             private readonly IBits valid;
 
-            public Int64DocValuesAnonymousInnerClassHelper(Int64FieldSource outerInstance, Int64FieldSource @this, FieldCache.Int64s arr, IBits valid)
+            public Int64DocValuesAnonymousClass(Int64FieldSource outerInstance, Int64FieldSource @this, FieldCache.Int64s arr, IBits valid)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs
index ff5a787..19bf4fe 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/MultiBoolFunction.cs
@@ -51,16 +51,16 @@
                 vals[i++] = source.GetValues(context, readerContext);
             }
 
-            return new BoolDocValuesAnonymousInnerClassHelper(this, this, vals);
+            return new BoolDocValuesAnonymousClass(this, this, vals);
         }
 
-        private class BoolDocValuesAnonymousInnerClassHelper : BoolDocValues
+        private class BoolDocValuesAnonymousClass : BoolDocValues
         {
             private readonly MultiBoolFunction outerInstance;
 
             private readonly FunctionValues[] vals;
 
-            public BoolDocValuesAnonymousInnerClassHelper(MultiBoolFunction outerInstance, MultiBoolFunction @this, FunctionValues[] vals)
+            public BoolDocValuesAnonymousClass(MultiBoolFunction outerInstance, MultiBoolFunction @this, FunctionValues[] vals)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/MultiFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/MultiFloatFunction.cs
index 156eda4..aaa0bbb 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/MultiFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/MultiFloatFunction.cs
@@ -73,16 +73,16 @@
                 valsArr[i] = m_sources[i].GetValues(context, readerContext);
             }
 
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this, valsArr);
+            return new SingleDocValuesAnonymousClass(this, this, valsArr);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly MultiSingleFunction outerInstance;
 
             private readonly FunctionValues[] valsArr;
 
-            public SingleDocValuesAnonymousInnerClassHelper(MultiSingleFunction outerInstance, MultiSingleFunction @this, FunctionValues[] valsArr)
+            public SingleDocValuesAnonymousClass(MultiSingleFunction outerInstance, MultiSingleFunction @this, FunctionValues[] valsArr)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/NormValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/NormValueSource.cs
index 2e0dac1..d8e6deb 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/NormValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/NormValueSource.cs
@@ -68,15 +68,15 @@
                 return new ConstDoubleDocValues(0.0, this);
             }
 
-            return new SingleDocValuesAnonymousInnerClassHelper(this, similarity, norms);
+            return new SingleDocValuesAnonymousClass(this, similarity, norms);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly TFIDFSimilarity similarity;
             private readonly NumericDocValues norms;
 
-            public SingleDocValuesAnonymousInnerClassHelper(NormValueSource @this, TFIDFSimilarity similarity, NumericDocValues norms)
+            public SingleDocValuesAnonymousClass(NormValueSource @this, TFIDFSimilarity similarity, NumericDocValues norms)
                 : base(@this)
             {
                 this.similarity = similarity;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/OrdFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/OrdFieldSource.cs
index c43c7bd..8bdc8d9 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/OrdFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/OrdFieldSource.cs
@@ -66,15 +66,15 @@
             IndexReader topReader = ReaderUtil.GetTopLevelContext(readerContext).Reader;
             AtomicReader r = SlowCompositeReaderWrapper.Wrap(topReader);
             SortedDocValues sindex = FieldCache.DEFAULT.GetTermsIndex(r, m_field);
-            return new Int32DocValuesAnonymousInnerClassHelper(this, off, sindex);
+            return new Int32DocValuesAnonymousClass(this, off, sindex);
         }
 
-        private sealed class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues
+        private sealed class Int32DocValuesAnonymousClass : Int32DocValues
         {
             private readonly int off;
             private readonly SortedDocValues sindex;
 
-            public Int32DocValuesAnonymousInnerClassHelper(OrdFieldSource @this, int off, SortedDocValues sindex)
+            public Int32DocValuesAnonymousClass(OrdFieldSource @this, int off, SortedDocValues sindex)
                 : base(@this)
             {
                 this.off = off;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs
index ccc7fd1..03389c6 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/RangeMapFloatFunction.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N.Numerics;
+using Lucene.Net.Index;
 using Lucene.Net.Queries.Function.DocValues;
 using Lucene.Net.Search;
 using System.Collections;
@@ -62,10 +63,10 @@
             FunctionValues vals = m_source.GetValues(context, readerContext);
             FunctionValues targets = m_target.GetValues(context, readerContext);
             FunctionValues defaults = (this.m_defaultVal == null) ? null : m_defaultVal.GetValues(context, readerContext);
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this, vals, targets, defaults);
+            return new SingleDocValuesAnonymousClass(this, this, vals, targets, defaults);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly RangeMapSingleFunction outerInstance;
 
@@ -73,7 +74,7 @@
             private readonly FunctionValues targets;
             private readonly FunctionValues defaults;
 
-            public SingleDocValuesAnonymousInnerClassHelper(RangeMapSingleFunction outerInstance, RangeMapSingleFunction @this, FunctionValues vals, FunctionValues targets, FunctionValues defaults)
+            public SingleDocValuesAnonymousClass(RangeMapSingleFunction outerInstance, RangeMapSingleFunction @this, FunctionValues vals, FunctionValues targets, FunctionValues defaults)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
@@ -104,9 +105,9 @@
         public override int GetHashCode()
         {
             int h = m_source.GetHashCode();
-            h ^= (h << 10) | ((int)((uint)h >> 23));
+            h ^= (h << 10) | (h.TripleShift(23));
             h += J2N.BitConversion.SingleToInt32Bits(m_min);
-            h ^= (h << 14) | ((int)((uint)h >> 19));
+            h ^= (h << 14) | (h.TripleShift(19));
             h += J2N.BitConversion.SingleToInt32Bits(m_max);
             h += m_target.GetHashCode();
             if (m_defaultVal != null)
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ReciprocalFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/ReciprocalFloatFunction.cs
index 6d16c43..e5bbb32 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/ReciprocalFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/ReciprocalFloatFunction.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N.Numerics;
+using Lucene.Net.Index;
 using Lucene.Net.Queries.Function.DocValues;
 using Lucene.Net.Search;
 using System;
@@ -61,15 +62,15 @@
         public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext)
         {
             var vals = m_source.GetValues(context, readerContext);
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this, vals);
+            return new SingleDocValuesAnonymousClass(this, this, vals);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly ReciprocalSingleFunction outerInstance;
             private readonly FunctionValues vals;
 
-            public SingleDocValuesAnonymousInnerClassHelper(ReciprocalSingleFunction outerInstance, ReciprocalSingleFunction @this, FunctionValues vals)
+            public SingleDocValuesAnonymousClass(ReciprocalSingleFunction outerInstance, ReciprocalSingleFunction @this, FunctionValues vals)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
@@ -110,7 +111,7 @@
         public override int GetHashCode()
         {
             int h = J2N.BitConversion.SingleToInt32Bits(m_a) + J2N.BitConversion.SingleToInt32Bits(m_m);
-            h ^= (h << 13) | ((int)((uint)h >> 20));
+            h ^= (h << 13) | (h.TripleShift(20));
             return h + (J2N.BitConversion.SingleToInt32Bits(m_b)) + m_source.GetHashCode();
         }
 
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ReverseOrdFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/ReverseOrdFieldSource.cs
index 3b54938..75c5f48 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/ReverseOrdFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/ReverseOrdFieldSource.cs
@@ -69,16 +69,16 @@
             var sindex = FieldCache.DEFAULT.GetTermsIndex(r, field);
             var end = sindex.ValueCount;
 
-            return new Int32DocValuesAnonymousInnerClassHelper(this, off, sindex, end);
+            return new Int32DocValuesAnonymousClass(this, off, sindex, end);
         }
 
-        private class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues
+        private class Int32DocValuesAnonymousClass : Int32DocValues
         {
             private readonly int off;
             private readonly SortedDocValues sindex;
             private readonly int end;
 
-            public Int32DocValuesAnonymousInnerClassHelper(ReverseOrdFieldSource @this, int off, SortedDocValues sindex, int end)
+            public Int32DocValuesAnonymousClass(ReverseOrdFieldSource @this, int off, SortedDocValues sindex, int end)
                 : base(@this)
             {
                 this.off = off;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs
index e711b7a..e7d5584 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/ScaleFloatFunction.cs
@@ -117,10 +117,10 @@
             float maxSource = scaleInfo.MaxVal;
 
             var vals = m_source.GetValues(context, readerContext);
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this, scale, minSource, maxSource, vals);
+            return new SingleDocValuesAnonymousClass(this, this, scale, minSource, maxSource, vals);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly ScaleSingleFunction outerInstance;
 
@@ -129,7 +129,7 @@
             private readonly float maxSource;
             private readonly FunctionValues vals;
 
-            public SingleDocValuesAnonymousInnerClassHelper(ScaleSingleFunction outerInstance, ScaleSingleFunction @this, float scale, float minSource, float maxSource, FunctionValues vals)
+            public SingleDocValuesAnonymousClass(ScaleSingleFunction outerInstance, ScaleSingleFunction @this, float scale, float minSource, float maxSource, FunctionValues vals)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/ShortFieldSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/ShortFieldSource.cs
index 0bc6504..4131649 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/ShortFieldSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/ShortFieldSource.cs
@@ -54,15 +54,15 @@
         public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext)
         {
             var arr = m_cache.GetInt16s(readerContext.AtomicReader, m_field, parser, false);
-            return new FunctionValuesAnonymousInnerClassHelper(this, arr);
+            return new FunctionValuesAnonymousClass(this, arr);
         }
 
-        private class FunctionValuesAnonymousInnerClassHelper : FunctionValues
+        private class FunctionValuesAnonymousClass : FunctionValues
         {
             private readonly Int16FieldSource outerInstance;
             private readonly FieldCache.Int16s arr;
 
-            public FunctionValuesAnonymousInnerClassHelper(Int16FieldSource outerInstance, FieldCache.Int16s arr)
+            public FunctionValuesAnonymousClass(Int16FieldSource outerInstance, FieldCache.Int16s arr)
             {
                 this.outerInstance = outerInstance;
                 this.arr = arr;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/SimpleBoolFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/SimpleBoolFunction.cs
index 9f609c7..0fc2a94 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/SimpleBoolFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/SimpleBoolFunction.cs
@@ -45,16 +45,16 @@
         public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext)
         {
             FunctionValues vals = m_source.GetValues(context, readerContext);
-            return new BoolDocValuesAnonymousInnerClassHelper(this, this, vals);
+            return new BoolDocValuesAnonymousClass(this, this, vals);
         }
 
-        private class BoolDocValuesAnonymousInnerClassHelper : BoolDocValues
+        private class BoolDocValuesAnonymousClass : BoolDocValues
         {
             private readonly SimpleBoolFunction outerInstance;
 
             private readonly FunctionValues vals;
 
-            public BoolDocValuesAnonymousInnerClassHelper(SimpleBoolFunction outerInstance, SimpleBoolFunction @this, FunctionValues vals)
+            public BoolDocValuesAnonymousClass(SimpleBoolFunction outerInstance, SimpleBoolFunction @this, FunctionValues vals)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/SimpleFloatFunction.cs b/src/Lucene.Net.Queries/Function/ValueSources/SimpleFloatFunction.cs
index f30fb7f..bf0a48c 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/SimpleFloatFunction.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/SimpleFloatFunction.cs
@@ -38,15 +38,15 @@
         public override FunctionValues GetValues(IDictionary context, AtomicReaderContext readerContext)
         {
             FunctionValues vals = m_source.GetValues(context, readerContext);
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this, vals);
+            return new SingleDocValuesAnonymousClass(this, this, vals);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly SimpleSingleFunction outerInstance;
             private readonly FunctionValues vals;
 
-            public SingleDocValuesAnonymousInnerClassHelper(SimpleSingleFunction outerInstance, SimpleSingleFunction @this, FunctionValues vals)
+            public SingleDocValuesAnonymousClass(SimpleSingleFunction outerInstance, SimpleSingleFunction @this, FunctionValues vals)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/SumTotalTermFreqValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/SumTotalTermFreqValueSource.cs
index b6d427f..60c72a2 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/SumTotalTermFreqValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/SumTotalTermFreqValueSource.cs
@@ -78,14 +78,14 @@
                 }
             }
             long ttf = sumTotalTermFreq;
-            context[this] = new Int64DocValuesAnonymousInnerClassHelper(this, ttf);
+            context[this] = new Int64DocValuesAnonymousClass(this, ttf);
         }
 
-        private class Int64DocValuesAnonymousInnerClassHelper : Int64DocValues
+        private class Int64DocValuesAnonymousClass : Int64DocValues
         {
             private readonly long ttf;
 
-            public Int64DocValuesAnonymousInnerClassHelper(SumTotalTermFreqValueSource @this, long ttf)
+            public Int64DocValuesAnonymousClass(SumTotalTermFreqValueSource @this, long ttf)
                 : base(@this)
             {
                 this.ttf = ttf;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/TFValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/TFValueSource.cs
index 5b243d3..000403b 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/TFValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/TFValueSource.cs
@@ -54,17 +54,17 @@
                 throw new NotSupportedException("requires a TFIDFSimilarity (such as DefaultSimilarity)");
             }
 
-            return new SingleDocValuesAnonymousInnerClassHelper(this, this, terms, similarity);
+            return new SingleDocValuesAnonymousClass(this, this, terms, similarity);
         }
 
-        private class SingleDocValuesAnonymousInnerClassHelper : SingleDocValues
+        private class SingleDocValuesAnonymousClass : SingleDocValues
         {
             private readonly TFValueSource outerInstance;
 
             private readonly Terms terms;
             private readonly TFIDFSimilarity similarity;
 
-            public SingleDocValuesAnonymousInnerClassHelper(TFValueSource outerInstance, TFValueSource @this, Terms terms, TFIDFSimilarity similarity)
+            public SingleDocValuesAnonymousClass(TFValueSource outerInstance, TFValueSource @this, Terms terms, TFIDFSimilarity similarity)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
@@ -101,12 +101,12 @@
 
                 if (docs == null)
                 {
-                    docs = new DocsEnumAnonymousInnerClassHelper();
+                    docs = new DocsEnumAnonymousClass();
                 }
                 atDoc = -1;
             }
 
-            private class DocsEnumAnonymousInnerClassHelper : DocsEnum
+            private class DocsEnumAnonymousClass : DocsEnum
             {
                 public override int Freq => 0;
 
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/TermFreqValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/TermFreqValueSource.cs
index 7985232..64fec78 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/TermFreqValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/TermFreqValueSource.cs
@@ -46,16 +46,16 @@
             Fields fields = readerContext.AtomicReader.Fields;
             Terms terms = fields.GetTerms(m_indexedField);
 
-            return new Int32DocValuesAnonymousInnerClassHelper(this, this, terms);
+            return new Int32DocValuesAnonymousClass(this, this, terms);
         }
 
-        private class Int32DocValuesAnonymousInnerClassHelper : Int32DocValues
+        private class Int32DocValuesAnonymousClass : Int32DocValues
         {
             private readonly TermFreqValueSource outerInstance;
 
             private readonly Terms terms;
 
-            public Int32DocValuesAnonymousInnerClassHelper(TermFreqValueSource outerInstance, TermFreqValueSource @this, Terms terms)
+            public Int32DocValuesAnonymousClass(TermFreqValueSource outerInstance, TermFreqValueSource @this, Terms terms)
                 : base(@this)
             {
                 this.outerInstance = outerInstance;
@@ -91,12 +91,12 @@
 
                 if (docs == null)
                 {
-                    docs = new DocsEnumAnonymousInnerClassHelper();
+                    docs = new DocsEnumAnonymousClass();
                 }
                 atDoc = -1;
             }
 
-            private class DocsEnumAnonymousInnerClassHelper : DocsEnum
+            private class DocsEnumAnonymousClass : DocsEnum
             {
                 public override int Freq => 0;
 
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/TotalTermFreqValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/TotalTermFreqValueSource.cs
index c165255..38d6213 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/TotalTermFreqValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/TotalTermFreqValueSource.cs
@@ -75,14 +75,14 @@
                 }
             }
             var ttf = totalTermFreq;
-            context[this] = new Int64DocValuesAnonymousInnerClassHelper(this, ttf);
+            context[this] = new Int64DocValuesAnonymousClass(this, ttf);
         }
 
-        private class Int64DocValuesAnonymousInnerClassHelper : Int64DocValues
+        private class Int64DocValuesAnonymousClass : Int64DocValues
         {
             private readonly long ttf;
 
-            public Int64DocValuesAnonymousInnerClassHelper(TotalTermFreqValueSource @this, long ttf)
+            public Int64DocValuesAnonymousClass(TotalTermFreqValueSource @this, long ttf)
                 : base(@this)
             {
                 this.ttf = ttf;
diff --git a/src/Lucene.Net.Queries/Function/ValueSources/VectorValueSource.cs b/src/Lucene.Net.Queries/Function/ValueSources/VectorValueSource.cs
index ffa314b..3417447 100644
--- a/src/Lucene.Net.Queries/Function/ValueSources/VectorValueSource.cs
+++ b/src/Lucene.Net.Queries/Function/ValueSources/VectorValueSource.cs
@@ -53,7 +53,7 @@
             {
                 var x = m_sources[0].GetValues(context, readerContext);
                 var y = m_sources[1].GetValues(context, readerContext);
-                return new FunctionValuesAnonymousInnerClassHelper(this, x, y);
+                return new FunctionValuesAnonymousClass(this, x, y);
             }
 
             var valsArr = new FunctionValues[size];
@@ -62,17 +62,17 @@
                 valsArr[i] = m_sources[i].GetValues(context, readerContext);
             }
 
-            return new FunctionValuesAnonymousInnerClassHelper2(this, valsArr);
+            return new FunctionValuesAnonymousClass2(this, valsArr);
         }
 
-        private class FunctionValuesAnonymousInnerClassHelper : FunctionValues
+        private class FunctionValuesAnonymousClass : FunctionValues
         {
             private readonly VectorValueSource outerInstance;
 
             private readonly FunctionValues x;
             private readonly FunctionValues y;
 
-            public FunctionValuesAnonymousInnerClassHelper(VectorValueSource outerInstance, FunctionValues x, FunctionValues y)
+            public FunctionValuesAnonymousClass(VectorValueSource outerInstance, FunctionValues x, FunctionValues y)
             {
                 this.outerInstance = outerInstance;
                 this.x = x;
@@ -136,12 +136,12 @@
             }
         }
 
-        private class FunctionValuesAnonymousInnerClassHelper2 : FunctionValues
+        private class FunctionValuesAnonymousClass2 : FunctionValues
         {
             private readonly VectorValueSource outerInstance;
             private readonly FunctionValues[] valsArr;
 
-            public FunctionValuesAnonymousInnerClassHelper2(VectorValueSource outerInstance, FunctionValues[] valsArr)
+            public FunctionValuesAnonymousClass2(VectorValueSource outerInstance, FunctionValues[] valsArr)
             {
                 this.outerInstance = outerInstance;
                 this.valsArr = valsArr;
diff --git a/src/Lucene.Net.Queries/TermFilter.cs b/src/Lucene.Net.Queries/TermFilter.cs
index 2ae2605..9217e5f 100644
--- a/src/Lucene.Net.Queries/TermFilter.cs
+++ b/src/Lucene.Net.Queries/TermFilter.cs
@@ -59,15 +59,15 @@
             {
                 return null;
             }
-            return new DocIdSetAnonymousInnerClassHelper(acceptDocs, termsEnum);
+            return new DocIdSetAnonymousClass(acceptDocs, termsEnum);
         }
 
-        private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+        private class DocIdSetAnonymousClass : DocIdSet
         {
             private readonly IBits acceptDocs;
             private readonly TermsEnum termsEnum;
 
-            public DocIdSetAnonymousInnerClassHelper(IBits acceptDocs, TermsEnum termsEnum)
+            public DocIdSetAnonymousClass(IBits acceptDocs, TermsEnum termsEnum)
             {
                 this.acceptDocs = acceptDocs;
                 this.termsEnum = termsEnum;
diff --git a/src/Lucene.Net.Queries/TermsFilter.cs b/src/Lucene.Net.Queries/TermsFilter.cs
index e448d9e..960b894 100644
--- a/src/Lucene.Net.Queries/TermsFilter.cs
+++ b/src/Lucene.Net.Queries/TermsFilter.cs
@@ -55,13 +55,13 @@
         /// can contain duplicate terms and multiple fields.
         /// </summary>
         public TermsFilter(IList<Term> terms)
-            : this(new FieldAndTermEnumAnonymousInnerClassHelper(terms), terms.Count)
+            : this(new FieldAndTermEnumAnonymousClass(terms), terms.Count)
         {
         }
 
-        private class FieldAndTermEnumAnonymousInnerClassHelper : FieldAndTermEnum
+        private class FieldAndTermEnumAnonymousClass : FieldAndTermEnum
         {            
-            public FieldAndTermEnumAnonymousInnerClassHelper(IList<Term> terms)
+            public FieldAndTermEnumAnonymousClass(IList<Term> terms)
             {
                 if (terms.Count == 0)
                 {
@@ -93,13 +93,13 @@
         /// a single field.
         /// </summary>
         public TermsFilter(string field, IList<BytesRef> terms)
-            : this(new FieldAndTermEnumAnonymousInnerClassHelper2(field, terms), terms.Count)
+            : this(new FieldAndTermEnumAnonymousClass2(field, terms), terms.Count)
         {
         }
 
-        private class FieldAndTermEnumAnonymousInnerClassHelper2 : FieldAndTermEnum
+        private class FieldAndTermEnumAnonymousClass2 : FieldAndTermEnum
         {
-            public FieldAndTermEnumAnonymousInnerClassHelper2(string field, IList<BytesRef> terms)
+            public FieldAndTermEnumAnonymousClass2(string field, IList<BytesRef> terms)
                 : base(field)
             {
                 if (terms.Count == 0)
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs
index 9d084db..544abe1 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/EscapeQuerySyntaxImpl.cs
@@ -1,4 +1,5 @@
-using J2N.Text;
+using J2N.Numerics;
+using J2N.Text;
 using Lucene.Net.QueryParsers.Flexible.Core.Messages;
 using Lucene.Net.QueryParsers.Flexible.Core.Parser;
 using Lucene.Net.QueryParsers.Flexible.Core.Util;
@@ -275,7 +276,7 @@
                 if (codePointMultiplier > 0)
                 {
                     codePoint += HexToInt32(curChar) * codePointMultiplier;
-                    codePointMultiplier = (int)((uint)codePointMultiplier >> 4);
+                    codePointMultiplier = codePointMultiplier.TripleShift(4);
                     if (codePointMultiplier == 0)
                     {
                         output[length++] = (char)codePoint;
diff --git a/src/Lucene.Net.Replicator/RevisionFile.cs b/src/Lucene.Net.Replicator/RevisionFile.cs
index 4d5c563..ab5371d 100644
--- a/src/Lucene.Net.Replicator/RevisionFile.cs
+++ b/src/Lucene.Net.Replicator/RevisionFile.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System;
 
 namespace Lucene.Net.Replicator
@@ -69,7 +70,7 @@
 
         public override int GetHashCode()
         {
-            return FileName.GetHashCode() ^ (int)(Length ^ (long)((ulong)Length >> 32));
+            return FileName.GetHashCode() ^ (int)(Length ^ Length.TripleShift(32));
         }
 
         public override string ToString()
diff --git a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs
index 38a5123..50cd15f 100644
--- a/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs
+++ b/src/Lucene.Net.Sandbox/Queries/SortedSetSortField.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+using J2N.Numerics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Search;
 using Lucene.Net.Util;
@@ -144,11 +145,11 @@
             }
         }
 
-        internal class TermOrdValComparerAnonymousHelper : FieldComparer.TermOrdValComparer
+        private class TermOrdValComparerAnonymousClass : FieldComparer.TermOrdValComparer
         {
             private readonly SortedSetSortField outerInstance;
 
-            public TermOrdValComparerAnonymousHelper(SortedSetSortField outerInstance, int numHits)
+            public TermOrdValComparerAnonymousClass(SortedSetSortField outerInstance, int numHits)
                 : base(numHits, outerInstance.Field, outerInstance.m_missingValue == STRING_LAST)
             {
                 this.outerInstance = outerInstance;
@@ -198,7 +199,7 @@
 
         public override FieldComparer GetComparer(int numHits, int sortPos)
         {
-            return new TermOrdValComparerAnonymousHelper(this, numHits);
+            return new TermOrdValComparerAnonymousClass(this, numHits);
         }
 
         /// <summary>Wraps a <see cref="SortedSetDocValues"/> and returns the first ordinal (min)</summary>
@@ -287,7 +288,7 @@
                 }
                 else
                 {
-                    return (int)@in.OrdAt((int)((uint)(count - 1)) >> 1);
+                    return (int)@in.OrdAt((count - 1).TripleShift(1));
                 }
             }
 
@@ -324,7 +325,7 @@
                 }
                 else
                 {
-                    return (int)@in.OrdAt((int)((uint)count >> 1));
+                    return (int)@in.OrdAt(count.TripleShift(1));
                 }
             }
 
diff --git a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs
index de1da9c..11b55bf 100644
--- a/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/ContainsPrefixTreeFilter.cs
@@ -305,17 +305,17 @@
                 int size = d;
                 //sort them
                 Array.Sort(docs, 0, size);
-                return new DocIdSetIteratorAnonymousHelper(size, docs);
+                return new DocIdSetIteratorAnonymousClass(size, docs);
             }
 
-            #region Nested Type: DocIdSetIteratorAnonymousHelper
+            #region Nested Type: DocIdSetIteratorAnonymousClass
 
-            private sealed class DocIdSetIteratorAnonymousHelper : DocIdSetIterator
+            private sealed class DocIdSetIteratorAnonymousClass : DocIdSetIterator
             {
                 private readonly int size;
                 private readonly int[] docs;
 
-                public DocIdSetIteratorAnonymousHelper(int size, int[] docs)
+                public DocIdSetIteratorAnonymousClass(int size, int[] docs)
                 {
                     this.size = size;
                     this.docs = docs;
diff --git a/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
index 49aebf0..f9672c3 100644
--- a/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/IntersectsPrefixTreeFilter.cs
@@ -62,16 +62,16 @@
         /// <exception cref="IOException"></exception>
         public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
         {
-            return new VisitorTemplateAnonymousHelper(this, context, acceptDocs, hasIndexedLeaves).GetDocIdSet();
+            return new VisitorTemplateAnonymousClass(this, context, acceptDocs, hasIndexedLeaves).GetDocIdSet();
         }
 
-        #region Nested type: VisitorTemplateAnonymousHelper
+        #region Nested type: VisitorTemplateAnonymousClass
 
-        private sealed class VisitorTemplateAnonymousHelper : VisitorTemplate
+        private sealed class VisitorTemplateAnonymousClass : VisitorTemplate
         {
             private FixedBitSet results;
 
-            public VisitorTemplateAnonymousHelper(IntersectsPrefixTreeFilter outerInstance, AtomicReaderContext context, IBits acceptDocs, bool hasIndexedLeaves)
+            public VisitorTemplateAnonymousClass(IntersectsPrefixTreeFilter outerInstance, AtomicReaderContext context, IBits acceptDocs, bool hasIndexedLeaves)
                 : base(outerInstance, context, acceptDocs, hasIndexedLeaves)
             {
             }
diff --git a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs
index b7da0be..c1ea3f6 100644
--- a/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs
+++ b/src/Lucene.Net.Spatial/Prefix/WithinPrefixTreeFilter.cs
@@ -140,18 +140,18 @@
         /// <exception cref="IOException"></exception>
         public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
         {
-            return new VisitorTemplateAnonymousHelper(this, context, acceptDocs, true).GetDocIdSet();
+            return new VisitorTemplateAnonymousClass(this, context, acceptDocs, true).GetDocIdSet();
         }
 
-        #region Nested type: VisitorTemplateAnonymousHelper
+        #region Nested type: VisitorTemplateAnonymousClass
 
-        private sealed class VisitorTemplateAnonymousHelper : VisitorTemplate
+        private sealed class VisitorTemplateAnonymousClass : VisitorTemplate
         {
             private FixedBitSet inside;
             private FixedBitSet outside;
             private SpatialRelation visitRelation;
 
-            public VisitorTemplateAnonymousHelper(WithinPrefixTreeFilter outerInstance, AtomicReaderContext context, 
+            public VisitorTemplateAnonymousClass(WithinPrefixTreeFilter outerInstance, AtomicReaderContext context, 
                 IBits acceptDocs, bool hasIndexedLeaves)
                 : base(outerInstance, context, acceptDocs, hasIndexedLeaves)
             {
diff --git a/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs b/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs
index 184ce53..485c752 100644
--- a/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs
+++ b/src/Lucene.Net.Spatial/Serialized/SerializedDVStrategy.cs
@@ -67,7 +67,7 @@
                 m_ctx.BinaryCodec.WriteShape(new BinaryWriter(byteStream), shape);
 
                 //this is a hack to avoid redundant byte array copying by byteStream.toByteArray()
-                byteStream.WriteTo(new OutputStreamAnonymousHelper(bytesRef));
+                byteStream.WriteTo(new OutputStreamAnonymousClass(bytesRef));
             }
             catch (IOException e)
             {
@@ -77,11 +77,11 @@
             return new Field[] { new BinaryDocValuesField(FieldName, bytesRef) };
         }
 
-        internal class OutputStreamAnonymousHelper : MemoryStream
+        private class OutputStreamAnonymousClass : MemoryStream
         {
             private readonly BytesRef bytesRef;
 
-            public OutputStreamAnonymousHelper(BytesRef bytesRef)
+            public OutputStreamAnonymousClass(BytesRef bytesRef)
             {
                 this.bytesRef = bytesRef;
             }
@@ -144,16 +144,16 @@
 
             public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
             {
-                return new DocIdSetAnonymousHelper(this, context, acceptDocs);
+                return new DocIdSetAnonymousClass(this, context, acceptDocs);
             }
 
-            internal class DocIdSetAnonymousHelper : DocIdSet
+            private class DocIdSetAnonymousClass : DocIdSet
             {
                 private readonly PredicateValueSourceFilter outerInstance;
                 private readonly AtomicReaderContext context;
                 private readonly IBits acceptDocs;
 
-                public DocIdSetAnonymousHelper(PredicateValueSourceFilter outerInstance, AtomicReaderContext context, IBits acceptDocs)
+                public DocIdSetAnonymousClass(PredicateValueSourceFilter outerInstance, AtomicReaderContext context, IBits acceptDocs)
                 {
                     this.outerInstance = outerInstance;
                     this.context = context;
@@ -174,17 +174,17 @@
                         //null Map context -- we simply don't have one. That's ok.
                         FunctionValues predFuncValues = outerInstance.predicateValueSource.GetValues(null, context);
 
-                        return new BitsAnonymousHelper(predFuncValues, context, acceptDocs);
+                        return new BitsAnonymousClass(predFuncValues, context, acceptDocs);
                     }
                 }
 
-                internal class BitsAnonymousHelper : IBits
+                private class BitsAnonymousClass : IBits
                 {
                     private readonly FunctionValues predFuncValues;
                     private readonly AtomicReaderContext context;
                     private readonly IBits acceptDocs;
 
-                    public BitsAnonymousHelper(FunctionValues predFuncValues, AtomicReaderContext context, IBits acceptDocs)
+                    public BitsAnonymousClass(FunctionValues predFuncValues, AtomicReaderContext context, IBits acceptDocs)
                     {
                         this.predFuncValues = predFuncValues;
                         this.context = context;
@@ -240,15 +240,15 @@
             {
                 BinaryDocValues docValues = readerContext.AtomicReader.GetBinaryDocValues(fieldName);
 
-                return new FuctionValuesAnonymousHelper(this, docValues);
+                return new FuctionValuesAnonymousClass(this, docValues);
             }
 
-            internal class FuctionValuesAnonymousHelper : FunctionValues
+            private class FuctionValuesAnonymousClass : FunctionValues
             {
                 private readonly ShapeDocValueSource outerInstance;
                 private readonly BinaryDocValues docValues;
 
-                public FuctionValuesAnonymousHelper(ShapeDocValueSource outerInstance, BinaryDocValues docValues)
+                public FuctionValuesAnonymousClass(ShapeDocValueSource outerInstance, BinaryDocValues docValues)
                 {
                     this.outerInstance = outerInstance;
                     this.docValues = docValues;
diff --git a/src/Lucene.Net.Spatial/Util/DistanceToShapeValueSource.cs b/src/Lucene.Net.Spatial/Util/DistanceToShapeValueSource.cs
index caf3a38..6234830 100644
--- a/src/Lucene.Net.Spatial/Util/DistanceToShapeValueSource.cs
+++ b/src/Lucene.Net.Spatial/Util/DistanceToShapeValueSource.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index;
+using J2N.Numerics;
+using Lucene.Net.Index;
 using Lucene.Net.Queries.Function;
 using Lucene.Net.Queries.Function.DocValues;
 using Lucene.Net.Search;
@@ -68,15 +69,15 @@
         {
             FunctionValues shapeValues = shapeValueSource.GetValues(context, readerContext);
 
-            return new DoubleDocValuesAnonymousHelper(this, shapeValues);
+            return new DoubleDocValuesAnonymousClass(this, shapeValues);
         }
 
-        internal class DoubleDocValuesAnonymousHelper : DoubleDocValues
+        private class DoubleDocValuesAnonymousClass : DoubleDocValues
         {
             private readonly DistanceToShapeValueSource outerInstance;
             private readonly FunctionValues shapeValues;
 
-            public DoubleDocValuesAnonymousHelper(DistanceToShapeValueSource outerInstance, FunctionValues shapeValues)
+            public DoubleDocValuesAnonymousClass(DistanceToShapeValueSource outerInstance, FunctionValues shapeValues)
                 : base(outerInstance)
             {
                 this.outerInstance = outerInstance;
@@ -122,7 +123,7 @@
             result = shapeValueSource.GetHashCode();
             result = 31 * result + queryPoint.GetHashCode();
             temp = J2N.BitConversion.DoubleToInt64Bits(multiplier);
-            result = 31 * result + (int)(temp ^ ((long)((ulong)temp) >> 32));
+            result = 31 * result + (int)(temp ^ (temp.TripleShift(32)));
             return result;
         }
     }
diff --git a/src/Lucene.Net.Spatial/Util/ShapePredicateValueSource.cs b/src/Lucene.Net.Spatial/Util/ShapePredicateValueSource.cs
index 179aac5..6d6a178 100644
--- a/src/Lucene.Net.Spatial/Util/ShapePredicateValueSource.cs
+++ b/src/Lucene.Net.Spatial/Util/ShapePredicateValueSource.cs
@@ -69,15 +69,15 @@
         {
             FunctionValues shapeValues = shapeValuesource.GetValues(context, readerContext);
 
-            return new BoolDocValuesAnonymousHelper(this, shapeValues);
+            return new BoolDocValuesAnonymousClass(this, shapeValues);
         }
 
-        internal class BoolDocValuesAnonymousHelper : BoolDocValues
+        private class BoolDocValuesAnonymousClass : BoolDocValues
         {
             private readonly ShapePredicateValueSource outerInstance;
             private readonly FunctionValues shapeValues;
 
-            public BoolDocValuesAnonymousHelper(ShapePredicateValueSource outerInstance, FunctionValues shapeValues)
+            public BoolDocValuesAnonymousClass(ShapePredicateValueSource outerInstance, FunctionValues shapeValues)
                 : base(outerInstance)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
index eff3a8f..ef411a7 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
@@ -288,13 +288,13 @@
         }
 
         private Analyzer GetGramAnalyzer()
-            => new AnalyzerWrapperAnonymousInnerClassHelper(this, Analyzer.PER_FIELD_REUSE_STRATEGY);
+            => new AnalyzerWrapperAnonymousClass(this, Analyzer.PER_FIELD_REUSE_STRATEGY);
 
-        private class AnalyzerWrapperAnonymousInnerClassHelper : AnalyzerWrapper
+        private class AnalyzerWrapperAnonymousClass : AnalyzerWrapper
         {
             private readonly AnalyzingInfixSuggester outerInstance;
 
-            public AnalyzerWrapperAnonymousInnerClassHelper(AnalyzingInfixSuggester outerInstance, ReuseStrategy reuseStrategy)
+            public AnalyzerWrapperAnonymousClass(AnalyzingInfixSuggester outerInstance, ReuseStrategy reuseStrategy)
                 : base(reuseStrategy)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
index b4e44cc..01775fd 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingSuggester.cs
@@ -835,7 +835,7 @@
                 }
 
                 Util.Fst.Util.TopNSearcher<PairOutputs<long?, BytesRef>.Pair> searcher2;
-                searcher2 = new TopNSearcherAnonymousInnerClassHelper(this, fst, num - results.Count,
+                searcher2 = new TopNSearcherAnonymousClass(this, fst, num - results.Count,
                     num * maxAnalyzedPathsForOneInput, weightComparer, utf8Key, results);
 
                 prefixPaths = GetFullPrefixPaths(prefixPaths, lookupAutomaton, fst);
@@ -875,14 +875,14 @@
             }
         }
 
-        private class TopNSearcherAnonymousInnerClassHelper : Util.Fst.Util.TopNSearcher<PairOutputs<long?, BytesRef>.Pair>
+        private class TopNSearcherAnonymousClass : Util.Fst.Util.TopNSearcher<PairOutputs<long?, BytesRef>.Pair>
         {
             private readonly AnalyzingSuggester outerInstance;
 
             private readonly BytesRef utf8Key;
             private readonly IList<LookupResult> results;
 
-            public TopNSearcherAnonymousInnerClassHelper(
+            public TopNSearcherAnonymousClass(
                 AnalyzingSuggester outerInstance,
                 FST<PairOutputs<long?, BytesRef>.Pair> fst,
                 int topN,
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
index d7d2594..8f30e41 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/FreeTextSuggester.cs
@@ -258,16 +258,16 @@
             {
                 // TODO: use ShingleAnalyzerWrapper?
                 // Tack on ShingleFilter to the end, to generate token ngrams:
-                return new AnalyzerWrapperAnonymousInnerClassHelper(this, other.Strategy, other);
+                return new AnalyzerWrapperAnonymousClass(this, other.Strategy, other);
             }
         }
 
-        private class AnalyzerWrapperAnonymousInnerClassHelper : AnalyzerWrapper
+        private class AnalyzerWrapperAnonymousClass : AnalyzerWrapper
         {
             private readonly FreeTextSuggester outerInstance;
             private readonly Analyzer other;
 
-            public AnalyzerWrapperAnonymousInnerClassHelper(FreeTextSuggester outerInstance, ReuseStrategy reuseStrategy, Analyzer other)
+            public AnalyzerWrapperAnonymousClass(FreeTextSuggester outerInstance, ReuseStrategy reuseStrategy, Analyzer other)
                 : base(reuseStrategy)
             {
                 this.outerInstance = outerInstance;
@@ -712,7 +712,7 @@
 
                         // Must do num+seen.size() for queue depth because we may
                         // reject up to seen.size() paths in acceptResult():
-                        Util.Fst.Util.TopNSearcher<long?> searcher = new TopNSearcherAnonymousInnerClassHelper(this, fst, num, num + seen.Count, weightComparer, seen, finalLastToken);
+                        Util.Fst.Util.TopNSearcher<long?> searcher = new TopNSearcherAnonymousClass(this, fst, num, num + seen.Count, weightComparer, seen, finalLastToken);
 
                         // since this search is initialized with a single start node 
                         // it is okay to start with an empty input path here
@@ -802,14 +802,14 @@
             }
         }
 
-        private class TopNSearcherAnonymousInnerClassHelper : Util.Fst.Util.TopNSearcher<long?>
+        private class TopNSearcherAnonymousClass : Util.Fst.Util.TopNSearcher<long?>
         {
             private readonly FreeTextSuggester outerInstance;
 
             private readonly ISet<BytesRef> seen;
             private readonly BytesRef finalLastToken;
 
-            public TopNSearcherAnonymousInnerClassHelper(
+            public TopNSearcherAnonymousClass(
                 FreeTextSuggester outerInstance,
                 FST<long?> fst,
                 int num,
diff --git a/src/Lucene.Net.TestFramework/Analysis/CollationTestBase.cs b/src/Lucene.Net.TestFramework/Analysis/CollationTestBase.cs
index cabf204..83dd51a 100644
--- a/src/Lucene.Net.TestFramework/Analysis/CollationTestBase.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/CollationTestBase.cs
@@ -288,7 +288,7 @@
             ThreadJob[] threads = new ThreadJob[numThreads];
             for (int i = 0; i < numThreads; i++)
             {
-                threads[i] = new ThreadAnonymousInnerClassHelper(analyzer, map);
+                threads[i] = new ThreadAnonymousClass(analyzer, map);
             }
             for (int i = 0; i < numThreads; i++)
             {
@@ -300,12 +300,12 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly Analyzer analyzer;
             private readonly IDictionary<string, BytesRef> map;
 
-            public ThreadAnonymousInnerClassHelper(Analyzer analyzer, IDictionary<string, BytesRef> map)
+            public ThreadAnonymousClass(Analyzer analyzer, IDictionary<string, BytesRef> map)
             {
                 this.analyzer = analyzer;
                 this.map = map;
diff --git a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
index c2ab2a8..d5d0fb7 100644
--- a/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/LookaheadTokenFilter.cs
@@ -123,7 +123,7 @@
         protected internal LookaheadTokenFilter(TokenStream input)
             : base(input)
         {
-            m_positions = new RollingBufferAnonymousInnerClassHelper(this);
+            m_positions = new RollingBufferAnonymousClass(this);
             m_posIncAtt = AddAttribute<IPositionIncrementAttribute>();
             m_posLenAtt = AddAttribute<IPositionLengthAttribute>();
             m_offsetAtt = AddAttribute<IOffsetAttribute>();
@@ -160,11 +160,11 @@
 
         protected readonly RollingBuffer<T> m_positions;
 
-        private class RollingBufferAnonymousInnerClassHelper : RollingBuffer<T>
+        private class RollingBufferAnonymousClass : RollingBuffer<T>
         {
             private readonly LookaheadTokenFilter<T> outerInstance;
 
-            public RollingBufferAnonymousInnerClassHelper(LookaheadTokenFilter<T> outerInstance)
+            public RollingBufferAnonymousClass(LookaheadTokenFilter<T> outerInstance)
                 : base(outerInstance.NewPosition)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs
index f1eef1b..b219a43 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs
@@ -28,11 +28,11 @@
     [CodecName("DummyCompressingStoredFields")]
     public class DummyCompressingCodec : CompressingCodec
     {
-        public static readonly CompressionMode DUMMY = new CompressionModeAnonymousInnerClassHelper();
+        public static readonly CompressionMode DUMMY = new CompressionModeAnonymousClass();
 
-        private class CompressionModeAnonymousInnerClassHelper : CompressionMode
+        private class CompressionModeAnonymousClass : CompressionMode
         {
-            public CompressionModeAnonymousInnerClassHelper()
+            public CompressionModeAnonymousClass()
             { }
 
             public override Compressor NewCompressor()
@@ -51,9 +51,9 @@
             }
         }
 
-        private static readonly Decompressor DUMMY_DECOMPRESSOR = new DecompressorAnonymousInnerClassHelper();
+        private static readonly Decompressor DUMMY_DECOMPRESSOR = new DecompressorAnonymousClass();
 
-        private class DecompressorAnonymousInnerClassHelper : Decompressor
+        private class DecompressorAnonymousClass : Decompressor
         {
             public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes)
             {
@@ -73,9 +73,9 @@
             }
         }
 
-        private static readonly Compressor DUMMY_COMPRESSOR = new CompressorAnonymousInnerClassHelper();
+        private static readonly Compressor DUMMY_COMPRESSOR = new CompressorAnonymousClass();
 
-        private class CompressorAnonymousInnerClassHelper : Compressor
+        private class CompressorAnonymousClass : Compressor
         {
             public override void Compress(byte[] bytes, int off, int len, DataOutput @out)
             {
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs
index 0538bdc..7d20215 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
@@ -101,7 +101,7 @@
                     {
                         // to allow null norm types we need to indicate if norms are written
                         // only in RW case
-                        output.WriteByte((byte)(sbyte)(fi.NormType == Index.DocValuesType.NONE ? 0 : 1));
+                        output.WriteByte((byte)(fi.NormType == Index.DocValuesType.NONE ? 0 : 1));
                     }
                     if (Debugging.AssertsEnabled) Debugging.Assert(fi.Attributes == null); // not used or supported
                 }
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
index b70ab92..704e5c8 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
@@ -80,11 +80,11 @@
             if (Debugging.AssertsEnabled) Debugging.Assert(field.Number > lastFieldNumber,"writing norms fields out of order {0} -> {1}", lastFieldNumber, field.Number);
             foreach (var n in values)
             {
-                if (((sbyte)(byte)(long)n) < sbyte.MinValue || ((sbyte)(byte)(long)n) > sbyte.MaxValue)
+                if (((sbyte)n) < sbyte.MinValue || ((sbyte)n) > sbyte.MaxValue)
                 {
-                    throw new NotSupportedException("3.x cannot index norms that won't fit in a byte, got: " + ((sbyte)(byte)(long)n));
+                    throw new NotSupportedException("3.x cannot index norms that won't fit in a byte, got: " + ((sbyte)n));
                 }
-                @out.WriteByte((byte)(sbyte)n);
+                @out.WriteByte((byte)n);
             }
             lastFieldNumber = field.Number;
         }
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs
index 26bce39..64a3b3c 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs
@@ -46,12 +46,12 @@
             // Whenever IW opens readers, eg for merging, we have to
             // keep terms order in UTF16:
 
-            return new Lucene3xFieldsAnonymousInnerClassHelper(state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.TermsIndexDivisor);
+            return new Lucene3xFieldsAnonymousClass(state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.TermsIndexDivisor);
         }
 
-        private class Lucene3xFieldsAnonymousInnerClassHelper : Lucene3xFields
+        private class Lucene3xFieldsAnonymousClass : Lucene3xFields
         {
-            public Lucene3xFieldsAnonymousInnerClassHelper(Store.Directory directory, FieldInfos fieldInfos, SegmentInfo segmentInfo, Store.IOContext context, int termsIndexDivisor)
+            public Lucene3xFieldsAnonymousClass(Store.Directory directory, FieldInfos fieldInfos, SegmentInfo segmentInfo, Store.IOContext context, int termsIndexDivisor)
                 : base(directory, fieldInfos, segmentInfo, context, termsIndexDivisor)
             {
             }
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs
index 608acc8..567cb2f 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
@@ -159,7 +159,7 @@
                 }
             }
 
-            fieldsStream.WriteByte((byte)(sbyte)bits);
+            fieldsStream.WriteByte((byte)bits);
 
             if (bytes != null)
             {
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs
index 82ed18a..1e5e6d1 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs
@@ -32,12 +32,12 @@
 
         public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
         {
-            return new Lucene3xTermVectorsReaderAnonymousInnerClassHelper(directory, segmentInfo, fieldInfos, context);
+            return new Lucene3xTermVectorsReaderAnonymousClass(directory, segmentInfo, fieldInfos, context);
         }
 
-        private class Lucene3xTermVectorsReaderAnonymousInnerClassHelper : Lucene3xTermVectorsReader
+        private class Lucene3xTermVectorsReaderAnonymousClass : Lucene3xTermVectorsReader
         {
-            public Lucene3xTermVectorsReaderAnonymousInnerClassHelper(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
+            public Lucene3xTermVectorsReaderAnonymousClass(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
                 : base(directory, segmentInfo, fieldInfos, context)
             {
             }
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs
index 0640c13..683c5ca 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
@@ -442,12 +442,12 @@
             if (Debugging.AssertsEnabled) Debugging.Assert(i >= 0 && i <= short.MaxValue);
             if (i < 128)
             {
-                o.WriteByte((byte)(sbyte)i);
+                o.WriteByte((byte)i);
             }
             else
             {
-                o.WriteByte((byte)unchecked((sbyte)(0x80 | (i >> 8))));
-                o.WriteByte((byte)unchecked((sbyte)(i & 0xff)));
+                o.WriteByte((byte)(0x80 | (i >> 8)));
+                o.WriteByte((byte)(i & 0xff));
             }
         }
 
@@ -466,7 +466,7 @@
             bool anyMissing = false;
             foreach (long n in docToOrd)
             {
-                if ((long)n == -1)
+                if (n == -1)
                 {
                     anyMissing = true;
                     break;
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
index 99563cd..8255e0b 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
@@ -24,9 +24,9 @@
 #pragma warning disable 612, 618
     public sealed class Lucene40RWCodec : Lucene40Codec
     {
-        private readonly FieldInfosFormat fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper();
+        private readonly FieldInfosFormat fieldInfos = new Lucene40FieldInfosFormatAnonymousClass();
 
-        private class Lucene40FieldInfosFormatAnonymousInnerClassHelper : Lucene40FieldInfosFormat
+        private class Lucene40FieldInfosFormatAnonymousClass : Lucene40FieldInfosFormat
         {
             public override FieldInfosWriter FieldInfosWriter
             {
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
index 94b7886..a29d283 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
@@ -27,9 +27,9 @@
     public class Lucene41RWCodec : Lucene41Codec
     {
         private readonly StoredFieldsFormat fieldsFormat = new Lucene41StoredFieldsFormat();
-        private readonly FieldInfosFormat fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper();
+        private readonly FieldInfosFormat fieldInfos = new Lucene40FieldInfosFormatAnonymousClass();
 
-        private class Lucene40FieldInfosFormatAnonymousInnerClassHelper : Lucene40FieldInfosFormat
+        private class Lucene40FieldInfosFormatAnonymousClass : Lucene40FieldInfosFormat
         {
             public override FieldInfosWriter FieldInfosWriter
             {
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs
index a30c4c4..c0b3d9c 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs
@@ -345,18 +345,18 @@
         public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
         {
             // write the ordinals as a binary field
-            AddBinaryField(field, new IterableAnonymousInnerClassHelper(docToOrdCount, ords));
+            AddBinaryField(field, new IterableAnonymousClass(docToOrdCount, ords));
 
             // write the values as FST
             WriteFST(field, values);
         }
 
-        private class IterableAnonymousInnerClassHelper : IEnumerable<BytesRef>
+        private class IterableAnonymousClass : IEnumerable<BytesRef>
         {
             private readonly IEnumerable<long?> docToOrdCount;
             private readonly IEnumerable<long?> ords;
 
-            public IterableAnonymousInnerClassHelper(IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
+            public IterableAnonymousClass(IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
             {
                 this.docToOrdCount = docToOrdCount;
                 this.ords = ords;
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
index 390edb6..36b1ecc 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
@@ -28,9 +28,9 @@
         private readonly DocValuesFormat dv = new Lucene42RWDocValuesFormat();
         private readonly NormsFormat norms = new Lucene42NormsFormat();
 
-        private readonly FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper();
+        private readonly FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousClass();
 
-        private class Lucene42FieldInfosFormatAnonymousInnerClassHelper : Lucene42FieldInfosFormat
+        private class Lucene42FieldInfosFormatAnonymousClass : Lucene42FieldInfosFormat
         {
             public override FieldInfosWriter FieldInfosWriter
             {
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
index 0850b5b..db0cc6c 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
@@ -26,9 +26,9 @@
 #pragma warning disable 612, 618
     public class Lucene45RWCodec : Lucene45Codec
     {
-        private readonly FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper();
+        private readonly FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousClass();
 
-        private class Lucene42FieldInfosFormatAnonymousInnerClassHelper : Lucene42FieldInfosFormat
+        private class Lucene42FieldInfosFormatAnonymousClass : Lucene42FieldInfosFormat
         {
             public override FieldInfosWriter FieldInfosWriter
             {
diff --git a/src/Lucene.Net.TestFramework/Codecs/MissingOrdRemapper.cs b/src/Lucene.Net.TestFramework/Codecs/MissingOrdRemapper.cs
index e7da523..cf0e6c3 100644
--- a/src/Lucene.Net.TestFramework/Codecs/MissingOrdRemapper.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/MissingOrdRemapper.cs
@@ -32,21 +32,21 @@
         /// Insert an empty byte[] to the front of this enumerable.</summary>
         public static IEnumerable<BytesRef> InsertEmptyValue(IEnumerable<BytesRef> iterable)
         {
-            return new IterableAnonymousInnerClassHelper(iterable);
+            return new IterableAnonymousClass(iterable);
         }
 
-        private class IterableAnonymousInnerClassHelper : IEnumerable<BytesRef>
+        private class IterableAnonymousClass : IEnumerable<BytesRef>
         {
             private readonly IEnumerable<BytesRef> iterable;
 
-            public IterableAnonymousInnerClassHelper(IEnumerable<BytesRef> iterable)
+            public IterableAnonymousClass(IEnumerable<BytesRef> iterable)
             {
                 this.iterable = iterable;
             }
 
             public IEnumerator<BytesRef> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper(this);
+                return new IteratorAnonymousClass(this);
             }
 
             IEnumerator IEnumerable.GetEnumerator()
@@ -54,9 +54,9 @@
                 return GetEnumerator();
             }
 
-            private class IteratorAnonymousInnerClassHelper : IEnumerator<BytesRef>
+            private class IteratorAnonymousClass : IEnumerator<BytesRef>
             {
-                public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper outerInstance)
+                public IteratorAnonymousClass(IterableAnonymousClass outerInstance)
                 {
                     seenEmpty = false;
                     @in = outerInstance.iterable.GetEnumerator();
@@ -100,29 +100,29 @@
         /// Remaps ord -1 to ord 0 on this enumerable. </summary>
         public static IEnumerable<long?> MapMissingToOrd0(IEnumerable<long?> iterable)
         {
-            return new IterableAnonymousInnerClassHelper2(iterable);
+            return new IterableAnonymousClass2(iterable);
         }
 
-        private class IterableAnonymousInnerClassHelper2 : IEnumerable<long?>
+        private class IterableAnonymousClass2 : IEnumerable<long?>
         {
             private readonly IEnumerable<long?> iterable;
 
-            public IterableAnonymousInnerClassHelper2(IEnumerable<long?> iterable)
+            public IterableAnonymousClass2(IEnumerable<long?> iterable)
             {
                 this.iterable = iterable;
             }
 
             public IEnumerator<long?> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper2(this);
+                return new IteratorAnonymousClass2(this);
             }
 
             IEnumerator IEnumerable.GetEnumerator()
                 => GetEnumerator();
 
-            private class IteratorAnonymousInnerClassHelper2 : IEnumerator<long?>
+            private class IteratorAnonymousClass2 : IEnumerator<long?>
             {
-                public IteratorAnonymousInnerClassHelper2(IterableAnonymousInnerClassHelper2 outerInstance)
+                public IteratorAnonymousClass2(IterableAnonymousClass2 outerInstance)
                 {
                     @in = outerInstance.iterable.GetEnumerator();
                 }
@@ -160,29 +160,29 @@
         /// Remaps every ord+1 on this enumerable. </summary>
         public static IEnumerable<long?> MapAllOrds(IEnumerable<long?> iterable)
         {
-            return new IterableAnonymousInnerClassHelper3(iterable);
+            return new IterableAnonymousClass3(iterable);
         }
 
-        private class IterableAnonymousInnerClassHelper3 : IEnumerable<long?>
+        private class IterableAnonymousClass3 : IEnumerable<long?>
         {
             private readonly IEnumerable<long?> iterable;
 
-            public IterableAnonymousInnerClassHelper3(IEnumerable<long?> iterable)
+            public IterableAnonymousClass3(IEnumerable<long?> iterable)
             {
                 this.iterable = iterable;
             }
 
             public IEnumerator<long?> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper3(this);
+                return new IteratorAnonymousClass3(this);
             }
 
             IEnumerator IEnumerable.GetEnumerator()
                 => GetEnumerator();
 
-            private class IteratorAnonymousInnerClassHelper3 : IEnumerator<long?>
+            private class IteratorAnonymousClass3 : IEnumerator<long?>
             {
-                public IteratorAnonymousInnerClassHelper3(IterableAnonymousInnerClassHelper3 outerInstance)
+                public IteratorAnonymousClass3(IterableAnonymousClass3 outerInstance)
                 {
                     @in = outerInstance.iterable.GetEnumerator();
                 }
diff --git a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs
index 8c72c82..5e74d7a 100644
--- a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs
@@ -69,27 +69,27 @@
 
             public override Int32IndexInput OpenInput(Directory dir, string fileName, IOContext context)
             {
-                return new FixedInt32BlockIndexInputAnonymousHelper(dir.OpenInput(fileName, context));
+                return new FixedInt32BlockIndexInputAnonymousClass(dir.OpenInput(fileName, context));
             }
 
-            private class FixedInt32BlockIndexInputAnonymousHelper : FixedInt32BlockIndexInput
+            private class FixedInt32BlockIndexInputAnonymousClass : FixedInt32BlockIndexInput
             {
-                public FixedInt32BlockIndexInputAnonymousHelper(IndexInput input)
+                public FixedInt32BlockIndexInputAnonymousClass(IndexInput input)
                     : base(input)
                 {
                 }
 
                 protected override IBlockReader GetBlockReader(IndexInput @in, int[] buffer)
                 {
-                    return new BlockReaderAnonymousHelper(@in, buffer);
+                    return new BlockReaderAnonymousClass(@in, buffer);
                 }
 
-                private class BlockReaderAnonymousHelper : FixedInt32BlockIndexInput.IBlockReader
+                private class BlockReaderAnonymousClass : FixedInt32BlockIndexInput.IBlockReader
                 {
                     private readonly IndexInput @in;
                     private readonly int[] buffer;
 
-                    public BlockReaderAnonymousHelper(IndexInput @in, int[] buffer)
+                    public BlockReaderAnonymousClass(IndexInput @in, int[] buffer)
                     {
                         this.@in = @in;
                         this.buffer = buffer;
@@ -115,7 +115,7 @@
                 bool success = false;
                 try
                 {
-                    FixedInt32BlockIndexOutputAnonymousHelper ret = new FixedInt32BlockIndexOutputAnonymousHelper(output, blockSize);
+                    FixedInt32BlockIndexOutputAnonymousClass ret = new FixedInt32BlockIndexOutputAnonymousClass(output, blockSize);
 
                     success = true;
                     return ret;
@@ -129,9 +129,9 @@
                 }
             }
 
-            private class FixedInt32BlockIndexOutputAnonymousHelper : FixedInt32BlockIndexOutput
+            private class FixedInt32BlockIndexOutputAnonymousClass : FixedInt32BlockIndexOutput
             {
-                public FixedInt32BlockIndexOutputAnonymousHelper(IndexOutput output, int blockSize)
+                public FixedInt32BlockIndexOutputAnonymousClass(IndexOutput output, int blockSize)
                     : base(output, blockSize)
                 {
                 }
diff --git a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs
index 8835337..664db8e 100644
--- a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs
@@ -69,30 +69,30 @@
             {
                 IndexInput input = dir.OpenInput(fileName, context);
                 int baseBlockSize = input.ReadInt32();
-                return new VariableInt32BlockIndexInputAnonymousHelper(input, baseBlockSize);
+                return new VariableInt32BlockIndexInputAnonymousClass(input, baseBlockSize);
             }
 
-            private class VariableInt32BlockIndexInputAnonymousHelper : VariableInt32BlockIndexInput
+            private class VariableInt32BlockIndexInputAnonymousClass : VariableInt32BlockIndexInput
             {
                 private readonly int baseBlockSize;
 
-                public VariableInt32BlockIndexInputAnonymousHelper(IndexInput input, int baseBlockSize)
+                public VariableInt32BlockIndexInputAnonymousClass(IndexInput input, int baseBlockSize)
                     : base(input)
                 {
                     this.baseBlockSize = baseBlockSize;
                 }
                 protected override IBlockReader GetBlockReader(IndexInput @in, int[] buffer)
                 {
-                    return new BlockReaderAnonymousHelper(@in, buffer, baseBlockSize);
+                    return new BlockReaderAnonymousClass(@in, buffer, baseBlockSize);
                 }
 
-                private class BlockReaderAnonymousHelper : IBlockReader
+                private class BlockReaderAnonymousClass : IBlockReader
                 {
                     private readonly IndexInput input;
                     private readonly int[] buffer;
                     private readonly int baseBlockSize;
 
-                    public BlockReaderAnonymousHelper(IndexInput input, int[] buffer, int baseBlockSize)
+                    public BlockReaderAnonymousClass(IndexInput input, int[] buffer, int baseBlockSize)
                     {
                         this.input = input;
                         this.buffer = buffer;
@@ -124,7 +124,7 @@
                 try
                 {
                     output.WriteInt32(baseBlockSize);
-                    VariableInt32BlockIndexOutput ret = new VariableInt32BlockIndexOutputAnonymousHelper(output, baseBlockSize);
+                    VariableInt32BlockIndexOutput ret = new VariableInt32BlockIndexOutputAnonymousClass(output, baseBlockSize);
                     success = true;
                     return ret;
                 }
@@ -137,11 +137,11 @@
                 }
             }
 
-            private class VariableInt32BlockIndexOutputAnonymousHelper : VariableInt32BlockIndexOutput
+            private class VariableInt32BlockIndexOutputAnonymousClass : VariableInt32BlockIndexOutput
             {
                 private readonly int baseBlockSize;
                 private readonly IndexOutput output;
-                public VariableInt32BlockIndexOutputAnonymousHelper(IndexOutput output, int baseBlockSize)
+                public VariableInt32BlockIndexOutputAnonymousClass(IndexOutput output, int baseBlockSize)
                     : base(output, 2 * baseBlockSize)
                 {
                     this.output = output;
diff --git a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs
index 417c68e..c43ebc0 100644
--- a/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/MockRandom/MockRandomPostingsFormat.cs
@@ -115,11 +115,11 @@
             }
         }
 
-        private class IndexTermSelectorAnonymousHelper : VariableGapTermsIndexWriter.IndexTermSelector
+        private class IndexTermSelectorAnonymousClass : VariableGapTermsIndexWriter.IndexTermSelector
         {
             private readonly Random rand;
             private readonly int gap;
-            public IndexTermSelectorAnonymousHelper(int seed, int gap)
+            public IndexTermSelectorAnonymousClass(int seed, int gap)
             {
                 rand = new Random(seed);
                 this.gap = gap;
@@ -315,7 +315,7 @@
                             {
                                 Console.WriteLine("MockRandomCodec: random-gap terms index (max gap=" + gap + ")");
                             }
-                            selector = new IndexTermSelectorAnonymousHelper(seed2, gap);
+                            selector = new IndexTermSelectorAnonymousClass(seed2, gap);
                         }
                         indexWriter = new VariableGapTermsIndexWriter(state, selector);
                     }
diff --git a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
index aee0430..bee1234 100644
--- a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
@@ -41,13 +41,13 @@
         // For fun, test that we can override how terms are
         // sorted, and basic things still work -- this comparer
         // sorts in reversed unicode code point order:
-        private static readonly IComparer<BytesRef> reverseUnicodeComparer = new ComparerAnonymousInnerClassHelper();
+        private static readonly IComparer<BytesRef> reverseUnicodeComparer = new ComparerAnonymousClass();
 
 #pragma warning disable 659 // LUCENENET: Overrides Equals but not GetHashCode
-        private class ComparerAnonymousInnerClassHelper : IComparer<BytesRef>
+        private class ComparerAnonymousClass : IComparer<BytesRef>
 #pragma warning restore 659
         {
-            public ComparerAnonymousInnerClassHelper()
+            public ComparerAnonymousClass()
             { }
 
             public virtual int Compare(BytesRef t1, BytesRef t2)
diff --git a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
index 1b003e6..8aaccc9 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
@@ -1308,15 +1308,15 @@
 
         private void DoTestNumericsVsStoredFields(long minValue, long maxValue)
         {
-            DoTestNumericsVsStoredFields(new Int64ProducerAnonymousInnerClassHelper(minValue, maxValue));
+            DoTestNumericsVsStoredFields(new Int64ProducerAnonymousClass(minValue, maxValue));
         }
 
-        private class Int64ProducerAnonymousInnerClassHelper : Int64Producer
+        private class Int64ProducerAnonymousClass : Int64Producer
         {
             private readonly long minValue;
             private readonly long maxValue;
 
-            public Int64ProducerAnonymousInnerClassHelper(long minValue, long maxValue)
+            public Int64ProducerAnonymousClass(long minValue, long maxValue)
             {
                 this.minValue = minValue;
                 this.maxValue = maxValue;
@@ -1390,15 +1390,15 @@
 
         private void DoTestMissingVsFieldCache(long minValue, long maxValue)
         {
-            DoTestMissingVsFieldCache(new Int64ProducerAnonymousInnerClassHelper2(minValue, maxValue));
+            DoTestMissingVsFieldCache(new Int64ProducerAnonymousClass2(minValue, maxValue));
         }
 
-        private class Int64ProducerAnonymousInnerClassHelper2 : Int64Producer
+        private class Int64ProducerAnonymousClass2 : Int64Producer
         {
             private readonly long minValue;
             private readonly long maxValue;
 
-            public Int64ProducerAnonymousInnerClassHelper2(long minValue, long maxValue)
+            public Int64ProducerAnonymousClass2(long minValue, long maxValue)
             {
                 this.minValue = minValue;
                 this.maxValue = maxValue;
@@ -2752,17 +2752,17 @@
             {
                 long min = -(((long)Random.Next(1 << 30)) << 32);
                 long mul = Random.Next() & 0xFFFFFFFFL;
-                Int64Producer longs = new Int64ProducerAnonymousInnerClassHelper3(min, mul);
+                Int64Producer longs = new Int64ProducerAnonymousClass3(min, mul);
                 DoTestNumericsVsStoredFields(longs);
             }
         }
 
-        private class Int64ProducerAnonymousInnerClassHelper3 : Int64Producer
+        private class Int64ProducerAnonymousClass3 : Int64Producer
         {
             private readonly long min;
             private readonly long mul;
 
-            public Int64ProducerAnonymousInnerClassHelper3(long min, long mul)
+            public Int64ProducerAnonymousClass3(long min, long mul)
             {
                 this.min = min;
                 this.mul = mul;
@@ -2788,12 +2788,12 @@
             int numIterations = AtLeast(1);
             for (int i = 0; i < numIterations; i++)
             {
-                Int64Producer longs = new Int64ProducerAnonymousInnerClassHelper4();
+                Int64Producer longs = new Int64ProducerAnonymousClass4();
                 DoTestNumericsVsStoredFields(longs);
             }
         }
 
-        private class Int64ProducerAnonymousInnerClassHelper4 : Int64Producer
+        private class Int64ProducerAnonymousClass4 : Int64Producer
         {
             internal override long Next()
             {
@@ -3307,7 +3307,7 @@
             using CountdownEvent startingGun = new CountdownEvent(1);
             for (int i = 0; i < threads.Length; i++)
             {
-                threads[i] = new ThreadAnonymousInnerClassHelper(ir, startingGun);
+                threads[i] = new ThreadAnonymousClass(ir, startingGun);
                 threads[i].Start();
             }
             startingGun.Signal();
@@ -3317,12 +3317,12 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly DirectoryReader ir;
             private readonly CountdownEvent startingGun;
 
-            public ThreadAnonymousInnerClassHelper(DirectoryReader ir, CountdownEvent startingGun)
+            public ThreadAnonymousClass(DirectoryReader ir, CountdownEvent startingGun)
             {
                 this.ir = ir;
                 this.startingGun = startingGun;
@@ -3443,7 +3443,7 @@
             using CountdownEvent startingGun = new CountdownEvent(1);
             for (int i = 0; i < threads.Length; i++)
             {
-                threads[i] = new ThreadAnonymousInnerClassHelper2(ir, startingGun);
+                threads[i] = new ThreadAnonymousClass2(ir, startingGun);
                 threads[i].Start();
             }
             startingGun.Signal();
@@ -3453,12 +3453,12 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly DirectoryReader ir;
             private readonly CountdownEvent startingGun;
 
-            public ThreadAnonymousInnerClassHelper2(DirectoryReader ir, CountdownEvent startingGun)
+            public ThreadAnonymousClass2(DirectoryReader ir, CountdownEvent startingGun)
             {
                 this.ir = ir;
                 this.startingGun = startingGun;
diff --git a/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs
index fe3d36b..b24e256 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseMergePolicyTestCase.cs
@@ -56,7 +56,7 @@
         {
             using Directory dir = NewDirectory();
             AtomicBoolean mayMerge = new AtomicBoolean(true);
-            MergeScheduler mergeScheduler = new SerialMergeSchedulerAnonymousInnerClassHelper(mayMerge);
+            MergeScheduler mergeScheduler = new SerialMergeSchedulerAnonymousClass(mayMerge);
             using IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMergePolicy(NewMergePolicy()));
             writer.Config.MergePolicy.NoCFSRatio = Random.NextBoolean() ? 0 : 1;
             int numSegments = TestUtil.NextInt32(Random, 2, 20);
@@ -78,11 +78,11 @@
             }
         }
 
-        private class SerialMergeSchedulerAnonymousInnerClassHelper : SerialMergeScheduler
+        private class SerialMergeSchedulerAnonymousClass : SerialMergeScheduler
         {
             private readonly AtomicBoolean mayMerge;
 
-            public SerialMergeSchedulerAnonymousInnerClassHelper(AtomicBoolean mayMerge)
+            public SerialMergeSchedulerAnonymousClass(AtomicBoolean mayMerge)
             {
                 this.mayMerge = mayMerge;
             }
diff --git a/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
index 78ad27a..2b779e8 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
@@ -501,7 +501,7 @@
 
                 for (int i = 0; i < concurrentReads; ++i)
                 {
-                    readThreads.Add(new ThreadAnonymousInnerClassHelper(numDocs, rd, searcher, readsPerThread, ex));
+                    readThreads.Add(new ThreadAnonymousClass(numDocs, rd, searcher, readsPerThread, ex));
                 }
                 foreach (ThreadJob thread in readThreads)
                 {
@@ -518,7 +518,7 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly int numDocs;
             private readonly DirectoryReader rd;
@@ -527,7 +527,7 @@
             private readonly AtomicReference<Exception> ex;
             private readonly int[] queries;
 
-            public ThreadAnonymousInnerClassHelper(int numDocs, DirectoryReader rd, IndexSearcher searcher, int readsPerThread, AtomicReference<Exception> ex)
+            public ThreadAnonymousClass(int numDocs, DirectoryReader rd, IndexSearcher searcher, int readsPerThread, AtomicReference<Exception> ex)
             {
                 this.numDocs = numDocs;
                 this.rd = rd;
diff --git a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
index 783fcb1..f923bf7 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseTermVectorsFormatTestCase.cs
@@ -920,7 +920,7 @@
                     ThreadJob[] threads = new ThreadJob[2];
                     for (int i = 0; i < threads.Length; ++i)
                     {
-                        threads[i] = new ThreadAnonymousInnerClassHelper(this, numDocs, docs, reader, exception);
+                        threads[i] = new ThreadAnonymousClass(this, numDocs, docs, reader, exception);
                     }
                     foreach (ThreadJob thread in threads)
                     {
@@ -935,7 +935,7 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly BaseTermVectorsFormatTestCase outerInstance;
 
@@ -944,7 +944,7 @@
             private readonly IndexReader reader;
             private readonly AtomicReference<Exception> exception;
 
-            public ThreadAnonymousInnerClassHelper(BaseTermVectorsFormatTestCase outerInstance, int numDocs, RandomDocument[] docs, IndexReader reader, AtomicReference<Exception> exception)
+            public ThreadAnonymousClass(BaseTermVectorsFormatTestCase outerInstance, int numDocs, RandomDocument[] docs, IndexReader reader, AtomicReference<Exception> exception)
             {
                 this.outerInstance = outerInstance;
                 this.numDocs = numDocs;
diff --git a/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs
index 4dbb538..8b54df3 100644
--- a/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs
+++ b/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs
@@ -71,16 +71,16 @@
 
         public override void Document(int docID, StoredFieldVisitor visitor)
         {
-            base.Document(docID, new StoredFieldVisitorAnonymousInnerClassHelper(this, visitor));
+            base.Document(docID, new StoredFieldVisitorAnonymousClass(this, visitor));
         }
 
-        private class StoredFieldVisitorAnonymousInnerClassHelper : StoredFieldVisitor
+        private class StoredFieldVisitorAnonymousClass : StoredFieldVisitor
         {
             private readonly FieldFilterAtomicReader outerInstance;
 
             private readonly StoredFieldVisitor visitor;
 
-            public StoredFieldVisitorAnonymousInnerClassHelper(FieldFilterAtomicReader outerInstance, StoredFieldVisitor visitor)
+            public StoredFieldVisitorAnonymousClass(FieldFilterAtomicReader outerInstance, StoredFieldVisitor visitor)
             {
                 this.outerInstance = outerInstance;
                 this.visitor = visitor;
diff --git a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs
index 892bce2..fc3069e 100644
--- a/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs
+++ b/src/Lucene.Net.TestFramework/Index/RandomIndexWriter.cs
@@ -48,14 +48,14 @@
         {
             // Randomly calls Thread.yield so we mixup thread scheduling
             Random random = new Random(r.Next());
-            return MockIndexWriter(dir, conf, new TestPointAnonymousInnerClassHelper(random));
+            return MockIndexWriter(dir, conf, new TestPointAnonymousClass(random));
         }
 
-        private class TestPointAnonymousInnerClassHelper : ITestPoint
+        private class TestPointAnonymousClass : ITestPoint
         {
             private readonly Random random;
 
-            public TestPointAnonymousInnerClassHelper(Random random)
+            public TestPointAnonymousClass(Random random)
             {
                 this.random = random;
             }
@@ -191,7 +191,7 @@
                 // (but we need to clone them), and only when
                 // getReader, commit, etc. are called, we do an
                 // addDocuments?  Would be better testing.
-                IndexWriter.AddDocuments(new IterableAnonymousInnerClassHelper<IIndexableField>(doc), a);
+                IndexWriter.AddDocuments(new IterableAnonymousClass<IIndexableField>(doc), a);
             }
             else
             {
@@ -201,18 +201,18 @@
             MaybeCommit();
         }
 
-        private class IterableAnonymousInnerClassHelper<IndexableField> : IEnumerable<IEnumerable<IndexableField>>
+        private class IterableAnonymousClass<IndexableField> : IEnumerable<IEnumerable<IndexableField>>
         {
             private readonly IEnumerable<IndexableField> doc;
 
-            public IterableAnonymousInnerClassHelper(IEnumerable<IndexableField> doc)
+            public IterableAnonymousClass(IEnumerable<IndexableField> doc)
             {
                 this.doc = doc;
             }
 
             public IEnumerator<IEnumerable<IndexableField>> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper(this);
+                return new IteratorAnonymousClass(this);
             }
 
             IEnumerator IEnumerable.GetEnumerator()
@@ -220,11 +220,11 @@
                 return GetEnumerator();
             }
 
-            private class IteratorAnonymousInnerClassHelper : IEnumerator<IEnumerable<IndexableField>>
+            private class IteratorAnonymousClass : IEnumerator<IEnumerable<IndexableField>>
             {
-                private readonly IterableAnonymousInnerClassHelper<IndexableField> outerInstance;
+                private readonly IterableAnonymousClass<IndexableField> outerInstance;
 
-                public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper<IndexableField> outerInstance)
+                public IteratorAnonymousClass(IterableAnonymousClass<IndexableField> outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -294,7 +294,7 @@
         {
             if (r.Next(5) == 3)
             {
-                IndexWriter.UpdateDocuments(t, new IterableAnonymousInnerClassHelper2(doc));
+                IndexWriter.UpdateDocuments(t, new IterableAnonymousClass2(doc));
             }
             else
             {
@@ -303,28 +303,28 @@
             MaybeCommit();
         }
 
-        private class IterableAnonymousInnerClassHelper2 : IEnumerable<IEnumerable<IIndexableField>>
+        private class IterableAnonymousClass2 : IEnumerable<IEnumerable<IIndexableField>>
         {
             private readonly IEnumerable<IIndexableField> doc;
 
-            public IterableAnonymousInnerClassHelper2(IEnumerable<IIndexableField> doc)
+            public IterableAnonymousClass2(IEnumerable<IIndexableField> doc)
             {
                 this.doc = doc;
             }
 
             public IEnumerator<IEnumerable<IIndexableField>> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper2(this);
+                return new IteratorAnonymousClass2(this);
             }
 
             IEnumerator IEnumerable.GetEnumerator() 
                 => GetEnumerator();
 
-            private class IteratorAnonymousInnerClassHelper2 : IEnumerator<IEnumerable<IIndexableField>>
+            private class IteratorAnonymousClass2 : IEnumerator<IEnumerable<IIndexableField>>
             {
-                private readonly IterableAnonymousInnerClassHelper2 outerInstance;
+                private readonly IterableAnonymousClass2 outerInstance;
 
-                public IteratorAnonymousInnerClassHelper2(IterableAnonymousInnerClassHelper2 outerInstance)
+                public IteratorAnonymousClass2(IterableAnonymousClass2 outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
diff --git a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs
index bf23258..be5d616 100644
--- a/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/ThreadedIndexingAndSearchingTestCase.cs
@@ -141,7 +141,7 @@
             ThreadJob[] threads = new ThreadJob[numThreads];
             for (int thread = 0; thread < numThreads; thread++)
             {
-                threads[thread] = new ThreadAnonymousInnerClassHelper(this, docs, stopTime, delIDs, delPackIDs, allSubDocs);
+                threads[thread] = new ThreadAnonymousClass(this, docs, stopTime, delIDs, delPackIDs, allSubDocs);
                 threads[thread].IsBackground = (true);
                 threads[thread].Start();
             }
@@ -149,7 +149,7 @@
             return threads;
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly ThreadedIndexingAndSearchingTestCase outerInstance;
 
@@ -159,7 +159,7 @@
             private readonly ISet<string> delPackIDs;
             private readonly ConcurrentQueue<SubDocs> allSubDocs;
 
-            public ThreadAnonymousInnerClassHelper(ThreadedIndexingAndSearchingTestCase outerInstance, LineFileDocs docs, long stopTime, ISet<string> delIDs, ISet<string> delPackIDs, ConcurrentQueue<SubDocs> allSubDocs)
+            public ThreadAnonymousClass(ThreadedIndexingAndSearchingTestCase outerInstance, LineFileDocs docs, long stopTime, ISet<string> delIDs, ISet<string> delPackIDs, ConcurrentQueue<SubDocs> allSubDocs)
             {
                 this.outerInstance = outerInstance;
                 this.docs = docs;
@@ -415,7 +415,7 @@
             // TODO: we should enrich this to do more interesting searches
             for (int thread = 0; thread < searchThreads.Length; thread++)
             {
-                searchThreads[thread] = new ThreadAnonymousInnerClassHelper2(this, stopTime, totHits, totTermCount);
+                searchThreads[thread] = new ThreadAnonymousClass2(this, stopTime, totHits, totTermCount);
                 searchThreads[thread].IsBackground = (true);
                 searchThreads[thread].Start();
             }
@@ -431,7 +431,7 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly ThreadedIndexingAndSearchingTestCase outerInstance;
 
@@ -439,7 +439,7 @@
             private readonly AtomicInt32 totHits;
             private readonly AtomicInt32 totTermCount;
 
-            public ThreadAnonymousInnerClassHelper2(ThreadedIndexingAndSearchingTestCase outerInstance, long stopTimeMS, AtomicInt32 totHits, AtomicInt32 totTermCount)
+            public ThreadAnonymousClass2(ThreadedIndexingAndSearchingTestCase outerInstance, long stopTimeMS, AtomicInt32 totHits, AtomicInt32 totTermCount)
             {
                 this.outerInstance = outerInstance;
                 this.stopTimeMS = stopTimeMS;
@@ -602,11 +602,11 @@
                 }
             }
 
-            conf.SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousInnerClassHelper(this));
+            conf.SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousClass(this));
 
             if (Verbose)
             {
-                conf.SetInfoStream(new PrintStreamInfoStreamAnonymousInnerClassHelper(Console.Out));
+                conf.SetInfoStream(new PrintStreamInfoStreamAnonymousClass(Console.Out));
             }
             m_writer = new IndexWriter(m_dir, conf);
             TestUtil.ReduceOpenFiles(m_writer);
@@ -795,11 +795,11 @@
             }
         }
 
-        private class IndexReaderWarmerAnonymousInnerClassHelper : IndexWriter.IndexReaderWarmer
+        private class IndexReaderWarmerAnonymousClass : IndexWriter.IndexReaderWarmer
         {
             private readonly ThreadedIndexingAndSearchingTestCase outerInstance;
 
-            public IndexReaderWarmerAnonymousInnerClassHelper(ThreadedIndexingAndSearchingTestCase outerInstance)
+            public IndexReaderWarmerAnonymousClass(ThreadedIndexingAndSearchingTestCase outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -842,9 +842,9 @@
             }
         }
 
-        private class PrintStreamInfoStreamAnonymousInnerClassHelper : TextWriterInfoStream
+        private class PrintStreamInfoStreamAnonymousClass : TextWriterInfoStream
         {
-            public PrintStreamInfoStreamAnonymousInnerClassHelper(TextWriter @out)
+            public PrintStreamInfoStreamAnonymousClass(TextWriter @out)
                 : base(@out)
             {
             }
diff --git a/src/Lucene.Net.TestFramework/Search/AssertingIndexSearcher.cs b/src/Lucene.Net.TestFramework/Search/AssertingIndexSearcher.cs
index d753f1c..ef44f4a 100644
--- a/src/Lucene.Net.TestFramework/Search/AssertingIndexSearcher.cs
+++ b/src/Lucene.Net.TestFramework/Search/AssertingIndexSearcher.cs
@@ -60,12 +60,12 @@
         public override Weight CreateNormalizedWeight(Query query)
         {
             Weight w = base.CreateNormalizedWeight(query);
-            return new AssertingWeightAnonymousInnerClassHelper(random, w);
+            return new AssertingWeightAnonymousClass(random, w);
         }
 
-        private class AssertingWeightAnonymousInnerClassHelper : AssertingWeight
+        private class AssertingWeightAnonymousClass : AssertingWeight
         {
-            public AssertingWeightAnonymousInnerClassHelper(Random random, Weight w)
+            public AssertingWeightAnonymousClass(Random random, Weight w)
                 : base(random, w)
             {
             }
diff --git a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs
index 7331687..083db1c 100644
--- a/src/Lucene.Net.TestFramework/Search/QueryUtils.cs
+++ b/src/Lucene.Net.TestFramework/Search/QueryUtils.cs
@@ -74,7 +74,7 @@
 
             // test that a class check is done so that no exception is thrown
             // in the implementation of equals()
-            Query whacky = new QueryAnonymousInnerClassHelper();
+            Query whacky = new QueryAnonymousClass();
             whacky.Boost = q.Boost;
             CheckUnequal(q, whacky);
 
@@ -82,9 +82,9 @@
             Assert.IsFalse(q.Equals(null));
         }
 
-        private class QueryAnonymousInnerClassHelper : Query
+        private class QueryAnonymousClass : Query
         {
-            public QueryAnonymousInnerClassHelper()
+            public QueryAnonymousClass()
             {
             }
 
@@ -351,7 +351,7 @@
                 const float maxDiff = 1e-5f;
                 AtomicReader[] lastReader = new AtomicReader[] { null };
 
-                s.Search(q, new CollectorAnonymousInnerClassHelper(
+                s.Search(q, new CollectorAnonymousClass(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                     luceneTestCase,
 #endif
@@ -380,7 +380,7 @@
             }
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
             private readonly LuceneTestCase luceneTestCase;
@@ -395,7 +395,7 @@
             private readonly float maxDiff;
             private readonly AtomicReader[] lastReader;
 
-            public CollectorAnonymousInnerClassHelper(
+            public CollectorAnonymousClass(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                 LuceneTestCase luceneTestCase,
 #endif                
@@ -515,7 +515,7 @@
             int[] lastDoc = new int[] { -1 };
             AtomicReader[] lastReader = new AtomicReader[] { null };
             IList<AtomicReaderContext> context = s.TopReaderContext.Leaves;
-            s.Search(q, new CollectorAnonymousInnerClassHelper2(
+            s.Search(q, new CollectorAnonymousClass2(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                 luceneTestCase,
 #endif
@@ -542,7 +542,7 @@
             }
         }
 
-        private class CollectorAnonymousInnerClassHelper2 : ICollector
+        private class CollectorAnonymousClass2 : ICollector
         {
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
             private readonly LuceneTestCase luceneTestCase;
@@ -554,7 +554,7 @@
             private readonly AtomicReader[] lastReader;
             private readonly IList<AtomicReaderContext> context;
 
-            public CollectorAnonymousInnerClassHelper2(
+            public CollectorAnonymousClass2(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION                
                 LuceneTestCase luceneTestCase, 
  #endif                
diff --git a/src/Lucene.Net.TestFramework/Store/BaseDirectoryTestCase.cs b/src/Lucene.Net.TestFramework/Store/BaseDirectoryTestCase.cs
index 0bc7aff..62c4526 100644
--- a/src/Lucene.Net.TestFramework/Store/BaseDirectoryTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Store/BaseDirectoryTestCase.cs
@@ -472,7 +472,7 @@
             });
         }
 
-        //        private class ListAllThread : ThreadClass
+        //        private class ListAllThread : ThreadJob
         //        {
         //            private readonly BaseDirectoryTestCase outerInstance;
         //            private readonly Directory dir;
@@ -516,7 +516,7 @@
         //            }
         //        }
 
-        //        private class ListAllThread2 : ThreadClass
+        //        private class ListAllThread2 : ThreadJob
         //        {
         //            private readonly BaseDirectoryTestCase outerInstance;
         //            private readonly Directory dir;
@@ -590,8 +590,8 @@
         //                }
 
         //                AtomicBoolean stop = new AtomicBoolean();
-        //                ThreadClass writer = new ListAllThread(this, dir, stop);
-        //                ThreadClass reader = new ListAllThread2(this, dir, stop);
+        //                ThreadJob writer = new ListAllThread(this, dir, stop);
+        //                ThreadJob reader = new ListAllThread2(this, dir, stop);
 
         //                reader.Start();
         //                writer.Start();
diff --git a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
index a7f51d3..41dd014 100644
--- a/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
+++ b/src/Lucene.Net.TestFramework/Store/MockDirectoryWrapper.cs
@@ -1227,19 +1227,19 @@
             }
 
             IndexInputSlicer delegateHandle = m_input.CreateSlicer(name, context);
-            IndexInputSlicer handle = new IndexInputSlicerAnonymousInnerClassHelper(this, name, delegateHandle);
+            IndexInputSlicer handle = new IndexInputSlicerAnonymousClass(this, name, delegateHandle);
             AddFileHandle(handle, name, Handle.Slice);
             return handle;
         }
 
-        private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer
+        private class IndexInputSlicerAnonymousClass : IndexInputSlicer
         {
             private readonly MockDirectoryWrapper outerInstance;
 
             private readonly string name;
             private readonly IndexInputSlicer delegateHandle;
 
-            public IndexInputSlicerAnonymousInnerClassHelper(MockDirectoryWrapper outerInstance, string name, IndexInputSlicer delegateHandle)
+            public IndexInputSlicerAnonymousClass(MockDirectoryWrapper outerInstance, string name, IndexInputSlicer delegateHandle)
             {
                 this.outerInstance = outerInstance;
                 this.name = name;
diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
index 6a458a4..c5eb737 100644
--- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
@@ -1563,22 +1563,9 @@
             {
                 int maxThreadCount = TestUtil.NextInt32(Random, 1, 4);
                 int maxMergeCount = TestUtil.NextInt32(Random, maxThreadCount, maxThreadCount + 4);
-                IConcurrentMergeScheduler mergeScheduler;
-
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-                mergeScheduler = new TaskMergeScheduler();
-#else
-                //if (Rarely(random))
-                //{
-                //    mergeScheduler = new TaskMergeScheduler();
-                //}
-                //else
-                {
-                    mergeScheduler = new ConcurrentMergeScheduler();
-                }
-#endif
-                mergeScheduler.SetMaxMergesAndThreads(maxMergeCount, maxThreadCount);
-                c.SetMergeScheduler(mergeScheduler);
+                IConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+                cms.SetMaxMergesAndThreads(maxMergeCount, maxThreadCount);
+                c.SetMergeScheduler(cms);
             }
             if (random.NextBoolean())
             {
@@ -2425,7 +2412,7 @@
                     {
                         Console.WriteLine("NOTE: newSearcher using ExecutorService with " + threads + " threads");
                     }
-                    //r.AddReaderClosedListener(new ReaderClosedListenerAnonymousInnerClassHelper(ex)); // LUCENENET TODO: Implement event (see the commented ReaderClosedListenerAnonymousInnerClassHelper class near the bottom of this file)
+                    //r.AddReaderClosedListener(new ReaderClosedListenerAnonymousClass(ex)); // LUCENENET TODO: Implement event (see the commented ReaderClosedListenerAnonymousClass class near the bottom of this file)
                 }
                 IndexSearcher ret;
                 if (wrapWithAssertions)
@@ -3549,34 +3536,6 @@
             }
         }
 
-        /// <summary>
-        /// Contains a list of the Func&lt;IConcurrentMergeSchedulers&gt; to be tested.
-        /// Delegate method allows them to be created on their target thread instead of the test thread
-        /// and also ensures a separate instance is created in each case (which can affect the result of the test).
-        /// <para/>
-        /// The <see cref="TaskMergeScheduler"/> is only rarely included.
-        /// <para/>
-        /// LUCENENET specific for injection into tests (i.e. using NUnit.Framework.ValueSourceAttribute)
-        /// </summary>
-        public static class ConcurrentMergeSchedulerFactories
-        {
-            public static IList<Func<IConcurrentMergeScheduler>> Values
-            {
-                get
-                {
-                    var schedulerFactories = new List<Func<IConcurrentMergeScheduler>>();
-#if FEATURE_CONCURRENTMERGESCHEDULER
-                    schedulerFactories.Add(() => new ConcurrentMergeScheduler());
-                    //if (Rarely())
-                    //    schedulerFactories.Add(() => new TaskMergeScheduler());
-#else
-                    schedulerFactories.Add(() => new TaskMergeScheduler());
-#endif
-                    return schedulerFactories;
-                }
-            }
-        }
-
         internal static void LogNativeFSFactoryDebugInfo()
         {
             // LUCENENET specific - log the current locking strategy used and HResult values
@@ -3680,11 +3639,11 @@
         }
     }
 
-    //internal class ReaderClosedListenerAnonymousInnerClassHelper : IndexReader.IReaderClosedListener
+    //private class ReaderClosedListenerAnonymousClass : IndexReader.IReaderClosedListener
     //{
     //    private TaskScheduler ex;
 
-    //    public ReaderClosedListenerAnonymousInnerClassHelper(TaskScheduler ex)
+    //    public ReaderClosedListenerAnonymousClass(TaskScheduler ex)
     //    {
     //        this.ex = ex;
     //    }
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs b/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs
index fa25854..436d752 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleAssertionsRequired.cs
@@ -35,17 +35,17 @@
     {
       public override Statement Apply(Statement @base, Description description)
       {
-        return new StatementAnonymousInnerClassHelper(this, @base, description);
+        return new StatementAnonymousClass(this, @base, description);
       }
 
-      private class StatementAnonymousInnerClassHelper : Statement
+      private class StatementAnonymousClass : Statement
       {
           private readonly TestRuleAssertionsRequired OuterInstance;
 
           private Statement @base;
           private Description Description;
 
-          public StatementAnonymousInnerClassHelper(TestRuleAssertionsRequired outerInstance, Statement @base, Description description)
+          public StatementAnonymousClass(TestRuleAssertionsRequired outerInstance, Statement @base, Description description)
           {
               this.OuterInstance = outerInstance;
               this.@base = @base;
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleFieldCacheSanity.cs b/src/Lucene.Net.TestFramework/Util/TestRuleFieldCacheSanity.cs
index 136566e..fc9a350 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleFieldCacheSanity.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleFieldCacheSanity.cs
@@ -50,17 +50,17 @@
 
       public override Statement Apply(Statement s, Description d)
       {
-        return new StatementAnonymousInnerClassHelper(this, s, d);
+        return new StatementAnonymousClass(this, s, d);
       }
 
-      private class StatementAnonymousInnerClassHelper : Statement
+      private class StatementAnonymousClass : Statement
       {
           private readonly TestRuleFieldCacheSanity OuterInstance;
 
           private Statement s;
           private Description d;
 
-          public StatementAnonymousInnerClassHelper(TestRuleFieldCacheSanity outerInstance, Statement s, Description d)
+          public StatementAnonymousClass(TestRuleFieldCacheSanity outerInstance, Statement s, Description d)
           {
               this.OuterInstance = outerInstance;
               this.s = s;
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleIgnoreAfterMaxFailures.cs b/src/Lucene.Net.TestFramework/Util/TestRuleIgnoreAfterMaxFailures.cs
index daa6a40..64ae2bf 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleIgnoreAfterMaxFailures.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleIgnoreAfterMaxFailures.cs
@@ -57,16 +57,16 @@
 
       public override Statement Apply(Statement s, Description d)
       {
-        return new StatementAnonymousInnerClassHelper(this, s);
+        return new StatementAnonymousClass(this, s);
       }
 
-      private class StatementAnonymousInnerClassHelper : Statement
+      private class StatementAnonymousClass : Statement
       {
           private readonly TestRuleIgnoreAfterMaxFailures OuterInstance;
 
           private Statement s;
 
-          public StatementAnonymousInnerClassHelper(TestRuleIgnoreAfterMaxFailures outerInstance, Statement s)
+          public StatementAnonymousClass(TestRuleIgnoreAfterMaxFailures outerInstance, Statement s)
           {
               this.OuterInstance = outerInstance;
               this.s = s;
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleIgnoreTestSuites.cs b/src/Lucene.Net.TestFramework/Util/TestRuleIgnoreTestSuites.cs
index d13eeca..c2c40bb 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleIgnoreTestSuites.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleIgnoreTestSuites.cs
@@ -51,17 +51,17 @@
 
       public override Statement Apply(Statement s, Description d)
       {
-        return new StatementAnonymousInnerClassHelper(this, s, d);
+        return new StatementAnonymousClass(this, s, d);
       }
 
-      private class StatementAnonymousInnerClassHelper : Statement
+      private class StatementAnonymousClass : Statement
       {
           private readonly TestRuleIgnoreTestSuites OuterInstance;
 
           private Statement s;
           private Description d;
 
-          public StatementAnonymousInnerClassHelper(TestRuleIgnoreTestSuites outerInstance, Statement s, Description d)
+          public StatementAnonymousClass(TestRuleIgnoreTestSuites outerInstance, Statement s, Description d)
           {
               this.OuterInstance = outerInstance;
               this.s = s;
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleMarkFailure.cs b/src/Lucene.Net.TestFramework/Util/TestRuleMarkFailure.cs
index 776a42d..5aa27e6 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleMarkFailure.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleMarkFailure.cs
@@ -44,16 +44,16 @@
 
       public override Statement Apply(Statement s, Description d)
       {
-        return new StatementAnonymousInnerClassHelper(this, s);
+        return new StatementAnonymousClass(this, s);
       }
 
-      private class StatementAnonymousInnerClassHelper : Statement
+      private class StatementAnonymousClass : Statement
       {
           private readonly TestRuleMarkFailure OuterInstance;
 
           private Statement s;
 
-          public StatementAnonymousInnerClassHelper(TestRuleMarkFailure outerInstance, Statement s)
+          public StatementAnonymousClass(TestRuleMarkFailure outerInstance, Statement s)
           {
               this.OuterInstance = outerInstance;
               this.s = s;
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs
index 43e6aab..0c833a5 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleSetupAndRestoreClassEnv.cs
@@ -242,7 +242,7 @@
                     dvFormat = DocValuesFormat.ForName(LuceneTestCase.TestDocValuesFormat);
                 }
 
-                codec = new Lucene46CodecAnonymousInnerClassHelper(format, dvFormat);
+                codec = new Lucene46CodecAnonymousClass(format, dvFormat);
             }
             else if ("SimpleText".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) 
                 || ("random".Equals(LuceneTestCase.TestCodec, StringComparison.Ordinal) && randomVal == 9 && LuceneTestCase.Rarely(random) && !ShouldAvoidCodec("SimpleText")))
@@ -315,12 +315,12 @@
             }
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        private class Lucene46CodecAnonymousClass : Lucene46Codec
         {
             private readonly PostingsFormat format;
             private readonly DocValuesFormat dvFormat;
 
-            public Lucene46CodecAnonymousInnerClassHelper(PostingsFormat format, DocValuesFormat dvFormat)
+            public Lucene46CodecAnonymousClass(PostingsFormat format, DocValuesFormat dvFormat)
             {
                 this.format = format;
                 this.dvFormat = dvFormat;
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleSetupTeardownChained.cs b/src/Lucene.Net.TestFramework/Util/TestRuleSetupTeardownChained.cs
index 19c728f..6bf74fc 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleSetupTeardownChained.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleSetupTeardownChained.cs
@@ -43,16 +43,16 @@
 
       public override Statement Apply(Statement @base, Description description)
       {
-        return new StatementAnonymousInnerClassHelper(this, @base);
+        return new StatementAnonymousClass(this, @base);
       }
 
-      private class StatementAnonymousInnerClassHelper : Statement
+      private class StatementAnonymousClass : Statement
       {
           private readonly TestRuleSetupTeardownChained OuterInstance;
 
           private Statement @base;
 
-          public StatementAnonymousInnerClassHelper(TestRuleSetupTeardownChained outerInstance, Statement @base)
+          public StatementAnonymousClass(TestRuleSetupTeardownChained outerInstance, Statement @base)
           {
               this.OuterInstance = outerInstance;
               this.@base = @base;
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleStoreClassName.cs b/src/Lucene.Net.TestFramework/Util/TestRuleStoreClassName.cs
index b837d10..0c7e5cd 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleStoreClassName.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleStoreClassName.cs
@@ -41,17 +41,17 @@
           throw new ArgumentException("this is a @ClassRule (applies to suites only).");
         }
 
-        return new StatementAnonymousInnerClassHelper(this, s, d);
+        return new StatementAnonymousClass(this, s, d);
       }
 
-      private class StatementAnonymousInnerClassHelper : Statement
+      private class StatementAnonymousClass : Statement
       {
           private readonly TestRuleStoreClassName OuterInstance;
 
           private Statement s;
           private Description d;
 
-          public StatementAnonymousInnerClassHelper(TestRuleStoreClassName outerInstance, Statement s, Description d)
+          public StatementAnonymousClass(TestRuleStoreClassName outerInstance, Statement s, Description d)
           {
               this.OuterInstance = outerInstance;
               this.s = s;
diff --git a/src/Lucene.Net.TestFramework/Util/TestRuleThreadAndTestName.cs b/src/Lucene.Net.TestFramework/Util/TestRuleThreadAndTestName.cs
index 79d2420..61a8d13 100644
--- a/src/Lucene.Net.TestFramework/Util/TestRuleThreadAndTestName.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestRuleThreadAndTestName.cs
@@ -43,17 +43,17 @@
 
       public override Statement Apply(Statement @base, Description description)
       {
-        return new StatementAnonymousInnerClassHelper(this, @base, description);
+        return new StatementAnonymousClass(this, @base, description);
       }
 
-      private class StatementAnonymousInnerClassHelper : Statement
+      private class StatementAnonymousClass : Statement
       {
           private readonly TestRuleThreadAndTestName OuterInstance;
 
           private Statement @base;
           private Description Description;
 
-          public StatementAnonymousInnerClassHelper(TestRuleThreadAndTestName outerInstance, Statement @base, Description description)
+          public StatementAnonymousClass(TestRuleThreadAndTestName outerInstance, Statement @base, Description description)
           {
               this.OuterInstance = outerInstance;
               this.@base = @base;
diff --git a/src/Lucene.Net.TestFramework/Util/TestSecurityManager.cs b/src/Lucene.Net.TestFramework/Util/TestSecurityManager.cs
index 50fc1b0..a4fd088 100644
--- a/src/Lucene.Net.TestFramework/Util/TestSecurityManager.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestSecurityManager.cs
@@ -51,19 +51,19 @@
       /// <exception cref="SecurityException"> if the caller of this method is not the test runner itself. </exception>
       public override void CheckExit(int status)
       {
-        AccessController.doPrivileged(new PrivilegedActionAnonymousInnerClassHelper(this, status));
+        AccessController.doPrivileged(new PrivilegedActionAnonymousClass(this, status));
 
         // we passed the stack check, delegate to super, so default policy can still deny permission:
         base.CheckExit(status);
       }
 
-      private class PrivilegedActionAnonymousInnerClassHelper : PrivilegedAction<Void>
+      private class PrivilegedActionAnonymousClass : PrivilegedAction<Void>
       {
           private readonly TestSecurityManager OuterInstance;
 
           private int Status;
 
-          public PrivilegedActionAnonymousInnerClassHelper(TestSecurityManager outerInstance, int status)
+          public PrivilegedActionAnonymousClass(TestSecurityManager outerInstance, int status)
           {
               this.OuterInstance = outerInstance;
               this.Status = status;
diff --git a/src/Lucene.Net.TestFramework/Util/TestUtil.cs b/src/Lucene.Net.TestFramework/Util/TestUtil.cs
index eb8728a..1dcc4ad 100644
--- a/src/Lucene.Net.TestFramework/Util/TestUtil.cs
+++ b/src/Lucene.Net.TestFramework/Util/TestUtil.cs
@@ -876,14 +876,14 @@
             {
                 Console.WriteLine("forcing postings format to:" + format);
             }
-            return new Lucene46CodecAnonymousInnerClassHelper(format);
+            return new Lucene46CodecAnonymousClass(format);
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        private class Lucene46CodecAnonymousClass : Lucene46Codec
         {
             private readonly PostingsFormat format;
 
-            public Lucene46CodecAnonymousInnerClassHelper(PostingsFormat format)
+            public Lucene46CodecAnonymousClass(PostingsFormat format)
             {
                 this.format = format;
             }
@@ -908,14 +908,14 @@
             {
                 Console.WriteLine("forcing docvalues format to:" + format);
             }
-            return new Lucene46CodecAnonymousInnerClassHelper2(format);
+            return new Lucene46CodecAnonymousClass2(format);
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper2 : Lucene46Codec
+        private class Lucene46CodecAnonymousClass2 : Lucene46Codec
         {
             private readonly DocValuesFormat format;
 
-            public Lucene46CodecAnonymousInnerClassHelper2(DocValuesFormat format)
+            public Lucene46CodecAnonymousClass2(DocValuesFormat format)
             {
                 this.format = format;
             }
@@ -1024,15 +1024,15 @@
         public static void AssertAttributeReflection(Attribute att, IDictionary<string, object> reflectedValues)
         {
             IDictionary<string, object> map = new JCG.Dictionary<string, object>();
-            att.ReflectWith(new AttributeReflectorAnonymousInnerClassHelper(map));
+            att.ReflectWith(new AttributeReflectorAnonymousClass(map));
             Assert.AreEqual(reflectedValues, map, aggressive: false, "Reflection does not produce same map");
         }
 
-        private class AttributeReflectorAnonymousInnerClassHelper : IAttributeReflector
+        private class AttributeReflectorAnonymousClass : IAttributeReflector
         {
             private readonly IDictionary<string, object> map;
 
-            public AttributeReflectorAnonymousInnerClassHelper(IDictionary<string, object> map)
+            public AttributeReflectorAnonymousClass(IDictionary<string, object> map)
             {
                 this.map = map;
             }
@@ -1278,7 +1278,7 @@
             {
                 case 5:
                 case 4:
-                    return new RandomAccessFilterStrategyAnonymousInnerClassHelper();
+                    return new RandomAccessFilterStrategyAnonymousClass();
 
                 case 3:
                     return FilteredQuery.RANDOM_ACCESS_FILTER_STRATEGY;
@@ -1297,7 +1297,7 @@
             }
         }
 
-        private class RandomAccessFilterStrategyAnonymousInnerClassHelper : FilteredQuery.RandomAccessFilterStrategy
+        private class RandomAccessFilterStrategyAnonymousClass : FilteredQuery.RandomAccessFilterStrategy
         {
             protected override bool UseRandomAccess(IBits bits, int firstFilterDoc)
             {
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs
index e9365b8..dab7c1b 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestBugInSomething.cs
@@ -61,11 +61,11 @@
             CheckAnalysisConsistency(Random, a, false, "wmgddzunizdomqyj");
         }
 
-        internal CharFilter wrappedStream = new CharFilterAnonymousInnerClassHelper(new StringReader("bogus"));
+        internal CharFilter wrappedStream = new CharFilterAnonymousClass(new StringReader("bogus"));
 
-        private sealed class CharFilterAnonymousInnerClassHelper : CharFilter
+        private sealed class CharFilterAnonymousClass : CharFilter
         {
-            public CharFilterAnonymousInnerClassHelper(StringReader java) : base(java)
+            public CharFilterAnonymousClass(StringReader java) : base(java)
             {
             }
 
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs
index 072d41d..43b0365 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestFactories.cs
@@ -189,11 +189,11 @@
         }
 
         // some silly classes just so we can use checkRandomData
-        private readonly TokenizerFactory assertingTokenizer = new AnonymousInnerClassHelperTokenizerFactory(new Dictionary<string, string>());
+        private readonly TokenizerFactory assertingTokenizer = new TokenizerFactoryAnonymousClass(new Dictionary<string, string>());
 
-        private sealed class AnonymousInnerClassHelperTokenizerFactory : TokenizerFactory
+        private sealed class TokenizerFactoryAnonymousClass : TokenizerFactory
         {
-            public AnonymousInnerClassHelperTokenizerFactory(IDictionary<string, string> java) : base(java)
+            public TokenizerFactoryAnonymousClass(IDictionary<string, string> java) : base(java)
             {
             }
 
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
index 0d67b47..656dba5 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
@@ -68,11 +68,11 @@
             bool Apply(T o);
         }
 
-        private static readonly IPredicate<object[]> ALWAYS = new PredicateAnonymousInnerClassHelper();
+        private static readonly IPredicate<object[]> ALWAYS = new PredicateAnonymousClass();
 
-        private class PredicateAnonymousInnerClassHelper : IPredicate<object[]>
+        private class PredicateAnonymousClass : IPredicate<object[]>
         {
-            public PredicateAnonymousInnerClassHelper()
+            public PredicateAnonymousClass()
             {
             }
 
@@ -95,9 +95,9 @@
             try
             {
                 brokenConstructors[typeof(LimitTokenCountFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int) })] = ALWAYS;
-                brokenConstructors[typeof(LimitTokenCountFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int), typeof(bool) })] = new PredicateAnonymousInnerClassHelper2();
+                brokenConstructors[typeof(LimitTokenCountFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int), typeof(bool) })] = new PredicateAnonymousClass2();
                 brokenConstructors[typeof(LimitTokenPositionFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int) })] = ALWAYS;
-                brokenConstructors[typeof(LimitTokenPositionFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int), typeof(bool) })] = new PredicateAnonymousInnerClassHelper3();
+                brokenConstructors[typeof(LimitTokenPositionFilter).GetConstructor(new Type[] { typeof(TokenStream), typeof(int), typeof(bool) })] = new PredicateAnonymousClass3();
                 foreach (Type c in new Type[] {
                     // TODO: can we promote some of these to be only
                     // offsets offenders?
@@ -166,9 +166,9 @@
             allowedCharFilterArgs.Add(typeof(TextReader));
         }
 
-        private class PredicateAnonymousInnerClassHelper2 : IPredicate<object[]>
+        private class PredicateAnonymousClass2 : IPredicate<object[]>
         {
-            public PredicateAnonymousInnerClassHelper2()
+            public PredicateAnonymousClass2()
             {
             }
 
@@ -179,9 +179,9 @@
             }
         }
 
-        private class PredicateAnonymousInnerClassHelper3 : IPredicate<object[]>
+        private class PredicateAnonymousClass3 : IPredicate<object[]>
         {
-            public PredicateAnonymousInnerClassHelper3()
+            public PredicateAnonymousClass3()
             {
             }
 
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestRemoveDuplicatesTokenFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestRemoveDuplicatesTokenFilter.cs
index e903892..e01e9ea 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestRemoveDuplicatesTokenFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Miscellaneous/TestRemoveDuplicatesTokenFilter.cs
@@ -45,16 +45,16 @@
         public virtual void TestDups(string expected, params Token[] tokens)
         {
             IEnumerator<Token> toks = ((IEnumerable<Token>)tokens).GetEnumerator();
-            TokenStream ts = new RemoveDuplicatesTokenFilter((new TokenStreamAnonymousInnerClassHelper(toks)));
+            TokenStream ts = new RemoveDuplicatesTokenFilter((new TokenStreamAnonymousClass(toks)));
 
             AssertTokenStreamContents(ts, Regex.Split(expected, "\\s").TrimEnd());
         }
 
-        private sealed class TokenStreamAnonymousInnerClassHelper : TokenStream
+        private sealed class TokenStreamAnonymousClass : TokenStream
         {
             private readonly IEnumerator<Token> toks;
 
-            public TokenStreamAnonymousInnerClassHelper(IEnumerator<Token> toks)
+            public TokenStreamAnonymousClass(IEnumerator<Token> toks)
             {
                 this.toks = toks;
                 termAtt = AddAttribute<ICharTermAttribute>();
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/NGram/NGramTokenizerTest.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/NGram/NGramTokenizerTest.cs
index 9cbe369..42c7669 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/NGram/NGramTokenizerTest.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/NGram/NGramTokenizerTest.cs
@@ -183,7 +183,7 @@
             {
                 offsets[i + 1] = offsets[i] + Character.CharCount(codePoints[i]);
             }
-            TokenStream grams = new NGramTokenizerAnonymousInnerClassHelper(TEST_VERSION_CURRENT, new StringReader(s), minGram, maxGram, edgesOnly, nonTokenChars);
+            TokenStream grams = new NGramTokenizerAnonymousClass(TEST_VERSION_CURRENT, new StringReader(s), minGram, maxGram, edgesOnly, nonTokenChars);
             ICharTermAttribute termAtt = grams.AddAttribute<ICharTermAttribute>();
             IPositionIncrementAttribute posIncAtt = grams.AddAttribute<IPositionIncrementAttribute>();
             IPositionLengthAttribute posLenAtt = grams.AddAttribute<IPositionLengthAttribute>();
@@ -221,11 +221,11 @@
             assertEquals(s.Length, offsetAtt.EndOffset);
         }
 
-        private sealed class NGramTokenizerAnonymousInnerClassHelper : NGramTokenizer
+        private sealed class NGramTokenizerAnonymousClass : NGramTokenizer
         {
             private readonly string nonTokenChars;
 
-            public NGramTokenizerAnonymousInnerClassHelper(LuceneVersion TEST_VERSION_CURRENT, StringReader java, int minGram, int maxGram, bool edgesOnly, string nonTokenChars)
+            public NGramTokenizerAnonymousClass(LuceneVersion TEST_VERSION_CURRENT, StringReader java, int minGram, int maxGram, bool edgesOnly, string nonTokenChars)
                   : base(TEST_VERSION_CURRENT, java, minGram, maxGram, edgesOnly)
             {
                 this.nonTokenChars = nonTokenChars;
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Sinks/TestTeeSinkTokenFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Sinks/TestTeeSinkTokenFilter.cs
index 2d52d22..33502a8 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Sinks/TestTeeSinkTokenFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Sinks/TestTeeSinkTokenFilter.cs
@@ -59,9 +59,9 @@
             }
         }
 
-        internal static readonly TeeSinkTokenFilter.SinkFilter theFilter = new SinkFilterAnonymousInnerClassHelper();
+        internal static readonly TeeSinkTokenFilter.SinkFilter theFilter = new SinkFilterAnonymousClass();
 
-        private sealed class SinkFilterAnonymousInnerClassHelper : TeeSinkTokenFilter.SinkFilter
+        private sealed class SinkFilterAnonymousClass : TeeSinkTokenFilter.SinkFilter
         {
             public override bool Accept(AttributeSource a)
             {
@@ -70,9 +70,9 @@
             }
         }
 
-        internal static readonly TeeSinkTokenFilter.SinkFilter dogFilter = new SinkFilterAnonymousInnerClassHelper2();
+        internal static readonly TeeSinkTokenFilter.SinkFilter dogFilter = new SinkFilterAnonymousClass2();
 
-        private sealed class SinkFilterAnonymousInnerClassHelper2 : TeeSinkTokenFilter.SinkFilter
+        private sealed class SinkFilterAnonymousClass2 : TeeSinkTokenFilter.SinkFilter
         {
             public override bool Accept(AttributeSource a)
             {
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMap.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMap.cs
index 46f2482..b229e07 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMap.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Synonym/TestSynonymMap.cs
@@ -280,7 +280,7 @@
             IDictionary<string, string> args = new Dictionary<string, string>();
             args["synonyms"] = "something.txt";
             SlowSynonymFilterFactory ff = new SlowSynonymFilterFactory(args);
-            ff.Inform(new ResourceLoaderAnonymousInnerClassHelper());
+            ff.Inform(new ResourceLoaderAnonymousClass());
 
             SlowSynonymMap synMap = ff.SynonymMap;
             assertEquals(2, synMap.Submap.size());
@@ -290,7 +290,7 @@
             AssertTokIncludes(synMap, "b", "b");
         }
 
-        internal sealed class ResourceLoaderAnonymousInnerClassHelper : IResourceLoader
+        private sealed class ResourceLoaderAnonymousClass : IResourceLoader
         {
             public T NewInstance<T>(string cname)
             {
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestAnalysisSPILoader.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestAnalysisSPILoader.cs
index 2e9089a..b782958 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestAnalysisSPILoader.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestAnalysisSPILoader.cs
@@ -30,12 +30,12 @@
 
         private IDictionary<string, string> VersionArgOnly()
         {
-            return new HashMapAnonymousInnerClassHelper();
+            return new HashMapAnonymousClass();
         }
 
-        private sealed class HashMapAnonymousInnerClassHelper : Dictionary<string, string>
+        private sealed class HashMapAnonymousClass : Dictionary<string, string>
         {
-            public HashMapAnonymousInnerClassHelper()
+            public HashMapAnonymousClass()
             {
                 this["luceneMatchVersion"] = TEST_VERSION_CURRENT.ToString();
             }
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestBufferedCharFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestBufferedCharFilter.cs
index cc9cd0e..309553d 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestBufferedCharFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestBufferedCharFilter.cs
@@ -261,14 +261,14 @@
             assertTrue(new BufferedCharFilter(new StringReader(new string(new char[5], 1, 0)), 2).Read() == -1);
         }
 
-        private sealed class ReaderAnonymousInnerClassHelper : CharFilter
+        private sealed class ReaderAnonymousClass : CharFilter
         {
             private const int SIZE = 2;
             private int pos = 0;
 
             private readonly char[] contents = new char[SIZE];
 
-            public ReaderAnonymousInnerClassHelper()
+            public ReaderAnonymousClass()
                 : base(null)
             { }
 
@@ -435,7 +435,7 @@
                 fail("Exception during read test");
             }
 
-            BufferedCharFilter bufin = new BufferedCharFilter(new ReaderAnonymousInnerClassHelper());
+            BufferedCharFilter bufin = new BufferedCharFilter(new ReaderAnonymousClass());
 
             //BufferedCharFilter bufin = new BufferedCharFilter(new Reader() {
             //            int size = 2, pos = 0;
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
index da3e608..4493c32 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
@@ -121,7 +121,7 @@
         [Test]
         public virtual void TestCrossPlaneNormalization()
         {
-            var analyzer = new AnalyzerAnonymousInnerClassHelper();
+            var analyzer = new AnalyzerAnonymousClass();
             var num = 1000 * RandomMultiplier;
             for (var i = 0; i < num; i++)
             {
@@ -151,17 +151,17 @@
             CheckRandomData(Random, analyzer, num);
         }
 
-        private sealed class AnalyzerAnonymousInnerClassHelper : Analyzer
+        private sealed class AnalyzerAnonymousClass : Analyzer
         {
             protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
-                Tokenizer tokenizer = new LetterTokenizerAnonymousInnerClassHelper(TEST_VERSION_CURRENT, reader);
+                Tokenizer tokenizer = new LetterTokenizerAnonymousClass(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
             }
 
-            private sealed class LetterTokenizerAnonymousInnerClassHelper : LetterTokenizer
+            private sealed class LetterTokenizerAnonymousClass : LetterTokenizer
             {
-                public LetterTokenizerAnonymousInnerClassHelper(LuceneVersion TEST_VERSION_CURRENT, TextReader reader)
+                public LetterTokenizerAnonymousClass(LuceneVersion TEST_VERSION_CURRENT, TextReader reader)
                     : base(TEST_VERSION_CURRENT, reader)
                 {
                 }
@@ -184,7 +184,7 @@
         [Test]
         public virtual void TestCrossPlaneNormalization2()
         {
-            var analyzer = new AnalyzerAnonymousInnerClassHelper2();
+            var analyzer = new AnalyzerAnonymousClass2();
             var num = 1000 * RandomMultiplier;
             for (var i = 0; i < num; i++)
             {
@@ -214,17 +214,17 @@
             CheckRandomData(Random, analyzer, num);
         }
 
-        private sealed class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        private sealed class AnalyzerAnonymousClass2 : Analyzer
         {
             protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
-                Tokenizer tokenizer = new LetterTokenizerAnonymousInnerClassHelper2(TEST_VERSION_CURRENT, reader);
+                Tokenizer tokenizer = new LetterTokenizerAnonymousClass2(TEST_VERSION_CURRENT, reader);
                 return new TokenStreamComponents(tokenizer, tokenizer);
             }
 
-            private sealed class LetterTokenizerAnonymousInnerClassHelper2 : LetterTokenizer
+            private sealed class LetterTokenizerAnonymousClass2 : LetterTokenizer
             {
-                public LetterTokenizerAnonymousInnerClassHelper2(LuceneVersion TEST_VERSION_CURRENT, TextReader reader)
+                public LetterTokenizerAnonymousClass2(LuceneVersion TEST_VERSION_CURRENT, TextReader reader)
                     : base(TEST_VERSION_CURRENT, reader)
                 {
                 }
@@ -252,15 +252,15 @@
         [LuceneNetSpecific]
         public virtual void TestSurrogates()
         {
-            var analyzer = new AnalyzerAnonymousInnerClassHelper3();
+            var analyzer = new AnalyzerAnonymousClass3();
 
             AssertAnalyzesTo(analyzer, "bar 123" + (char)55404 + (char)56321 + "34 5te 987", new string[] { "123𫀁34", "5", "987" });
             AssertAnalyzesTo(analyzer, "787 " + (char)55297 + (char)56388 + "6" + (char)55404 + (char)56321 + " art true 734", new string[] { "787", "𐑄6𫀁", "734" });
         }
 
-        private sealed class AnalyzerAnonymousInnerClassHelper3 : Analyzer
+        private sealed class AnalyzerAnonymousClass3 : Analyzer
         {
-            public AnalyzerAnonymousInnerClassHelper3()
+            public AnalyzerAnonymousClass3()
             { }
 
             protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestRollingCharBuffer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestRollingCharBuffer.cs
index 807b368..04a24d0 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestRollingCharBuffer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestRollingCharBuffer.cs
@@ -28,7 +28,16 @@
         [Test]
         public virtual void Test()
         {
-            var ITERS = AtLeast(1000);
+            int ITERS;
+
+            // LUCENENET specific: NUnit will crash with an OOM if we do the full test
+            // with verbosity enabled. So, decreasing the number of iterations by 1/2
+            // to keep it from crashing.
+            if (Verbose)
+                ITERS = AtLeast(500);
+            else
+                ITERS = AtLeast(1000);
+
 
             var buffer = new RollingCharBuffer();
 
diff --git a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/Segmentation/TestICUTokenizer.cs b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/Segmentation/TestICUTokenizer.cs
index 73c21ac..23b6f21 100644
--- a/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/Segmentation/TestICUTokenizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.ICU/Analysis/Icu/Segmentation/TestICUTokenizer.cs
@@ -351,11 +351,11 @@
             ts.End();
         }
 
-        private class ThreadAnonymousHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly CountdownEvent startingGun;
 
-            public ThreadAnonymousHelper(CountdownEvent startingGun)
+            public ThreadAnonymousClass(CountdownEvent startingGun)
             {
                 this.startingGun = startingGun;
             }
@@ -399,10 +399,10 @@
         {
             int numThreads = 8;
             CountdownEvent startingGun = new CountdownEvent(1);
-            ThreadAnonymousHelper[] threads = new ThreadAnonymousHelper[numThreads];
+            ThreadAnonymousClass[] threads = new ThreadAnonymousClass[numThreads];
             for (int i = 0; i < threads.Length; i++)
             {
-                threads[i] = new ThreadAnonymousHelper(startingGun);
+                threads[i] = new ThreadAnonymousClass(startingGun);
 
                 threads[i].Start();
             }
diff --git a/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/CountingHighlighterTestTask.cs b/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/CountingHighlighterTestTask.cs
index 678339f..2aea5df 100644
--- a/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/CountingHighlighterTestTask.cs
+++ b/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/CountingHighlighterTestTask.cs
@@ -46,11 +46,11 @@
             return document;
         }
 
-        private class BenchmarkHighlighterAnonymousHelper : BenchmarkHighlighter
+        private class BenchmarkHighlighterAnonymousClass : BenchmarkHighlighter
         {
             private readonly CountingHighlighterTestTask outerInstance;
             private readonly Highlighter highlighter;
-            public BenchmarkHighlighterAnonymousHelper(CountingHighlighterTestTask outerInstance, Highlighter highlighter)
+            public BenchmarkHighlighterAnonymousClass(CountingHighlighterTestTask outerInstance, Highlighter highlighter)
             {
                 this.outerInstance = outerInstance;
                 this.highlighter = highlighter;
@@ -68,7 +68,7 @@
         protected override BenchmarkHighlighter GetBenchmarkHighlighter(Query q)
         {
             m_highlighter = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(q));
-            return new BenchmarkHighlighterAnonymousHelper(this, m_highlighter);
+            return new BenchmarkHighlighterAnonymousClass(this, m_highlighter);
             //        return new BenchmarkHighlighter() {
             //  @Override
             //  public int doHighlight(IndexReader reader, int doc, String field, Document document, Analyzer analyzer, String text) 
diff --git a/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/WriteLineDocTaskTest.cs b/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/WriteLineDocTaskTest.cs
index b937246..9e994aa 100644
--- a/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/WriteLineDocTaskTest.cs
+++ b/src/Lucene.Net.Tests.Benchmark/ByTask/Tasks/WriteLineDocTaskTest.cs
@@ -368,10 +368,10 @@
                 br.Dispose();
             }
         }
-        private class ThreadAnonymousHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly WriteLineDocTask wldt;
-            public ThreadAnonymousHelper(string name, WriteLineDocTask wldt)
+            public ThreadAnonymousClass(string name, WriteLineDocTask wldt)
                 : base(name)
             {
                 this.IsDebug = true;
@@ -401,7 +401,7 @@
             {
                 for (int i = 0; i < threads.Length; i++)
                 {
-                    threads[i] = new ThreadAnonymousHelper("t" + i, wldt);
+                    threads[i] = new ThreadAnonymousClass("t" + i, wldt);
                 }
 
                 foreach (ThreadJob t in threads) t.Start();
diff --git a/src/Lucene.Net.Tests.Classification/Support/TestApiConsistency.cs b/src/Lucene.Net.Tests.Classification/Support/TestApiConsistency.cs
index 8ce487b..d60e6c2 100644
--- a/src/Lucene.Net.Tests.Classification/Support/TestApiConsistency.cs
+++ b/src/Lucene.Net.Tests.Classification/Support/TestApiConsistency.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Attributes;
+using Lucene.Net.Attributes;
 using Lucene.Net.Util;
 using NUnit.Framework;
 using System;
@@ -111,7 +111,7 @@
         public override void TestForPublicMembersContainingNonNetNumeric(Type typeFromTargetAssembly)
         {
             base.TestForPublicMembersContainingNonNetNumeric(typeFromTargetAssembly);
-        }        
+        }
 
         [Test, LuceneNetSpecific]
         [TestCase(typeof(Lucene.Net.Classification.KNearestNeighborClassifier))]
@@ -129,12 +129,12 @@
 
         // LUCENENET NOTE: This test is only for identifying members who were changed from
         // ICollection, IList or ISet to IEnumerable during the port (that should be changed back)
-        //[Test, LuceneNetSpecific]
-        //[TestCase(typeof(Lucene.Net.Classification.KNearestNeighborClassifier))]
-        //public override void TestForMembersAcceptingOrReturningIEnumerable(Type typeFromTargetAssembly)
-        //{
-        //    base.TestForMembersAcceptingOrReturningIEnumerable(typeFromTargetAssembly);
-        //}
+        [Test, LuceneNetSpecific]
+        [TestCase(typeof(Lucene.Net.Classification.KNearestNeighborClassifier))]
+        public override void TestForMembersAcceptingOrReturningIEnumerable(Type typeFromTargetAssembly)
+        {
+            base.TestForMembersAcceptingOrReturningIEnumerable(typeFromTargetAssembly);
+        }
 
         [Test, LuceneNetSpecific]
         [TestCase(typeof(Lucene.Net.Classification.KNearestNeighborClassifier))]
diff --git a/src/Lucene.Net.Tests.Classification/Utils/DataSplitterTest.cs b/src/Lucene.Net.Tests.Classification/Utils/DataSplitterTest.cs
index 0f7d9ec..30ed545 100644
--- a/src/Lucene.Net.Tests.Classification/Utils/DataSplitterTest.cs
+++ b/src/Lucene.Net.Tests.Classification/Utils/DataSplitterTest.cs
@@ -1,4 +1,4 @@
-using System;
+using System;
 using System.Text;
 using Lucene.Net.Analysis;
 using Lucene.Net.Classification.Utils;
@@ -32,24 +32,24 @@
      */
     public class DataSplitterTest : Util.LuceneTestCase
     {
-        private AtomicReader _originalIndex;
-        private RandomIndexWriter _indexWriter;
-        private Directory _dir;
+        private AtomicReader originalIndex;
+        private RandomIndexWriter indexWriter;
+        private Directory dir;
 
-        private String _textFieldName = "text";
-        private String _classFieldName = "class";
-        private String _idFieldName = "id";
+        private String textFieldName = "text";
+        private String classFieldName = "class";
+        private String idFieldName = "id";
 
         [SetUp]
         public override void SetUp()
         {
             base.SetUp();
-            _dir = NewDirectory();
-            _indexWriter = new RandomIndexWriter(
+            dir = NewDirectory();
+            indexWriter = new RandomIndexWriter(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                 this,
 #endif
-                Random, _dir, new MockAnalyzer(Random));
+                Random, dir, new MockAnalyzer(Random));
 
             FieldType ft = new FieldType(TextField.TYPE_STORED);
             ft.StoreTermVectors = true;
@@ -62,38 +62,38 @@
             for (int i = 0; i < 100; i++)
             {
                 doc = new Document();
-                doc.Add(new Field(_idFieldName, Random.toString(), ft));
-                doc.Add(new Field(_textFieldName, new StringBuilder(Random.toString()).append(Random.toString()).append(
+                doc.Add(new Field(idFieldName, Random.toString(), ft));
+                doc.Add(new Field(textFieldName, new StringBuilder(Random.toString()).append(Random.toString()).append(
                     Random.toString()).toString(), ft));
-                doc.Add(new Field(_classFieldName, Random.toString(), ft));
-                _indexWriter.AddDocument(doc, analyzer);
+                doc.Add(new Field(classFieldName, Random.toString(), ft));
+                indexWriter.AddDocument(doc, analyzer);
             }
 
-            _indexWriter.Commit();
+            indexWriter.Commit();
 
-            _originalIndex = SlowCompositeReaderWrapper.Wrap(_indexWriter.GetReader());
+            originalIndex = SlowCompositeReaderWrapper.Wrap(indexWriter.GetReader());
         }
 
         [TearDown]
         public override void TearDown()
         {
-            _originalIndex.Dispose();
-            _indexWriter.Dispose();
-            _dir.Dispose();
+            originalIndex.Dispose();
+            indexWriter.Dispose();
+            dir.Dispose();
             base.TearDown();
         }
 
         [Test]
         public void TestSplitOnAllFields()
         {
-            AssertSplit(_originalIndex, 0.1, 0.1);
+            AssertSplit(originalIndex, 0.1, 0.1);
         }
 
 
         [Test]
         public void TestSplitOnSomeFields()
         {
-            AssertSplit(_originalIndex, 0.2, 0.35, _idFieldName, _textFieldName);
+            AssertSplit(originalIndex, 0.2, 0.35, idFieldName, textFieldName);
         }
 
         public static void AssertSplit(AtomicReader originalIndex, double testRatio, double crossValidationRatio, params string[] fieldNames)
diff --git a/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs b/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs
index e9fd23a..6079e8a 100644
--- a/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs
+++ b/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs
@@ -21,10 +21,6 @@
      * limitations under the License.
      */
 
-    //using MockVariableIntBlockPostingsFormat = org.apache.lucene.codecs.mockintblock.MockVariableIntBlockPostingsFormat;
-    //using BasePostingsFormatTestCase = org.apache.lucene.index.BasePostingsFormatTestCase;
-    //using TestUtil = org.apache.lucene.util.TestUtil;
-    //using TestUtil = org.apache.lucene.util.TestUtil;
 
     /// <summary>
     /// Basic tests for VariableIntBlock
diff --git a/src/Lucene.Net.Tests.Codecs/Memory/TestFSTOrdPostingsFormat.cs b/src/Lucene.Net.Tests.Codecs/Memory/TestFSTOrdPostingsFormat.cs
index 3523216..3fef554 100644
--- a/src/Lucene.Net.Tests.Codecs/Memory/TestFSTOrdPostingsFormat.cs
+++ b/src/Lucene.Net.Tests.Codecs/Memory/TestFSTOrdPostingsFormat.cs
@@ -33,67 +33,5 @@
             return codec;
         }
 
-
-        #region BasePostingsFormatTestCase
-        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
-        // context in Visual Studio. This fixes that with the minimum amount of code necessary
-        // to run them in the correct context without duplicating all of the tests.
-
-        [Test]
-        public override void TestDocsOnly()
-        {
-            base.TestDocsOnly();
-        }
-
-        [Test]
-        public override void TestDocsAndFreqs()
-        {
-            base.TestDocsAndFreqs();
-        }
-
-        [Test]
-        public override void TestDocsAndFreqsAndPositions()
-        {
-            base.TestDocsAndFreqsAndPositions();
-        }
-
-        [Test]
-        public override void TestDocsAndFreqsAndPositionsAndPayloads()
-        {
-            base.TestDocsAndFreqsAndPositionsAndPayloads();
-        }
-
-        [Test]
-        public override void TestDocsAndFreqsAndPositionsAndOffsets()
-        {
-            base.TestDocsAndFreqsAndPositionsAndOffsets();
-        }
-
-        [Test]
-        public override void TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads()
-        {
-            base.TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads();
-        }
-
-        [Test]
-        public override void TestRandom()
-        {
-            base.TestRandom();
-        }
-
-        #endregion
-
-        #region BaseIndexFileFormatTestCase
-        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
-        // context in Visual Studio. This fixes that with the minimum amount of code necessary
-        // to run them in the correct context without duplicating all of the tests.
-
-        [Test]
-        public override void TestMergeStability()
-        {
-            base.TestMergeStability();
-        }
-
-        #endregion
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Codecs/Pulsing/Test10KPulsings.cs b/src/Lucene.Net.Tests.Codecs/Pulsing/Test10KPulsings.cs
index 19fd045..50aad55 100644
--- a/src/Lucene.Net.Tests.Codecs/Pulsing/Test10KPulsings.cs
+++ b/src/Lucene.Net.Tests.Codecs/Pulsing/Test10KPulsings.cs
@@ -68,7 +68,7 @@
             Field field = NewField("field", "", ft);
             document.Add(field);
 
-            //NumberFormat df = new DecimalFormat("00000", new DecimalFormatSymbols(Locale.ROOT));
+            //NumberFormat df = new DecimalFormat("00000", new DecimalFormatSymbols(Locale.ROOT));  // LUCENENET specific:  Use .ToString formating instead
 
             for (int i = 0; i < 10050; i++)
             {
diff --git a/src/Lucene.Net.Tests.Codecs/SimpleText/TestSimpleTextPostingsFormat.cs b/src/Lucene.Net.Tests.Codecs/SimpleText/TestSimpleTextPostingsFormat.cs
index f6c20dd..7038854 100644
--- a/src/Lucene.Net.Tests.Codecs/SimpleText/TestSimpleTextPostingsFormat.cs
+++ b/src/Lucene.Net.Tests.Codecs/SimpleText/TestSimpleTextPostingsFormat.cs
@@ -23,7 +23,7 @@
     /// Tests SimpleText's postings
     /// </summary>
     [Nightly] // please figure out why I am so horrendously slow!
-    public class TestSimpleTextPostingsFormat : BasePostingsFormatTestCase // please figure out why I am so horrendously slow!
+    public class TestSimpleTextPostingsFormat : BasePostingsFormatTestCase
     {
         private readonly Codec codec = new SimpleTextCodec();
 
diff --git a/src/Lucene.Net.Tests.Codecs/SimpleText/TestSimpleTextStoredFieldsFormat.cs b/src/Lucene.Net.Tests.Codecs/SimpleText/TestSimpleTextStoredFieldsFormat.cs
index be751b3..5f44527 100644
--- a/src/Lucene.Net.Tests.Codecs/SimpleText/TestSimpleTextStoredFieldsFormat.cs
+++ b/src/Lucene.Net.Tests.Codecs/SimpleText/TestSimpleTextStoredFieldsFormat.cs
@@ -29,6 +29,7 @@
             return new SimpleTextCodec();
         }
 
+
         [Deadlock][Timeout(600000)]
         public override void TestConcurrentReads()
         {
diff --git a/src/Lucene.Net.Tests.Facet/AssertingSubDocsAtOnceCollector.cs b/src/Lucene.Net.Tests.Facet/AssertingSubDocsAtOnceCollector.cs
index a81f7e1..a8c1e3d 100644
--- a/src/Lucene.Net.Tests.Facet/AssertingSubDocsAtOnceCollector.cs
+++ b/src/Lucene.Net.Tests.Facet/AssertingSubDocsAtOnceCollector.cs
@@ -1,8 +1,9 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -20,19 +21,17 @@
      * limitations under the License.
      */
 
-
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
     using ICollector = Lucene.Net.Search.ICollector;
     using ChildScorer = Lucene.Net.Search.Scorer.ChildScorer;
     using Scorer = Lucene.Net.Search.Scorer;
-    using System;
+    
     /// <summary>
     /// Verifies in collect() that all child subScorers are on
     ///  the collected doc. 
     /// </summary>
     internal class AssertingSubDocsAtOnceCollector : ICollector
     {
-
         // TODO: allow wrapping another Collector
 
         internal IList<Scorer> allScorers;
@@ -70,5 +69,4 @@
 
         public virtual bool AcceptsDocsOutOfOrder => false;
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
index b0d4a31..6befeff 100644
--- a/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
+++ b/src/Lucene.Net.Tests.Facet/FacetTestCase.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using NUnit.Framework;
 using System;
@@ -8,7 +9,6 @@
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -45,6 +45,7 @@
         {
             return GetTaxonomyFacetCounts(taxoReader, config, c, FacetsConfig.DEFAULT_INDEX_FIELD_NAME);
         }
+
         public virtual Facets GetTaxonomyFacetCounts(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector c, string indexFieldName)
         {
             Facets facets;
@@ -153,9 +154,9 @@
                 {
                     if (numInRow > 1)
                     {
-                        Array.Sort(labelValues, i - numInRow, i - (i - numInRow), Comparer<LabelAndValue>.Create((a,b)=> {
+                        Array.Sort(labelValues, i - numInRow, i - (i - numInRow), Comparer<LabelAndValue>.Create((a,b) => {
                             if (Debugging.AssertsEnabled) Debugging.Assert((double)a.Value == (double)b.Value);
-                            return (new BytesRef(a.Label)).CompareTo(new BytesRef(b.Label));
+                            return new BytesRef(a.Label).CompareTo(new BytesRef(b.Label));
                         }));
                     }
                     numInRow = 1;
@@ -170,7 +171,7 @@
         
         protected internal virtual void SortLabelValues(List<LabelAndValue> labelValues)
         {
-            labelValues.Sort(Comparer<LabelAndValue>.Create((a,b)=> {
+            labelValues.Sort(Comparer<LabelAndValue>.Create((a,b) => {
                 if ((double)a.Value > (double)b.Value)
                 {
                     return -1;
@@ -181,7 +182,7 @@
                 }
                 else
                 {
-                    return (new BytesRef(a.Label)).CompareTo(new BytesRef(b.Label));
+                    return new BytesRef(a.Label).CompareTo(new BytesRef(b.Label));
                 }
             }));
         }
@@ -206,14 +207,14 @@
             }));
         }
 
-        protected internal virtual void AssertFloatValuesEquals(IList<FacetResult> a, IList<FacetResult> b)
+        protected virtual void AssertFloatValuesEquals(IList<FacetResult> a, IList<FacetResult> b)
         {
             Assert.AreEqual(a.Count, b.Count);
             float lastValue = float.PositiveInfinity;
             IDictionary<string, FacetResult> aByDim = new Dictionary<string, FacetResult>();
             for (int i = 0; i < a.Count; i++)
             {
-                Assert.True((float)a[i].Value <= lastValue);
+                Assert.IsTrue((float)a[i].Value <= lastValue);
                 lastValue = (float)a[i].Value;
                 aByDim[a[i].Dim] = a[i];
             }
@@ -222,7 +223,7 @@
             for (int i = 0; i < b.Count; i++)
             {
                 bByDim[b[i].Dim] = b[i];
-                Assert.True((float)b[i].Value <= lastValue);
+                Assert.IsTrue((float)b[i].Value <= lastValue);
                 lastValue = (float)b[i].Value;
             }
             foreach (string dim in aByDim.Keys)
@@ -231,10 +232,10 @@
             }
         }
 
-        protected internal virtual void AssertFloatValuesEquals(FacetResult a, FacetResult b)
+        protected virtual void AssertFloatValuesEquals(FacetResult a, FacetResult b)
         {
             Assert.AreEqual(a.Dim, b.Dim);
-            Assert.True(Arrays.Equals(a.Path, b.Path));
+            Assert.IsTrue(Arrays.Equals(a.Path, b.Path));
             Assert.AreEqual(a.ChildCount, b.ChildCount);
             Assert.AreEqual((float)a.Value, (float)b.Value, (float)a.Value / 1e5);
             Assert.AreEqual(a.LabelValues.Length, b.LabelValues.Length);
@@ -245,5 +246,4 @@
             }
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
index 537dada..be755e8 100644
--- a/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Range/TestRangeFacetCounts.cs
@@ -1,4 +1,5 @@
-using J2N.Threading.Atomic;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading.Atomic;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Search;
 using NUnit.Framework;
@@ -96,7 +97,12 @@
             IndexSearcher s = NewSearcher(r);
             s.Search(new MatchAllDocsQuery(), fc);
 
-            Facets facets = new Int64RangeFacetCounts("field", fc, new Int64Range("less than 10", 0L, true, 10L, false), new Int64Range("less than or equal to 10", 0L, true, 10L, true), new Int64Range("over 90", 90L, false, 100L, false), new Int64Range("90 or above", 90L, true, 100L, false), new Int64Range("over 1000", 1000L, false, long.MaxValue, true));
+            Facets facets = new Int64RangeFacetCounts("field", fc,
+                new Int64Range("less than 10", 0L, true, 10L, false),
+                new Int64Range("less than or equal to 10", 0L, true, 10L, true),
+                new Int64Range("over 90", 90L, false, 100L, false),
+                new Int64Range("90 or above", 90L, true, 100L, false),
+                new Int64Range("over 1000", 1000L, false, long.MaxValue, true));
 
             FacetResult result = facets.GetTopChildren(10, "field");
 
@@ -111,7 +117,7 @@
         {
             try
             {
-                new Int64Range("useless", 7, true, 6, true);
+                _ = new Int64Range("useless", 7, true, 6, true);
                 fail("did not hit expected exception");
             }
             catch (ArgumentException)
@@ -120,7 +126,7 @@
             }
             try
             {
-                new Int64Range("useless", 7, true, 7, false);
+                _ = new Int64Range("useless", 7, true, 7, false);
                 fail("did not hit expected exception");
             }
             catch (ArgumentException)
@@ -129,7 +135,7 @@
             }
             try
             {
-                new DoubleRange("useless", 7.0, true, 6.0, true);
+                _ = new DoubleRange("useless", 7.0, true, 6.0, true);
                 fail("did not hit expected exception");
             }
             catch (ArgumentException)
@@ -138,7 +144,7 @@
             }
             try
             {
-                new DoubleRange("useless", 7.0, true, 7.0, false);
+                _ = new DoubleRange("useless", 7.0, true, 7.0, false);
                 fail("did not hit expected exception");
             }
             catch (ArgumentException)
@@ -174,7 +180,13 @@
             IndexSearcher s = NewSearcher(r);
             s.Search(new MatchAllDocsQuery(), fc);
 
-            Facets facets = new Int64RangeFacetCounts("field", fc, new Int64Range("min", long.MinValue, true, long.MinValue, true), new Int64Range("max", long.MaxValue, true, long.MaxValue, true), new Int64Range("all0", long.MinValue, true, long.MaxValue, true), new Int64Range("all1", long.MinValue, false, long.MaxValue, true), new Int64Range("all2", long.MinValue, true, long.MaxValue, false), new Int64Range("all3", long.MinValue, false, long.MaxValue, false));
+            Facets facets = new Int64RangeFacetCounts("field", fc,
+                new Int64Range("min", long.MinValue, true, long.MinValue, true),
+                new Int64Range("max", long.MaxValue, true, long.MaxValue, true),
+                new Int64Range("all0", long.MinValue, true, long.MaxValue, true),
+                new Int64Range("all1", long.MinValue, false, long.MaxValue, true),
+                new Int64Range("all2", long.MinValue, true, long.MaxValue, false),
+                new Int64Range("all3", long.MinValue, false, long.MaxValue, false));
 
             FacetResult result = facets.GetTopChildren(10, "field");
             Assert.AreEqual("dim=field path=[] value=3 childCount=6\n  min (1)\n  max (1)\n  all0 (3)\n  all1 (2)\n  all2 (2)\n  all3 (1)\n", result.ToString());
@@ -210,7 +222,11 @@
             IndexSearcher s = NewSearcher(r);
             s.Search(new MatchAllDocsQuery(), fc);
 
-            Facets facets = new Int64RangeFacetCounts("field", fc, new Int64Range("0-10", 0L, true, 10L, true), new Int64Range("10-20", 10L, true, 20L, true), new Int64Range("20-30", 20L, true, 30L, true), new Int64Range("30-40", 30L, true, 40L, true));
+            Facets facets = new Int64RangeFacetCounts("field", fc,
+                new Int64Range("0-10", 0L, true, 10L, true),
+                new Int64Range("10-20", 10L, true, 20L, true),
+                new Int64Range("20-30", 20L, true, 30L, true),
+                new Int64Range("30-40", 30L, true, 40L, true));
 
             FacetResult result = facets.GetTopChildren(10, "field");
             Assert.AreEqual("dim=field path=[] value=41 childCount=4\n  0-10 (11)\n  10-20 (11)\n  20-30 (11)\n  30-40 (11)\n", result.ToString());
@@ -221,7 +237,7 @@
 
         /// <summary>
         /// Tests single request that mixes Range and non-Range
-        ///  faceting, with DrillSideways and taxonomy. 
+        ///  faceting, with <see cref="DrillSideways"/> and taxonomy. 
         /// </summary>
         [Test]
         public virtual void TestMixedRangeAndNonRangeTaxonomy()
@@ -267,7 +283,7 @@
                 Console.WriteLine("TEST: searcher=" + s);
             }
 
-            DrillSideways ds = new DrillSidewaysAnonymousInnerClassHelper(this, s, config, tr);
+            DrillSideways ds = new DrillSidewaysAnonymousClass(this, s, config, tr);
 
             // First search, no drill downs:
             DrillDownQuery ddq = new DrillDownQuery(config);
@@ -275,7 +291,8 @@
 
             Assert.AreEqual(100, dsr.Hits.TotalHits);
             Assert.AreEqual("dim=dim path=[] value=100 childCount=2\n  b (75)\n  a (25)\n", dsr.Facets.GetTopChildren(10, "dim").ToString());
-            Assert.AreEqual("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n", dsr.Facets.GetTopChildren(10, "field").ToString());
+            Assert.AreEqual("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+                dsr.Facets.GetTopChildren(10, "field").ToString());
 
             // Second search, drill down on dim=b:
             ddq = new DrillDownQuery(config);
@@ -284,7 +301,8 @@
 
             Assert.AreEqual(75, dsr.Hits.TotalHits);
             Assert.AreEqual("dim=dim path=[] value=100 childCount=2\n  b (75)\n  a (25)\n", dsr.Facets.GetTopChildren(10, "dim").ToString());
-            Assert.AreEqual("dim=field path=[] value=16 childCount=5\n  less than 10 (7)\n  less than or equal to 10 (8)\n  over 90 (7)\n  90 or above (8)\n  over 1000 (0)\n", dsr.Facets.GetTopChildren(10, "field").ToString());
+            Assert.AreEqual("dim=field path=[] value=16 childCount=5\n  less than 10 (7)\n  less than or equal to 10 (8)\n  over 90 (7)\n  90 or above (8)\n  over 1000 (0)\n",
+                dsr.Facets.GetTopChildren(10, "field").ToString());
 
             // Third search, drill down on "less than or equal to 10":
             ddq = new DrillDownQuery(config);
@@ -293,15 +311,16 @@
 
             Assert.AreEqual(11, dsr.Hits.TotalHits);
             Assert.AreEqual("dim=dim path=[] value=11 childCount=2\n  b (8)\n  a (3)\n", dsr.Facets.GetTopChildren(10, "dim").ToString());
-            Assert.AreEqual("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n", dsr.Facets.GetTopChildren(10, "field").ToString());
+            Assert.AreEqual("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+                dsr.Facets.GetTopChildren(10, "field").ToString());
             IOUtils.Dispose(tw, tr, td, w, r, d);
         }
 
-        private class DrillSidewaysAnonymousInnerClassHelper : DrillSideways
+        private class DrillSidewaysAnonymousClass : DrillSideways
         {
             private readonly TestRangeFacetCounts outerInstance;
 
-            public DrillSidewaysAnonymousInnerClassHelper(TestRangeFacetCounts outerInstance, IndexSearcher s, FacetsConfig config, TaxonomyReader tr)
+            public DrillSidewaysAnonymousClass(TestRangeFacetCounts outerInstance, IndexSearcher s, FacetsConfig config, TaxonomyReader tr)
                 : base(s, config, tr)
             {
                 this.outerInstance = outerInstance;
@@ -328,7 +347,12 @@
                 }
 
                 IDictionary<string, Facets> byDim = new Dictionary<string, Facets>();
-                byDim["field"] = new Int64RangeFacetCounts("field", fieldFC, new Int64Range("less than 10", 0L, true, 10L, false), new Int64Range("less than or equal to 10", 0L, true, 10L, true), new Int64Range("over 90", 90L, false, 100L, false), new Int64Range("90 or above", 90L, true, 100L, false), new Int64Range("over 1000", 1000L, false, long.MaxValue, false));
+                byDim["field"] = new Int64RangeFacetCounts("field", fieldFC,
+                    new Int64Range("less than 10", 0L, true, 10L, false),
+                    new Int64Range("less than or equal to 10", 0L, true, 10L, true),
+                    new Int64Range("over 90", 90L, false, 100L, false),
+                    new Int64Range("90 or above", 90L, true, 100L, false),
+                    new Int64Range("over 1000", 1000L, false, long.MaxValue, false));
                 byDim["dim"] = outerInstance.GetTaxonomyFacetCounts(m_taxoReader, m_config, dimFC);
                 return new MultiFacets(byDim, null);
             }
@@ -360,9 +384,15 @@
 
             IndexSearcher s = NewSearcher(r);
             s.Search(new MatchAllDocsQuery(), fc);
-            Facets facets = new DoubleRangeFacetCounts("field", fc, new DoubleRange("less than 10", 0.0, true, 10.0, false), new DoubleRange("less than or equal to 10", 0.0, true, 10.0, true), new DoubleRange("over 90", 90.0, false, 100.0, false), new DoubleRange("90 or above", 90.0, true, 100.0, false), new DoubleRange("over 1000", 1000.0, false, double.PositiveInfinity, false));
+            Facets facets = new DoubleRangeFacetCounts("field", fc,
+                new DoubleRange("less than 10", 0.0, true, 10.0, false),
+                new DoubleRange("less than or equal to 10", 0.0, true, 10.0, true),
+                new DoubleRange("over 90", 90.0, false, 100.0, false),
+                new DoubleRange("90 or above", 90.0, true, 100.0, false),
+                new DoubleRange("over 1000", 1000.0, false, double.PositiveInfinity, false));
 
-            Assert.AreEqual("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n", facets.GetTopChildren(10, "field").ToString());
+            Assert.AreEqual("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+                facets.GetTopChildren(10, "field").ToString());
 
             IOUtils.Dispose(w, r, d);
         }
@@ -392,9 +422,15 @@
             IndexSearcher s = NewSearcher(r);
             s.Search(new MatchAllDocsQuery(), fc);
 
-            Facets facets = new DoubleRangeFacetCounts("field", new SingleFieldSource("field"), fc, new DoubleRange("less than 10", 0.0f, true, 10.0f, false), new DoubleRange("less than or equal to 10", 0.0f, true, 10.0f, true), new DoubleRange("over 90", 90.0f, false, 100.0f, false), new DoubleRange("90 or above", 90.0f, true, 100.0f, false), new DoubleRange("over 1000", 1000.0f, false, double.PositiveInfinity, false));
+            Facets facets = new DoubleRangeFacetCounts("field", new SingleFieldSource("field"), fc,
+                new DoubleRange("less than 10", 0.0f, true, 10.0f, false),
+                new DoubleRange("less than or equal to 10", 0.0f, true, 10.0f, true),
+                new DoubleRange("over 90", 90.0f, false, 100.0f, false),
+                new DoubleRange("90 or above", 90.0f, true, 100.0f, false),
+                new DoubleRange("over 1000", 1000.0f, false, double.PositiveInfinity, false));
 
-            Assert.AreEqual("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n", facets.GetTopChildren(10, "field").ToString());
+            Assert.AreEqual("dim=field path=[] value=21 childCount=5\n  less than 10 (10)\n  less than or equal to 10 (11)\n  over 90 (9)\n  90 or above (10)\n  over 1000 (0)\n",
+                facets.GetTopChildren(10, "field").ToString());
 
             IOUtils.Dispose(w, r, d);
         }
@@ -1028,9 +1064,15 @@
 
             IndexSearcher s = NewSearcher(r);
             s.Search(new MatchAllDocsQuery(), fc);
-            Facets facets = new Int64RangeFacetCounts("field", fc, new Int64Range("less than 10", 0L, true, 10L, false), new Int64Range("less than or equal to 10", 0L, true, 10L, true), new Int64Range("over 90", 90L, false, 100L, false), new Int64Range("90 or above", 90L, true, 100L, false), new Int64Range("over 1000", 1000L, false, long.MaxValue, false));
+            Facets facets = new Int64RangeFacetCounts("field", fc,
+                new Int64Range("less than 10", 0L, true, 10L, false),
+                new Int64Range("less than or equal to 10", 0L, true, 10L, true),
+                new Int64Range("over 90", 90L, false, 100L, false),
+                new Int64Range("90 or above", 90L, true, 100L, false),
+                new Int64Range("over 1000", 1000L, false, long.MaxValue, false));
 
-            Assert.AreEqual("dim=field path=[] value=16 childCount=5\n  less than 10 (8)\n  less than or equal to 10 (8)\n  over 90 (8)\n  90 or above (8)\n  over 1000 (0)\n", facets.GetTopChildren(10, "field").ToString());
+            Assert.AreEqual("dim=field path=[] value=16 childCount=5\n  less than 10 (8)\n  less than or equal to 10 (8)\n  over 90 (8)\n  90 or above (8)\n  over 1000 (0)\n",
+                facets.GetTopChildren(10, "field").ToString());
 
             IOUtils.Dispose(w, r, d);
         }
@@ -1053,7 +1095,7 @@
             // Test wants 3 docs in one segment:
             writer.ForceMerge(1);
 
-            var vs = new ValueSourceAnonymousInnerClassHelper(this, doc);
+            var vs = new ValueSourceAnonymousClass(this, doc);
 
             FacetsConfig config = new FacetsConfig();
 
@@ -1063,14 +1105,21 @@
             IndexSearcher s = NewSearcher(r);
             s.Search(new MatchAllDocsQuery(), fc);
 
-            DoubleRange[] ranges = new DoubleRange[] { new DoubleRange("< 1", 0.0, true, 1.0, false), new DoubleRange("< 2", 0.0, true, 2.0, false), new DoubleRange("< 5", 0.0, true, 5.0, false), new DoubleRange("< 10", 0.0, true, 10.0, false), new DoubleRange("< 20", 0.0, true, 20.0, false), new DoubleRange("< 50", 0.0, true, 50.0, false) };
+            DoubleRange[] ranges = new DoubleRange[] {
+                new DoubleRange("< 1", 0.0, true, 1.0, false),
+                new DoubleRange("< 2", 0.0, true, 2.0, false),
+                new DoubleRange("< 5", 0.0, true, 5.0, false),
+                new DoubleRange("< 10", 0.0, true, 10.0, false),
+                new DoubleRange("< 20", 0.0, true, 20.0, false),
+                new DoubleRange("< 50", 0.0, true, 50.0, false)
+            };
 
             Filter fastMatchFilter;
             AtomicBoolean filterWasUsed = new AtomicBoolean();
             if (Random.NextBoolean())
             {
                 // Sort of silly:
-                fastMatchFilter = new CachingWrapperFilterAnonymousInnerClassHelper(this, new QueryWrapperFilter(new MatchAllDocsQuery()), filterWasUsed);
+                fastMatchFilter = new CachingWrapperFilterAnonymousClass(this, new QueryWrapperFilter(new MatchAllDocsQuery()), filterWasUsed);
             }
             else
             {
@@ -1084,8 +1133,9 @@
 
             Facets facets = new DoubleRangeFacetCounts("field", vs, fc, fastMatchFilter, ranges);
 
-            Assert.AreEqual("dim=field path=[] value=3 childCount=6\n  < 1 (0)\n  < 2 (1)\n  < 5 (3)\n  < 10 (3)\n  < 20 (3)\n  < 50 (3)\n", facets.GetTopChildren(10, "field").ToString());
-            Assert.True(fastMatchFilter == null || filterWasUsed);
+            Assert.AreEqual("dim=field path=[] value=3 childCount=6\n  < 1 (0)\n  < 2 (1)\n  < 5 (3)\n  < 10 (3)\n  < 20 (3)\n  < 50 (3)\n",
+                facets.GetTopChildren(10, "field").ToString());
+            Assert.IsTrue(fastMatchFilter == null || filterWasUsed);
 
             DrillDownQuery ddq = new DrillDownQuery(config);
             ddq.Add("field", ranges[1].GetFilter(fastMatchFilter, vs));
@@ -1094,23 +1144,24 @@
             Assert.AreEqual(1, s.Search(ddq, 10).TotalHits);
 
             // Test drill-sideways after drill-down
-            DrillSideways ds = new DrillSidewaysAnonymousInnerClassHelper2(this, s, config, (TaxonomyReader)null, vs, ranges, fastMatchFilter);
+            DrillSideways ds = new DrillSidewaysAnonymousClass2(this, s, config, (TaxonomyReader)null, vs, ranges, fastMatchFilter);
 
 
             DrillSidewaysResult dsr = ds.Search(ddq, 10);
             Assert.AreEqual(1, dsr.Hits.TotalHits);
-            Assert.AreEqual("dim=field path=[] value=3 childCount=6\n  < 1 (0)\n  < 2 (1)\n  < 5 (3)\n  < 10 (3)\n  < 20 (3)\n  < 50 (3)\n", dsr.Facets.GetTopChildren(10, "field").ToString());
+            Assert.AreEqual("dim=field path=[] value=3 childCount=6\n  < 1 (0)\n  < 2 (1)\n  < 5 (3)\n  < 10 (3)\n  < 20 (3)\n  < 50 (3)\n",
+                dsr.Facets.GetTopChildren(10, "field").ToString());
 
             IOUtils.Dispose(r, writer, dir);
         }
 
-        private class ValueSourceAnonymousInnerClassHelper : ValueSource
+        private class ValueSourceAnonymousClass : ValueSource
         {
             private readonly TestRangeFacetCounts outerInstance;
 
-            private Document doc;
+            private readonly Document doc;
 
-            public ValueSourceAnonymousInnerClassHelper(TestRangeFacetCounts outerInstance, Document doc)
+            public ValueSourceAnonymousClass(TestRangeFacetCounts outerInstance, Document doc)
             {
                 this.outerInstance = outerInstance;
                 this.doc = doc;
@@ -1118,14 +1169,14 @@
 
             public override FunctionValues GetValues(IDictionary ignored, AtomicReaderContext ignored2)
             {
-                return new DoubleDocValuesAnonymousInnerClassHelper(this);
+                return new DoubleDocValuesAnonymousClass(this);
             }
 
-            private class DoubleDocValuesAnonymousInnerClassHelper : DoubleDocValues
+            private class DoubleDocValuesAnonymousClass : DoubleDocValues
             {
-                private readonly ValueSourceAnonymousInnerClassHelper outerInstance;
+                private readonly ValueSourceAnonymousClass outerInstance;
 
-                public DoubleDocValuesAnonymousInnerClassHelper(ValueSourceAnonymousInnerClassHelper outerInstance)
+                public DoubleDocValuesAnonymousClass(ValueSourceAnonymousClass outerInstance)
                     : base(null)
                 {
                     this.outerInstance = outerInstance;
@@ -1154,13 +1205,13 @@
 
         }
 
-        private class CachingWrapperFilterAnonymousInnerClassHelper : CachingWrapperFilter
+        private class CachingWrapperFilterAnonymousClass : CachingWrapperFilter
         {
             private readonly TestRangeFacetCounts outerInstance;
 
-            private AtomicBoolean filterWasUsed;
+            private readonly AtomicBoolean filterWasUsed;
 
-            public CachingWrapperFilterAnonymousInnerClassHelper(TestRangeFacetCounts outerInstance, QueryWrapperFilter org, AtomicBoolean filterWasUsed)
+            public CachingWrapperFilterAnonymousClass(TestRangeFacetCounts outerInstance, QueryWrapperFilter org, AtomicBoolean filterWasUsed)
                 : base(org)
             {
                 this.outerInstance = outerInstance;
@@ -1176,16 +1227,16 @@
             }
         }
 
-        private class DrillSidewaysAnonymousInnerClassHelper2 : DrillSideways
+        private class DrillSidewaysAnonymousClass2 : DrillSideways
         {
             private readonly TestRangeFacetCounts outerInstance;
 
-            private ValueSource vs;
-            private Lucene.Net.Facet.Range.DoubleRange[] ranges;
-            private Filter fastMatchFilter;
+            private readonly ValueSource vs;
+            private readonly DoubleRange[] ranges;
+            private readonly Filter fastMatchFilter;
 
 
-            public DrillSidewaysAnonymousInnerClassHelper2(TestRangeFacetCounts outerInstance, IndexSearcher indexSearcher, FacetsConfig facetsConfig, TaxonomyReader org, ValueSource valueSource, DoubleRange[] doubleRanges, Filter filter)
+            public DrillSidewaysAnonymousClass2(TestRangeFacetCounts outerInstance, IndexSearcher indexSearcher, FacetsConfig facetsConfig, TaxonomyReader org, ValueSource valueSource, DoubleRange[] doubleRanges, Filter filter)
                 : base(indexSearcher, facetsConfig, org)
             {
                 this.outerInstance = outerInstance;
@@ -1204,5 +1255,4 @@
             protected override bool ScoreSubDocsAtOnce => Random.NextBoolean();
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs b/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs
index b9631ee..6dc1b89 100644
--- a/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs
+++ b/src/Lucene.Net.Tests.Facet/SlowRAMDirectory.cs
@@ -1,10 +1,9 @@
-using System;
+// Lucene version compatibility level 4.8.1
+using System;
 using System.Threading;
-using Lucene.Net.Randomized.Generators;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -23,9 +22,9 @@
      */
 
 
-    using IOContext = Lucene.Net.Store.IOContext;
     using IndexInput = Lucene.Net.Store.IndexInput;
     using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
     using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
     /// <summary>
@@ -34,15 +33,14 @@
     // TODO: move to test-framework and sometimes use in tests?
     public class SlowRAMDirectory : RAMDirectory
     {
-
         private const int IO_SLEEP_THRESHOLD = 50;
 
         internal Random random;
         private int sleepMillis;
 
-        public virtual int SleepMillis
+        public virtual void SetSleepMillis(int sleepMillis)
         {
-            set => this.sleepMillis = value;
+            this.sleepMillis = sleepMillis;
         }
 
         public SlowRAMDirectory(int sleepMillis, Random random)
@@ -110,9 +108,9 @@
         {
             private readonly SlowRAMDirectory outerInstance;
 
-            internal IndexInput ii;
-            internal int numRead = 0;
-            internal Random rand;
+            private readonly IndexInput ii;
+            private int numRead = 0;
+            private readonly Random rand;
 
             public SlowIndexInput(SlowRAMDirectory outerInstance, IndexInput ii)
                 : base("SlowIndexInput(" + ii + ")")
@@ -159,26 +157,28 @@
                     ii.Dispose();
                 }
             }
+
             public override bool Equals(object o)
             {
                 return ii.Equals(o);
             }
+
             public override long GetFilePointer()
             {
                 return ii.GetFilePointer();
             }
 
-            public override void Seek(long pos)
-            {
-                ii.Seek(pos);
-            }
-
-
             public override int GetHashCode()
             {
                 return ii.GetHashCode();
             }
+
             public override long Length => ii.Length;
+
+            public override void Seek(long pos)
+            {
+                ii.Seek(pos);
+            }
         }
 
         /// <summary>
@@ -189,10 +189,9 @@
         {
             private readonly SlowRAMDirectory outerInstance;
 
-
-            internal IndexOutput io;
-            internal int numWrote;
-            internal readonly Random rand;
+            private readonly IndexOutput io;
+            private int numWrote;
+            private readonly Random rand;
 
             public SlowIndexOutput(SlowRAMDirectory outerInstance, IndexOutput io)
             {
@@ -223,6 +222,12 @@
                 io.WriteBytes(b, offset, length);
             }
 
+            [Obsolete]
+            public override void Seek(long pos)
+            {
+                io.Seek(pos);
+            }
+
             protected override void Dispose(bool disposing)
             {
                 if (disposing)
@@ -230,24 +235,24 @@
                     io.Dispose();
                 }
             }
+
             public override void Flush()
             {
                 io.Flush();
             }
+
             public override long GetFilePointer()
             {
                 return io.GetFilePointer();
             }
 
-            [Obsolete]
-            public override void Seek(long pos)
+            public override long Length
             {
-                io.Seek(pos);
+                get => io.Length;
+                set => throw new InvalidOperationException("Length is readonly");
             }
 
             public override long Checksum => io.Checksum;
         }
-
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
index b235b47..69fb35a 100644
--- a/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
+++ b/src/Lucene.Net.Tests.Facet/SortedSet/TestSortedSetDocValuesFacets.cs
@@ -1,4 +1,5 @@
-using NUnit.Framework;
+// Lucene version compatibility level 4.8.1
+using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
@@ -48,7 +49,6 @@
         [Test]
         public virtual void TestBasic()
         {
-
             AssumeTrue("Test requires SortedSetDV support", DefaultCodecSupportsSortedSet);
             Directory dir = NewDirectory();
 
@@ -138,7 +138,7 @@
 
             try
             {
-                new SortedSetDocValuesFacetCounts(state, c);
+                _ = new SortedSetDocValuesFacetCounts(state, c);
                 fail("did not hit expected exception");
             }
             catch (InvalidOperationException)
@@ -410,5 +410,4 @@
             IOUtils.Dispose(w, searcher.IndexReader, indexDir, taxoDir);
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
index 18c7b05..96b0892 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestAddTaxonomy.cs
@@ -1,4 +1,5 @@
-using J2N.Threading;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading;
 using J2N.Threading.Atomic;
 using NUnit.Framework;
 using System;
@@ -46,7 +47,7 @@
                 ThreadJob[] addThreads = new ThreadJob[4];
                 for (int j = 0; j < addThreads.Length; j++)
                 {
-                    addThreads[j] = new ThreadAnonymousInnerClassHelper(this, range, numCats, tw);
+                    addThreads[j] = new ThreadAnonymousClass(this, range, numCats, tw);
                 }
 
                 foreach (ThreadJob t in addThreads)
@@ -71,7 +72,7 @@
             IOUtils.Dispose(dirs);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestAddTaxonomy outerInstance;
 
@@ -79,7 +80,7 @@
             private AtomicInt32 numCats;
             private DirectoryTaxonomyWriter tw;
 
-            public ThreadAnonymousInnerClassHelper(TestAddTaxonomy outerInstance, int range, AtomicInt32 numCats, DirectoryTaxonomyWriter tw)
+            public ThreadAnonymousClass(TestAddTaxonomy outerInstance, int range, AtomicInt32 numCats, DirectoryTaxonomyWriter tw)
             {
                 this.outerInstance = outerInstance;
                 this.range = range;
@@ -127,7 +128,7 @@
 
             // validate taxo sizes
             int srcSize = srcTR.Count;
-            Assert.True(destSize >= srcSize, "destination taxonomy expected to be larger than source; dest=" + destSize + " src=" + srcSize);
+            Assert.IsTrue(destSize >= srcSize, "destination taxonomy expected to be larger than source; dest=" + destSize + " src=" + srcSize);
 
             // validate that all source categories exist in destination, and their
             // ordinals are as expected.
@@ -135,7 +136,7 @@
             {
                 FacetLabel cp = srcTR.GetPath(j);
                 int destOrdinal = destTr.GetOrdinal(cp);
-                Assert.True(destOrdinal > 0, cp + " not found in destination");
+                Assert.IsTrue(destOrdinal > 0, cp + " not found in destination");
                 Assert.AreEqual(destOrdinal, map[j]);
             }
         }
@@ -150,7 +151,7 @@
             destTW.Commit();
 
             Directory src = NewDirectory();
-            (new DirectoryTaxonomyWriter(src)).Dispose(); // create an empty taxonomy
+            new DirectoryTaxonomyWriter(src).Dispose(); // create an empty taxonomy
 
             IOrdinalMap map = randomOrdinalMap();
             destTW.AddTaxonomy(src, map);
@@ -200,7 +201,8 @@
             int numTests = AtLeast(3);
             for (int i = 0; i < numTests; i++)
             {
-                Dotest(TestUtil.NextInt32(random, 2, 100), TestUtil.NextInt32(random, 100, 1000));
+                Dotest(TestUtil.NextInt32(random, 2, 100),
+                    TestUtil.NextInt32(random, 100, 1000));
             }
         }
 
@@ -248,7 +250,7 @@
             // again, in parallel -- in the end, no duplicate categories should exist.
             Directory dest = NewDirectory();
             var destTw = new DirectoryTaxonomyWriter(dest);
-            var t = new ThreadAnonymousInnerClassHelper2(this, numCategories, destTw);
+            var t = new ThreadAnonymousClass2(this, numCategories, destTw);
             t.Start();
 
             IOrdinalMap map = new MemoryOrdinalMap();
@@ -265,21 +267,21 @@
             for (int i = 1; i < dtr.Count; i++)
             {
                 FacetLabel cat = dtr.GetPath(i);
-                Assert.True(categories.Add(cat), "category " + cat + " already existed");
+                Assert.IsTrue(categories.Add(cat), "category " + cat + " already existed");
             }
             dtr.Dispose();
 
             IOUtils.Dispose(src, dest);
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly TestAddTaxonomy outerInstance;
 
-            private int numCategories;
-            private Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter destTW;
+            private readonly int numCategories;
+            private readonly DirectoryTaxonomyWriter destTW;
 
-            public ThreadAnonymousInnerClassHelper2(TestAddTaxonomy outerInstance, int numCategories, Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter destTW)
+            public ThreadAnonymousClass2(TestAddTaxonomy outerInstance, int numCategories, DirectoryTaxonomyWriter destTW)
             {
                 this.outerInstance = outerInstance;
                 this.numCategories = numCategories;
@@ -302,7 +304,5 @@
                 }
             }
         }
-
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
index e194468..9a2f56d 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestConcurrentFacetedIndexing.cs
@@ -1,4 +1,5 @@
-using J2N.Threading.Atomic;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading.Atomic;
 using System;
 using System.Collections.Concurrent;
 using System.IO;
@@ -40,11 +41,11 @@
 
         // A No-Op ITaxonomyWriterCache which always discards all given categories, and
         // always returns true in put(), to indicate some cache entries were cleared.
-        private static ITaxonomyWriterCache NO_OP_CACHE = new TaxonomyWriterCacheAnonymousInnerClassHelper();
+        private static ITaxonomyWriterCache NO_OP_CACHE = new TaxonomyWriterCacheAnonymousClass();
 
-        private class TaxonomyWriterCacheAnonymousInnerClassHelper : ITaxonomyWriterCache
+        private class TaxonomyWriterCacheAnonymousClass : ITaxonomyWriterCache
         {
-            public TaxonomyWriterCacheAnonymousInnerClassHelper()
+            public TaxonomyWriterCacheAnonymousClass()
             {
             }
 
@@ -118,7 +119,7 @@
 
             for (int i = 0; i < indexThreads.Length; i++)
             {
-                indexThreads[i] = new ThreadAnonymousInnerClassHelper(this, numDocs, values, iw, tw, config);
+                indexThreads[i] = new ThreadAnonymousClass(this, numDocs, values, iw, tw, config);
             }
 
             foreach (ThreadJob t in indexThreads)
@@ -148,7 +149,7 @@
             foreach (string cat in values.Keys)
             {
                 FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(cat));
-                Assert.True(tr.GetOrdinal(cp) > 0, "category not found " + cp);
+                Assert.IsTrue(tr.GetOrdinal(cp) > 0, "category not found " + cp);
                 int level = cp.Length;
                 int parentOrd = 0; // for root, parent is always virtual ROOT (ord=0)
                 FacetLabel path = null;
@@ -164,17 +165,17 @@
             IOUtils.Dispose(tw, iw, tr, taxoDir, indexDir);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestConcurrentFacetedIndexing outerInstance;
 
-            private AtomicInt32 numDocs;
-            private ConcurrentDictionary<string, string> values;
-            private IndexWriter iw;
-            private Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter tw;
-            private FacetsConfig config;
+            private readonly AtomicInt32 numDocs;
+            private readonly ConcurrentDictionary<string, string> values;
+            private readonly IndexWriter iw;
+            private readonly DirectoryTaxonomyWriter tw;
+            private readonly FacetsConfig config;
 
-            public ThreadAnonymousInnerClassHelper(TestConcurrentFacetedIndexing outerInstance, AtomicInt32 numDocs, ConcurrentDictionary<string, string> values, IndexWriter iw, Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter tw, FacetsConfig config)
+            public ThreadAnonymousClass(TestConcurrentFacetedIndexing outerInstance, AtomicInt32 numDocs, ConcurrentDictionary<string, string> values, IndexWriter iw, DirectoryTaxonomyWriter tw, FacetsConfig config)
             {
                 this.outerInstance = outerInstance;
                 this.numDocs = numDocs;
@@ -218,7 +219,5 @@
                 }
             }
         }
-
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
index 61df174..e76dc9d 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyReader.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index.Extensions;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using System;
 using System.Globalization;
@@ -50,7 +51,7 @@
             ltr.Dispose();
 
             // should not fail as we IncRef() before close
-            var tmpSie = ltr.Count;
+            var _ = ltr.Count;
             ltr.DecRef();
 
             dir.Dispose();
@@ -65,8 +66,8 @@
             ltw.Dispose();
 
             var ltr = new DirectoryTaxonomyReader(dir);
-            (ltr).Dispose();
-            (ltr).Dispose(); // no exception should be thrown
+            ltr.Dispose();
+            ltr.Dispose(); // no exception should be thrown
 
             dir.Dispose();
         }
@@ -87,15 +88,15 @@
                 ltw.Commit();
 
                 ltr = new DirectoryTaxonomyReader(dir);
-                Assert.Null(TaxonomyReader.OpenIfChanged(ltr), "Nothing has changed");
+                Assert.IsNull(TaxonomyReader.OpenIfChanged(ltr), "Nothing has changed");
 
                 ltw.AddCategory(new FacetLabel("b"));
                 ltw.Commit();
 
                 DirectoryTaxonomyReader newtr = TaxonomyReader.OpenIfChanged(ltr);
-                Assert.NotNull(newtr, "changes were committed");
-                Assert.Null(TaxonomyReader.OpenIfChanged(newtr), "Nothing has changed");
-                (newtr).Dispose();
+                Assert.IsNotNull(newtr, "changes were committed");
+                Assert.IsNull(TaxonomyReader.OpenIfChanged(newtr), "Nothing has changed");
+                newtr.Dispose();
             }
             finally
             {
@@ -115,7 +116,7 @@
             ltr.Dispose();
             try
             {
-                var tmpSize = ltr.Count;
+                var _ = ltr.Count;
                 fail("An ObjectDisposedException should have been thrown here");
             }
             catch (ObjectDisposedException)
@@ -139,7 +140,7 @@
         {
             doTestReadRecreatedTaxonomy(Random, false);
         }
-        
+
         private void doTestReadRecreatedTaxonomy(Random random, bool closeReader)
         {
             Directory dir = null;
@@ -182,7 +183,7 @@
                     else
                     {
                         var newtr = TaxonomyReader.OpenIfChanged(tr);
-                        Assert.NotNull(newtr);
+                        Assert.IsNotNull(newtr);
                         tr.Dispose();
                         tr = newtr;
                     }
@@ -191,7 +192,7 @@
             }
             finally
             {
-                IOUtils.Dispose(tr as DirectoryTaxonomyReader, tw, dir);
+                IOUtils.Dispose(tr, tw, dir);
             }
         }
 
@@ -213,7 +214,7 @@
             taxoWriter.AddCategory(new FacetLabel("a", "b"));
             taxoWriter.Commit();
             var newtr = TaxonomyReader.OpenIfChanged(taxoReader);
-            Assert.NotNull(newtr);
+            Assert.IsNotNull(newtr);
             taxoReader.Dispose();
             taxoReader = newtr;
             Assert.AreEqual(1, taxoReader.RefCount, "wrong refCount");
@@ -229,7 +230,7 @@
             // test openIfChanged() when the taxonomy contains many segments
             Directory dir = NewDirectory();
 
-            DirectoryTaxonomyWriter writer = new DirectoryTaxonomyWriterAnonymousInnerClassHelper(this, dir);
+            DirectoryTaxonomyWriter writer = new DirectoryTaxonomyWriterAnonymousClass(this, dir);
             var reader = new DirectoryTaxonomyReader(writer);
 
             int numRounds = Random.Next(10) + 10;
@@ -243,7 +244,7 @@
                 }
                 numCategories += numCats + 1; // one for round-parent
                 var newtr = TaxonomyReader.OpenIfChanged(reader);
-                Assert.NotNull(newtr);
+                Assert.IsNotNull(newtr);
                 reader.Dispose();
                 reader = newtr;
 
@@ -264,11 +265,11 @@
             dir.Dispose();
         }
 
-        private class DirectoryTaxonomyWriterAnonymousInnerClassHelper : DirectoryTaxonomyWriter
+        private class DirectoryTaxonomyWriterAnonymousClass : DirectoryTaxonomyWriter
         {
             private readonly TestDirectoryTaxonomyReader outerInstance;
 
-            public DirectoryTaxonomyWriterAnonymousInnerClassHelper(TestDirectoryTaxonomyReader outerInstance, Directory dir)
+            public DirectoryTaxonomyWriterAnonymousClass(TestDirectoryTaxonomyReader outerInstance, Directory dir)
                 : base(dir)
             {
                 this.outerInstance = outerInstance;
@@ -293,13 +294,14 @@
 
             // hold onto IW to forceMerge
             // note how we don't close it, since DTW will close it.
-            IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(new LogByteSizeMergePolicy()));
+            IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+                .SetMergePolicy(new LogByteSizeMergePolicy()));
 
             // LUCENENET: We need to set the index writer before the constructor of the base class is called
             // because the DirectoryTaxonomyWriter class constructor is the consumer of the OpenIndexWriter method.
             // The only option seems to be to set it statically before creating the instance.
-            DirectoryTaxonomyWriterAnonymousInnerClassHelper2.iw = iw;
-            var writer = new DirectoryTaxonomyWriterAnonymousInnerClassHelper2(dir);
+            DirectoryTaxonomyWriterAnonymousClass2.iw = iw;
+            var writer = new DirectoryTaxonomyWriterAnonymousClass2(dir);
 
             var reader = new DirectoryTaxonomyReader(writer);
             Assert.AreEqual(1, reader.Count);
@@ -312,7 +314,7 @@
 
             // now calling openIfChanged should trip on the bug
             var newtr = TaxonomyReader.OpenIfChanged(reader);
-            Assert.NotNull(newtr);
+            Assert.IsNotNull(newtr);
             reader.Dispose();
             reader = newtr;
             Assert.AreEqual(2, reader.Count);
@@ -323,17 +325,17 @@
             dir.Dispose();
         }
 
-        private class DirectoryTaxonomyWriterAnonymousInnerClassHelper2 : DirectoryTaxonomyWriter
+        private class DirectoryTaxonomyWriterAnonymousClass2 : DirectoryTaxonomyWriter
         {
             internal static IndexWriter iw = null;
 
-            public DirectoryTaxonomyWriterAnonymousInnerClassHelper2(Directory dir) 
+            public DirectoryTaxonomyWriterAnonymousClass2(Directory dir)
                 : base(dir)
             {
             }
 
-            protected override IndexWriter OpenIndexWriter(Directory directory, IndexWriterConfig config) 
-            {   
+            protected override IndexWriter OpenIndexWriter(Directory directory, IndexWriterConfig config)
+            {
                 return iw;
             }
         }
@@ -354,8 +356,8 @@
             // LUCENENET: We need to set the index writer before the constructor of the base class is called
             // because the DirectoryTaxonomyWriter class constructor is the consumer of the OpenIndexWriter method.
             // The only option seems to be to set it statically before creating the instance.
-            DirectoryTaxonomyWriterAnonymousInnerClassHelper3.iw = iw;
-            DirectoryTaxonomyWriter writer = new DirectoryTaxonomyWriterAnonymousInnerClassHelper3(dir);
+            DirectoryTaxonomyWriterAnonymousClass3.iw = iw;
+            DirectoryTaxonomyWriter writer = new DirectoryTaxonomyWriterAnonymousClass3(dir);
 
 
             // add a category so that the following DTR open will cause a flush and 
@@ -371,7 +373,7 @@
 
             // now calling openIfChanged should trip on the wrong assert in ParetArray's ctor
             var newtr = TaxonomyReader.OpenIfChanged(reader);
-            Assert.NotNull(newtr);
+            Assert.IsNotNull(newtr);
             reader.Dispose();
             reader = newtr;
             Assert.AreEqual(2, reader.Count);
@@ -382,11 +384,11 @@
             dir.Dispose();
         }
 
-        private class DirectoryTaxonomyWriterAnonymousInnerClassHelper3 : DirectoryTaxonomyWriter
+        private class DirectoryTaxonomyWriterAnonymousClass3 : DirectoryTaxonomyWriter
         {
             internal static IndexWriter iw;
 
-            public DirectoryTaxonomyWriterAnonymousInnerClassHelper3(Directory dir)
+            public DirectoryTaxonomyWriterAnonymousClass3(Directory dir)
                 : base(dir)
             {
             }
@@ -419,7 +421,7 @@
             writer.Dispose();
 
             DirectoryTaxonomyReader r2 = TaxonomyReader.OpenIfChanged(r1);
-            Assert.NotNull(r2);
+            Assert.IsNotNull(r2);
 
             // fill r2's caches
             Assert.AreEqual(1, r2.GetOrdinal(cp_b));
@@ -433,8 +435,8 @@
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, r2.GetOrdinal(cp_a));
             Assert.AreEqual(cp_b, r2.GetPath(1));
 
-            (r2).Dispose();
-            (r1).Dispose();
+            r2.Dispose();
+            r1.Dispose();
             dir.Dispose();
         }
 
@@ -467,7 +469,7 @@
                 }
 
                 DirectoryTaxonomyReader r2 = TaxonomyReader.OpenIfChanged(r1);
-                Assert.NotNull(r2);
+                Assert.IsNotNull(r2);
 
                 // add r2's categories to the caches
                 Assert.AreEqual(2, r2.GetOrdinal(cp_b));
@@ -475,10 +477,10 @@
 
                 // check that r1 doesn't see cp_b
                 Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, r1.GetOrdinal(cp_b));
-                Assert.Null(r1.GetPath(2));
+                Assert.IsNull(r1.GetPath(2));
 
-                (r1).Dispose();
-                (r2).Dispose();
+                r1.Dispose();
+                r2.Dispose();
                 writer.Dispose();
                 dir.Dispose();
             }
@@ -520,7 +522,7 @@
                 }
 
                 DirectoryTaxonomyReader r2 = TaxonomyReader.OpenIfChanged(r1);
-                Assert.NotNull(r2);
+                Assert.IsNotNull(r2);
 
                 // fill r2's caches
                 Assert.AreEqual(1, r2.GetOrdinal(cp_b));
@@ -534,8 +536,8 @@
                 Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, r2.GetOrdinal(cp_a));
                 Assert.AreEqual(cp_b, r2.GetPath(1));
 
-                (r2).Dispose();
-                (r1).Dispose();
+                r2.Dispose();
+                r1.Dispose();
                 writer.Dispose();
                 dir.Dispose();
             }
@@ -594,7 +596,7 @@
                 it.MoveNext();
                 FacetLabel root = taxoReader.GetPath(it.Current);
                 Assert.AreEqual(1, root.Length);
-                Assert.True(roots.Remove(root.Components[0]));
+                Assert.IsTrue(roots.Remove(root.Components[0]));
             }
             Assert.AreEqual(false, it.MoveNext());
 
@@ -620,7 +622,5 @@
 
             dir.Dispose();
         }
-
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
index 43fffcc..c6d6241 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/Directory/TestDirectoryTaxonomyWriter.cs
@@ -1,4 +1,5 @@
-using J2N.Threading;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Attributes;
 using NUnit.Framework;
@@ -51,9 +52,9 @@
 
         // A No-Op ITaxonomyWriterCache which always discards all given categories, and
         // always returns true in put(), to indicate some cache entries were cleared.
-        private static readonly ITaxonomyWriterCache NO_OP_CACHE = new TaxonomyWriterCacheAnonymousInnerClassHelper();
+        private static readonly ITaxonomyWriterCache NO_OP_CACHE = new TaxonomyWriterCacheAnonymousClass();
 
-        private class TaxonomyWriterCacheAnonymousInnerClassHelper : ITaxonomyWriterCache
+        private class TaxonomyWriterCacheAnonymousClass : ITaxonomyWriterCache
         {
             public virtual void Dispose()
             {
@@ -81,7 +82,7 @@
             // commit() wasn't called.
             Directory dir = NewDirectory();
             var ltw = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, NO_OP_CACHE);
-            Assert.False(DirectoryReader.IndexExists(dir));
+            Assert.IsFalse(DirectoryReader.IndexExists(dir));
             ltw.Commit(); // first commit, so that an index will be created
             ltw.AddCategory(new FacetLabel("a"));
 
@@ -107,8 +108,8 @@
             var r = DirectoryReader.Open(dir);
             Assert.AreEqual(3, r.NumDocs, "2 categories plus root should have been committed to the underlying directory");
             var readUserCommitData = r.IndexCommit.UserData;
-            Assert.True("1 2 3".Equals(readUserCommitData["testing"], StringComparison.Ordinal), "wrong value extracted from commit data");
-            Assert.NotNull(DirectoryTaxonomyWriter.INDEX_EPOCH + " not found in commitData", readUserCommitData[DirectoryTaxonomyWriter.INDEX_EPOCH]);
+            Assert.IsTrue("1 2 3".Equals(readUserCommitData["testing"], StringComparison.Ordinal), "wrong value extracted from commit data");
+            Assert.IsNotNull(DirectoryTaxonomyWriter.INDEX_EPOCH + " not found in commitData", readUserCommitData[DirectoryTaxonomyWriter.INDEX_EPOCH]);
             r.Dispose();
 
             // open DirTaxoWriter again and commit, INDEX_EPOCH should still exist
@@ -116,8 +117,6 @@
             // that the taxonomy index has been recreated.
             taxoWriter = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, NO_OP_CACHE);
             taxoWriter.AddCategory(new FacetLabel("c")); // add a category so that commit will happen
-
-
             taxoWriter.SetCommitData(new Dictionary<string, string>()
             {
                 {"just", "data"}
@@ -125,12 +124,12 @@
             taxoWriter.Commit();
 
             // verify taxoWriter.getCommitData()
-            Assert.NotNull(DirectoryTaxonomyWriter.INDEX_EPOCH + " not found in taoxWriter.commitData", taxoWriter.CommitData[DirectoryTaxonomyWriter.INDEX_EPOCH]);
+            Assert.IsTrue(taxoWriter.CommitData.ContainsKey(DirectoryTaxonomyWriter.INDEX_EPOCH), DirectoryTaxonomyWriter.INDEX_EPOCH + " not found in taoxWriter.commitData");
             taxoWriter.Dispose();
 
             r = DirectoryReader.Open(dir);
             readUserCommitData = r.IndexCommit.UserData;
-            Assert.NotNull(DirectoryTaxonomyWriter.INDEX_EPOCH + " not found in commitData", readUserCommitData[DirectoryTaxonomyWriter.INDEX_EPOCH]);
+            Assert.IsTrue(readUserCommitData.ContainsKey(DirectoryTaxonomyWriter.INDEX_EPOCH), DirectoryTaxonomyWriter.INDEX_EPOCH + " not found in taoxWriter.commitData");
             r.Dispose();
 
             dir.Dispose();
@@ -162,9 +161,9 @@
         {
             // Tests rollback with OpenMode.CREATE
             Directory dir = NewDirectory();
-            (new DirectoryTaxonomyWriter(dir)).Dispose();
+            new DirectoryTaxonomyWriter(dir).Dispose();
             Assert.AreEqual(1, getEpoch(dir));
-            (new DirectoryTaxonomyWriter(dir, OpenMode.CREATE)).Rollback();
+            new DirectoryTaxonomyWriter(dir, OpenMode.CREATE).Rollback();
             Assert.AreEqual(1, getEpoch(dir));
 
             dir.Dispose();
@@ -217,7 +216,7 @@
             var newtr = TaxonomyReader.OpenIfChanged(taxoReader);
             taxoReader.Dispose();
             taxoReader = newtr;
-            Assert.AreEqual(1, Convert.ToInt32(taxoReader.CommitUserData[DirectoryTaxonomyWriter.INDEX_EPOCH]));
+            Assert.AreEqual(1, Convert.ToInt32(taxoReader.CommitUserData[DirectoryTaxonomyWriter.INDEX_EPOCH], CultureInfo.InvariantCulture));
 
             // now recreate the taxonomy, and check that the epoch is preserved after opening DirTW again.
             taxoWriter.Dispose();
@@ -232,7 +231,7 @@
             newtr = TaxonomyReader.OpenIfChanged(taxoReader);
             taxoReader.Dispose();
             taxoReader = newtr;
-            Assert.AreEqual(2, Convert.ToInt32(taxoReader.CommitUserData[DirectoryTaxonomyWriter.INDEX_EPOCH]));
+            Assert.AreEqual(2, Convert.ToInt32(taxoReader.CommitUserData[DirectoryTaxonomyWriter.INDEX_EPOCH], CultureInfo.InvariantCulture));
 
             taxoReader.Dispose();
             dir.Dispose();
@@ -246,22 +245,23 @@
             Directory dir = NewDirectory();
 
             // create an empty index first, so that DirTaxoWriter initializes indexEpoch to 1.
-            (new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null))).Dispose();
+            new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).Dispose();
 
             var taxoWriter = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, NO_OP_CACHE);
             taxoWriter.Dispose();
 
             var taxoReader = new DirectoryTaxonomyReader(dir);
-            Assert.AreEqual(1, Convert.ToInt32(taxoReader.CommitUserData[DirectoryTaxonomyWriter.INDEX_EPOCH]));
-            Assert.Null(TaxonomyReader.OpenIfChanged(taxoReader));
-            (taxoReader).Dispose();
+            Assert.AreEqual(1, Convert.ToInt32(taxoReader.CommitUserData[DirectoryTaxonomyWriter.INDEX_EPOCH], CultureInfo.InvariantCulture));
+            Assert.IsNull(TaxonomyReader.OpenIfChanged(taxoReader));
+            taxoReader.Dispose();
 
             dir.Dispose();
         }
 
         [Test]
         [Slow]
-        [Deadlock][Timeout(1200000)]
+        [Deadlock]
+        [Timeout(1200000)]
         public virtual void TestConcurrency()
         {
             int ncats = AtLeast(100000); // add many categories
@@ -296,7 +296,7 @@
             ThreadJob[] addThreads = new ThreadJob[AtLeast(4)];
             for (int z = 0; z < addThreads.Length; z++)
             {
-                addThreads[z] = new ThreadAnonymousInnerClassHelper(range, numCats, values, tw);
+                addThreads[z] = new ThreadAnonymousClass(range, numCats, values, tw);
             }
 
             foreach (var t in addThreads)
@@ -328,7 +328,7 @@
             foreach (string cat in values.Keys)
             {
                 FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(cat));
-                Assert.True(dtr.GetOrdinal(cp) > 0, "category not found " + cp);
+                Assert.IsTrue(dtr.GetOrdinal(cp) > 0, "category not found " + cp);
                 int level = cp.Length;
                 int parentOrd = 0; // for root, parent is always virtual ROOT (ord=0)
                 FacetLabel path /*= new FacetLabel()*/;
@@ -344,14 +344,14 @@
             IOUtils.Dispose(dtr, dir);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly int range;
             private readonly AtomicInt32 numCats;
             private readonly ConcurrentDictionary<string, string> values;
             private readonly DirectoryTaxonomyWriter tw;
 
-            public ThreadAnonymousInnerClassHelper(int range, AtomicInt32 numCats, ConcurrentDictionary<string, string> values, DirectoryTaxonomyWriter tw)
+            public ThreadAnonymousClass(int range, AtomicInt32 numCats, ConcurrentDictionary<string, string> values, DirectoryTaxonomyWriter tw)
             {
                 this.range = range;
                 this.numCats = numCats;
@@ -373,7 +373,7 @@
                             Convert.ToString(value / 100000, CultureInfo.InvariantCulture),
                             Convert.ToString(value, CultureInfo.InvariantCulture));
                         int ord = tw.AddCategory(cp);
-                        Assert.True(tw.GetParent(ord) != -1, "invalid parent for ordinal " + ord + ", category " + cp);
+                        Assert.IsTrue(tw.GetParent(ord) != -1, "invalid parent for ordinal " + ord + ", category " + cp);
                         string l1 = FacetsConfig.PathToString(cp.Components, 1);
                         string l2 = FacetsConfig.PathToString(cp.Components, 2);
                         string l3 = FacetsConfig.PathToString(cp.Components, 3);
@@ -395,7 +395,7 @@
         {
             SegmentInfos infos = new SegmentInfos();
             infos.Read(taxoDir);
-            return Convert.ToInt64(infos.UserData[DirectoryTaxonomyWriter.INDEX_EPOCH]);
+            return Convert.ToInt64(infos.UserData[DirectoryTaxonomyWriter.INDEX_EPOCH], CultureInfo.InvariantCulture);
         }
 
         [Test]
@@ -424,13 +424,13 @@
 
             // add the same category again -- it should not receive the same ordinal !
             int newOrdB = taxoWriter.AddCategory(new FacetLabel("b"));
-            Assert.AreNotSame(ordB, newOrdB, "new ordinal cannot be the original ordinal");
+            Assert.AreNotEqual(ordB, newOrdB, "new ordinal cannot be the original ordinal"); // LUCENENET specific: Changed to AreNotEqual instead of AreNotSame (boxing)
             Assert.AreEqual(2, newOrdB, "ordinal should have been 2 since only one category was added by replaceTaxonomy");
 
             taxoWriter.Dispose();
 
             long newEpoch = getEpoch(dir);
-            Assert.True(origEpoch < newEpoch, "index epoch should have been updated after replaceTaxonomy");
+            Assert.IsTrue(origEpoch < newEpoch, "index epoch should have been updated after replaceTaxonomy");
 
             dir.Dispose();
             input.Dispose();
@@ -446,7 +446,7 @@
             DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE, NO_OP_CACHE);
             int o1 = taxoWriter.AddCategory(new FacetLabel("a"));
             int o2 = taxoWriter.AddCategory(new FacetLabel("a"));
-            Assert.True(o1 == o2, "ordinal for same category that is added twice should be the same !");
+            Assert.IsTrue(o1 == o2, "ordinal for same category that is added twice should be the same !");
             taxoWriter.Dispose();
             dir.Dispose();
         }
@@ -576,7 +576,5 @@
             srcTaxoDir.Dispose();
             targetTaxoDir.Dispose();
         }
-
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestCachedOrdinalsReader.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestCachedOrdinalsReader.cs
index b9d65b9..f23cd73 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestCachedOrdinalsReader.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestCachedOrdinalsReader.cs
@@ -1,4 +1,5 @@
-using J2N.Threading;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading;
 using NUnit.Framework;
 using System;
 using System.IO;
@@ -6,7 +7,6 @@
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -60,7 +60,7 @@
             ThreadJob[] threads = new ThreadJob[3];
             for (int i = 0; i < threads.Length; i++)
             {
-                threads[i] = new ThreadAnonymousInnerClassHelper(this, "CachedOrdsThread-" + i, reader, ordsReader);
+                threads[i] = new ThreadAnonymousClass(this, "CachedOrdsThread-" + i, reader, ordsReader);
             }
 
             long ramBytesUsed = 0;
@@ -81,15 +81,15 @@
             IOUtils.Dispose(writer, taxoWriter, reader, indexDir, taxoDir);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestCachedOrdinalsReader outerInstance;
 
-            private DirectoryReader reader;
-            private Lucene.Net.Facet.Taxonomy.CachedOrdinalsReader ordsReader;
+            private readonly DirectoryReader reader;
+            private readonly CachedOrdinalsReader ordsReader;
 
-            public ThreadAnonymousInnerClassHelper(TestCachedOrdinalsReader outerInstance, string CachedOrdsThread, DirectoryReader reader, Lucene.Net.Facet.Taxonomy.CachedOrdinalsReader ordsReader)
-                : base("CachedOrdsThread-")
+            public ThreadAnonymousClass(TestCachedOrdinalsReader outerInstance, string threadName, DirectoryReader reader, CachedOrdinalsReader ordsReader)
+                : base(threadName)
             {
                 this.outerInstance = outerInstance;
                 this.reader = reader;
@@ -112,5 +112,4 @@
             }
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestFacetLabel.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestFacetLabel.cs
index b47f13c..59960e0 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestFacetLabel.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestFacetLabel.cs
@@ -1,15 +1,12 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using NUnit.Framework;
 using System;
+using System.Globalization;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using SortedSetDocValuesFacetField = Lucene.Net.Facet.SortedSet.SortedSetDocValuesFacetField;
-    using TestUtil = Lucene.Net.Util.TestUtil;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -26,6 +23,11 @@
      * See the License for the specific language governing permissions and
      * limitations under the License.
      */
+
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using SortedSetDocValuesFacetField = Lucene.Net.Facet.SortedSet.SortedSetDocValuesFacetField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
     [TestFixture]
     public class TestFacetLabel : FacetTestCase
     {
@@ -33,20 +35,20 @@
         [Test]
         public virtual void TestBasic()
         {
-            Assert.AreEqual(0, (new FacetLabel()).Length);
-            Assert.AreEqual(1, (new FacetLabel("hello")).Length);
-            Assert.AreEqual(2, (new FacetLabel("hello", "world")).Length);
+            Assert.AreEqual(0, new FacetLabel().Length);
+            Assert.AreEqual(1, new FacetLabel("hello").Length);
+            Assert.AreEqual(2, new FacetLabel("hello", "world").Length);
         }
 
         [Test]
         public virtual void TestToString()
         {
             // When the category is empty, we expect an empty string
-            Assert.AreEqual("FacetLabel: []", (new FacetLabel()).ToString());
+            Assert.AreEqual("FacetLabel: []", new FacetLabel().ToString());
             // one category
-            Assert.AreEqual("FacetLabel: [hello]", (new FacetLabel("hello")).ToString());
+            Assert.AreEqual("FacetLabel: [hello]", new FacetLabel("hello").ToString());
             // more than one category
-            Assert.AreEqual("FacetLabel: [hello, world]", (new FacetLabel("hello", "world")).ToString());
+            Assert.AreEqual("FacetLabel: [hello, world]", new FacetLabel("hello", "world").ToString());
         }
 
         [Test]
@@ -55,12 +57,12 @@
             string[] components = new string[AtLeast(10)];
             for (int i = 0; i < components.Length; i++)
             {
-                components[i] = Convert.ToString(i);
+                components[i] = Convert.ToString(i, CultureInfo.InvariantCulture);
             }
             FacetLabel cp = new FacetLabel(components);
             for (int i = 0; i < components.Length; i++)
             {
-                Assert.AreEqual(i, Convert.ToInt32(cp.Components[i]));
+                Assert.AreEqual(i, Convert.ToInt32(cp.Components[i], CultureInfo.InvariantCulture));
             }
         }
 
@@ -109,25 +111,25 @@
         public virtual void TestEquals()
         {
             Assert.AreEqual(new FacetLabel(), new FacetLabel());
-            Assert.False((new FacetLabel()).Equals(new FacetLabel("hi")));
-            Assert.False((new FacetLabel()).Equals(Convert.ToInt32(3)));
+            Assert.IsFalse(new FacetLabel().Equals(new FacetLabel("hi")));
+            Assert.IsFalse(new FacetLabel().Equals(Convert.ToInt32(3)));
             Assert.AreEqual(new FacetLabel("hello", "world"), new FacetLabel("hello", "world"));
         }
 
         [Test]
         public virtual void TestHashCode()
         {
-            Assert.AreEqual((new FacetLabel()).GetHashCode(), (new FacetLabel()).GetHashCode());
-            Assert.False((new FacetLabel()).GetHashCode() == (new FacetLabel("hi")).GetHashCode());
-            Assert.AreEqual((new FacetLabel("hello", "world")).GetHashCode(), (new FacetLabel("hello", "world")).GetHashCode());
+            Assert.AreEqual(new FacetLabel().GetHashCode(), new FacetLabel().GetHashCode());
+            Assert.IsFalse(new FacetLabel().GetHashCode() == new FacetLabel("hi").GetHashCode());
+            Assert.AreEqual(new FacetLabel("hello", "world").GetHashCode(), new FacetLabel("hello", "world").GetHashCode());
         }
 
         [Test]
         public virtual void TestLongHashCode()
         {
-            Assert.AreEqual((new FacetLabel()).Int64HashCode(), (new FacetLabel()).Int64HashCode());
-            Assert.False((new FacetLabel()).Int64HashCode() == (new FacetLabel("hi")).Int64HashCode());
-            Assert.AreEqual((new FacetLabel("hello", "world")).Int64HashCode(), (new FacetLabel("hello", "world")).Int64HashCode());
+            Assert.AreEqual(new FacetLabel().Int64HashCode(), new FacetLabel().Int64HashCode());
+            Assert.IsFalse(new FacetLabel().Int64HashCode() == new FacetLabel("hi").Int64HashCode());
+            Assert.AreEqual(new FacetLabel("hello", "world").Int64HashCode(), new FacetLabel("hello", "world").Int64HashCode());
         }
 
         [Test]
@@ -146,38 +148,37 @@
             Assert.AreEqual(0, pother.CompareTo(p));
             Assert.AreEqual(0, p.CompareTo(pother));
             pother = new FacetLabel();
-            Assert.True(pother.CompareTo(p) < 0);
-            Assert.True(p.CompareTo(pother) > 0);
+            Assert.IsTrue(pother.CompareTo(p) < 0);
+            Assert.IsTrue(p.CompareTo(pother) > 0);
             pother = new FacetLabel("a", "b_", "c", "d");
-            Assert.True(pother.CompareTo(p) > 0);
-            Assert.True(p.CompareTo(pother) < 0);
+            Assert.IsTrue(pother.CompareTo(p) > 0);
+            Assert.IsTrue(p.CompareTo(pother) < 0);
             pother = new FacetLabel("a", "b", "c");
-            Assert.True(pother.CompareTo(p) < 0);
-            Assert.True(p.CompareTo(pother) > 0);
+            Assert.IsTrue(pother.CompareTo(p) < 0);
+            Assert.IsTrue(p.CompareTo(pother) > 0);
             pother = new FacetLabel("a", "b", "c", "e");
-            Assert.True(pother.CompareTo(p) > 0);
-            Assert.True(p.CompareTo(pother) < 0);
+            Assert.IsTrue(pother.CompareTo(p) > 0);
+            Assert.IsTrue(p.CompareTo(pother) < 0);
         }
 
         [Test]
         public virtual void TestEmptyNullComponents()
         {
             // LUCENE-4724: CategoryPath should not allow empty or null components
-            string[][] components_tests = new string[][]
-        {
-            new string[] {"", "test"},
-            new string[] {"test", ""},
-            new string[] {"test", "", "foo"},
-            new string[] {null, "test"},
-            new string[] {"test", null},
-            new string[] {"test", null, "foo"}
-        };
+            string[][] components_tests = new string[][] {
+                new string[] {"", "test"}, // empty in the beginning
+                new string[] {"test", ""}, // empty in the end
+                new string[] {"test", "", "foo"}, // empty in the middle
+                new string[] {null, "test"}, // null at the beginning
+                new string[] {"test", null}, // null in the end
+                new string[] {"test", null, "foo"} // null in the middle
+            };
 
             foreach (string[] components in components_tests)
             {
                 try
                 {
-                    Assert.NotNull(new FacetLabel(components));
+                    Assert.IsNotNull(new FacetLabel(components));
                     fail("empty or null components should not be allowed: " + Arrays.ToString(components));
                 }
                 catch (ArgumentException)
@@ -186,7 +187,7 @@
                 }
                 try
                 {
-                    new FacetField("dim", components);
+                    _ = new FacetField("dim", components);
                     fail("empty or null components should not be allowed: " + Arrays.ToString(components));
                 }
                 catch (ArgumentException)
@@ -195,7 +196,7 @@
                 }
                 try
                 {
-                    new AssociationFacetField(new BytesRef(), "dim", components);
+                    _ = new AssociationFacetField(new BytesRef(), "dim", components);
                     fail("empty or null components should not be allowed: " + Arrays.ToString(components));
                 }
                 catch (ArgumentException)
@@ -204,7 +205,7 @@
                 }
                 try
                 {
-                    new Int32AssociationFacetField(17, "dim", components);
+                    _ = new Int32AssociationFacetField(17, "dim", components);
                     fail("empty or null components should not be allowed: " + Arrays.ToString(components));
                 }
                 catch (ArgumentException)
@@ -213,7 +214,7 @@
                 }
                 try
                 {
-                    new SingleAssociationFacetField(17.0f, "dim", components);
+                    _ = new SingleAssociationFacetField(17.0f, "dim", components);
                     fail("empty or null components should not be allowed: " + Arrays.ToString(components));
                 }
                 catch (ArgumentException)
@@ -223,7 +224,7 @@
             }
             try
             {
-                new FacetField(null, new string[] { "abc" });
+                _ = new FacetField(null, new string[] { "abc" });
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -232,7 +233,7 @@
             }
             try
             {
-                new FacetField("", new string[] { "abc" });
+                _ = new FacetField("", new string[] { "abc" });
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -241,7 +242,7 @@
             }
             try
             {
-                new Int32AssociationFacetField(17, null, new string[] { "abc" });
+                _ = new Int32AssociationFacetField(17, null, new string[] { "abc" });
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -250,7 +251,7 @@
             }
             try
             {
-                new Int32AssociationFacetField(17, "", new string[] { "abc" });
+                _ = new Int32AssociationFacetField(17, "", new string[] { "abc" });
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -259,7 +260,7 @@
             }
             try
             {
-                new SingleAssociationFacetField(17.0f, null, new string[] { "abc" });
+                _ = new SingleAssociationFacetField(17.0f, null, new string[] { "abc" });
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -268,7 +269,7 @@
             }
             try
             {
-                new SingleAssociationFacetField(17.0f, "", new string[] { "abc" });
+                _ = new SingleAssociationFacetField(17.0f, "", new string[] { "abc" });
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -277,7 +278,7 @@
             }
             try
             {
-                new AssociationFacetField(new BytesRef(), null, new string[] { "abc" });
+                _ = new AssociationFacetField(new BytesRef(), null, new string[] { "abc" });
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -286,7 +287,7 @@
             }
             try
             {
-                new AssociationFacetField(new BytesRef(), "", new string[] { "abc" });
+                _ = new AssociationFacetField(new BytesRef(), "", new string[] { "abc" });
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -295,7 +296,7 @@
             }
             try
             {
-                new SortedSetDocValuesFacetField(null, "abc");
+                _ = new SortedSetDocValuesFacetField(null, "abc");
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -304,7 +305,7 @@
             }
             try
             {
-                new SortedSetDocValuesFacetField("", "abc");
+                _ = new SortedSetDocValuesFacetField("", "abc");
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -313,7 +314,7 @@
             }
             try
             {
-                new SortedSetDocValuesFacetField("dim", null);
+                _ = new SortedSetDocValuesFacetField("dim", null);
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -322,7 +323,7 @@
             }
             try
             {
-                new SortedSetDocValuesFacetField("dim", "");
+                _ = new SortedSetDocValuesFacetField("dim", "");
                 fail("empty or null components should not be allowed");
             }
             catch (ArgumentException)
@@ -348,7 +349,7 @@
 
             try
             {
-                Assert.NotNull(new FacetLabel("dim", bigComp));
+                Assert.IsNotNull(new FacetLabel("dim", bigComp));
                 fail("long paths should not be allowed; len=" + bigComp.Length);
             }
             catch (ArgumentException)
@@ -357,5 +358,4 @@
             }
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
index d73c3a2..20e75c9 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestLRUHashMap.cs
@@ -1,9 +1,9 @@
-using NUnit.Framework;
+// Lucene version compatibility level 4.8.1
+using NUnit.Framework;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -21,7 +21,6 @@
      * limitations under the License.
      */
 
-    using Lucene.Net.Facet.Taxonomy;
     [TestFixture]
     public class TestLRUHashMap : FacetTestCase
     {
@@ -41,32 +40,31 @@
             Assert.AreEqual(3, lru.Count);
             lru.Put("four", "Shalom");
             Assert.AreEqual(3, lru.Count);
-            Assert.NotNull(lru.Get("three"));
-            Assert.NotNull(lru.Get("two"));
-            Assert.NotNull(lru.Get("four"));
-            Assert.Null(lru.Get("one"));
+            Assert.IsNotNull(lru.Get("three"));
+            Assert.IsNotNull(lru.Get("two"));
+            Assert.IsNotNull(lru.Get("four"));
+            Assert.IsNull(lru.Get("one"));
             lru.Put("five", "Yo!");
             Assert.AreEqual(3, lru.Count);
-            Assert.Null(lru.Get("three")); // three was last used, so it got removed
-            Assert.NotNull(lru.Get("five"));
+            Assert.IsNull(lru.Get("three")); // three was last used, so it got removed
+            Assert.IsNotNull(lru.Get("five"));
             lru.Get("four");
             lru.Put("six", "hi");
             lru.Put("seven", "hey dude");
             Assert.AreEqual(3, lru.Count);
-            Assert.Null(lru.Get("one"));
-            Assert.Null(lru.Get("two"));
-            Assert.Null(lru.Get("three"));
-            Assert.NotNull(lru.Get("four"));
-            Assert.Null(lru.Get("five"));
-            Assert.NotNull(lru.Get("six"));
-            Assert.NotNull(lru.Get("seven"));
+            Assert.IsNull(lru.Get("one"));
+            Assert.IsNull(lru.Get("two"));
+            Assert.IsNull(lru.Get("three"));
+            Assert.IsNotNull(lru.Get("four"));
+            Assert.IsNull(lru.Get("five"));
+            Assert.IsNotNull(lru.Get("six"));
+            Assert.IsNotNull(lru.Get("seven"));
 
             // LUCENENET specific tests to ensure Put is implemented correctly
-            Assert.Null(lru.Put("ten", "oops"));
+            Assert.IsNull(lru.Put("ten", "oops"));
             assertEquals("oops", lru.Put("ten", "not oops"));
             assertEquals("not oops", lru.Put("ten", "new value"));
             assertEquals("new value", lru.Put("ten", "new value2"));
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
index 9717ba5..5fad91a 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestSearcherTaxonomyManager.cs
@@ -1,4 +1,5 @@
-using J2N.Threading;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Attributes;
 using Lucene.Net.Index.Extensions;
@@ -45,16 +46,14 @@
     [TestFixture]
     public class TestSearcherTaxonomyManager : FacetTestCase
     {
-
         private class IndexerThread : ThreadJob
         {
-
-            internal IndexWriter w;
-            internal FacetsConfig config;
-            internal ITaxonomyWriter tw;
-            internal ReferenceManager<SearcherAndTaxonomy> mgr;
-            internal int ordLimit;
-            internal AtomicBoolean stop;
+            private readonly IndexWriter w;
+            private readonly FacetsConfig config;
+            private readonly ITaxonomyWriter tw;
+            private readonly ReferenceManager<SearcherAndTaxonomy> mgr;
+            private readonly int ordLimit;
+            private readonly AtomicBoolean stop;
 
             public IndexerThread(IndexWriter w, FacetsConfig config, ITaxonomyWriter tw, ReferenceManager<SearcherAndTaxonomy> mgr, int ordLimit, AtomicBoolean stop)
             {
@@ -165,7 +164,7 @@
 
             var mgr = new SearcherTaxonomyManager(w, true, null, tw);
 
-            var reopener = new ThreadAnonymousInnerClassHelper(stop, mgr);
+            var reopener = new ThreadAnonymousClass(stop, mgr);
 
             reopener.Name = "reopener";
             reopener.Start();
@@ -188,8 +187,8 @@
                         if (pair.Searcher.IndexReader.NumDocs > 0)
                         {
                             //System.out.println(pair.taxonomyReader.getSize());
-                            Assert.True(result.ChildCount > 0);
-                            Assert.True(result.LabelValues.Length > 0);
+                            Assert.IsTrue(result.ChildCount > 0);
+                            Assert.IsTrue(result.LabelValues.Length > 0);
                         }
 
                         //if (VERBOSE) {
@@ -216,12 +215,12 @@
             IOUtils.Dispose(mgr, tw, w, taxoDir, dir);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly AtomicBoolean stop;
             private readonly SearcherTaxonomyManager mgr;
 
-            public ThreadAnonymousInnerClassHelper(AtomicBoolean stop, SearcherTaxonomyManager mgr)
+            public ThreadAnonymousClass(AtomicBoolean stop, SearcherTaxonomyManager mgr)
             {
                 this.stop = stop;
                 this.mgr = mgr;
@@ -298,8 +297,8 @@
                         if (pair.Searcher.IndexReader.NumDocs > 0)
                         {
                             //System.out.println(pair.taxonomyReader.getSize());
-                            Assert.True(result.ChildCount > 0);
-                            Assert.True(result.LabelValues.Length > 0);
+                            Assert.IsTrue(result.ChildCount > 0);
+                            Assert.IsTrue(result.LabelValues.Length > 0);
                         }
 
                         //if (VERBOSE) {
@@ -400,7 +399,5 @@
 
             IOUtils.Dispose(mgr, tw, w, taxoDir, indexDir);
         }
-
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
index 634bf97..2a8bc3e 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyCombined.cs
@@ -1,4 +1,5 @@
-using J2N.Threading;
+// Lucene version compatibility level 4.8.1
+using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Support;
 using NUnit.Framework;
@@ -11,11 +12,6 @@
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-    using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
-    using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
-    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException;
-    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -33,6 +29,10 @@
      * limitations under the License.
      */
 
+    using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
+    using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
+    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
     [TestFixture]
     [SuppressCodecs("SimpleText", "Lucene3x")]
@@ -43,21 +43,20 @@
         ///  The following categories will be added to the taxonomy by
         ///  fillTaxonomy(), and tested by all tests below:
         /// </summary>
-        private static readonly string[][] categories = new string[][]
-      {
-          new string[] {"Author", "Tom Clancy"},
-          new string[] {"Author", "Richard Dawkins"},
-          new string[] {"Author", "Richard Adams"},
-          new string[] {"Price", "10", "11"},
-          new string[] {"Price", "10", "12"},
-          new string[] {"Price", "20", "27"},
-          new string[] {"Date", "2006", "05"},
-          new string[] {"Date", "2005"},
-          new string[] {"Date", "2006"},
-          new string[] {"Subject", "Nonfiction", "Children", "Animals"},
-          new string[] {"Author", "Stephen Jay Gould"},
-          new string[] {"Author", "\u05e0\u05d3\u05d1\u3042\u0628"}
-      };
+        private static readonly string[][] categories = new string[][] {
+            new string[] {"Author", "Tom Clancy"},
+            new string[] {"Author", "Richard Dawkins"},
+            new string[] {"Author", "Richard Adams"},
+            new string[] {"Price", "10", "11"},
+            new string[] {"Price", "10", "12"},
+            new string[] {"Price", "20", "27"},
+            new string[] {"Date", "2006", "05"},
+            new string[] {"Date", "2005"},
+            new string[] {"Date", "2006"},
+            new string[] {"Subject", "Nonfiction", "Children", "Animals"},
+            new string[] {"Author", "Stephen Jay Gould"},
+            new string[] {"Author", "\u05e0\u05d3\u05d1\u3042\u0628"}
+        };
 
         /// <summary>
         ///  When adding the above categories with ITaxonomyWriter.AddCategory(), 
@@ -66,8 +65,7 @@
         ///  not tested - rather, just the last component, the ordinal, is returned
         ///  and tested.
         /// </summary>
-        private static readonly int[][] ExpectedPaths =
-        {
+        private static readonly int[][] ExpectedPaths = {
             new int[] {1, 2},
             new int[] {1, 3},
             new int[] {1, 4},
@@ -87,7 +85,29 @@
         ///  generated categories, with increasing ordinals (note how parent
         ///  categories are be added automatically when subcategories are added).
         /// </summary>
-        private static readonly string[][] ExpectedCategories = new string[][] { new string[] { }, new string[] { "Author" }, new string[] { "Author", "Tom Clancy" }, new string[] { "Author", "Richard Dawkins" }, new string[] { "Author", "Richard Adams" }, new string[] { "Price" }, new string[] { "Price", "10" }, new string[] { "Price", "10", "11" }, new string[] { "Price", "10", "12" }, new string[] { "Price", "20" }, new string[] { "Price", "20", "27" }, new string[] { "Date" }, new string[] { "Date", "2006" }, new string[] { "Date", "2006", "05" }, new string[] { "Date", "2005" }, new string[] { "Subject" }, new string[] { "Subject", "Nonfiction" }, new string[] { "Subject", "Nonfiction", "Children" }, new string[] { "Subject", "Nonfiction", "Children", "Animals" }, new string[] { "Author", "Stephen Jay Gould" }, new string[] { "Author", "\u05e0\u05d3\u05d1\u3042\u0628" } };
+        private static readonly string[][] ExpectedCategories = new string[][] {
+            new string[] { },
+            new string[] { "Author" },
+            new string[] { "Author", "Tom Clancy" },
+            new string[] { "Author", "Richard Dawkins" },
+            new string[] { "Author", "Richard Adams" },
+            new string[] { "Price" },
+            new string[] { "Price", "10" },
+            new string[] { "Price", "10", "11" },
+            new string[] { "Price", "10", "12" },
+            new string[] { "Price", "20" },
+            new string[] { "Price", "20", "27" },
+            new string[] { "Date" },
+            new string[] { "Date", "2006" },
+            new string[] { "Date", "2006", "05" },
+            new string[] { "Date", "2005" },
+            new string[] { "Subject" },
+            new string[] { "Subject", "Nonfiction" },
+            new string[] { "Subject", "Nonfiction", "Children" },
+            new string[] { "Subject", "Nonfiction", "Children", "Animals" },
+            new string[] { "Author", "Stephen Jay Gould" },
+            new string[] { "Author", "\u05e0\u05d3\u05d1\u3042\u0628" }
+        };
 
         /// <summary>
         ///  fillTaxonomy adds the categories in the categories[] array, and asserts
@@ -370,13 +390,14 @@
                 FacetLabel category = tr.GetPath(i);
                 if (!expectedCategory.Equals(category))
                 {
-                    fail("For ordinal " + i + " expected category " + Showcat(expectedCategory) + ", but got " + Showcat(category));
+                    fail("For ordinal " + i + " expected category " +
+                        Showcat(expectedCategory) + ", but got " + Showcat(category));
                 }
             }
             //  (also test invalid ordinals:)
-            Assert.Null(tr.GetPath(-1));
-            Assert.Null(tr.GetPath(tr.Count));
-            Assert.Null(tr.GetPath(TaxonomyReader.INVALID_ORDINAL));
+            Assert.IsNull(tr.GetPath(-1));
+            Assert.IsNull(tr.GetPath(tr.Count));
+            Assert.IsNull(tr.GetPath(TaxonomyReader.INVALID_ORDINAL));
 
             // test TaxonomyReader.GetOrdinal():
             for (int i = 1; i < ExpectedCategories.Length; i++)
@@ -385,7 +406,8 @@
                 int ordinal = tr.GetOrdinal(new FacetLabel(ExpectedCategories[i]));
                 if (expectedOrdinal != ordinal)
                 {
-                    fail("For category " + Showcat(ExpectedCategories[i]) + " expected ordinal " + expectedOrdinal + ", but got " + ordinal);
+                    fail("For category " + Showcat(ExpectedCategories[i]) + " expected ordinal " +
+                        expectedOrdinal + ", but got " + ordinal);
                 }
             }
             // (also test invalid categories:)
@@ -430,12 +452,15 @@
                 FacetLabel parent = tr.GetPath(parentOrdinal);
                 if (parent is null)
                 {
-                    fail("Parent of " + ordinal + " is " + parentOrdinal + ", but this is not a valid category.");
+                    fail("Parent of " + ordinal + " is " + parentOrdinal +
+                        ", but this is not a valid category.");
                 }
                 // verify that the parent is indeed my parent, according to the strings
                 if (!me.Subpath(me.Length - 1).Equals(parent))
                 {
-                    fail("Got parent " + parentOrdinal + " for ordinal " + ordinal + " but categories are " + Showcat(parent) + " and " + Showcat(me) + " respectively.");
+                    fail("Got parent " + parentOrdinal + " for ordinal " + ordinal +
+                        " but categories are " + Showcat(parent) + " and " + Showcat(me) +
+                        " respectively.");
                 }
             }
 
@@ -502,13 +527,16 @@
                 FacetLabel parent = tr.GetPath(parentOrdinal);
                 if (parent is null)
                 {
-                    fail("Parent of " + ordinal + " is " + parentOrdinal + ", but this is not a valid category.");
+                    fail("Parent of " + ordinal + " is " + parentOrdinal
+                        + ", but this is not a valid category.");
                 }
                 // verify that the parent is indeed my parent, according to the
                 // strings
                 if (!me.Subpath(me.Length - 1).Equals(parent))
                 {
-                    fail("Got parent " + parentOrdinal + " for ordinal " + ordinal + " but categories are " + Showcat(parent) + " and " + Showcat(me) + " respectively.");
+                    fail("Got parent " + parentOrdinal + " for ordinal " + ordinal
+                        + " but categories are " + Showcat(parent) + " and "
+                        + Showcat(me) + " respectively.");
                 }
             }
 
@@ -649,7 +677,7 @@
             // (it can also be INVALID_ORDINAL, which is lower than any ordinal)
             for (int i = 0; i < tr.Count; i++)
             {
-                Assert.True(olderSiblingArray[i] < i, "olderSiblingArray[" + i + "] should be <" + i);
+                Assert.IsTrue(olderSiblingArray[i] < i, "olderSiblingArray[" + i + "] should be <" + i);
             }
 
             // test that the "older sibling" of every category is indeed a sibling
@@ -727,8 +755,8 @@
             Assert.AreEqual(3, tr.Count);
             Assert.AreEqual(3, ca.Siblings.Length);
             Assert.AreEqual(3, ca.Children.Length);
-            Assert.True(Arrays.Equals(new int[] { 1, 2, -1 }, ca.Children));
-            Assert.True(Arrays.Equals(new int[] { -1, -1, -1 }, ca.Siblings));
+            Assert.IsTrue(Arrays.Equals(new int[] { 1, 2, -1 }, ca.Children));
+            Assert.IsTrue(Arrays.Equals(new int[] { -1, -1, -1 }, ca.Siblings));
             tw.AddCategory(new FacetLabel("hi", "ho"));
             tw.AddCategory(new FacetLabel("hello"));
             tw.Commit();
@@ -740,15 +768,15 @@
             Assert.AreEqual(3, ca.Children.Length);
             // After the refresh, things change:
             var newtr = TaxonomyReader.OpenIfChanged(tr);
-            Assert.NotNull(newtr);
+            Assert.IsNotNull(newtr);
             tr.Dispose();
             tr = newtr;
             ca = tr.ParallelTaxonomyArrays;
             Assert.AreEqual(5, tr.Count);
             Assert.AreEqual(5, ca.Siblings.Length);
             Assert.AreEqual(5, ca.Children.Length);
-            Assert.True(Arrays.Equals(new int[] { 4, 3, -1, -1, -1 }, ca.Children));
-            Assert.True(Arrays.Equals(new int[] { -1, -1, -1, 2, 1 }, ca.Siblings));
+            Assert.IsTrue(Arrays.Equals(new int[] { 4, 3, -1, -1, -1 }, ca.Children));
+            Assert.IsTrue(Arrays.Equals(new int[] { -1, -1, -1, 2, 1 }, ca.Siblings));
             tw.Dispose();
             tr.Dispose();
             indexDir.Dispose();
@@ -781,7 +809,7 @@
             twBase.Dispose();
 
             var newTaxoReader = TaxonomyReader.OpenIfChanged(trBase);
-            Assert.NotNull(newTaxoReader);
+            Assert.IsNotNull(newTaxoReader);
             trBase.Dispose();
             trBase = newTaxoReader;
 
@@ -807,7 +835,6 @@
             tw.AddCategory(abPath);
             tw.Commit();
 
-            
             var tr = new DirectoryTaxonomyReader(indexDir);
             for (int i = 0; i < numCategories; i++)
             {
@@ -817,15 +844,14 @@
             }
             tw.Dispose();
 
-            
             var stop = new AtomicBoolean(false);
             Exception[] error = new Exception[] { null };
             int[] retrieval = new int[] { 0 };
 
-            var thread = new ThreadAnonymousInnerClassHelper(this, abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, tr, stop, error, retrieval);
+            var thread = new ThreadAnonymousClass(this, abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, tr, stop, error, retrieval);
             thread.Start();
 
-            indexDir.SleepMillis = 1; // some delay for refresh
+            indexDir.SetSleepMillis(1); // some delay for refresh
             var newTaxoReader = TaxonomyReader.OpenIfChanged(tr);
             if (newTaxoReader != null)
             {
@@ -834,26 +860,26 @@
 
             stop.Value = true;
             thread.Join();
-            Assert.Null(error[0], "Unexpcted exception at retry " + retry + " retrieval " + retrieval[0] + ": \n" + stackTraceStr(error[0]));
+            Assert.IsNull(error[0], "Unexpcted exception at retry " + retry + " retrieval " + retrieval[0] + ": \n" + stackTraceStr(error[0]));
 
             tr.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestTaxonomyCombined outerInstance;
 
-            private Lucene.Net.Facet.Taxonomy.FacetLabel abPath;
-            private int abOrd;
-            private int abYoungChildBase1;
-            private int abYoungChildBase2;
-            private int retry;
-            private DirectoryTaxonomyReader tr;
-            private AtomicBoolean stop;
-            private Exception[] error;
-            private int[] retrieval;
+            private readonly FacetLabel abPath;
+            private readonly int abOrd;
+            private readonly int abYoungChildBase1;
+            private readonly int abYoungChildBase2;
+            private readonly int retry;
+            private readonly DirectoryTaxonomyReader tr;
+            private readonly AtomicBoolean stop;
+            private readonly Exception[] error;
+            private readonly int[] retrieval;
 
-            public ThreadAnonymousInnerClassHelper(TestTaxonomyCombined outerInstance, Lucene.Net.Facet.Taxonomy.FacetLabel abPath, int abOrd, int abYoungChildBase1, int abYoungChildBase2, int retry, DirectoryTaxonomyReader tr, AtomicBoolean stop, Exception[] error, int[] retrieval)
+            public ThreadAnonymousClass(TestTaxonomyCombined outerInstance, FacetLabel abPath, int abOrd, int abYoungChildBase1, int abYoungChildBase2, int retry, DirectoryTaxonomyReader tr, AtomicBoolean stop, Exception[] error, int[] retrieval)
                 : base("Child Arrays Verifier")
             {
                 this.outerInstance = outerInstance;
@@ -878,7 +904,7 @@
                     while (!stop)
                     {
                         int lastOrd = tr.ParallelTaxonomyArrays.Parents.Length - 1;
-                        Assert.NotNull(tr.GetPath(lastOrd), "path of last-ord " + lastOrd + " is not found!");
+                        Assert.IsNotNull(tr.GetPath(lastOrd), "path of last-ord " + lastOrd + " is not found!");
                         AssertChildrenArrays(tr.ParallelTaxonomyArrays, retry, retrieval[0]++);
                         Thread.Sleep(10);// don't starve refresh()'s CPU, which sleeps every 50 bytes for 1 ms
                     }
@@ -893,7 +919,10 @@
             private void AssertChildrenArrays(ParallelTaxonomyArrays ca, int retry, int retrieval)
             {
                 int abYoungChild = ca.Children[abOrd];
-                Assert.True(abYoungChildBase1 == abYoungChild || abYoungChildBase2 == ca.Children[abOrd], "Retry " + retry + ": retrieval: " + retrieval + ": wrong youngest child for category " + abPath + " (ord=" + abOrd + ") - must be either " + abYoungChildBase1 + " or " + abYoungChildBase2 + " but was: " + abYoungChild);
+                Assert.IsTrue(abYoungChildBase1 == abYoungChild ||
+                    abYoungChildBase2 == ca.Children[abOrd],
+                    "Retry " + retry + ": retrieval: " + retrieval + ": wrong youngest child for category " + abPath + " (ord=" + abOrd +
+                    ") - must be either " + abYoungChildBase1 + " or " + abYoungChildBase2 + " but was: " + abYoungChild);
             }
         }
 
@@ -939,12 +968,12 @@
             Assert.AreEqual(1, tr.Count); // the empty taxonomy has size 1 (the root)
             tw.AddCategory(new FacetLabel("Author"));
             Assert.AreEqual(1, tr.Count); // still root only...
-            Assert.Null(TaxonomyReader.OpenIfChanged(tr)); // this is not enough, because tw.Commit() hasn't been done yet
+            Assert.IsNull(TaxonomyReader.OpenIfChanged(tr)); // this is not enough, because tw.Commit() hasn't been done yet
             Assert.AreEqual(1, tr.Count); // still root only...
             tw.Commit();
             Assert.AreEqual(1, tr.Count); // still root only...
             var newTaxoReader = TaxonomyReader.OpenIfChanged(tr);
-            Assert.NotNull(newTaxoReader);
+            Assert.IsNotNull(newTaxoReader);
             tr.Dispose();
             tr = newTaxoReader;
 
@@ -968,7 +997,7 @@
             int dawkins = 2;
             tw.Commit();
             newTaxoReader = TaxonomyReader.OpenIfChanged(tr);
-            Assert.NotNull(newTaxoReader);
+            Assert.IsNotNull(newTaxoReader);
             tr.Dispose();
             tr = newTaxoReader;
             int[] parents = tr.ParallelTaxonomyArrays.Parents;
@@ -998,7 +1027,7 @@
             // before commit and refresh, no change:
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.GetOrdinal(author));
             Assert.AreEqual(1, tr.Count); // still root only...
-            Assert.Null(TaxonomyReader.OpenIfChanged(tr)); // this is not enough, because tw.Commit() hasn't been done yet
+            Assert.IsNull(TaxonomyReader.OpenIfChanged(tr)); // this is not enough, because tw.Commit() hasn't been done yet
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.GetOrdinal(author));
             Assert.AreEqual(1, tr.Count); // still root only...
             tw.Commit();
@@ -1006,7 +1035,7 @@
             Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.GetOrdinal(author));
             Assert.AreEqual(1, tr.Count); // still root only...
             var newTaxoReader = TaxonomyReader.OpenIfChanged(tr);
-            Assert.NotNull(newTaxoReader);
+            Assert.IsNotNull(newTaxoReader);
             tr.Dispose();
             tr = newTaxoReader;
             Assert.AreEqual(1, tr.GetOrdinal(author));
@@ -1037,7 +1066,7 @@
             // We expect to get a LockObtainFailedException.
             try
             {
-                Assert.Null(new DirectoryTaxonomyWriter(indexDir));
+                Assert.IsNull(new DirectoryTaxonomyWriter(indexDir));
                 fail("should have failed to write in locked directory");
             }
             catch (LockObtainFailedException)
@@ -1052,7 +1081,7 @@
             tw2.Dispose();
             // See that the writer indeed wrote:
             var newtr = TaxonomyReader.OpenIfChanged(tr);
-            Assert.NotNull(newtr);
+            Assert.IsNotNull(newtr);
             tr.Dispose();
             tr = newtr;
             Assert.AreEqual(3, tr.GetOrdinal(new FacetLabel("hey")));
@@ -1078,7 +1107,8 @@
                 int expectedOrdinal = ExpectedPaths[i][ExpectedPaths[i].Length - 1];
                 if (ordinal != expectedOrdinal)
                 {
-                    fail("For category " + Showcat(categories[i]) + " expected ordinal " + expectedOrdinal + ", but got " + ordinal);
+                    fail("For category " + Showcat(categories[i]) + " expected ordinal " +
+                        expectedOrdinal + ", but got " + ordinal);
                 }
                 for (int j = ExpectedPaths[i].Length - 2; j >= 0; j--)
                 {
@@ -1086,7 +1116,9 @@
                     expectedOrdinal = ExpectedPaths[i][j];
                     if (ordinal != expectedOrdinal)
                     {
-                        fail("For category " + Showcat(categories[i]) + " expected ancestor level " + (ExpectedPaths[i].Length - 1 - j) + " was " + expectedOrdinal + ", but got " + ordinal);
+                        fail("For category " + Showcat(categories[i]) + " expected ancestor level " +
+                            (ExpectedPaths[i].Length - 1 - j) + " was " + expectedOrdinal +
+                            ", but got " + ordinal);
                     }
                 }
             }
@@ -1105,7 +1137,9 @@
                     int expectedOrdinal = ExpectedPaths[i][j];
                     if (ordinal != expectedOrdinal)
                     {
-                        fail("For category " + Showcat(categories[i]) + " expected ancestor level " + (ExpectedPaths[i].Length - 1 - j) + " was " + expectedOrdinal + ", but got " + ordinal);
+                        fail("For category " + Showcat(categories[i]) + " expected ancestor level " +
+                            (ExpectedPaths[i].Length - 1 - j) + " was " + expectedOrdinal +
+                            ", but got " + ordinal);
                     }
                 }
                 Assert.AreEqual(TaxonomyReader.ROOT_ORDINAL, tw.GetParent(ExpectedPaths[i][0]));
@@ -1167,9 +1201,9 @@
             FacetLabel cp = new FacetLabel("a");
             writer.AddCategory(cp);
             var newReader = TaxonomyReader.OpenIfChanged(reader);
-            Assert.NotNull(newReader, "expected a new instance");
+            Assert.IsNotNull(newReader, "expected a new instance");
             Assert.AreEqual(2, newReader.Count);
-            Assert.AreNotSame(TaxonomyReader.INVALID_ORDINAL, newReader.GetOrdinal(cp));
+            Assert.AreNotEqual(TaxonomyReader.INVALID_ORDINAL, newReader.GetOrdinal(cp)); // LUCENENET specific: Changed from AreNotSame to AreNotEqual (boxing)
             reader.Dispose();
             reader = newReader;
 
@@ -1183,5 +1217,4 @@
         //  using the same object (simulating threads) or different objects
         //  (simulating processes).
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
index 0b5cab6..87913a1 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
@@ -1,10 +1,10 @@
-using NUnit.Framework;
+// Lucene version compatibility level 4.8.1
+using NUnit.Framework;
 using System;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -39,7 +39,6 @@
     [TestFixture]
     public class TestTaxonomyFacetAssociations : FacetTestCase
     {
-
         private static Store.Directory dir;
         private static IndexReader reader;
         private static Store.Directory taxoDir;
@@ -297,7 +296,5 @@
             Assert.AreEqual(100, (int)facets.GetSpecificValue("int", "a"), "Wrong count for category 'a'!");
             Assert.AreEqual(150, (int)facets.GetSpecificValue("int", "b"), "Wrong count for category 'b'!");
         }
-
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
index a5988dd..afc86a5 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Index.Extensions;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Index.Extensions;
 using Lucene.Net.Support.IO;
 using NUnit.Framework;
 using System;
@@ -12,7 +13,6 @@
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -131,7 +131,7 @@
 
             Assert.AreEqual(1, facets.GetSpecificValue("Author", "Lisa"));
 
-            Assert.Null(facets.GetTopChildren(10, "Non exitent dim"));
+            Assert.IsNull(facets.GetTopChildren(10, "Non exitent dim"));
 
             // Smoke test PrintTaxonomyStats:
             string result;
@@ -143,12 +143,12 @@
                 }
                 result = bos.ToString();
             }
-            Assert.True(result.IndexOf("/Author: 4 immediate children; 5 total categories", StringComparison.Ordinal) != -1);
-            Assert.True(result.IndexOf("/Publish Date: 3 immediate children; 12 total categories", StringComparison.Ordinal) != -1);
+            Assert.IsTrue(result.IndexOf("/Author: 4 immediate children; 5 total categories", StringComparison.Ordinal) != -1);
+            Assert.IsTrue(result.IndexOf("/Publish Date: 3 immediate children; 12 total categories", StringComparison.Ordinal) != -1);
             // Make sure at least a few nodes of the tree came out:
-            Assert.True(result.IndexOf("  /1999", StringComparison.Ordinal) != -1);
-            Assert.True(result.IndexOf("  /2012", StringComparison.Ordinal) != -1);
-            Assert.True(result.IndexOf("      /20", StringComparison.Ordinal) != -1);
+            Assert.IsTrue(result.IndexOf("  /1999", StringComparison.Ordinal) != -1);
+            Assert.IsTrue(result.IndexOf("  /2012", StringComparison.Ordinal) != -1);
+            Assert.IsTrue(result.IndexOf("      /20", StringComparison.Ordinal) != -1);
 
             IOUtils.Dispose(writer, taxoWriter, searcher.IndexReader, taxoReader, taxoDir, dir);
         }
@@ -267,7 +267,7 @@
 
             // Ask for top 10 labels for any dims that have counts:
             IList<FacetResult> results = facets.GetAllDims(10);
-            Assert.True(results.Count == 0);
+            Assert.IsTrue(results.Count == 0);
 
             try
             {
@@ -298,7 +298,7 @@
             Store.Directory dir = NewDirectory();
             Store.Directory taxoDir = NewDirectory();
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
-            iwc.SetSimilarity(new PerFieldSimilarityWrapperAnonymousInnerClassHelper(this));
+            iwc.SetSimilarity(new PerFieldSimilarityWrapperAnonymousClass(this));
             ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
             RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc);
             FacetsConfig config = new FacetsConfig();
@@ -310,17 +310,17 @@
             IOUtils.Dispose(writer, taxoWriter, dir, taxoDir);
         }
 
-        private class PerFieldSimilarityWrapperAnonymousInnerClassHelper : PerFieldSimilarityWrapper
+        private class PerFieldSimilarityWrapperAnonymousClass : PerFieldSimilarityWrapper
         {
             private readonly TestTaxonomyFacetCounts outerInstance;
 
-            public PerFieldSimilarityWrapperAnonymousInnerClassHelper(TestTaxonomyFacetCounts outerInstance)
+            public PerFieldSimilarityWrapperAnonymousClass(TestTaxonomyFacetCounts outerInstance)
             {
                 this.outerInstance = outerInstance;
                 sim = new DefaultSimilarity();
             }
 
-            internal readonly Similarity sim;
+            private readonly Similarity sim;
 
             public override Similarity Get(string name)
             {
@@ -890,5 +890,4 @@
             IOUtils.Dispose(w, tw, searcher.IndexReader, tr, indexDir, taxoDir);
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
index 33c1388..fc89050 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetCounts2.cs
@@ -1,4 +1,6 @@
-using J2N.Collections.Generic.Extensions;
+// Lucene version compatibility level 4.8.1
+using J2N.Collections.Generic.Extensions;
+using Lucene.Net.Index;
 using Lucene.Net.Support;
 using NUnit.Framework;
 using System;
@@ -8,7 +10,6 @@
 
 namespace Lucene.Net.Facet.Taxonomy
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -26,7 +27,6 @@
      * limitations under the License.
      */
 
-
     using DirectoryReader = Lucene.Net.Index.DirectoryReader;
     using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
     using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
@@ -147,17 +147,14 @@
             }
         }
 
-        private static FacetsConfig Config
+        private static FacetsConfig GetConfig()
         {
-            get
-            {
-                FacetsConfig config = new FacetsConfig();
-                config.SetMultiValued("A", true);
-                config.SetMultiValued("B", true);
-                config.SetRequireDimCount("B", true);
-                config.SetHierarchical("D", true);
-                return config;
-            }
+            FacetsConfig config = new FacetsConfig();
+            config.SetMultiValued("A", true);
+            config.SetMultiValued("B", true);
+            config.SetRequireDimCount("B", true);
+            config.SetHierarchical("D", true);
+            return config;
         }
 
         private static void IndexDocsNoFacets(IndexWriter indexWriter)
@@ -176,7 +173,7 @@
         {
             Random random = Random;
             int numDocs = AtLeast(random, 2);
-            FacetsConfig config = Config;
+            FacetsConfig config = GetConfig();
             for (int i = 0; i < numDocs; i++)
             {
                 Document doc = new Document();
@@ -190,7 +187,7 @@
         {
             Random random = Random;
             int numDocs = AtLeast(random, 2);
-            FacetsConfig config = Config;
+            FacetsConfig config = GetConfig();
             for (int i = 0; i < numDocs; i++)
             {
                 Document doc = new Document();
@@ -205,7 +202,7 @@
         {
             Random random = Random;
             int numDocs = AtLeast(random, 2);
-            FacetsConfig config = Config;
+            FacetsConfig config = GetConfig();
             for (int i = 0; i < numDocs; i++)
             {
                 Document doc = new Document();
@@ -262,7 +259,7 @@
             // 4. Segment w/ categories, but only some results
 
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
-            //conf.MergePolicy = NoMergePolicy.INSTANCE; // prevent merges, so we can control the index segments
+            conf.MergePolicy = NoMergePolicy.COMPOUND_FILES; // prevent merges, so we can control the index segments
             IndexWriter indexWriter = new IndexWriter(indexDir, conf);
             ITaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
 
@@ -295,7 +292,7 @@
             FacetsCollector sfc = new FacetsCollector();
             TermQuery q = new TermQuery(A);
             searcher.Search(q, sfc);
-            Facets facets = GetTaxonomyFacetCounts(taxoReader, Config, sfc);
+            Facets facets = GetTaxonomyFacetCounts(taxoReader, GetConfig(), sfc);
             FacetResult result = facets.GetTopChildren(NUM_CHILDREN_CP_A, CP_A);
             Assert.AreEqual(-1, (int)result.Value);
             foreach (LabelAndValue labelValue in result.LabelValues)
@@ -322,7 +319,7 @@
             FacetsCollector sfc = new FacetsCollector();
             searcher.Search(new MatchAllDocsQuery(), sfc);
 
-            Facets facets = GetTaxonomyFacetCounts(taxoReader, Config, sfc);
+            Facets facets = GetTaxonomyFacetCounts(taxoReader, GetConfig(), sfc);
 
             FacetResult result = facets.GetTopChildren(NUM_CHILDREN_CP_A, CP_A);
             Assert.AreEqual(-1, (int)result.Value);
@@ -330,7 +327,7 @@
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
                 Assert.AreEqual(allExpectedCounts[CP_A + "/" + labelValue.Label].GetValueOrDefault(), labelValue.Value);
-                Assert.True((int)labelValue.Value <= prevValue, "wrong sort order of sub results: labelValue.value=" + labelValue.Value + " prevValue=" + prevValue);
+                Assert.IsTrue((int)labelValue.Value <= prevValue, "wrong sort order of sub results: labelValue.value=" + labelValue.Value + " prevValue=" + prevValue);
                 prevValue = (int)labelValue.Value;
             }
 
@@ -340,7 +337,7 @@
             foreach (LabelAndValue labelValue in result.LabelValues)
             {
                 Assert.AreEqual(allExpectedCounts[CP_B + "/" + labelValue.Label].GetValueOrDefault(), labelValue.Value);
-                Assert.True((int)labelValue.Value <= prevValue, "wrong sort order of sub results: labelValue.value=" + labelValue.Value + " prevValue=" + prevValue);
+                Assert.IsTrue((int)labelValue.Value <= prevValue, "wrong sort order of sub results: labelValue.value=" + labelValue.Value + " prevValue=" + prevValue);
                 prevValue = (int)labelValue.Value;
             }
 
@@ -357,7 +354,7 @@
             FacetsCollector sfc = new FacetsCollector();
             searcher.Search(new MatchAllDocsQuery(), sfc);
 
-            Facets facets = GetTaxonomyFacetCounts(taxoReader, Config, sfc);
+            Facets facets = GetTaxonomyFacetCounts(taxoReader, GetConfig(), sfc);
 
             FacetResult result = facets.GetTopChildren(int.MaxValue, CP_A);
             Assert.AreEqual(-1, (int)result.Value);
@@ -385,7 +382,7 @@
             var sfc = new FacetsCollector();
             searcher.Search(new MatchAllDocsQuery(), sfc);
 
-            Facets facets = GetTaxonomyFacetCounts(taxoReader, Config, sfc);
+            Facets facets = GetTaxonomyFacetCounts(taxoReader, GetConfig(), sfc);
 
             FacetResult result = facets.GetTopChildren(NUM_CHILDREN_CP_C, CP_C);
             Assert.AreEqual(allExpectedCounts[CP_C].GetValueOrDefault(), result.Value);
@@ -403,5 +400,4 @@
             IOUtils.Dispose(indexReader, taxoReader);
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
index 2e95b28..d56e83a 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetSumValueSource.cs
@@ -1,10 +1,12 @@
-using Lucene.Net.Diagnostics;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Diagnostics;
 using NUnit.Framework;
 using System;
 using System.Collections;
 using System.Collections.Generic;
 using System.Globalization;
 using System.IO;
+using System.Runtime.CompilerServices;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
 
@@ -67,7 +69,6 @@
         [Test]
         public virtual void TestBasic()
         {
-
             Store.Directory dir = NewDirectory();
             Store.Directory taxoDir = NewDirectory();
 
@@ -210,7 +211,6 @@
         [Test]
         public virtual void TestWrongIndexFieldName()
         {
-
             Store.Directory dir = NewDirectory();
             Store.Directory taxoDir = NewDirectory();
 
@@ -247,7 +247,7 @@
 
             // Ask for top 10 labels for any dims that have counts:
             IList<FacetResult> results = facets.GetAllDims(10);
-            Assert.True(results.Count == 0);
+            Assert.IsTrue(results.Count == 0);
 
             try
             {
@@ -360,7 +360,7 @@
             DirectoryReader r = DirectoryReader.Open(iw, true);
             DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
 
-            ValueSource valueSource = new ValueSourceAnonymousInnerClassHelper(this);
+            ValueSource valueSource = new ValueSourceAnonymousClass(this);
 
             FacetsCollector fc = new FacetsCollector(true);
             // score documents by their 'price' field - makes asserting the correct counts for the categories easier
@@ -373,11 +373,11 @@
             IOUtils.Dispose(taxoWriter, iw, taxoReader, taxoDir, r, indexDir);
         }
 
-        private class ValueSourceAnonymousInnerClassHelper : ValueSource
+        private class ValueSourceAnonymousClass : ValueSource
         {
             private readonly TestTaxonomyFacetSumValueSource outerInstance;
 
-            public ValueSourceAnonymousInnerClassHelper(TestTaxonomyFacetSumValueSource outerInstance)
+            public ValueSourceAnonymousClass(TestTaxonomyFacetSumValueSource outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -386,16 +386,16 @@
             {
                 Scorer scorer = (Scorer)context["scorer"];
                 if (Debugging.AssertsEnabled) Debugging.Assert(scorer != null);
-                return new DoubleDocValuesAnonymousInnerClassHelper(this, scorer);
+                return new DoubleDocValuesAnonymousClass(this, scorer);
             }
 
-            private class DoubleDocValuesAnonymousInnerClassHelper : DoubleDocValues
+            private class DoubleDocValuesAnonymousClass : DoubleDocValues
             {
-                private readonly ValueSourceAnonymousInnerClassHelper outerInstance;
+                private readonly ValueSourceAnonymousClass outerInstance;
 
-                private Scorer scorer;
+                private readonly Scorer scorer;
 
-                public DoubleDocValuesAnonymousInnerClassHelper(ValueSourceAnonymousInnerClassHelper outerInstance, Scorer scorer)
+                public DoubleDocValuesAnonymousClass(ValueSourceAnonymousClass outerInstance, Scorer scorer)
                     : base(null) //todo: value source
                 {
                     this.outerInstance = outerInstance;
@@ -423,7 +423,7 @@
             }
             public override int GetHashCode()
             {
-                throw new NotImplementedException();
+                return RuntimeHelpers.GetHashCode(this);
             }
 
             public override string GetDescription()
@@ -617,5 +617,4 @@
             IOUtils.Dispose(w, tw, searcher.IndexReader, tr, indexDir, taxoDir);
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
index c487c5f..e268fad 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCharBlockArray.cs
@@ -1,4 +1,5 @@
-using NUnit.Framework;
+// Lucene version compatibility level 4.8.1
+using NUnit.Framework;
 using System.IO;
 using System.Text;
 using Assert = Lucene.Net.TestFramework.Assert;
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
index eaa271a..b8d013a 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/WriterCache/TestCompactLabelToOrdinal.cs
@@ -1,4 +1,6 @@
-using NUnit.Framework;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Attributes;
+using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using System.IO;
@@ -155,6 +157,7 @@
         /// LUCENENET specific test similar to TestL2O without any randomness, useful for debugging
         /// </summary>
         [Test]
+        [LuceneNetSpecific]
         public virtual void TestL2OBasic()
         {
             LabelToOrdinal map = new LabelToOrdinalMap();
diff --git a/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs b/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs
index e2b5be7..3824e10 100644
--- a/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs
+++ b/src/Lucene.Net.Tests.Facet/TestDrillDownQuery.cs
@@ -1,10 +1,10 @@
-using NUnit.Framework;
+// Lucene version compatibility level 4.8.1
+using NUnit.Framework;
 using System;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -47,14 +47,12 @@
     [TestFixture]
     public class TestDrillDownQuery : FacetTestCase
     {
-
         private static IndexReader reader;
         private static DirectoryTaxonomyReader taxo;
         private static Directory dir;
         private static Directory taxoDir;
         private static FacetsConfig config;
 
-      
         [OneTimeTearDown]
         public override void AfterClass() // LUCENENET specific - renamed from AfterClassDrillDownQueryTest() to ensure calling order
         {
@@ -266,10 +264,10 @@
             var q = new DrillDownQuery(config, new MatchAllDocsQuery());
             q.Add("a");
 
-            var clone = q.Clone() as DrillDownQuery;
-            Assert.NotNull(clone);
+            var clone = (DrillDownQuery)q.Clone();
+            Assert.IsNotNull(clone);
             clone.Add("b");
-            Assert.False(q.ToString().Equals(clone.ToString(), StringComparison.Ordinal), "query wasn't cloned: source=" + q + " clone=" + clone);
+            Assert.IsFalse(q.ToString().Equals(clone.ToString(), StringComparison.Ordinal), "query wasn't cloned: source=" + q + " clone=" + clone);
         }
 
         [Test]
@@ -281,5 +279,4 @@
             Assert.AreSame(@base, rewrite);
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
index 97be3a1..a00f51b 100644
--- a/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
+++ b/src/Lucene.Net.Tests.Facet/TestDrillSideways.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+// Lucene version compatibility level 4.8.1 + LUCENE-6001
+using J2N.Collections.Generic.Extensions;
 using J2N.Text;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Search;
@@ -6,6 +7,7 @@
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using System.Globalization;
 using System.Linq;
 using Assert = Lucene.Net.TestFramework.Assert;
 using Console = Lucene.Net.Util.SystemConsole;
@@ -13,7 +15,6 @@
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -31,7 +32,6 @@
      * limitations under the License.
      */
 
-
     using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
     using BytesRef = Lucene.Net.Util.BytesRef;
     using DefaultSortedSetDocValuesReaderState = Lucene.Net.Facet.SortedSet.DefaultSortedSetDocValuesReaderState;
@@ -177,7 +177,7 @@
             // published once:
             Assert.AreEqual("dim=Author path=[] value=5 childCount=4\n  Lisa (2)\n  Bob (1)\n  Susan (1)\n  Frank (1)\n", r.Facets.GetTopChildren(10, "Author").ToString());
 
-            Assert.True(r.Facets is MultiFacets);
+            Assert.IsTrue(r.Facets is MultiFacets);
             IList<FacetResult> allResults = r.Facets.GetAllDims(10);
             Assert.AreEqual(2, allResults.Count);
             Assert.AreEqual("dim=Author path=[] value=5 childCount=4\n  Lisa (2)\n  Bob (1)\n  Susan (1)\n  Frank (1)\n", allResults[0].ToString());
@@ -218,8 +218,8 @@
             ddq.Add("Foobar", "Baz");
             r = ds.Search(null, ddq, 10);
             Assert.AreEqual(0, r.Hits.TotalHits);
-            Assert.Null(r.Facets.GetTopChildren(10, "Publish Date"));
-            Assert.Null(r.Facets.GetTopChildren(10, "Foobar"));
+            Assert.IsNull(r.Facets.GetTopChildren(10, "Publish Date"));
+            Assert.IsNull(r.Facets.GetTopChildren(10, "Foobar"));
 
             // Test drilling down on valid term or'd with invalid term:
             ddq = new DrillDownQuery(config);
@@ -252,8 +252,8 @@
             r = ds.Search(null, ddq, 10);
 
             Assert.AreEqual(0, r.Hits.TotalHits);
-            Assert.Null(r.Facets.GetTopChildren(10, "Publish Date"));
-            Assert.Null(r.Facets.GetTopChildren(10, "Author"));
+            Assert.IsNull(r.Facets.GetTopChildren(10, "Publish Date"));
+            Assert.IsNull(r.Facets.GetTopChildren(10, "Author"));
             IOUtils.Dispose(searcher.IndexReader, taxoReader, writer, taxoWriter, dir, taxoDir);
         }
 
@@ -369,7 +369,7 @@
 
             DrillDownQuery ddq = new DrillDownQuery(config);
             ddq.Add("dim", "a");
-            DrillSidewaysResult r = (new DrillSideways(searcher, config, taxoReader)).Search(null, ddq, 10);
+            DrillSidewaysResult r = new DrillSideways(searcher, config, taxoReader).Search(null, ddq, 10);
 
             Assert.AreEqual(3, r.Hits.TotalHits);
             Assert.AreEqual("dim=dim path=[] value=6 childCount=4\n  a (3)\n  b (1)\n  c (1)\n  d (1)\n", r.Facets.GetTopChildren(10, "dim").ToString());
@@ -442,7 +442,6 @@
         [Test]
         public virtual void TestRandom()
         {
-
             bool canUseDV = DefaultCodecSupportsSortedSet;
 
             while (aChance == 0.0)
@@ -543,7 +542,7 @@
             Directory td = NewDirectory();
 
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
-            iwc.SetInfoStream((InfoStream)InfoStream.NO_OUTPUT);
+            iwc.SetInfoStream(InfoStream.NO_OUTPUT);
             var w = new RandomIndexWriter(Random, d, iwc);
             var tw = new DirectoryTaxonomyWriter(td, OpenMode.CREATE);
             FacetsConfig config = new FacetsConfig();
@@ -664,7 +663,6 @@
 
             for (int iter = 0; iter < numIters; iter++)
             {
-
                 string contentToken = Random.Next(30) == 17 ? null : randomContentToken(true);
                 int numDrillDown = TestUtil.NextInt32(Random, 1, Math.Min(4, numDims));
                 if (Verbose)
@@ -755,7 +753,7 @@
                     {
                         Console.WriteLine("  only-even filter");
                     }
-                    filter = new FilterAnonymousInnerClassHelper(this);
+                    filter = new FilterAnonymousClass(this);
                 }
                 else
                 {
@@ -765,7 +763,7 @@
                 // Verify docs are always collected in order.  If we
                 // had an AssertingScorer it could catch it when
                 // Weight.scoresDocsOutOfOrder lies!:
-                (new DrillSideways(s, config, tr)).Search(ddq, new CollectorAnonymousInnerClassHelper(this, s));
+                new DrillSideways(s, config, tr).Search(ddq, new CollectorAnonymousClass(this));
 
                 // Also separately verify that DS respects the
                 // scoreSubDocsAtOnce method, to ensure that all
@@ -776,8 +774,8 @@
                     // drill-down values, because in that case it's
                     // easily possible for one of the DD terms to be on
                     // a future docID:
-                    new DrillSidewaysAnonymousInnerClassHelper(this, s, config, tr)
-                    .Search(ddq, new AssertingSubDocsAtOnceCollector());
+                    new DrillSidewaysAnonymousClass(this, s, config, tr)
+                        .Search(ddq, new AssertingSubDocsAtOnceCollector());
                 }
 
                 TestFacetResult expected = slowDrillSidewaysSearch(s, docs, contentToken, drillDowns, dimValues, filter);
@@ -790,7 +788,7 @@
                 }
                 else
                 {
-                    ds = new DrillSidewaysAnonymousInnerClassHelper2(this, s, config, tr, drillDowns);
+                    ds = new DrillSidewaysAnonymousClass2(this, s, config, tr);
                 }
 
                 // Retrieve all facets:
@@ -821,11 +819,11 @@
             IOUtils.Dispose(r, tr, w, tw, d, td);
         }
 
-        private class FilterAnonymousInnerClassHelper : Filter
+        private class FilterAnonymousClass : Filter
         {
             private readonly TestDrillSideways outerInstance;
 
-            public FilterAnonymousInnerClassHelper(TestDrillSideways outerInstance)
+            public FilterAnonymousClass(TestDrillSideways outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -846,16 +844,13 @@
             }
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestDrillSideways outerInstance;
 
-            private IndexSearcher s;
-
-            public CollectorAnonymousInnerClassHelper(TestDrillSideways outerInstance, IndexSearcher s)
+            public CollectorAnonymousClass(TestDrillSideways outerInstance)
             {
                 this.outerInstance = outerInstance;
-                this.s = s;
             }
 
             internal int lastDocID;
@@ -878,11 +873,11 @@
             public virtual bool AcceptsDocsOutOfOrder => false;
         }
 
-        private class DrillSidewaysAnonymousInnerClassHelper : DrillSideways
+        private class DrillSidewaysAnonymousClass : DrillSideways
         {
             private readonly TestDrillSideways outerInstance;
 
-            public DrillSidewaysAnonymousInnerClassHelper(TestDrillSideways outerInstance, IndexSearcher s, Lucene.Net.Facet.FacetsConfig config, TaxonomyReader tr)
+            public DrillSidewaysAnonymousClass(TestDrillSideways outerInstance, IndexSearcher s, FacetsConfig config, TaxonomyReader tr)
                 : base(s, config, tr)
             {
                 this.outerInstance = outerInstance;
@@ -891,17 +886,14 @@
             protected override bool ScoreSubDocsAtOnce => true;
         }
 
-        private class DrillSidewaysAnonymousInnerClassHelper2 : DrillSideways
+        private class DrillSidewaysAnonymousClass2 : DrillSideways
         {
             private readonly TestDrillSideways outerInstance;
 
-            private string[][] drillDowns;
-
-            public DrillSidewaysAnonymousInnerClassHelper2(TestDrillSideways outerInstance, IndexSearcher s, Lucene.Net.Facet.FacetsConfig config, TaxonomyReader tr, string[][] drillDowns)
+            public DrillSidewaysAnonymousClass2(TestDrillSideways outerInstance, IndexSearcher s, FacetsConfig config, TaxonomyReader tr)
                 : base(s, config, tr)
             {
                 this.outerInstance = outerInstance;
-                this.drillDowns = drillDowns;
             }
 
             protected override Facets BuildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, string[] drillSidewaysDims)
@@ -924,7 +916,6 @@
                 {
                     return new MultiFacets(drillSidewaysFacets, drillDownFacets);
                 }
-
             }
         }
 
@@ -987,7 +978,7 @@
 
             // Naive (on purpose, to reduce bug in tester/gold):
             // sort all ids, then return top N slice:
-            new InPlaceMergeSorterAnonymousInnerClassHelper(this, counts, values, ids).Sort(0, ids.Length);
+            new InPlaceMergeSorterAnonymousClass(this, counts, values, ids).Sort(0, ids.Length);
 
             if (topN > ids.Length)
             {
@@ -1009,15 +1000,15 @@
             return topNIDs;
         }
 
-        private class InPlaceMergeSorterAnonymousInnerClassHelper : InPlaceMergeSorter
+        private class InPlaceMergeSorterAnonymousClass : InPlaceMergeSorter
         {
             private readonly TestDrillSideways outerInstance;
 
-            private int[] counts;
-            private string[] values;
-            private int[] ids;
+            private readonly int[] counts;
+            private readonly string[] values;
+            private readonly int[] ids;
 
-            public InPlaceMergeSorterAnonymousInnerClassHelper(TestDrillSideways outerInstance, int[] counts, string[] values, int[] ids)
+            public InPlaceMergeSorterAnonymousClass(TestDrillSideways outerInstance, int[] counts, string[] values, int[] ids)
             {
                 this.outerInstance = outerInstance;
                 this.counts = counts;
@@ -1049,7 +1040,7 @@
                 else
                 {
                     // ... then by label ascending:
-                    return (new BytesRef(values[ids[i]])).CompareTo(new BytesRef(values[ids[j]]));
+                    return new BytesRef(values[ids[i]]).CompareTo(new BytesRef(values[ids[j]]));
                 }
             }
 
@@ -1078,7 +1069,7 @@
                 {
                     continue;
                 }
-                if (onlyEven != null & (Convert.ToInt32(doc.id) & 1) != 0)
+                if (onlyEven != null & (Convert.ToInt32(doc.id, CultureInfo.InvariantCulture) & 1) != 0)
                 {
                     continue;
                 }
@@ -1195,7 +1186,7 @@
                 }
                 Assert.AreEqual(expected.Hits[i].id, s.Doc(actual.Hits.ScoreDocs[i].Doc).Get("id"));
                 // Score should be IDENTICAL:
-                Assert.AreEqual(scores[expected.Hits[i].id], actual.Hits.ScoreDocs[i].Score);
+                Assert.AreEqual(scores[expected.Hits[i].id].GetValueOrDefault(), actual.Hits.ScoreDocs[i].Score, 0.0f);
             }
 
             for (int dim = 0; dim < expected.Counts.Length; dim++)
@@ -1289,13 +1280,13 @@
                         string value = dimValues[dim][i];
                         if (expected.Counts[dim][i] != 0)
                         {
-                            Assert.True(actualValues.ContainsKey(value));
+                            Assert.IsTrue(actualValues.ContainsKey(value));
                             Assert.AreEqual(expected.Counts[dim][i], (int)actualValues[value]);
                             setCount++;
                         }
                         else
                         {
-                            Assert.False(actualValues.ContainsKey(value));
+                            Assert.IsFalse(actualValues.ContainsKey(value));
                         }
                     }
                     Assert.AreEqual(setCount, actualValues.Count);
diff --git a/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs b/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs
index 855a09b..105661c 100644
--- a/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs
+++ b/src/Lucene.Net.Tests.Facet/TestFacetsConfig.cs
@@ -1,10 +1,10 @@
-using Lucene.Net.Support;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Support;
 using NUnit.Framework;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -103,16 +103,16 @@
         [Test]
         public virtual void TestCustomDefault()
         {
-            FacetsConfig config = new FacetsConfigAnonymousInnerClassHelper(this);
+            FacetsConfig config = new FacetsConfigAnonymousClass(this);
 
-            Assert.True(config.GetDimConfig("foobar").IsHierarchical);
+            Assert.IsTrue(config.GetDimConfig("foobar").IsHierarchical);
         }
 
-        private class FacetsConfigAnonymousInnerClassHelper : FacetsConfig
+        private class FacetsConfigAnonymousClass : FacetsConfig
         {
             private readonly TestFacetsConfig outerInstance;
 
-            public FacetsConfigAnonymousInnerClassHelper(TestFacetsConfig outerInstance)
+            public FacetsConfigAnonymousClass(TestFacetsConfig outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -128,5 +128,4 @@
             }
         }
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs b/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs
index 352e39b..8f5b498 100644
--- a/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs
+++ b/src/Lucene.Net.Tests.Facet/TestMultipleIndexFields.cs
@@ -1,10 +1,10 @@
-using System.Collections.Generic;
+// Lucene version compatibility level 4.8.1
+using System.Collections.Generic;
 using NUnit.Framework;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Facet
 {
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -45,16 +45,23 @@
     public class TestMultipleIndexFields : FacetTestCase
     {
 
-        private static readonly FacetField[] CATEGORIES = new FacetField[] { new FacetField("Author", "Mark Twain"), new FacetField("Author", "Stephen King"), new FacetField("Author", "Kurt Vonnegut"), new FacetField("Band", "Rock & Pop", "The Beatles"), new FacetField("Band", "Punk", "The Ramones"), new FacetField("Band", "Rock & Pop", "U2"), new FacetField("Band", "Rock & Pop", "REM"), new FacetField("Band", "Rock & Pop", "Dave Matthews Band"), new FacetField("Composer", "Bach") };
+        private static readonly FacetField[] CATEGORIES = new FacetField[] {
+            new FacetField("Author", "Mark Twain"),
+            new FacetField("Author", "Stephen King"),
+            new FacetField("Author", "Kurt Vonnegut"),
+            new FacetField("Band", "Rock & Pop", "The Beatles"),
+            new FacetField("Band", "Punk", "The Ramones"),
+            new FacetField("Band", "Rock & Pop", "U2"),
+            new FacetField("Band", "Rock & Pop", "REM"),
+            new FacetField("Band", "Rock & Pop", "Dave Matthews Band"),
+            new FacetField("Composer", "Bach")
+        };
 
-        private FacetsConfig Config
+        private FacetsConfig GetConfig()
         {
-            get
-            {
-                FacetsConfig config = new FacetsConfig();
-                config.SetHierarchical("Band", true);
-                return config;
-            }
+            FacetsConfig config = new FacetsConfig();
+            config.SetHierarchical("Band", true);
+            return config;
         }
 
         [Test]
@@ -64,10 +71,11 @@
             Directory taxoDir = NewDirectory();
 
             // create and open an index writer
-            var iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
+            var iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(
+                TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
             // create and open a taxonomy writer
             var tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
-            var config = Config;
+            var config = GetConfig();
 
             seedIndex(tw, iw, config);
 
@@ -97,11 +105,12 @@
             Directory taxoDir = NewDirectory();
 
             // create and open an index writer
-            RandomIndexWriter iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
+            RandomIndexWriter iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(
+                TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
             // create and open a taxonomy writer
             var tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
 
-            FacetsConfig config = Config;
+            FacetsConfig config = GetConfig();
             config.SetIndexFieldName("Author", "$author");
             seedIndex(tw, iw, config);
 
@@ -136,11 +145,12 @@
             Directory taxoDir = NewDirectory();
 
             // create and open an index writer
-            RandomIndexWriter iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
+            RandomIndexWriter iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(
+                TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
             // create and open a taxonomy writer
             var tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
 
-            FacetsConfig config = Config;
+            FacetsConfig config = GetConfig();
             config.SetIndexFieldName("Band", "$music");
             config.SetIndexFieldName("Composer", "$music");
             seedIndex(tw, iw, config);
@@ -192,11 +202,12 @@
             Directory taxoDir = NewDirectory();
 
             // create and open an index writer
-            var iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
+            var iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(
+                TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
             // create and open a taxonomy writer
             var tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
 
-            FacetsConfig config = Config;
+            FacetsConfig config = GetConfig();
             config.SetIndexFieldName("Band", "$bands");
             config.SetIndexFieldName("Composer", "$composers");
             seedIndex(tw, iw, config);
@@ -233,11 +244,12 @@
             Directory taxoDir = NewDirectory();
 
             // create and open an index writer
-            RandomIndexWriter iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
+            RandomIndexWriter iw = new RandomIndexWriter(Random, indexDir, NewIndexWriterConfig(
+                TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)));
             // create and open a taxonomy writer
             ITaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
 
-            FacetsConfig config = Config;
+            FacetsConfig config = GetConfig();
             config.SetIndexFieldName("Band", "$music");
             config.SetIndexFieldName("Composer", "$music");
             config.SetIndexFieldName("Author", "$literature");
diff --git a/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs b/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
index 5575c31..4f3f6e0 100644
--- a/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
+++ b/src/Lucene.Net.Tests.Facet/TestRandomSamplingFacetsCollector.cs
@@ -1,26 +1,11 @@
-using NUnit.Framework;
+// Lucene version compatibility level 4.8.1
+using NUnit.Framework;
 using System;
 using System.Globalization;
 using Assert = Lucene.Net.TestFramework.Assert;
 
 namespace Lucene.Net.Facet
 {
-
-    using Directory = Lucene.Net.Store.Directory;
-    using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
-    using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
-    using Document = Lucene.Net.Documents.Document;
-    using FastTaxonomyFacetCounts = Lucene.Net.Facet.Taxonomy.FastTaxonomyFacetCounts;
-    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using MatchingDocs = Lucene.Net.Facet.FacetsCollector.MatchingDocs;
-    using MultiCollector = Lucene.Net.Search.MultiCollector;
-    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
-    using Store = Lucene.Net.Documents.Field.Store;
-    using StringField = Lucene.Net.Documents.StringField;
-    using Term = Lucene.Net.Index.Term;
-    using TermQuery = Lucene.Net.Search.TermQuery;
-
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
      * contributor license agreements.  See the NOTICE file distributed with
@@ -38,9 +23,23 @@
      * limitations under the License.
      */
 
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryTaxonomyReader = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyReader;
+    using DirectoryTaxonomyWriter = Lucene.Net.Facet.Taxonomy.Directory.DirectoryTaxonomyWriter;
+    using Document = Lucene.Net.Documents.Document;
+    using FastTaxonomyFacetCounts = Lucene.Net.Facet.Taxonomy.FastTaxonomyFacetCounts;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using MatchingDocs = Lucene.Net.Facet.FacetsCollector.MatchingDocs;
+    using MultiCollector = Lucene.Net.Search.MultiCollector;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Store = Lucene.Net.Documents.Field.Store;
+    using StringField = Lucene.Net.Documents.StringField;
+    using Term = Lucene.Net.Index.Term;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+
     public class TestRandomSamplingFacetsCollector : FacetTestCase
     {
-
         [Test]
         public virtual void TestRandomSampling()
         {
@@ -52,7 +51,7 @@
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                 this,
 #endif
-                Util.LuceneTestCase.Random, dir);
+                Random, dir);
 
             FacetsConfig config = new FacetsConfig();
 
@@ -78,7 +77,7 @@
             searcher.Search(new TermQuery(new Term("EvenOdd", "NeverMatches")), collectRandomZeroResults);
 
             // There should be no divisions by zero and no null result
-            Assert.NotNull(collectRandomZeroResults.GetMatchingDocs());
+            Assert.IsNotNull(collectRandomZeroResults.GetMatchingDocs());
 
             // There should be no results at all
             foreach (MatchingDocs doc in collectRandomZeroResults.GetMatchingDocs())
@@ -119,9 +118,9 @@
 
             // we should have five children, but there is a small chance we have less.
             // (see above).
-            Assert.True(random10Result.ChildCount <= maxNumChildren);
+            Assert.IsTrue(random10Result.ChildCount <= maxNumChildren);
             // there should be one child at least.
-            Assert.True(random10Result.ChildCount >= 1);
+            Assert.IsTrue(random10Result.ChildCount >= 1);
 
             // now calculate some statistics to determine if the sampled result is 'ok'.
             // because random sampling is used, the results will vary each time.
@@ -146,12 +145,10 @@
 
             // the average should be in the range and the standard deviation should not
             // be too great
-            Assert.True(sigma < 200);
-            Assert.True(targetMu - 3 * sigma < mu && mu < targetMu + 3 * sigma);
+            Assert.IsTrue(sigma < 200);
+            Assert.IsTrue(targetMu - 3 * sigma < mu && mu < targetMu + 3 * sigma);
 
             IOUtils.Dispose(searcher.IndexReader, taxoReader, dir, taxoDir);
         }
-
     }
-
 }
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterPhraseTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterPhraseTest.cs
index 1485728..8bb2ef4 100644
--- a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterPhraseTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterPhraseTest.cs
@@ -110,7 +110,7 @@
                     new SpanTermQuery(new Term(FIELD, "fox")),
                     new SpanTermQuery(new Term(FIELD, "jumped")) }, 0, true);
                 FixedBitSet bitset = new FixedBitSet(indexReader.MaxDoc);
-                indexSearcher.Search(phraseQuery, new ConcurrentSpanCollectorAnonymousHelper(this, bitset));
+                indexSearcher.Search(phraseQuery, new ConcurrentSpanCollectorAnonymousClass(this, bitset));
 
                 assertEquals(1, bitset.Cardinality());
                 int maxDoc = indexReader.MaxDoc;
@@ -135,11 +135,11 @@
             }
         }
 
-        internal class ConcurrentSpanCollectorAnonymousHelper : ICollector
+        private class ConcurrentSpanCollectorAnonymousClass : ICollector
         {
             private readonly HighlighterPhraseTest outerInstance;
             private readonly FixedBitSet bitset;
-            public ConcurrentSpanCollectorAnonymousHelper(HighlighterPhraseTest outerInstance, FixedBitSet bitset)
+            public ConcurrentSpanCollectorAnonymousClass(HighlighterPhraseTest outerInstance, FixedBitSet bitset)
             {
                 this.outerInstance = outerInstance;
                 this.bitset = bitset;
diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
index 66d2194..b6b666e 100644
--- a/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/Highlight/HighlighterTest.cs
@@ -131,7 +131,7 @@
             assertEquals("<B>This</B> piece of text refers to Kennedy at the beginning then has a longer piece of text that is <B>very</B>", fragment);
         }
 
-        internal class TestHighlightUnknowQueryAnonymousHelper : Query
+        private class TestHighlightUnknowQueryAnonymousClass : Query
         {
             public override Query Rewrite(IndexReader reader)
             {
@@ -161,7 +161,7 @@
         [Test]
         public void TestHighlightUnknowQueryAfterRewrite()
         {
-            Query query = new TestHighlightUnknowQueryAnonymousHelper();
+            Query query = new TestHighlightUnknowQueryAnonymousClass();
 
             Analyzer analyzer = new MockAnalyzer(Random, MockTokenizer.SIMPLE, true);
 
@@ -1510,7 +1510,7 @@
             // for
             // highlighting but scores a single fragment for selection
 
-            Highlighter highlighter = new Highlighter(this, new SimpleHTMLEncoder(), new TestEncodingScorerAnonymousHelper(this));
+            Highlighter highlighter = new Highlighter(this, new SimpleHTMLEncoder(), new TestEncodingScorerAnonymousClass(this));
 
             highlighter.TextFragmenter = (new SimpleFragmenter(2000));
             TokenStream tokenStream = analyzer.GetTokenStream(FIELD_NAME, rawDocContent);
@@ -1543,11 +1543,11 @@
             assertEquals("XHTML Encoding should have worked:", rawDocContent, decodedSnippet);
         }
 
-        internal class TestEncodingScorerAnonymousHelper : IScorer
+        private class TestEncodingScorerAnonymousClass : IScorer
         {
             private readonly HighlighterTest outerInstance;
 
-            public TestEncodingScorerAnonymousHelper(HighlighterTest outerInstance)
+            public TestEncodingScorerAnonymousClass(HighlighterTest outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1624,13 +1624,13 @@
         protected TokenStream getTS2()
         {
             // String s = "Hi-Speed10 foo";
-            return new TS2TokenStreamAnonymousHelper();
+            return new TS2TokenStreamAnonymousClass();
         }
 
 
-        protected sealed class TS2TokenStreamAnonymousHelper : TokenStream
+        private sealed class TS2TokenStreamAnonymousClass : TokenStream
         {
-            public TS2TokenStreamAnonymousHelper()
+            public TS2TokenStreamAnonymousClass()
             {
                 termAtt = AddAttribute<ICharTermAttribute>();
                 posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
@@ -1687,12 +1687,12 @@
         protected TokenStream getTS2a()
         {
             // String s = "Hi-Speed10 foo";
-            return new TS2aTokenStreamAnonymousHelper();
+            return new TS2aTokenStreamAnonymousClass();
         }
 
-        protected sealed class TS2aTokenStreamAnonymousHelper : TokenStream
+        private sealed class TS2aTokenStreamAnonymousClass : TokenStream
         {
-            public TS2aTokenStreamAnonymousHelper()
+            public TS2aTokenStreamAnonymousClass()
             {
                 termAtt = AddAttribute<ICharTermAttribute>();
                 posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
diff --git a/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs b/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs
index 7269b53..039564c 100644
--- a/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/Highlight/OffsetLimitTokenFilterTest.cs
@@ -51,10 +51,10 @@
             AssertTokenStreamContents(filter, new String[] {"short", "toolong",
                 "evenmuchlongertext"});
 
-            CheckOneTerm(new AnalyzerAnonymousHelper(), "llenges", "llenges");
+            CheckOneTerm(new AnalyzerAnonymousClass(), "llenges", "llenges");
         }
 
-        internal class AnalyzerAnonymousHelper : Analyzer
+        private class AnalyzerAnonymousClass : Analyzer
         {
             protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FastVectorHighlighterTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FastVectorHighlighterTest.cs
index 5b0320b..e2acd5a 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FastVectorHighlighterTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FastVectorHighlighterTest.cs
@@ -596,7 +596,7 @@
                   token("red", 0, 0, 3)
                 ), matched));
 
-            Analyzer analyzer = new AnalyzerWrapperAnonymousHelper();
+            Analyzer analyzer = new AnalyzerWrapperAnonymousClass();
 
             Directory dir = NewDirectory();
             IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
@@ -642,12 +642,12 @@
             dir.Dispose();
         }
 
-        internal class AnalyzerWrapperAnonymousHelper : AnalyzerWrapper
+        private class AnalyzerWrapperAnonymousClass : AnalyzerWrapper
         {
             IDictionary<String, Analyzer> fieldAnalyzers = new JCG.SortedDictionary<String, Analyzer>(StringComparer.Ordinal);
 
 #pragma warning disable 612, 618 // LUCENENET NOTE: Class calls obsolete (default) constructor
-            public AnalyzerWrapperAnonymousHelper()
+            public AnalyzerWrapperAnonymousClass()
             {
                 fieldAnalyzers.Put("field", new MockAnalyzer(Random, MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET));
                 fieldAnalyzers.Put("field_exact", new MockAnalyzer(Random));
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs
index 24626a4..41d11fe 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/FieldQueryTest.cs
@@ -990,7 +990,7 @@
             phraseCandidate.Add(new TermInfo("defg", 0, 12, 0, 1));
             assertNotNull(fq.SearchPhrase(F, phraseCandidate));
         }
-        internal class TestStopRewriteQueryAnonymousHelper : Query
+        private class TestStopRewriteQueryAnonymousClass : Query
         {
             public override string ToString(string field)
             {
@@ -1001,13 +1001,13 @@
         [Test]
         public void TestStopRewrite()
         {
-            Query q = new TestStopRewriteQueryAnonymousHelper();
+            Query q = new TestStopRewriteQueryAnonymousClass();
             make1d1fIndex("a");
             assertNotNull(reader);
             new FieldQuery(q, reader, true, true);
         }
 
-        internal class TestFlattenFilteredQueryFilterAnonymousHelper : Filter
+        private class TestFlattenFilteredQueryFilterAnonymousClass : Filter
         {
             public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
             {
@@ -1019,7 +1019,7 @@
         public void TestFlattenFilteredQuery()
         {
             initBoost();
-            Query query = new FilteredQuery(pqF("A"), new TestFlattenFilteredQueryFilterAnonymousHelper());
+            Query query = new FilteredQuery(pqF("A"), new TestFlattenFilteredQueryFilterAnonymousClass());
             query.Boost = (boost);
             FieldQuery fq = new FieldQuery(query, true, true);
             ISet<Query> flatQueries = new JCG.HashSet<Query>();
diff --git a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs
index 7f36ba1..5cc625b 100644
--- a/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs
+++ b/src/Lucene.Net.Tests.Highlighter/VectorHighlight/IndexTimeSynonymTest.cs
@@ -323,11 +323,11 @@
             return token;
         }
 
-        internal sealed class TokenizerAnonymousHelper : Tokenizer
+        private sealed class TokenizerAnonymousClass : Tokenizer
         {
             private readonly Token[] tokens;
 
-            public TokenizerAnonymousHelper(AttributeFactory factory, TextReader reader, Token[] tokens)
+            public TokenizerAnonymousClass(AttributeFactory factory, TextReader reader, Token[] tokens)
                 : base(factory, reader)
             {
                 reusableToken = AddAttribute<ICharTermAttribute>();
@@ -362,7 +362,7 @@
 
             protected internal override TokenStreamComponents CreateComponents(String fieldName, TextReader reader)
             {
-                Tokenizer ts = new TokenizerAnonymousHelper(Token.TOKEN_ATTRIBUTE_FACTORY, reader, tokens);
+                Tokenizer ts = new TokenizerAnonymousClass(Token.TOKEN_ATTRIBUTE_FACTORY, reader, tokens);
                 return new TokenStreamComponents(ts);
             }
         }
diff --git a/src/Lucene.Net.Tests.Join/TestBlockJoin.cs b/src/Lucene.Net.Tests.Join/TestBlockJoin.cs
index ac4199d..d7e2fe9 100644
--- a/src/Lucene.Net.Tests.Join/TestBlockJoin.cs
+++ b/src/Lucene.Net.Tests.Join/TestBlockJoin.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+// Lucene version compatibility level 4.8.1
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using Lucene.Net.Documents.Extensions;
@@ -12,6 +13,7 @@
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using System.Globalization;
 using System.Linq;
 using System.Text;
 using Console = Lucene.Net.Util.SystemConsole;
@@ -355,7 +357,8 @@
 
 
             TermQuery us = new TermQuery(new Term("country", "United States"));
-            assertEquals("@ US we have java and ruby", 2, s.Search(new ToChildBlockJoinQuery(us, parentsFilter, Random.NextBoolean()), 10).TotalHits);
+            assertEquals("@ US we have java and ruby", 2,
+                s.Search(new ToChildBlockJoinQuery(us, parentsFilter, Random.NextBoolean()), 10).TotalHits);
 
             assertEquals("java skills in US", 1, s.Search(new ToChildBlockJoinQuery(us, parentsFilter, Random.NextBoolean()), Skill("java"), 10).TotalHits);
 
@@ -438,7 +441,11 @@
             w.Commit();
             IndexSearcher s = NewSearcher(DirectoryReader.Open(dir));
 
-            ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(NumericRangeQuery.NewInt32Range("year", 1990, 2010, true, true), new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("docType", "resume")))), ScoreMode.Total);
+            ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(
+                NumericRangeQuery.NewInt32Range("year", 1990, 2010, true, true),
+                new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("docType", "resume")))),
+                ScoreMode.Total
+            );
 
             TopDocs topDocs = s.Search(q, 10);
             assertEquals(2, topDocs.TotalHits);
@@ -465,7 +472,6 @@
 
         private string[][] GetRandomFields(int maxUniqueValues)
         {
-
             string[][] fields = new string[TestUtil.NextInt32(Random, 2, 4)][];
             for (int fieldID = 0; fieldID < fields.Length; fieldID++)
             {
@@ -531,7 +537,7 @@
             Directory joinDir = NewDirectory();
 
             int numParentDocs = TestUtil.NextInt32(Random, 100 * RandomMultiplier, 300 * RandomMultiplier);
-            //final int numParentDocs = 30;
+            //int numParentDocs = 30;
 
             // Values for parent fields:
             string[][] parentFields = GetRandomFields(numParentDocs / 2);
@@ -693,7 +699,8 @@
                 if (Random.Next(3) == 2)
                 {
                     int childFieldID = Random.Next(childFields.Length);
-                    childQuery = new TermQuery(new Term("child" + childFieldID, childFields[childFieldID][Random.Next(childFields[childFieldID].Length)]));
+                    childQuery = new TermQuery(new Term("child" + childFieldID,
+                        childFields[childFieldID][Random.Next(childFields[childFieldID].Length)]));
                 }
                 else if (Random.Next(3) == 2)
                 {
@@ -727,7 +734,8 @@
 
                     bq.Add(new TermQuery(RandomChildTerm(childFields[0])), Occur.MUST);
                     int childFieldID = TestUtil.NextInt32(Random, 1, childFields.Length - 1);
-                    bq.Add(new TermQuery(new Term("child" + childFieldID, childFields[childFieldID][Random.Next(childFields[childFieldID].Length)])), Random.NextBoolean() ? Occur.MUST : Occur.MUST_NOT);
+                    bq.Add(new TermQuery(new Term("child" + childFieldID, childFields[childFieldID][Random.Next(childFields[childFieldID].Length)])),
+                        Random.NextBoolean() ? Occur.MUST : Occur.MUST_NOT);
                 }
 
                 int x = Random.Next(4);
@@ -910,7 +918,7 @@
                     {
                         Explanation explanation = joinS.Explain(childJoinQuery, hit.Doc);
                         Document document = joinS.Doc(hit.Doc - 1);
-                        int childId = Convert.ToInt32(document.Get("childID"));
+                        int childId = Convert.ToInt32(document.Get("childID"), CultureInfo.InvariantCulture);
                         assertTrue(explanation.IsMatch);
                         assertEquals(hit.Score, explanation.Value, 0.0f);
                         assertEquals(string.Format("Score based on child doc range from {0} to {1}", hit.Doc - 1 - childId, hit.Doc - 1), explanation.Description);
@@ -1060,7 +1068,8 @@
                 // Search join index:
                 if (Verbose)
                 {
-                    Console.WriteLine("TEST: run top down join query=" + childJoinQuery2 + " filter=" + childJoinFilter2 + " sort=" + childSort2);
+                    Console.WriteLine("TEST: run top down join query=" + childJoinQuery2 +
+                        " filter=" + childJoinFilter2 + " sort=" + childSort2);
                 }
                 TopDocs joinResults2 = joinS.Search(childJoinQuery2, childJoinFilter2, joinR.NumDocs, childSort2);
                 if (Verbose)
@@ -1093,7 +1102,8 @@
                 ScoreDoc joinHit = joinResults.ScoreDocs[hitCount];
                 Document doc1 = r.Document(hit.Doc);
                 Document doc2 = joinR.Document(joinHit.Doc);
-                assertEquals("hit " + hitCount + " differs", doc1.Get("childID"), doc2.Get("childID"));
+                assertEquals("hit " + hitCount + " differs",
+                    doc1.Get("childID"), doc2.Get("childID"));
                 // don't compare scores -- they are expected to differ
 
 
@@ -1306,7 +1316,6 @@
         [Test]
         public void TestGetTopGroups()
         {
-
             Directory dir = NewDirectory();
             RandomIndexWriter w = new RandomIndexWriter(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
@@ -1350,7 +1359,7 @@
             getTopGroupsResults[0] = c.GetTopGroups(childJoinQuery, null, 0, 10, 0, true);
             getTopGroupsResults[1] = c.GetTopGroupsWithAllChildDocs(childJoinQuery, null, 0, 0, true);
 
-            foreach (TopGroups<int> results in getTopGroupsResults)
+            foreach (ITopGroups<int> results in getTopGroupsResults)
             {
                 assertFalse(float.IsNaN(results.MaxScore));
                 assertEquals(2, results.TotalGroupedHitCount);
@@ -1369,7 +1378,7 @@
                 {
                     Document childDoc = s.Doc(scoreDoc.Doc);
                     assertEquals("java", childDoc.Get("skill"));
-                    int year = Convert.ToInt32(childDoc.Get("year"));
+                    int year = Convert.ToInt32(childDoc.Get("year"), CultureInfo.InvariantCulture);
                     assertTrue(year >= 2006 && year <= 2011);
                 }
             }
@@ -1393,7 +1402,7 @@
             {
                 Document childDoc = s.Doc(scoreDoc.Doc);
                 assertEquals("java", childDoc.Get("skill"));
-                int year = Convert.ToInt32(childDoc.Get("year"));
+                int year = Convert.ToInt32(childDoc.Get("year"), CultureInfo.InvariantCulture);
                 assertTrue(year >= 2006 && year <= 2011);
             }
 
diff --git a/src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs b/src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs
index 7a9b3bf..38aa343 100644
--- a/src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs
+++ b/src/Lucene.Net.Tests.Join/TestBlockJoinSorting.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using Lucene.Net.Index.Extensions;
@@ -34,7 +35,8 @@
         public void TestNestedSorting()
         {
             Directory dir = NewDirectory();
-            RandomIndexWriter w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+            RandomIndexWriter w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT,
+                new MockAnalyzer(Random)).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
             IList<Document> docs = new List<Document>();
             Document document = new Document();
@@ -248,7 +250,10 @@
 
             // Sort by field descending, order last, sort filter (filter_1:T)
             childFilter = new QueryWrapperFilter(new TermQuery((new Term("filter_1", "T"))));
-            query = new ToParentBlockJoinQuery(new FilteredQuery(new MatchAllDocsQuery(), childFilter), new FixedBitSetCachingWrapperFilter(parentFilter), ScoreMode.None);
+            query = new ToParentBlockJoinQuery(
+                new FilteredQuery(new MatchAllDocsQuery(), childFilter),
+                new FixedBitSetCachingWrapperFilter(parentFilter),
+                ScoreMode.None);
             sortField = new ToParentBlockJoinSortField("field2", SortFieldType.STRING, true, Wrap(parentFilter), Wrap(childFilter));
             sort = new Sort(sortField);
             topDocs = searcher.Search(query, 5, sort);
diff --git a/src/Lucene.Net.Tests.Join/TestBlockJoinValidation.cs b/src/Lucene.Net.Tests.Join/TestBlockJoinValidation.cs
index 48c894d..9d7f6e6 100644
--- a/src/Lucene.Net.Tests.Join/TestBlockJoinValidation.cs
+++ b/src/Lucene.Net.Tests.Join/TestBlockJoinValidation.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
 using Lucene.Net.Join;
@@ -31,7 +32,6 @@
 
     public class TestBlockJoinValidation : LuceneTestCase
     {
-
         public const int AMOUNT_OF_SEGMENTS = 5;
         public const int AMOUNT_OF_PARENT_DOCS = 10;
         public const int AMOUNT_OF_CHILD_DOCS = 5;
@@ -60,13 +60,6 @@
             parentsFilter = new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new WildcardQuery(new Term("parent", "*"))));
         }
 
-        [TearDown]
-        public override void TearDown()
-        {
-            indexReader.Dispose();
-            directory.Dispose();
-        }
-
         [Test]
         public void TestNextDocValidationForToParentBjq()
         {
@@ -126,6 +119,13 @@
             StringAssert.Contains(ToChildBlockJoinQuery.INVALID_QUERY_MESSAGE, ex.Message);
         }
 
+        [TearDown]
+        public override void TearDown()
+        {
+            indexReader.Dispose();
+            directory.Dispose();
+        }
+
         private IList<Document> CreateDocsForSegment(int segmentNumber)
         {
             IList<IList<Document>> blocks = new List<IList<Document>>(AMOUNT_OF_PARENT_DOCS);
@@ -185,7 +185,7 @@
         private static Query CreateChildrenQueryWithOneParent(int childNumber)
         {
             TermQuery childQuery = new TermQuery(new Term("child", CreateFieldValue(childNumber)));
-            Query randomParentQuery = new TermQuery(new Term("id", CreateFieldValue(RandomParentId)));
+            Query randomParentQuery = new TermQuery(new Term("id", CreateFieldValue(GetRandomParentId())));
             BooleanQuery childrenQueryWithRandomParent = new BooleanQuery();
             childrenQueryWithRandomParent.Add(new BooleanClause(childQuery, Occur.SHOULD));
             childrenQueryWithRandomParent.Add(new BooleanClause(randomParentQuery, Occur.SHOULD));
@@ -195,19 +195,19 @@
         private static Query CreateParentsQueryWithOneChild(int randomChildNumber)
         {
             BooleanQuery childQueryWithRandomParent = new BooleanQuery();
-            Query parentsQuery = new TermQuery(new Term("parent", CreateFieldValue(RandomParentNumber)));
+            Query parentsQuery = new TermQuery(new Term("parent", CreateFieldValue(GetRandomParentNumber())));
             childQueryWithRandomParent.Add(new BooleanClause(parentsQuery, Occur.SHOULD));
             childQueryWithRandomParent.Add(new BooleanClause(RandomChildQuery(randomChildNumber), Occur.SHOULD));
             return childQueryWithRandomParent;
         }
 
-        private static int RandomParentId => Random.Next(AMOUNT_OF_PARENT_DOCS*AMOUNT_OF_SEGMENTS);
+        private static int GetRandomParentId() => Random.Next(AMOUNT_OF_PARENT_DOCS*AMOUNT_OF_SEGMENTS);
 
-        private static int RandomParentNumber => Random.Next(AMOUNT_OF_PARENT_DOCS);
+        private static int GetRandomParentNumber() => Random.Next(AMOUNT_OF_PARENT_DOCS);
 
         private static Query RandomChildQuery(int randomChildNumber)
         {
-            return new TermQuery(new Term("id", CreateFieldValue(RandomParentId, randomChildNumber)));
+            return new TermQuery(new Term("id", CreateFieldValue(GetRandomParentId(), randomChildNumber)));
         }
 
         private static int GetRandomChildNumber(int notLessThan)
diff --git a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs
index 29f7f8d..9d0b606 100644
--- a/src/Lucene.Net.Tests.Join/TestJoinUtil.cs
+++ b/src/Lucene.Net.Tests.Join/TestJoinUtil.cs
@@ -1,4 +1,5 @@
-using Lucene.Net.Analysis;
+// Lucene version compatibility level 4.8.1
+using Lucene.Net.Analysis;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
@@ -10,6 +11,7 @@
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using System.Globalization;
 using Console = Lucene.Net.Util.SystemConsole;
 using JCG = J2N.Collections.Generic;
 
@@ -129,7 +131,7 @@
         public void TestOverflowTermsWithScoreCollectorRandom()
         {
             var scoreModeLength = Enum.GetNames(typeof(ScoreMode)).Length;
-            Test300spartans(Random.NextBoolean(), (ScoreMode) Random.Next(scoreModeLength));
+            Test300spartans(Random.NextBoolean(), (ScoreMode)Random.Next(scoreModeLength));
         }
 
         protected virtual void Test300spartans(bool multipleValues, ScoreMode scoreMode)
@@ -249,13 +251,13 @@
             bq.Add(joinQuery, Occur.SHOULD);
             bq.Add(new TermQuery(new Term("id", "3")), Occur.SHOULD);
 
-            indexSearcher.Search(bq, new CollectorAnonymousInnerClassHelper());
+            indexSearcher.Search(bq, new CollectorAnonymousClass());
 
             indexSearcher.IndexReader.Dispose();
             dir.Dispose();
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             internal bool sawFive;
 
@@ -463,7 +465,7 @@
                     FixedBitSet actualResult = new FixedBitSet(indexSearcher.IndexReader.MaxDoc);
                     TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.Create(10, false);
                     indexSearcher.Search(joinQuery,
-                        new CollectorAnonymousInnerClassHelper2(scoreDocsInOrder, actualResult,
+                        new CollectorAnonymousClass2(scoreDocsInOrder, actualResult,
                             topScoreDocCollector));
                     // Asserting bit set...
                     if (Verbose)
@@ -502,8 +504,8 @@
                     {
                         if (Verbose)
                         {
-                            string.Format("Expected doc: {0} | Actual doc: {1}\n", expectedTopDocs.ScoreDocs[i].Doc, actualTopDocs.ScoreDocs[i].Doc);
-                            string.Format("Expected score: {0} | Actual score: {1}\n", expectedTopDocs.ScoreDocs[i].Score, actualTopDocs.ScoreDocs[i].Score);
+                            Console.WriteLine(string.Format(CultureInfo.InvariantCulture, "Expected doc: {0} | Actual doc: {1}\n", expectedTopDocs.ScoreDocs[i].Doc, actualTopDocs.ScoreDocs[i].Doc));
+                            Console.WriteLine(string.Format(CultureInfo.InvariantCulture, "Expected score: {0} | Actual score: {1}\n", expectedTopDocs.ScoreDocs[i].Score, actualTopDocs.ScoreDocs[i].Score));
                         }
                         assertEquals(expectedTopDocs.ScoreDocs[i].Doc, actualTopDocs.ScoreDocs[i].Doc);
                         assertEquals(expectedTopDocs.ScoreDocs[i].Score, actualTopDocs.ScoreDocs[i].Score, 0.0f);
@@ -516,13 +518,13 @@
             }
         }
 
-        private class CollectorAnonymousInnerClassHelper2 : ICollector
+        private class CollectorAnonymousClass2 : ICollector
         {
             private bool scoreDocsInOrder;
             private FixedBitSet actualResult;
             private TopScoreDocCollector topScoreDocCollector;
 
-            public CollectorAnonymousInnerClassHelper2(bool scoreDocsInOrder,
+            public CollectorAnonymousClass2(bool scoreDocsInOrder,
                 FixedBitSet actualResult,
                 TopScoreDocCollector topScoreDocCollector)
             {
@@ -554,8 +556,7 @@
             public virtual bool AcceptsDocsOutOfOrder => scoreDocsInOrder;
         }
         
-        private IndexIterationContext CreateContext(int nDocs, RandomIndexWriter writer, bool multipleValuesPerDocument,
-            bool scoreDocsInOrder)
+        private IndexIterationContext CreateContext(int nDocs, RandomIndexWriter writer, bool multipleValuesPerDocument, bool scoreDocsInOrder)
         {
             return CreateContext(nDocs, writer, writer, multipleValuesPerDocument, scoreDocsInOrder);
         }
@@ -564,7 +565,7 @@
             bool multipleValuesPerDocument, bool scoreDocsInOrder)
         {
             IndexIterationContext context = new IndexIterationContext();
-            int numRandomValues = nDocs/2;
+            int numRandomValues = nDocs / 2;
             context.RandomUniqueValues = new string[numRandomValues];
             ISet<string> trackSet = new JCG.HashSet<string>();
             context.RandomFrom = new bool[numRandomValues];
@@ -585,7 +586,7 @@
             RandomDoc[] docs = new RandomDoc[nDocs];
             for (int i = 0; i < nDocs; i++)
             {
-                string id = Convert.ToString(i);
+                string id = Convert.ToString(i, CultureInfo.InvariantCulture);
                 int randomI = Random.Next(context.RandomUniqueValues.Length);
                 string value = context.RandomUniqueValues[randomI];
                 Document document = new Document();
@@ -678,12 +679,12 @@
                 if (multipleValuesPerDocument)
                 {
                     fromSearcher.Search(new TermQuery(new Term("value", uniqueRandomValue)),
-                        new CollectorAnonymousInnerClassHelper3(fromField, joinValueToJoinScores));
+                        new CollectorAnonymousClass3(fromField, joinValueToJoinScores));
                 }
                 else
                 {
                     fromSearcher.Search(new TermQuery(new Term("value", uniqueRandomValue)),
-                        new CollectorAnonymousInnerClassHelper4(fromField, joinValueToJoinScores));
+                        new CollectorAnonymousClass4(fromField, joinValueToJoinScores));
                 }
 
                 IDictionary<int, JoinScore> docToJoinScore = new Dictionary<int, JoinScore>();
@@ -708,9 +709,7 @@
                                     docsEnum = termsEnum.Docs(slowCompositeReader.LiveDocs, docsEnum, DocsFlags.NONE);
                                     JoinScore joinScore = joinValueToJoinScores[joinValue];
 
-                                    for (int doc = docsEnum.NextDoc();
-                                        doc != DocIdSetIterator.NO_MORE_DOCS;
-                                        doc = docsEnum.NextDoc())
+                                    for (int doc = docsEnum.NextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = docsEnum.NextDoc())
                                     {
                                         // First encountered join value determines the score.
                                         // Something to keep in mind for many-to-many relations.
@@ -726,14 +725,14 @@
                     else
                     {
                         toSearcher.Search(new MatchAllDocsQuery(),
-                            new CollectorAnonymousInnerClassHelper5(toField, joinValueToJoinScores,
+                            new CollectorAnonymousClass5(toField, joinValueToJoinScores,
                                 docToJoinScore));
                     }
                 }
                 else
                 {
                     toSearcher.Search(new MatchAllDocsQuery(),
-                        new CollectorAnonymousInnerClassHelper6(toField, joinValueToJoinScores,
+                        new CollectorAnonymousClass6(toField, joinValueToJoinScores,
                             docToJoinScore));
                 }
                 queryVals[uniqueRandomValue] = docToJoinScore;
@@ -745,12 +744,12 @@
             return context;
         }
 
-        private class CollectorAnonymousInnerClassHelper3 : ICollector
+        private class CollectorAnonymousClass3 : ICollector
         {
             private readonly string fromField;
             private readonly IDictionary<BytesRef, JoinScore> joinValueToJoinScores;
 
-            public CollectorAnonymousInnerClassHelper3(string fromField,
+            public CollectorAnonymousClass3(string fromField,
                 IDictionary<BytesRef, JoinScore> joinValueToJoinScores)
             {
                 this.fromField = fromField;
@@ -791,12 +790,12 @@
             public virtual bool AcceptsDocsOutOfOrder => false;
         }
 
-        private class CollectorAnonymousInnerClassHelper4 : ICollector
+        private class CollectorAnonymousClass4 : ICollector
         {
             private readonly string fromField;
             private readonly IDictionary<BytesRef, JoinScore> joinValueToJoinScores;
 
-            public CollectorAnonymousInnerClassHelper4(string fromField,
+            public CollectorAnonymousClass4(string fromField,
                 IDictionary<BytesRef, JoinScore> joinValueToJoinScores)
             {
                 this.fromField = fromField;
@@ -840,7 +839,7 @@
             public virtual bool AcceptsDocsOutOfOrder => false;
         }
 
-        private class CollectorAnonymousInnerClassHelper5 : ICollector
+        private class CollectorAnonymousClass5 : ICollector
         {
             private readonly string toField;
             private readonly IDictionary<BytesRef, JoinScore> joinValueToJoinScores;
@@ -850,7 +849,7 @@
             private readonly BytesRef scratch = new BytesRef();
             private int docBase;
 
-            public CollectorAnonymousInnerClassHelper5(
+            public CollectorAnonymousClass5(
                 string toField, IDictionary<BytesRef, JoinScore> joinValueToJoinScores, 
                 IDictionary<int, JoinScore> docToJoinScore)
             {
@@ -893,7 +892,7 @@
             }
         }
 
-        private class CollectorAnonymousInnerClassHelper6 : ICollector
+        private class CollectorAnonymousClass6 : ICollector
         {
             private readonly string toField;
             private readonly IDictionary<BytesRef, JoinScore> joinValueToJoinScores;
@@ -903,7 +902,7 @@
             private int docBase;
             private readonly BytesRef spare = new BytesRef();
 
-            public CollectorAnonymousInnerClassHelper6(
+            public CollectorAnonymousClass6(
                 string toField, 
                 IDictionary<BytesRef, JoinScore> joinValueToJoinScores, 
                 IDictionary<int, JoinScore> docToJoinScore)
@@ -936,7 +935,9 @@
             }
         }
 
-        private TopDocs CreateExpectedTopDocs(string queryValue, bool from, ScoreMode scoreMode,
+        private TopDocs CreateExpectedTopDocs(string queryValue,
+            bool from,
+            ScoreMode scoreMode,
             IndexIterationContext context)
         {
             var hitsToJoinScores = @from
@@ -1071,7 +1072,7 @@
                     case ScoreMode.Total:
                         return total;
                     case ScoreMode.Avg:
-                        return total/count;
+                        return total / count;
                     case ScoreMode.Max:
                         return maxScore;
                 }
diff --git a/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs b/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs
index 163f82e..16805f3 100644
--- a/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs
+++ b/src/Lucene.Net.Tests.Queries/TestCustomScoreQuery.cs
@@ -101,12 +101,12 @@
 
             protected override CustomScoreProvider GetCustomScoreProvider(AtomicReaderContext context)
             {
-                return new CustomScoreProviderAnonymousInnerClassHelper(context);
+                return new CustomScoreProviderAnonymousClass(context);
             }
 
-            private class CustomScoreProviderAnonymousInnerClassHelper : CustomScoreProvider
+            private class CustomScoreProviderAnonymousClass : CustomScoreProvider
             {
-                public CustomScoreProviderAnonymousInnerClassHelper(AtomicReaderContext context) : base(context)
+                public CustomScoreProviderAnonymousClass(AtomicReaderContext context) : base(context)
                 {
                 }
 
@@ -141,12 +141,12 @@
 
             protected override CustomScoreProvider GetCustomScoreProvider(AtomicReaderContext context)
             {
-                return new CustomScoreProviderAnonymousInnerClassHelper(context);
+                return new CustomScoreProviderAnonymousClass(context);
             }
 
-            private class CustomScoreProviderAnonymousInnerClassHelper : CustomScoreProvider
+            private class CustomScoreProviderAnonymousClass : CustomScoreProvider
             {
-                public CustomScoreProviderAnonymousInnerClassHelper(AtomicReaderContext context) : base(context)
+                public CustomScoreProviderAnonymousClass(AtomicReaderContext context) : base(context)
                 {
                 }
 
@@ -192,14 +192,14 @@
             protected override CustomScoreProvider GetCustomScoreProvider(AtomicReaderContext context)
             {
                 FieldCache.Int32s values = FieldCache.DEFAULT.GetInt32s(context.AtomicReader, INT_FIELD, false);
-                return new CustomScoreProviderAnonymousInnerClassHelper(context, values);
+                return new CustomScoreProviderAnonymousClass(context, values);
             }
             
-            private class CustomScoreProviderAnonymousInnerClassHelper : CustomScoreProvider
+            private class CustomScoreProviderAnonymousClass : CustomScoreProvider
             {
                 private FieldCache.Int32s values;
 
-                public CustomScoreProviderAnonymousInnerClassHelper(AtomicReaderContext context, FieldCache.Int32s values) : base(context)
+                public CustomScoreProviderAnonymousClass(AtomicReaderContext context, FieldCache.Int32s values) : base(context)
                 {
                     this.values = values;
                 }
diff --git a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs
index 0adf72e..cd8e9f2 100644
--- a/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs
+++ b/src/Lucene.Net.Tests.Replicator/IndexAndTaxonomyReplicationClientTest.cs
@@ -317,14 +317,14 @@
             ISourceDirectoryFactory @in = sourceDirFactory;
             AtomicInt32 failures = new AtomicInt32(AtLeast(10));
 
-            sourceDirFactory = new SourceDirectoryFactoryAnonymousInnerClass(this, @in, failures);
+            sourceDirFactory = new SourceDirectoryFactoryAnonymousClass(this, @in, failures);
             handler = new IndexAndTaxonomyReplicationHandler(handlerIndexDir, handlerTaxoDir, () =>
             {
                 if (Random.NextDouble() < 0.2 && failures > 0)
                     throw new Exception("random exception from callback");
                 return null;
             });
-            client = new ReplicationClientAnonymousInnerClass(this, replicator, handler, @in, failures);
+            client = new ReplicationClientAnonymousClass(this, replicator, handler, @in, failures);
             client.StartUpdateThread(10, "indexAndTaxo");
 
             Directory baseHandlerIndexDir = handlerIndexDir.Delegate;
@@ -345,7 +345,7 @@
             handlerTaxoDir.RandomIOExceptionRateOnOpen = (0.0);
         }
 
-        private class SourceDirectoryFactoryAnonymousInnerClass : ISourceDirectoryFactory
+        private class SourceDirectoryFactoryAnonymousClass : ISourceDirectoryFactory
         {
             private long clientMaxSize = 100, handlerIndexMaxSize = 100, handlerTaxoMaxSize = 100;
             private double clientExRate = 1.0, handlerIndexExRate = 1.0, handlerTaxoExRate = 1.0;
@@ -354,7 +354,7 @@
             private readonly ISourceDirectoryFactory @in;
             private readonly AtomicInt32 failures;
 
-            public SourceDirectoryFactoryAnonymousInnerClass(IndexAndTaxonomyReplicationClientTest test, ISourceDirectoryFactory @in, AtomicInt32 failures)
+            public SourceDirectoryFactoryAnonymousClass(IndexAndTaxonomyReplicationClientTest test, ISourceDirectoryFactory @in, AtomicInt32 failures)
             {
                 this.test = test;
                 this.@in = @in;
@@ -417,12 +417,12 @@
 
 
 
-        private class ReplicationClientAnonymousInnerClass : ReplicationClient
+        private class ReplicationClientAnonymousClass : ReplicationClient
         {
             private readonly IndexAndTaxonomyReplicationClientTest test;
             private readonly AtomicInt32 failures;
 
-            public ReplicationClientAnonymousInnerClass(IndexAndTaxonomyReplicationClientTest test, IReplicator replicator, IReplicationHandler handler, ISourceDirectoryFactory factory, AtomicInt32 failures)
+            public ReplicationClientAnonymousClass(IndexAndTaxonomyReplicationClientTest test, IReplicator replicator, IReplicationHandler handler, ISourceDirectoryFactory factory, AtomicInt32 failures)
                 : base(replicator, handler, factory)
             {
                 this.test = test;
diff --git a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs
index 6b4624c..3a37774 100644
--- a/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs
+++ b/src/Lucene.Net.Tests.Replicator/IndexReplicationClientTest.cs
@@ -1,4 +1,4 @@
-using J2N.Threading.Atomic;
+using J2N.Threading.Atomic;
 using Lucene.Net.Attributes;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Documents;
@@ -247,14 +247,14 @@
             AtomicInt32 failures = new AtomicInt32(AtLeast(10));
 
             // wrap sourceDirFactory to return a MockDirWrapper so we can simulate errors
-            sourceDirFactory = new SourceDirectoryFactoryAnonymousInnerClass(this, @in, failures);
+            sourceDirFactory = new SourceDirectoryFactoryAnonymousClass(this, @in, failures);
             handler = new IndexReplicationHandler(handlerDir, () =>
             {
                 if (Random.NextDouble() < 0.2 && failures > 0)
                     throw new Exception("random exception from callback");
                 return null;
             });
-            client = new ReplicationClientAnonymousInnerClass(this, replicator, handler, sourceDirFactory, failures);
+            client = new ReplicationClientAnonymousClass(this, replicator, handler, sourceDirFactory, failures);
             client.StartUpdateThread(10, "index");
 
             Directory baseHandlerDir = handlerDir.Delegate;
@@ -272,7 +272,7 @@
             handlerDir.RandomIOExceptionRateOnOpen = 0.0;
         }
 
-        private class SourceDirectoryFactoryAnonymousInnerClass : ISourceDirectoryFactory
+        private class SourceDirectoryFactoryAnonymousClass : ISourceDirectoryFactory
         {
             private long clientMaxSize = 100, handlerMaxSize = 100;
             private double clientExRate = 1.0, handlerExRate = 1.0;
@@ -281,7 +281,7 @@
             private readonly ISourceDirectoryFactory @in;
             private readonly AtomicInt32 failures;
 
-            public SourceDirectoryFactoryAnonymousInnerClass(IndexReplicationClientTest test, ISourceDirectoryFactory @in, AtomicInt32 failures)
+            public SourceDirectoryFactoryAnonymousClass(IndexReplicationClientTest test, ISourceDirectoryFactory @in, AtomicInt32 failures)
             {
                 this.test = test;
                 this.@in = @in;
@@ -327,12 +327,12 @@
             }
         }
 
-        private class ReplicationClientAnonymousInnerClass : ReplicationClient
+        private class ReplicationClientAnonymousClass : ReplicationClient
         {
             private readonly IndexReplicationClientTest test;
             private readonly AtomicInt32 failures;
 
-            public ReplicationClientAnonymousInnerClass(IndexReplicationClientTest test, IReplicator replicator, IReplicationHandler handler, ISourceDirectoryFactory factory, AtomicInt32 failures)
+            public ReplicationClientAnonymousClass(IndexReplicationClientTest test, IReplicator replicator, IReplicationHandler handler, ISourceDirectoryFactory factory, AtomicInt32 failures)
                 : base(replicator, handler, factory)
             {
                 this.test = test;
diff --git a/src/Lucene.Net.Tests.Spatial/QueryEqualsHashCodeTest.cs b/src/Lucene.Net.Tests.Spatial/QueryEqualsHashCodeTest.cs
index 68a67ae..3b27e83 100644
--- a/src/Lucene.Net.Tests.Spatial/QueryEqualsHashCodeTest.cs
+++ b/src/Lucene.Net.Tests.Spatial/QueryEqualsHashCodeTest.cs
@@ -52,11 +52,11 @@
             }
         }
 
-        private class ObjGeneratorQueryAnonymousHelper : ObjGenerator
+        private class ObjGeneratorQueryAnonymousClass : ObjGenerator
         {
             private readonly SpatialStrategy strategy;
 
-            public ObjGeneratorQueryAnonymousHelper(SpatialStrategy strategy)
+            public ObjGeneratorQueryAnonymousClass(SpatialStrategy strategy)
             {
                 this.strategy = strategy;
             }
@@ -67,11 +67,11 @@
             }
         }
 
-        private class ObjGeneratorFilterAnonymousHelper : ObjGenerator
+        private class ObjGeneratorFilterAnonymousClass : ObjGenerator
         {
             private readonly SpatialStrategy strategy;
 
-            public ObjGeneratorFilterAnonymousHelper(SpatialStrategy strategy)
+            public ObjGeneratorFilterAnonymousClass(SpatialStrategy strategy)
             {
                 this.strategy = strategy;
             }
@@ -82,11 +82,11 @@
             }
         }
 
-        private class ObjGeneratorDistanceValueSourceAnonymousHelper : ObjGenerator
+        private class ObjGeneratorDistanceValueSourceAnonymousClass : ObjGenerator
         {
             private readonly SpatialStrategy strategy;
 
-            public ObjGeneratorDistanceValueSourceAnonymousHelper(SpatialStrategy strategy)
+            public ObjGeneratorDistanceValueSourceAnonymousClass(SpatialStrategy strategy)
             {
                 this.strategy = strategy;
             }
@@ -101,9 +101,9 @@
         {
             SpatialArgs args1 = MakeArgs1();
             SpatialArgs args2 = MakeArgs2();
-            TestEqualsHashcode(args1, args2, new ObjGeneratorQueryAnonymousHelper(strategy));
-            TestEqualsHashcode(args1, args2, new ObjGeneratorFilterAnonymousHelper(strategy));
-            TestEqualsHashcode(args1, args2, new ObjGeneratorDistanceValueSourceAnonymousHelper(strategy));
+            TestEqualsHashcode(args1, args2, new ObjGeneratorQueryAnonymousClass(strategy));
+            TestEqualsHashcode(args1, args2, new ObjGeneratorFilterAnonymousClass(strategy));
+            TestEqualsHashcode(args1, args2, new ObjGeneratorDistanceValueSourceAnonymousClass(strategy));
         }
 
         private void TestEqualsHashcode(SpatialArgs args1, SpatialArgs args2, ObjGenerator generator)
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
index 6e3da5a..2231119 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
@@ -1038,7 +1038,9 @@
             {
                 suggester.Add(new BytesRef(key), null, 10, null);
             }
+#pragma warning disable CS0168 // Variable is declared but never used
             catch (IOException e)
+#pragma warning restore CS0168 // Variable is declared but never used
             {
                 fail("Could not build suggest dictionary correctly");
             }
diff --git a/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs b/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs
index fde5e30..9777d1a 100644
--- a/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs
+++ b/src/Lucene.Net.Tests.TestFramework/Analysis/TestMockAnalyzer.cs
@@ -303,10 +303,10 @@
             }
         }
 
-        private class AnalyzerWrapperAnonymousHelper : AnalyzerWrapper
+        private class AnalyzerWrapperAnonymousClass : AnalyzerWrapper
         {
             private readonly Analyzer @delegate;
-            public AnalyzerWrapperAnonymousHelper(Analyzer @delegate)
+            public AnalyzerWrapperAnonymousClass(Analyzer @delegate)
                 : base(@delegate.Strategy)
             {
                 this.@delegate = @delegate;
@@ -329,7 +329,7 @@
             Random random = Random;
 
             Analyzer @delegate = new MockAnalyzer(random);
-            Analyzer a = new AnalyzerWrapperAnonymousHelper(@delegate);
+            Analyzer a = new AnalyzerWrapperAnonymousClass(@delegate);
 
 
             CheckOneTerm(a, "abc", "aabc");
diff --git a/src/Lucene.Net.Tests/Analysis/TestCachingTokenFilter.cs b/src/Lucene.Net.Tests/Analysis/TestCachingTokenFilter.cs
index b96902e..87a208c 100644
--- a/src/Lucene.Net.Tests/Analysis/TestCachingTokenFilter.cs
+++ b/src/Lucene.Net.Tests/Analysis/TestCachingTokenFilter.cs
@@ -48,7 +48,7 @@
 #endif
                 Random, dir);
             Document doc = new Document();
-            TokenStream stream = new TokenStreamAnonymousInnerClassHelper(this);
+            TokenStream stream = new TokenStreamAnonymousClass(this);
 
             stream = new CachingTokenFilter(stream);
 
@@ -87,11 +87,11 @@
             dir.Dispose();
         }
 
-        private class TokenStreamAnonymousInnerClassHelper : TokenStream
+        private class TokenStreamAnonymousClass : TokenStream
         {
             private TestCachingTokenFilter outerInstance;
 
-            public TokenStreamAnonymousInnerClassHelper(TestCachingTokenFilter outerInstance)
+            public TokenStreamAnonymousClass(TestCachingTokenFilter outerInstance)
             {
                 InitMembers(outerInstance);
             }
diff --git a/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs b/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
index 77d857f..f49f63f 100644
--- a/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
+++ b/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
@@ -273,18 +273,18 @@
             Random random = Random;
 
             Analyzer @delegate = new MockAnalyzer(random);
-            Analyzer a = new AnalyzerWrapperAnonymousInnerClassHelper(this, @delegate.Strategy, @delegate);
+            Analyzer a = new AnalyzerWrapperAnonymousClass(this, @delegate.Strategy, @delegate);
 
             CheckOneTerm(a, "abc", "aabc");
         }
 
-        private class AnalyzerWrapperAnonymousInnerClassHelper : AnalyzerWrapper
+        private class AnalyzerWrapperAnonymousClass : AnalyzerWrapper
         {
             private readonly TestMockAnalyzer outerInstance;
 
             private Analyzer @delegate;
 
-            public AnalyzerWrapperAnonymousInnerClassHelper(TestMockAnalyzer outerInstance, ReuseStrategy getReuseStrategy, Analyzer @delegate)
+            public AnalyzerWrapperAnonymousClass(TestMockAnalyzer outerInstance, ReuseStrategy getReuseStrategy, Analyzer @delegate)
                 : base(getReuseStrategy)
             {
                 this.outerInstance = outerInstance;
@@ -314,7 +314,7 @@
             int positionGap = Random.Next(1000);
             int offsetGap = Random.Next(1000);
             Analyzer @delegate = new MockAnalyzer(Random);
-            Analyzer a = new AnalyzerWrapperAnonymousInnerClassHelper2(this, @delegate.Strategy, positionGap, offsetGap, @delegate);
+            Analyzer a = new AnalyzerWrapperAnonymousClass2(this, @delegate.Strategy, positionGap, offsetGap, @delegate);
 
             RandomIndexWriter writer = new RandomIndexWriter(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
@@ -352,7 +352,7 @@
             writer.IndexWriter.Directory.Dispose();
         }
 
-        private class AnalyzerWrapperAnonymousInnerClassHelper2 : AnalyzerWrapper
+        private class AnalyzerWrapperAnonymousClass2 : AnalyzerWrapper
         {
             private readonly TestMockAnalyzer outerInstance;
 
@@ -360,7 +360,7 @@
             private int offsetGap;
             private Analyzer @delegate;
 
-            public AnalyzerWrapperAnonymousInnerClassHelper2(TestMockAnalyzer outerInstance, ReuseStrategy getReuseStrategy, int positionGap, int offsetGap, Analyzer @delegate)
+            public AnalyzerWrapperAnonymousClass2(TestMockAnalyzer outerInstance, ReuseStrategy getReuseStrategy, int positionGap, int offsetGap, Analyzer @delegate)
                 : base(getReuseStrategy)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestCharTermAttributeImpl.cs b/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestCharTermAttributeImpl.cs
index 1612f67..de32a2e 100644
--- a/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestCharTermAttributeImpl.cs
+++ b/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestCharTermAttributeImpl.cs
@@ -339,17 +339,17 @@
 
             // finally use a completely custom ICharSequence that is not catched by instanceof checks
             const string longTestString = "012345678901234567890123456789";
-            t.Append(new CharSequenceAnonymousInnerClassHelper(this, longTestString));
+            t.Append(new CharSequenceAnonymousClass(this, longTestString));
             Assert.AreEqual("4567890123456" + longTestString, t.ToString());
         }
 
-        private class CharSequenceAnonymousInnerClassHelper : ICharSequence
+        private class CharSequenceAnonymousClass : ICharSequence
         {
             private readonly TestCharTermAttributeImpl outerInstance;
 
             private string longTestString;
 
-            public CharSequenceAnonymousInnerClassHelper(TestCharTermAttributeImpl outerInstance, string longTestString)
+            public CharSequenceAnonymousClass(TestCharTermAttributeImpl outerInstance, string longTestString)
             {
                 this.outerInstance = outerInstance;
                 this.longTestString = longTestString;
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs b/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
index 0a7d861..50eb109 100644
--- a/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using J2N.Text;
 using Lucene.Net.Randomized.Generators;
 using NUnit.Framework;
@@ -35,7 +36,7 @@
             for (; ; )
             {
                 int token = compressed[off++] & 0xFF;
-                int literalLen = (int)((uint)token >> 4);
+                int literalLen = token.TripleShift(4);
                 if (literalLen == 0x0F)
                 {
                     while (compressed[off] == 0xFF)
@@ -79,7 +80,7 @@
                 if (decompressedOff + matchLen < decompressed.Length - LZ4.LAST_LITERALS)
                 {
                     bool moreCommonBytes = decompressed[decompressedOff + matchLen] == decompressed[decompressedOff - matchDec + matchLen];
-                    bool nextSequenceHasLiterals = ((int)((uint)(compressed[off] & 0xFF) >> 4)) != 0;
+                    bool nextSequenceHasLiterals = (compressed[off] & 0xFF).TripleShift(4) != 0;
                     Assert.IsTrue(!moreCommonBytes || !nextSequenceHasLiterals);
                 }
 
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingStoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingStoredFieldsFormat.cs
index b96ecc2..fe210da 100644
--- a/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingStoredFieldsFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingStoredFieldsFormat.cs
@@ -64,7 +64,7 @@
             Document invalidDoc = new Document();
             FieldType fieldType = new FieldType();
             fieldType.IsStored = true;
-            invalidDoc.Add(new FieldAnonymousInnerClassHelper(this, fieldType));
+            invalidDoc.Add(new FieldAnonymousClass(this, fieldType));
 
             try
             {
@@ -90,11 +90,11 @@
             }
         }
 
-        private class FieldAnonymousInnerClassHelper : Field
+        private class FieldAnonymousClass : Field
         {
             private readonly TestCompressingStoredFieldsFormat outerInstance;
 
-            public FieldAnonymousInnerClassHelper(TestCompressingStoredFieldsFormat outerInstance, FieldType fieldType)
+            public FieldAnonymousClass(TestCompressingStoredFieldsFormat outerInstance, FieldType fieldType)
                 : base("invalid", fieldType)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs
index 3379c03..cf3abc6 100644
--- a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs
@@ -88,7 +88,7 @@
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
             DocValuesFormat fast = DocValuesFormat.ForName("Lucene45");
             DocValuesFormat slow = DocValuesFormat.ForName("SimpleText");
-            iwc.SetCodec(new Lucene46CodecAnonymousInnerClassHelper(this, fast, slow));
+            iwc.SetCodec(new Lucene46CodecAnonymousClass(this, fast, slow));
             IndexWriter iwriter = new IndexWriter(directory, iwc);
             Document doc = new Document();
             string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm";
@@ -125,14 +125,14 @@
             directory.Dispose();
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        private class Lucene46CodecAnonymousClass : Lucene46Codec
         {
             private readonly TestPerFieldDocValuesFormat outerInstance;
 
             private readonly DocValuesFormat fast;
             private readonly DocValuesFormat slow;
 
-            public Lucene46CodecAnonymousInnerClassHelper(TestPerFieldDocValuesFormat outerInstance, DocValuesFormat fast, DocValuesFormat slow)
+            public Lucene46CodecAnonymousClass(TestPerFieldDocValuesFormat outerInstance, DocValuesFormat fast, DocValuesFormat slow)
             {
                 this.outerInstance = outerInstance;
                 this.fast = fast;
diff --git a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat2.cs b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat2.cs
index 5e6c3a3..c1fde4f 100644
--- a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat2.cs
+++ b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat2.cs
@@ -283,15 +283,15 @@
         [Test]
         public virtual void TestSameCodecDifferentInstance()
         {
-            Codec codec = new Lucene46CodecAnonymousInnerClassHelper(this);
+            Codec codec = new Lucene46CodecAnonymousClass(this);
             DoTestMixedPostings(codec);
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        private class Lucene46CodecAnonymousClass : Lucene46Codec
         {
             private readonly TestPerFieldPostingsFormat2 outerInstance;
 
-            public Lucene46CodecAnonymousInnerClassHelper(TestPerFieldPostingsFormat2 outerInstance)
+            public Lucene46CodecAnonymousClass(TestPerFieldPostingsFormat2 outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -316,15 +316,15 @@
         [Test]
         public virtual void TestSameCodecDifferentParams()
         {
-          Codec codec = new Lucene46CodecAnonymousInnerClassHelper2(this);
+          Codec codec = new Lucene46CodecAnonymousClass2(this);
           DoTestMixedPostings(codec);
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper2 : Lucene46Codec
+        private class Lucene46CodecAnonymousClass2 : Lucene46Codec
         {
             private readonly TestPerFieldPostingsFormat2 outerInstance;
 
-            public Lucene46CodecAnonymousInnerClassHelper2(TestPerFieldPostingsFormat2 outerInstance)
+            public Lucene46CodecAnonymousClass2(TestPerFieldPostingsFormat2 outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
index 9d2ef61..f400c47 100644
--- a/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
 using NUnit.Framework;
@@ -42,7 +42,7 @@
     {
         // indexes Integer.MAX_VALUE docs with a fixed binary field
         [Test]
-        public virtual void TestFixedBinary([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestFixedBinary()
         {
             BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BFixedBinary"));
             if (dir is MockDirectoryWrapper)
@@ -52,7 +52,7 @@
             var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                             .SetRAMBufferSizeMB(256.0)
-                            .SetMergeScheduler(newScheduler())
+                            .SetMergeScheduler(new ConcurrentMergeScheduler())
                             .SetMergePolicy(NewLogMergePolicy(false, 10))
                             .SetOpenMode(OpenMode.CREATE);
             IndexWriter w = new IndexWriter(dir, config);
@@ -108,7 +108,7 @@
 
         // indexes Integer.MAX_VALUE docs with a variable binary field
         [Test]
-        public virtual void TestVariableBinary([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestVariableBinary()
         {
             BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BVariableBinary"));
             if (dir is MockDirectoryWrapper)
@@ -119,7 +119,7 @@
             var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                             .SetRAMBufferSizeMB(256.0)
-                            .SetMergeScheduler(newScheduler())
+                            .SetMergeScheduler(new ConcurrentMergeScheduler())
                             .SetMergePolicy(NewLogMergePolicy(false, 10))
                             .SetOpenMode(OpenMode.CREATE);
             IndexWriter w = new IndexWriter(dir, config);
diff --git a/src/Lucene.Net.Tests/Index/Test2BNumericDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BNumericDocValues.cs
index 252b31f..6458705 100644
--- a/src/Lucene.Net.Tests/Index/Test2BNumericDocValues.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BNumericDocValues.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
 using NUnit.Framework;
@@ -40,7 +40,7 @@
     {
         // indexes Integer.MAX_VALUE docs with an increasing dv field
         [Test]
-        public virtual void TestNumerics([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestNumerics()
         {
             BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BNumerics"));
             if (dir is MockDirectoryWrapper)
@@ -48,8 +48,13 @@
                 ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
             }
 
-            IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
-           .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetRAMBufferSizeMB(256.0).SetMergeScheduler(newScheduler()).SetMergePolicy(NewLogMergePolicy(false, 10)).SetOpenMode(OpenMode.CREATE));
+            IndexWriter w = new IndexWriter(dir,
+                new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+                .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                .SetRAMBufferSizeMB(256.0)
+                .SetMergeScheduler(new ConcurrentMergeScheduler())
+                .SetMergePolicy(NewLogMergePolicy(false, 10))
+                .SetOpenMode(OpenMode.CREATE));
 
             Document doc = new Document();
             NumericDocValuesField dvField = new NumericDocValuesField("dv", 0);
diff --git a/src/Lucene.Net.Tests/Index/Test2BPositions.cs b/src/Lucene.Net.Tests/Index/Test2BPositions.cs
index f56a056..aeed283 100644
--- a/src/Lucene.Net.Tests/Index/Test2BPositions.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BPositions.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
@@ -46,7 +46,7 @@
     {
         [Ignore("Very slow. Enable manually by removing Ignore.")]
         [Test]
-        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void Test()
         {
             BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BPositions"));
             if (dir is MockDirectoryWrapper)
@@ -54,8 +54,13 @@
                 ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
             }
 
-            IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
-           .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetRAMBufferSizeMB(256.0).SetMergeScheduler(newScheduler()).SetMergePolicy(NewLogMergePolicy(false, 10)).SetOpenMode(OpenMode.CREATE));
+            IndexWriter w = new IndexWriter(dir,
+                new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+                .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                .SetRAMBufferSizeMB(256.0)
+                .SetMergeScheduler(new ConcurrentMergeScheduler())
+                .SetMergePolicy(NewLogMergePolicy(false, 10))
+                .SetOpenMode(OpenMode.CREATE));
 
             MergePolicy mp = w.Config.MergePolicy;
             if (mp is LogByteSizeMergePolicy)
diff --git a/src/Lucene.Net.Tests/Index/Test2BPostings.cs b/src/Lucene.Net.Tests/Index/Test2BPostings.cs
index 09a6fe5..19640a9 100644
--- a/src/Lucene.Net.Tests/Index/Test2BPostings.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BPostings.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
@@ -46,7 +46,7 @@
         [Test]
         [Nightly]
         [Ignore("LUCENENET specific - takes too long to run on Azure DevOps")]
-        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void Test()
         {
             BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BPostings"));
             if (dir is MockDirectoryWrapper)
@@ -57,7 +57,7 @@
             var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                             .SetRAMBufferSizeMB(256.0)
-                            .SetMergeScheduler(newScheduler())
+                            .SetMergeScheduler(new ConcurrentMergeScheduler())
                             .SetMergePolicy(NewLogMergePolicy(false, 10))
                             .SetOpenMode(OpenMode.CREATE);
 
diff --git a/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs b/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
index cdad4d6..ee3cd39 100644
--- a/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
@@ -50,7 +50,7 @@
     {
         [Ignore("Very slow. Enable manually by removing Ignore.")]
         [Test]
-        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void Test()
         {
             BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BPostingsBytes1"));
             if (dir is MockDirectoryWrapper)
@@ -61,7 +61,7 @@
             var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                             .SetRAMBufferSizeMB(256.0)
-                            .SetMergeScheduler(newScheduler())
+                            .SetMergeScheduler(new ConcurrentMergeScheduler())
                             .SetMergePolicy(NewLogMergePolicy(false, 10))
                             .SetOpenMode(OpenMode.CREATE);
             IndexWriter w = new IndexWriter(dir, config);
diff --git a/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
index 881a4f8..5af76e5 100644
--- a/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
 using NUnit.Framework;
@@ -40,7 +40,7 @@
     {
         // indexes Integer.MAX_VALUE docs with a fixed binary field
         [Test]
-        public virtual void TestFixedSorted([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestFixedSorted()
         {
             BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BFixedSorted"));
             if (dir is MockDirectoryWrapper)
@@ -51,7 +51,7 @@
             IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                                 .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                                 .SetRAMBufferSizeMB(256.0)
-                                .SetMergeScheduler(newScheduler())
+                                .SetMergeScheduler(new ConcurrentMergeScheduler())
                                 .SetMergePolicy(NewLogMergePolicy(false, 10))
                                 .SetOpenMode(OpenMode.CREATE));
 
@@ -102,7 +102,7 @@
 
         // indexes Integer.MAX_VALUE docs with a fixed binary field
         [Test]
-        public virtual void Test2BOrds([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void Test2BOrds()
         {
             BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BOrds"));
             if (dir is MockDirectoryWrapper)
@@ -113,7 +113,7 @@
             var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                             .SetRAMBufferSizeMB(256.0)
-                            .SetMergeScheduler(newScheduler())
+                            .SetMergeScheduler(new ConcurrentMergeScheduler())
                             .SetMergePolicy(NewLogMergePolicy(false, 10))
                             .SetOpenMode(OpenMode.CREATE);
             IndexWriter w = new IndexWriter(dir, config);
diff --git a/src/Lucene.Net.Tests/Index/Test2BTerms.cs b/src/Lucene.Net.Tests/Index/Test2BTerms.cs
index a60a541..3398ad2 100644
--- a/src/Lucene.Net.Tests/Index/Test2BTerms.cs
+++ b/src/Lucene.Net.Tests/Index/Test2BTerms.cs
@@ -1,4 +1,4 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Documents;
@@ -164,7 +164,7 @@
 
         [Ignore("Very slow. Enable manually by removing Ignore.")]
         [Test]
-        public virtual void Test2BTerms_Mem([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void Test2BTerms_Mem()
         {
             if ("Lucene3x".Equals(Codec.Default.Name, StringComparison.Ordinal))
             {
@@ -190,7 +190,7 @@
                 IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                                            .SetRAMBufferSizeMB(256.0)
-                                           .SetMergeScheduler(newScheduler())
+                                           .SetMergeScheduler(new ConcurrentMergeScheduler())
                                            .SetMergePolicy(NewLogMergePolicy(false, 10))
                                            .SetOpenMode(OpenMode.CREATE));
 
diff --git a/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs b/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
index 1a89d4c..731fcd4 100644
--- a/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
+++ b/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Randomized.Generators;
 using Lucene.Net.Store;
@@ -47,7 +47,7 @@
         [Test]
         [Nightly]
         [Timeout(1200000)]
-        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void Test()
         {
             // LUCENENET specific - disable the test if not 64 bit
             AssumeTrue("This test consumes too much RAM be run on x86.", Constants.RUNTIME_IS_64BIT);
@@ -58,7 +58,7 @@
             var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                             .SetRAMBufferSizeMB(256.0)
-                            .SetMergeScheduler(newScheduler())
+                            .SetMergeScheduler(new ConcurrentMergeScheduler())
                             .SetMergePolicy(NewLogMergePolicy(false, 10))
                             .SetOpenMode(OpenMode.CREATE);
             IndexWriter w = new IndexWriter(dir, config);
diff --git a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
index 4411103..395eb13 100644
--- a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
+++ b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
@@ -671,7 +671,7 @@
             {
                 for (int i = 0; i < NUM_THREADS; i++)
                 {
-                    threads[i] = new ThreadAnonymousInnerClassHelper(this, numIter);
+                    threads[i] = new ThreadAnonymousClass(this, numIter);
                 }
 
                 for (int i = 0; i < NUM_THREADS; i++)
@@ -680,12 +680,12 @@
                 }
             }
 
-            private class ThreadAnonymousInnerClassHelper : ThreadJob
+            private class ThreadAnonymousClass : ThreadJob
             {
                 private readonly RunAddIndexesThreads outerInstance;
                 private readonly int numIter;
 
-                public ThreadAnonymousInnerClassHelper(RunAddIndexesThreads outerInstance, int numIter)
+                public ThreadAnonymousClass(RunAddIndexesThreads outerInstance, int numIter)
                 {
                     this.outerInstance = outerInstance;
                     this.numIter = numIter;
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
index eb895bd..d37a8f2 100644
--- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
@@ -1,5 +1,6 @@
-using J2N;
+using J2N;
 using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
@@ -504,7 +505,7 @@
                     int id = Convert.ToInt32(reader.Document(i).Get("id"));
                     Assert.AreEqual(id, dvByte.Get(i));
 
-                    sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id };
+                    sbyte[] bytes = new sbyte[] { (sbyte)(id.TripleShift(24)), (sbyte)(id.TripleShift(16)), (sbyte)(id.TripleShift(8)), (sbyte)id };
                     BytesRef expectedRef = new BytesRef((byte[])(Array)bytes);
                     BytesRef scratch = new BytesRef();
 
@@ -710,7 +711,7 @@
             doc.Add(new Int64Field("trieLong", (long)id, Field.Store.NO));
             // add docvalues fields
             doc.Add(new NumericDocValuesField("dvByte", (sbyte)id));
-            sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id };
+            sbyte[] bytes = new sbyte[] { (sbyte)(id.TripleShift(24)), (sbyte)(id.TripleShift(16)), (sbyte)(id.TripleShift(8)), (sbyte)id };
             BytesRef @ref = new BytesRef((byte[])(Array)bytes);
             doc.Add(new BinaryDocValuesField("dvBytesDerefFixed", @ref));
             doc.Add(new BinaryDocValuesField("dvBytesDerefVar", @ref));
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
index eda9e29..5a8604a 100644
--- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
@@ -1,4 +1,5 @@
-using J2N;
+using J2N;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
@@ -473,7 +474,7 @@
                     int id = Convert.ToInt32(reader.Document(i).Get("id"));
                     Assert.AreEqual(id, dvByte.Get(i));
 
-                    sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id };
+                    sbyte[] bytes = new sbyte[] { (sbyte)(id.TripleShift(24)), (sbyte)(id.TripleShift(16)), (sbyte)(id.TripleShift(8)), (sbyte)id };
                     BytesRef expectedRef = new BytesRef((byte[])(Array)bytes);
                     BytesRef scratch = new BytesRef();
 
@@ -671,7 +672,7 @@
             doc.Add(new Int64Field("trieLong", (long)id, Field.Store.NO));
             // add docvalues fields
             doc.Add(new NumericDocValuesField("dvByte", (sbyte)id));
-            sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id };
+            sbyte[] bytes = new sbyte[] { (sbyte)(id.TripleShift(24)), (sbyte)(id.TripleShift(16)), (sbyte)(id.TripleShift(8)), (sbyte)id };
             BytesRef @ref = new BytesRef((byte[])(Array)bytes);
             doc.Add(new BinaryDocValuesField("dvBytesDerefFixed", @ref));
             doc.Add(new BinaryDocValuesField("dvBytesDerefVar", @ref));
diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
index fbe7193..8e36ab4 100644
--- a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
+++ b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
@@ -125,7 +125,7 @@
                 Document document = new Document();
                 Field field = new Field("field", "", fieldType);
                 document.Add(field);
-                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, numTerms, maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field);
+                threads[threadID] = new ThreadAnonymousClass(this, numTerms, maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field);
                 threads[threadID].Start();
             }
             startingGun.Signal();
@@ -155,7 +155,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestBagOfPositions outerInstance;
 
@@ -168,7 +168,7 @@
             private readonly Document document;
             private readonly Field field;
 
-            public ThreadAnonymousInnerClassHelper(TestBagOfPositions outerInstance, int numTerms, int maxTermsPerDoc, ConcurrentQueue<string> postings, RandomIndexWriter iw, CountdownEvent startingGun, Random threadRandom, Document document, Field field)
+            public ThreadAnonymousClass(TestBagOfPositions outerInstance, int numTerms, int maxTermsPerDoc, ConcurrentQueue<string> postings, RandomIndexWriter iw, CountdownEvent startingGun, Random threadRandom, Document document, Field field)
             {
                 this.outerInstance = outerInstance;
                 this.numTerms = numTerms;
diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
index e26a81b..656764f 100644
--- a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
+++ b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
@@ -101,7 +101,7 @@
 
             for (int threadID = 0; threadID < threadCount; threadID++)
             {
-                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, maxTermsPerDoc, postings, iw, startingGun);
+                threads[threadID] = new ThreadAnonymousClass(this, maxTermsPerDoc, postings, iw, startingGun);
                 threads[threadID].Start();
             }
             startingGun.Signal();
@@ -137,7 +137,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestBagOfPostings outerInstance;
 
@@ -146,7 +146,7 @@
             private readonly RandomIndexWriter iw;
             private readonly CountdownEvent startingGun;
 
-            public ThreadAnonymousInnerClassHelper(TestBagOfPostings outerInstance, int maxTermsPerDoc, ConcurrentQueue<string> postings, RandomIndexWriter iw, CountdownEvent startingGun)
+            public ThreadAnonymousClass(TestBagOfPostings outerInstance, int maxTermsPerDoc, ConcurrentQueue<string> postings, RandomIndexWriter iw, CountdownEvent startingGun)
             {
                 this.outerInstance = outerInstance;
                 this.maxTermsPerDoc = maxTermsPerDoc;
diff --git a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
index a38cdd8..6148f47 100644
--- a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
@@ -1,4 +1,5 @@
-using J2N;
+using J2N;
+using J2N.Numerics;
 using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Attributes;
@@ -90,7 +91,7 @@
             while ((value & ~0x7FL) != 0L)
             {
                 bytes.Bytes[bytes.Length++] = unchecked((byte)((value & 0x7FL) | 0x80L));
-                value = (long)((ulong)value >> 7);
+                value = value.TripleShift(7);
             }
             bytes.Bytes[bytes.Length++] = (byte)value;
             //    System.err.println("[" + Thread.currentThread().getName() + "] value=" + orig + ", bytes=" + bytes);
@@ -673,7 +674,7 @@
         {
             Directory dir = NewDirectory();
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
-            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper(this));
+            conf.SetCodec(new Lucene46CodecAnonymousClass(this));
             IndexWriter writer = new IndexWriter(dir, conf);
 
             Document doc = new Document();
@@ -704,11 +705,11 @@
             dir.Dispose();
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        private class Lucene46CodecAnonymousClass : Lucene46Codec
         {
             private readonly TestBinaryDocValuesUpdates outerInstance;
 
-            public Lucene46CodecAnonymousInnerClassHelper(TestBinaryDocValuesUpdates outerInstance)
+            public Lucene46CodecAnonymousClass(TestBinaryDocValuesUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1245,7 +1246,7 @@
             {
                 string f = "f" + i;
                 string cf = "cf" + i;
-                threads[i] = new ThreadAnonymousInnerClassHelper(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
+                threads[i] = new ThreadAnonymousClass(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
             }
 
             foreach (ThreadJob t in threads)
@@ -1285,7 +1286,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestBinaryDocValuesUpdates outerInstance;
 
@@ -1296,7 +1297,7 @@
             private readonly string f;
             private readonly string cf;
 
-            public ThreadAnonymousInnerClassHelper(TestBinaryDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
+            public ThreadAnonymousClass(TestBinaryDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
                 : base(str)
             {
                 this.outerInstance = outerInstance;
@@ -1460,7 +1461,7 @@
             Directory dir = NewDirectory();
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
             conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions.
-            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper2(this));
+            conf.SetCodec(new Lucene46CodecAnonymousClass2(this));
             IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
             Document doc = new Document();
             doc.Add(new StringField("id", "d0", Store.NO));
@@ -1470,7 +1471,7 @@
             writer.Dispose();
 
             // change format
-            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper3(this));
+            conf.SetCodec(new Lucene46CodecAnonymousClass3(this));
             writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
             doc = new Document();
             doc.Add(new StringField("id", "d1", Store.NO));
@@ -1493,11 +1494,11 @@
             dir.Dispose();
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper2 : Lucene46Codec
+        private class Lucene46CodecAnonymousClass2 : Lucene46Codec
         {
             private readonly TestBinaryDocValuesUpdates outerInstance;
 
-            public Lucene46CodecAnonymousInnerClassHelper2(TestBinaryDocValuesUpdates outerInstance)
+            public Lucene46CodecAnonymousClass2(TestBinaryDocValuesUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1508,11 +1509,11 @@
             }
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper3 : Lucene46Codec
+        private class Lucene46CodecAnonymousClass3 : Lucene46Codec
         {
             private readonly TestBinaryDocValuesUpdates outerInstance;
 
-            public Lucene46CodecAnonymousInnerClassHelper3(TestBinaryDocValuesUpdates outerInstance)
+            public Lucene46CodecAnonymousClass3(TestBinaryDocValuesUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Index/TestCodecs.cs b/src/Lucene.Net.Tests/Index/TestCodecs.cs
index 8c9e94b..f38f82e 100644
--- a/src/Lucene.Net.Tests/Index/TestCodecs.cs
+++ b/src/Lucene.Net.Tests/Index/TestCodecs.cs
@@ -112,7 +112,7 @@
                 this.omitTF = omitTF;
                 this.storePayloads = storePayloads;
                 // TODO: change this test to use all three
-                fieldInfo = fieldInfos.AddOrUpdate(name, new IndexableFieldTypeAnonymousInnerClassHelper(this, omitTF));
+                fieldInfo = fieldInfos.AddOrUpdate(name, new IndexableFieldTypeAnonymousClass(this, omitTF));
                 if (storePayloads)
                 {
                     fieldInfo.SetStorePayloads();
@@ -126,12 +126,12 @@
                 Array.Sort(terms);
             }
 
-            private class IndexableFieldTypeAnonymousInnerClassHelper : IIndexableFieldType
+            private class IndexableFieldTypeAnonymousClass : IIndexableFieldType
             {
                 private readonly FieldData outerInstance;
                 private readonly bool omitTF;
 
-                public IndexableFieldTypeAnonymousInnerClassHelper(FieldData outerInstance, bool omitTF)
+                public IndexableFieldTypeAnonymousClass(FieldData outerInstance, bool omitTF)
                 {
                     this.outerInstance = outerInstance;
                     this.omitTF = omitTF;
diff --git a/src/Lucene.Net.Tests/Index/TestCompoundFile.cs b/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
index 7c53e49..b56f0ae 100644
--- a/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
+++ b/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Store;
 using NUnit.Framework;
 using System;
@@ -310,7 +310,7 @@
             IndexOutput os = fsdir.CreateOutput(file, IOContext.DEFAULT);
             for (int i = 0; i < 2000; i++)
             {
-                os.WriteByte((byte)(sbyte)i);
+                os.WriteByte((byte)i);
             }
             os.Dispose();
 
@@ -632,7 +632,7 @@
             var largeBuf = new byte[2048];
             for (int i = 0; i < largeBuf.Length; i++)
             {
-                largeBuf[i] = (byte)unchecked((sbyte)(new Random(1).NextDouble() * 256));
+                largeBuf[i] = (byte)(new Random(1).NextDouble() * 256);
             }
 
             long currentPos = os.GetFilePointer();
@@ -823,7 +823,7 @@
             for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
             {
                 IndexOutput @out = d.CreateOutput("file." + fileIdx, NewIOContext(Random));
-                @out.WriteByte((byte)(sbyte)fileIdx);
+                @out.WriteByte((byte)fileIdx);
                 @out.Dispose();
             }
 
diff --git a/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
index 3da3ece..bb9ac45 100644
--- a/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
+++ b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
@@ -1,5 +1,4 @@
-#if FEATURE_CONCURRENTMERGESCHEDULER
-using J2N.Threading.Atomic;
+using J2N.Threading.Atomic;
 using Lucene.Net.Attributes;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
@@ -297,7 +296,7 @@
                 Console.WriteLine("TEST: maxMergeCount=" + maxMergeCount + " maxMergeThreads=" + maxMergeThreads);
             }
 
-            ConcurrentMergeScheduler cms = new ConcurrentMergeSchedulerAnonymousInnerClassHelper(this, maxMergeCount, enoughMergesWaiting, runningMergeCount, failed);
+            ConcurrentMergeScheduler cms = new ConcurrentMergeSchedulerAnonymousClass(this, maxMergeCount, enoughMergesWaiting, runningMergeCount, failed);
             cms.SetMaxMergesAndThreads(maxMergeCount, maxMergeThreads);
             iwc.SetMergeScheduler(cms);
             iwc.SetMaxBufferedDocs(2);
@@ -321,7 +320,7 @@
             dir.Dispose();
         }
 
-        private class ConcurrentMergeSchedulerAnonymousInnerClassHelper : ConcurrentMergeScheduler
+        private class ConcurrentMergeSchedulerAnonymousClass : ConcurrentMergeScheduler
         {
             private readonly TestConcurrentMergeScheduler outerInstance;
 
@@ -330,7 +329,7 @@
             private readonly AtomicInt32 runningMergeCount;
             private readonly AtomicBoolean failed;
 
-            public ConcurrentMergeSchedulerAnonymousInnerClassHelper(TestConcurrentMergeScheduler outerInstance, int maxMergeCount, CountdownEvent enoughMergesWaiting, AtomicInt32 runningMergeCount, AtomicBoolean failed)
+            public ConcurrentMergeSchedulerAnonymousClass(TestConcurrentMergeScheduler outerInstance, int maxMergeCount, CountdownEvent enoughMergesWaiting, AtomicInt32 runningMergeCount, AtomicBoolean failed)
             {
                 this.outerInstance = outerInstance;
                 this.maxMergeCount = maxMergeCount;
@@ -480,5 +479,4 @@
             assertTrue(exceptionHit);
         }
     }
-}
-#endif
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests/Index/TestCrash.cs b/src/Lucene.Net.Tests/Index/TestCrash.cs
index 37cd69e..e079493 100644
--- a/src/Lucene.Net.Tests/Index/TestCrash.cs
+++ b/src/Lucene.Net.Tests/Index/TestCrash.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using System;
@@ -35,19 +35,19 @@
     [TestFixture]
     public class TestCrash : LuceneTestCase
     {
-        private IndexWriter InitIndex(Func<IConcurrentMergeScheduler> newScheduler, Random random, bool initialCommit)
+        private IndexWriter InitIndex(Random random, bool initialCommit)
         {
-            return InitIndex(newScheduler, random, NewMockDirectory(random), initialCommit);
+            return InitIndex(random, NewMockDirectory(random), initialCommit);
         }
 
-        private IndexWriter InitIndex(Func<IConcurrentMergeScheduler> newScheduler, Random random, MockDirectoryWrapper dir, bool initialCommit)
+        private IndexWriter InitIndex(Random random, MockDirectoryWrapper dir, bool initialCommit)
         {
             dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
 
             IndexWriter writer = new IndexWriter(dir,
                 NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
                 .SetMaxBufferedDocs(10)
-                .SetMergeScheduler(newScheduler()));
+                .SetMergeScheduler(new ConcurrentMergeScheduler()));
 
             IConcurrentMergeScheduler scheduler = writer.Config.MergeScheduler as IConcurrentMergeScheduler;
             if (scheduler != null)
@@ -82,13 +82,12 @@
         }
 
         [Test]
-        public virtual void TestCrashWhileIndexing(
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestCrashWhileIndexing()
         {
             // this test relies on being able to open a reader before any commit
             // happened, so we must create an initial commit just to allow that, but
             // before any documents were added.
-            IndexWriter writer = InitIndex(newScheduler, Random, true);
+            IndexWriter writer = InitIndex(Random, true);
             MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
 
             // We create leftover files because merging could be
@@ -116,15 +115,13 @@
         }
 
         [Test]
-        public virtual void TestWriterAfterCrash(
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler1,
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler2)
+        public virtual void TestWriterAfterCrash()
         {
             // this test relies on being able to open a reader before any commit
             // happened, so we must create an initial commit just to allow that, but
             // before any documents were added.
             Console.WriteLine("TEST: initIndex");
-            IndexWriter writer = InitIndex(newScheduler1, Random, true);
+            IndexWriter writer = InitIndex(Random, true);
             Console.WriteLine("TEST: done initIndex");
             MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
 
@@ -135,7 +132,7 @@
             dir.PreventDoubleWrite = false;
             Console.WriteLine("TEST: now crash");
             Crash(writer);
-            writer = InitIndex(newScheduler2, Random, dir, false);
+            writer = InitIndex(Random, dir, false);
             writer.Dispose();
 
             IndexReader reader = DirectoryReader.Open(dir);
@@ -157,11 +154,9 @@
         }
 
         [Test]
-        public virtual void TestCrashAfterReopen(
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler1,
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler2)
+        public virtual void TestCrashAfterReopen()
         {
-            IndexWriter writer = InitIndex(newScheduler1, Random, false);
+            IndexWriter writer = InitIndex(Random, false);
             MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
 
             // We create leftover files because merging could be
@@ -169,7 +164,7 @@
             dir.AssertNoUnreferencedFilesOnClose = false;
 
             writer.Dispose();
-            writer = InitIndex(newScheduler2, Random, dir, false);
+            writer = InitIndex(Random, dir, false);
             Assert.AreEqual(314, writer.MaxDoc);
             Crash(writer);
 
@@ -201,10 +196,9 @@
         }
 
         [Test]
-        public virtual void TestCrashAfterClose(
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestCrashAfterClose()
         {
-            IndexWriter writer = InitIndex(newScheduler, Random, false);
+            IndexWriter writer = InitIndex(Random, false);
             MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
 
             writer.Dispose();
@@ -224,10 +218,9 @@
         }
 
         [Test]
-        public virtual void TestCrashAfterCloseNoWait(
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestCrashAfterCloseNoWait()
         {
-            IndexWriter writer = InitIndex(newScheduler, Random, false);
+            IndexWriter writer = InitIndex(Random, false);
             MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
 
             writer.Dispose(false);
diff --git a/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs b/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
index b88b65b..dbdf310 100644
--- a/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
+++ b/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
@@ -1110,7 +1110,7 @@
             writer.Commit();
             DirectoryReader reader = writer.GetReader();
             int[] closeCount = new int[1];
-            IndexReader.IReaderClosedListener listener = new ReaderClosedListenerAnonymousInnerClassHelper(this, reader, closeCount);
+            IndexReader.IReaderClosedListener listener = new ReaderClosedListenerAnonymousClass(this, reader, closeCount);
 
             reader.AddReaderClosedListener(listener);
 
@@ -1129,14 +1129,14 @@
             dir.Dispose();
         }
 
-        private class ReaderClosedListenerAnonymousInnerClassHelper : IndexReader.IReaderClosedListener
+        private class ReaderClosedListenerAnonymousClass : IndexReader.IReaderClosedListener
         {
             private readonly TestDirectoryReader outerInstance;
 
             private readonly DirectoryReader reader;
             private readonly int[] closeCount;
 
-            public ReaderClosedListenerAnonymousInnerClassHelper(TestDirectoryReader outerInstance, DirectoryReader reader, int[] closeCount)
+            public ReaderClosedListenerAnonymousClass(TestDirectoryReader outerInstance, DirectoryReader reader, int[] closeCount)
             {
                 this.outerInstance = outerInstance;
                 this.reader = reader;
diff --git a/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs b/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
index 578649f..a921184 100644
--- a/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
+++ b/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
@@ -51,23 +51,23 @@
             Directory dir1 = NewDirectory();
 
             CreateIndex(Random, dir1, false);
-            PerformDefaultTests(new TestReopenAnonymousInnerClassHelper(this, dir1));
+            PerformDefaultTests(new TestReopenAnonymousClass(this, dir1));
             dir1.Dispose();
 
             Directory dir2 = NewDirectory();
 
             CreateIndex(Random, dir2, true);
-            PerformDefaultTests(new TestReopenAnonymousInnerClassHelper2(this, dir2));
+            PerformDefaultTests(new TestReopenAnonymousClass2(this, dir2));
             dir2.Dispose();
         }
 
-        private class TestReopenAnonymousInnerClassHelper : TestReopen
+        private class TestReopenAnonymousClass : TestReopen
         {
             private readonly TestDirectoryReaderReopen outerInstance;
 
             private Directory dir1;
 
-            public TestReopenAnonymousInnerClassHelper(TestDirectoryReaderReopen outerInstance, Directory dir1)
+            public TestReopenAnonymousClass(TestDirectoryReaderReopen outerInstance, Directory dir1)
             {
                 this.outerInstance = outerInstance;
                 this.dir1 = dir1;
@@ -84,13 +84,13 @@
             }
         }
 
-        private class TestReopenAnonymousInnerClassHelper2 : TestReopen
+        private class TestReopenAnonymousClass2 : TestReopen
         {
             private readonly TestDirectoryReaderReopen outerInstance;
 
             private readonly Directory dir2;
 
-            public TestReopenAnonymousInnerClassHelper2(TestDirectoryReaderReopen outerInstance, Directory dir2)
+            public TestReopenAnonymousClass2(TestDirectoryReaderReopen outerInstance, Directory dir2)
             {
                 this.outerInstance = outerInstance;
                 this.dir2 = dir2;
@@ -248,7 +248,7 @@
             writer.ForceMerge(1);
             writer.Dispose();
 
-            TestReopen test = new TestReopenAnonymousInnerClassHelper3(this, dir, n);
+            TestReopen test = new TestReopenAnonymousClass3(this, dir, n);
 
             IList<ReaderCouple> readers = new SynchronizedList<ReaderCouple>();
             DirectoryReader firstReader = DirectoryReader.Open(dir);
@@ -276,11 +276,11 @@
 
                 if (i < 4 || (i >= 10 && i < 14) || i > 18)
                 {
-                    task = new ReaderThreadTaskAnonymousInnerClassHelper(this, test, readers, readersToClose, r, index);
+                    task = new ReaderThreadTaskAnonymousClass(this, test, readers, readersToClose, r, index);
                 }
                 else
                 {
-                    task = new ReaderThreadTaskAnonymousInnerClassHelper2(this, readers);
+                    task = new ReaderThreadTaskAnonymousClass2(this, readers);
                 }
 
                 threads[i] = new ReaderThread(task);
@@ -332,14 +332,14 @@
             dir.Dispose();
         }
 
-        private class TestReopenAnonymousInnerClassHelper3 : TestReopen
+        private class TestReopenAnonymousClass3 : TestReopen
         {
             private readonly TestDirectoryReaderReopen outerInstance;
 
             private readonly Directory dir;
             private readonly int n;
 
-            public TestReopenAnonymousInnerClassHelper3(TestDirectoryReaderReopen outerInstance, Directory dir, int n)
+            public TestReopenAnonymousClass3(TestDirectoryReaderReopen outerInstance, Directory dir, int n)
             {
                 this.outerInstance = outerInstance;
                 this.dir = dir;
@@ -359,7 +359,7 @@
             }
         }
 
-        private class ReaderThreadTaskAnonymousInnerClassHelper : ReaderThreadTask
+        private class ReaderThreadTaskAnonymousClass : ReaderThreadTask
         {
             private readonly TestDirectoryReaderReopen outerInstance;
 
@@ -369,7 +369,7 @@
             private readonly DirectoryReader r;
             private readonly int index;
 
-            public ReaderThreadTaskAnonymousInnerClassHelper(TestDirectoryReaderReopen outerInstance, Lucene.Net.Index.TestDirectoryReaderReopen.TestReopen test, IList<ReaderCouple> readers, ISet<DirectoryReader> readersToClose, DirectoryReader r, int index)
+            public ReaderThreadTaskAnonymousClass(TestDirectoryReaderReopen outerInstance, Lucene.Net.Index.TestDirectoryReaderReopen.TestReopen test, IList<ReaderCouple> readers, ISet<DirectoryReader> readersToClose, DirectoryReader r, int index)
             {
                 this.outerInstance = outerInstance;
                 this.test = test;
@@ -426,13 +426,13 @@
             }
         }
 
-        private class ReaderThreadTaskAnonymousInnerClassHelper2 : ReaderThreadTask
+        private class ReaderThreadTaskAnonymousClass2 : ReaderThreadTask
         {
             private readonly TestDirectoryReaderReopen outerInstance;
 
             private readonly IList<ReaderCouple> readers;
 
-            public ReaderThreadTaskAnonymousInnerClassHelper2(TestDirectoryReaderReopen outerInstance, IList<ReaderCouple> readers)
+            public ReaderThreadTaskAnonymousClass2(TestDirectoryReaderReopen outerInstance, IList<ReaderCouple> readers)
             {
                 this.outerInstance = outerInstance;
                 this.readers = readers;
diff --git a/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs b/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
index 68b9738..1ff88a5 100644
--- a/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
@@ -60,7 +60,7 @@
                 Tokenizer tokenizer = new MockTokenizer(input);
                 if (fieldName.Equals("distinctiveFieldName", StringComparison.Ordinal))
                 {
-                    TokenFilter tosser = new TokenFilterAnonymousInnerClassHelper(this, tokenizer);
+                    TokenFilter tosser = new TokenFilterAnonymousClass(this, tokenizer);
                     return new TokenStreamComponents(tokenizer, tosser);
                 }
                 else
@@ -69,11 +69,11 @@
                 }
             }
 
-            private class TokenFilterAnonymousInnerClassHelper : TokenFilter
+            private class TokenFilterAnonymousClass : TokenFilter
             {
                 private readonly ThrowingAnalyzer outerInstance;
 
-                public TokenFilterAnonymousInnerClassHelper(ThrowingAnalyzer outerInstance, Tokenizer tokenizer)
+                public TokenFilterAnonymousClass(ThrowingAnalyzer outerInstance, Tokenizer tokenizer)
                     : base(tokenizer)
                 {
                     this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs b/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
index cc77cf6..1a5da9c 100644
--- a/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
@@ -580,7 +580,7 @@
                 Document doc = new Document();
                 doc.Add(field);
 
-                threads[i] = new ThreadAnonymousInnerClassHelper(this, w, startingGun, hitExc, doc);
+                threads[i] = new ThreadAnonymousClass(this, w, startingGun, hitExc, doc);
                 threads[i].Start();
             }
 
@@ -595,7 +595,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestDocValuesIndexing outerInstance;
 
@@ -604,7 +604,7 @@
             private readonly AtomicBoolean hitExc;
             private readonly Document doc;
 
-            public ThreadAnonymousInnerClassHelper(TestDocValuesIndexing outerInstance, IndexWriter w, CountdownEvent startingGun, AtomicBoolean hitExc, Document doc)
+            public ThreadAnonymousClass(TestDocValuesIndexing outerInstance, IndexWriter w, CountdownEvent startingGun, AtomicBoolean hitExc, Document doc)
             {
                 this.outerInstance = outerInstance;
                 this.w = w;
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
index 719500a..d02467a 100644
--- a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
@@ -82,7 +82,7 @@
             for (int t = 0; t < numThreads; t++)
             {
                 Random threadRandom = new Random(Random.Next());
-                ThreadJob thread = new ThreadAnonymousInnerClassHelper(this, numbers, binary, sorted, numDocs, ar, startingGun, threadRandom);
+                ThreadJob thread = new ThreadAnonymousClass(this, numbers, binary, sorted, numDocs, ar, startingGun, threadRandom);
                 thread.Start();
                 threads.Add(thread);
             }
@@ -98,7 +98,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestDocValuesWithThreads outerInstance;
 
@@ -110,7 +110,7 @@
             private readonly CountdownEvent startingGun;
             private readonly Random threadRandom;
 
-            public ThreadAnonymousInnerClassHelper(TestDocValuesWithThreads outerInstance, IList<long?> numbers, IList<BytesRef> binary, IList<BytesRef> sorted, int numDocs, AtomicReader ar, CountdownEvent startingGun, Random threadRandom)
+            public ThreadAnonymousClass(TestDocValuesWithThreads outerInstance, IList<long?> numbers, IList<BytesRef> binary, IList<BytesRef> sorted, int numDocs, AtomicReader ar, CountdownEvent startingGun, Random threadRandom)
             {
                 this.outerInstance = outerInstance;
                 this.numbers = numbers;
@@ -255,7 +255,7 @@
             ThreadJob[] threads = new ThreadJob[NUM_THREADS];
             for (int thread = 0; thread < NUM_THREADS; thread++)
             {
-                threads[thread] = new ThreadAnonymousInnerClassHelper2(random, docValues, sr, END_TIME);
+                threads[thread] = new ThreadAnonymousClass2(random, docValues, sr, END_TIME);
                 threads[thread].Start();
             }
 
@@ -268,14 +268,14 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly Random random;
             private readonly IList<BytesRef> docValues;
             private readonly AtomicReader sr;
             private readonly long endTime;
 
-            public ThreadAnonymousInnerClassHelper2(Random random, IList<BytesRef> docValues, AtomicReader sr, long endTime)
+            public ThreadAnonymousClass2(Random random, IList<BytesRef> docValues, AtomicReader sr, long endTime)
             {
                 this.random = random;
                 this.docValues = docValues;
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
index d8c57f0..316f393 100644
--- a/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
@@ -117,7 +117,7 @@
         [Test]
         public virtual void TestPositionIncrementGap()
         {
-            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper();
+            Analyzer analyzer = new AnalyzerAnonymousClass();
 
             IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
 
@@ -140,7 +140,7 @@
             reader.Dispose();
         }
 
-        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        private class AnalyzerAnonymousClass : Analyzer
         {
             protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
@@ -156,7 +156,7 @@
         [Test]
         public virtual void TestTokenReuse()
         {
-            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper2(this);
+            Analyzer analyzer = new AnalyzerAnonymousClass2(this);
 
             IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
 
@@ -182,11 +182,11 @@
             reader.Dispose();
         }
 
-        private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        private class AnalyzerAnonymousClass2 : Analyzer
         {
             private readonly TestDocumentWriter outerInstance;
 
-            public AnalyzerAnonymousInnerClassHelper2(TestDocumentWriter outerInstance)
+            public AnalyzerAnonymousClass2(TestDocumentWriter outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -194,14 +194,14 @@
             protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
                 Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
-                return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousInnerClassHelper(this, tokenizer));
+                return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousClass(this, tokenizer));
             }
 
-            private class TokenFilterAnonymousInnerClassHelper : TokenFilter
+            private class TokenFilterAnonymousClass : TokenFilter
             {
-                private readonly AnalyzerAnonymousInnerClassHelper2 outerInstance;
+                private readonly AnalyzerAnonymousClass2 outerInstance;
 
-                public TokenFilterAnonymousInnerClassHelper(AnalyzerAnonymousInnerClassHelper2 outerInstance, Tokenizer tokenizer)
+                public TokenFilterAnonymousClass(AnalyzerAnonymousClass2 outerInstance, Tokenizer tokenizer)
                     : base(tokenizer)
                 {
                     this.outerInstance = outerInstance;
@@ -266,7 +266,7 @@
             IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
             Document doc = new Document();
 
-            doc.Add(new TextField("preanalyzed", new TokenStreamAnonymousInnerClassHelper(this)));
+            doc.Add(new TextField("preanalyzed", new TokenStreamAnonymousClass(this)));
 
             writer.AddDocument(doc);
             writer.Commit();
@@ -292,11 +292,11 @@
             reader.Dispose();
         }
 
-        private class TokenStreamAnonymousInnerClassHelper : TokenStream
+        private class TokenStreamAnonymousClass : TokenStream
         {
             private readonly TestDocumentWriter outerInstance;
 
-            public TokenStreamAnonymousInnerClassHelper(TestDocumentWriter outerInstance) 
+            public TokenStreamAnonymousClass(TestDocumentWriter outerInstance) 
             {
                 this.outerInstance = outerInstance;
                 tokens = new string[] { "term1", "term2", "term3", "term2" };
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
index cc68b06..fc77f34 100644
--- a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
@@ -217,7 +217,7 @@
                 BindingFlags.NonPublic | BindingFlags.GetField | BindingFlags.Instance);
             ReentrantLock @lock = (ReentrantLock)field.GetValue(queue);
             @lock.Lock();
-            var t = new ThreadAnonymousInnerClassHelper(this, queue);
+            var t = new ThreadAnonymousClass(this, queue);
             t.Start();
             t.Join();
             @lock.Unlock();
@@ -230,13 +230,13 @@
             Assert.IsFalse(queue.AnyChanges(), "all changes applied");
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestDocumentsWriterDeleteQueue outerInstance;
 
             private DocumentsWriterDeleteQueue queue;
 
-            public ThreadAnonymousInnerClassHelper(TestDocumentsWriterDeleteQueue outerInstance, DocumentsWriterDeleteQueue queue)
+            public ThreadAnonymousClass(TestDocumentsWriterDeleteQueue outerInstance, DocumentsWriterDeleteQueue queue)
             {
                 this.outerInstance = outerInstance;
                 this.queue = queue;
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs b/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
index 6faf12d..8cb184b 100644
--- a/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
@@ -69,7 +69,7 @@
             for (int i = 0; i < stallThreads.Length; i++)
             {
                 int stallProbability = 1 + Random.Next(10);
-                stallThreads[i] = new ThreadAnonymousInnerClassHelper(ctrl, stallProbability);
+                stallThreads[i] = new ThreadAnonymousClass(ctrl, stallProbability);
             }
             Start(stallThreads);
             long time = Environment.TickCount;
@@ -92,12 +92,12 @@
             Join(stallThreads);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly DocumentsWriterStallControl ctrl;
             private readonly int stallProbability;
 
-            public ThreadAnonymousInnerClassHelper(DocumentsWriterStallControl ctrl, int stallProbability)
+            public ThreadAnonymousClass(DocumentsWriterStallControl ctrl, int stallProbability)
             {
                 this.ctrl = ctrl;
                 this.stallProbability = stallProbability;
@@ -401,16 +401,16 @@
             ThreadJob[] array = new ThreadJob[num];
             for (int i = 0; i < array.Length; i++)
             {
-                array[i] = new ThreadAnonymousInnerClassHelper2(ctrl);
+                array[i] = new ThreadAnonymousClass2(ctrl);
             }
             return array;
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly DocumentsWriterStallControl ctrl;
 
-            public ThreadAnonymousInnerClassHelper2(DocumentsWriterStallControl ctrl)
+            public ThreadAnonymousClass2(DocumentsWriterStallControl ctrl)
             {
                 this.ctrl = ctrl;
             }
diff --git a/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs b/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
index 84cb2e2..2783af2 100644
--- a/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
+++ b/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
@@ -97,7 +97,7 @@
 
             AtomicBoolean doStop = new AtomicBoolean();
             w.Config.SetMaxBufferedDocs(2);
-            ThreadJob t = new ThreadAnonymousInnerClassHelper(this, w, numStartDocs, docs, doStop);
+            ThreadJob t = new ThreadAnonymousClass(this, w, numStartDocs, docs, doStop);
             t.Start();
             w.ForceMerge(1);
             doStop.Value = true;
@@ -108,7 +108,7 @@
             docs.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestForceMergeForever outerInstance;
 
@@ -117,7 +117,7 @@
             private readonly LineFileDocs docs;
             private readonly AtomicBoolean doStop;
 
-            public ThreadAnonymousInnerClassHelper(TestForceMergeForever outerInstance, Lucene.Net.Index.TestForceMergeForever.MyIndexWriter w, int numStartDocs, LineFileDocs docs, AtomicBoolean doStop)
+            public ThreadAnonymousClass(TestForceMergeForever outerInstance, Lucene.Net.Index.TestForceMergeForever.MyIndexWriter w, int numStartDocs, LineFileDocs docs, AtomicBoolean doStop)
             {
                 this.outerInstance = outerInstance;
                 this.w = w;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexCommit.cs b/src/Lucene.Net.Tests/Index/TestIndexCommit.cs
index bde46f3..95bec68 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexCommit.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexCommit.cs
@@ -33,22 +33,22 @@
             // LUCENE-2417: equals and hashCode() impl was inconsistent
             Directory dir = NewDirectory();
 
-            IndexCommit ic1 = new IndexCommitAnonymousInnerClassHelper(this, dir);
+            IndexCommit ic1 = new IndexCommitAnonymousClass(this, dir);
 
-            IndexCommit ic2 = new IndexCommitAnonymousInnerClassHelper2(this, dir);
+            IndexCommit ic2 = new IndexCommitAnonymousClass2(this, dir);
 
             Assert.AreEqual(ic1, ic2);
             Assert.AreEqual(ic1.GetHashCode(), ic2.GetHashCode(), "hash codes are not equals");
             dir.Dispose();
         }
 
-        private class IndexCommitAnonymousInnerClassHelper : IndexCommit
+        private class IndexCommitAnonymousClass : IndexCommit
         {
             private readonly TestIndexCommit outerInstance;
 
             private Directory dir;
 
-            public IndexCommitAnonymousInnerClassHelper(TestIndexCommit outerInstance, Directory dir)
+            public IndexCommitAnonymousClass(TestIndexCommit outerInstance, Directory dir)
             {
                 this.outerInstance = outerInstance;
                 this.dir = dir;
@@ -73,13 +73,13 @@
             public override int SegmentCount => 2;
         }
 
-        private class IndexCommitAnonymousInnerClassHelper2 : IndexCommit
+        private class IndexCommitAnonymousClass2 : IndexCommit
         {
             private readonly TestIndexCommit outerInstance;
 
             private Directory dir;
 
-            public IndexCommitAnonymousInnerClassHelper2(TestIndexCommit outerInstance, Directory dir)
+            public IndexCommitAnonymousClass2(TestIndexCommit outerInstance, Directory dir)
             {
                 this.outerInstance = outerInstance;
                 this.dir = dir;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexInput.cs b/src/Lucene.Net.Tests/Index/TestIndexInput.cs
index 3cdea38..5b3c809 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexInput.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexInput.cs
@@ -1,4 +1,4 @@
-using NUnit.Framework;
+using NUnit.Framework;
 using System;
 using System.IO;
 using Assert = Lucene.Net.TestFramework.Assert;
@@ -34,7 +34,51 @@
     [TestFixture]
     public class TestIndexInput : LuceneTestCase
     {
-        internal static readonly byte[] READ_TEST_BYTES = new byte[] { unchecked((byte)(sbyte)0x80), 0x01, unchecked((byte)(sbyte)0xFF), 0x7F, unchecked((byte)(sbyte)0x80), unchecked((byte)(sbyte)0x80), 0x01, unchecked((byte)(sbyte)0x81), unchecked((byte)(sbyte)0x80), 0x01, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), 0x07, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), 0x0F, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), 0x07, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), (byte)0x7F, 0x06, (byte)'L', (byte)'u', (byte)'c', (byte)'e', (byte)'n', (byte)'e', 0x02, unchecked((byte)(sbyte)0xC2), unchecked((byte)(sbyte)0xBF), 0x0A, (byte)'L', (byte)'u', unchecked((byte)(sbyte)0xC2), unchecked((byte)(sbyte)0xBF), (byte)(sbyte)'c', (byte)'e', unchecked((byte)(sbyte)0xC2), unchecked((byte)(sbyte)0xBF), (byte)'n', (byte)'e', 0x03, unchecked((byte)(sbyte)0xE2), unchecked((byte)(sbyte)0x98), unchecked((byte)(sbyte)0xA0), 0x0C, (byte)'L', (byte)'u', unchecked((byte)(sbyte)0xE2), unchecked((byte)(sbyte)0x98), unchecked((byte)(sbyte)0xA0), (byte)'c', (byte)'e', unchecked((byte)(sbyte)0xE2), unchecked((byte)(sbyte)0x98), unchecked((byte)(sbyte)0xA0), (byte)'n', (byte)'e', 0x04, unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x84), unchecked((byte)(sbyte)0x9E), 0x08, unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x84), unchecked((byte)(sbyte)0x9E), unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x85), unchecked((byte)(sbyte)0xA0), 0x0E, (byte)'L', (byte)'u', unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x84), unchecked((byte)(sbyte)0x9E), (byte)'c', (byte)'e', unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x85), unchecked((byte)(sbyte)0xA0), (byte)'n', (byte)'e', 0x01, 0x00, 0x08, (byte)'L', (byte)'u', 0x00, (byte)'c', (byte)'e', 0x00, (byte)'n', (byte)'e', unchecked((byte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), (byte)0x17, (byte)0x01, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), 0x01 };
+        internal static readonly byte[] READ_TEST_BYTES = new byte[] {
+            (byte) 0x80, 0x01,
+            (byte) 0xFF, 0x7F,
+            (byte) 0x80, (byte) 0x80, 0x01,
+            (byte) 0x81, (byte) 0x80, 0x01,
+            (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x07,
+            (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x0F,
+            (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x07,
+            (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x7F,
+            0x06, (byte) 'L', (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', (byte) 'e',
+
+            // 2-byte UTF-8 (U+00BF "INVERTED QUESTION MARK") 
+            0x02, (byte) 0xC2, (byte) 0xBF,
+            0x0A, (byte) 'L', (byte) 'u', (byte) 0xC2, (byte) 0xBF,
+                  (byte) 'c', (byte) 'e', (byte) 0xC2, (byte) 0xBF,
+                  (byte) 'n', (byte) 'e',
+
+            // 3-byte UTF-8 (U+2620 "SKULL AND CROSSBONES") 
+            0x03, (byte) 0xE2, (byte) 0x98, (byte) 0xA0,
+            0x0C, (byte) 'L', (byte) 'u', (byte) 0xE2, (byte) 0x98, (byte) 0xA0,
+                  (byte) 'c', (byte) 'e', (byte) 0xE2, (byte) 0x98, (byte) 0xA0,
+                  (byte) 'n', (byte) 'e',
+
+            // surrogate pairs
+            // (U+1D11E "MUSICAL SYMBOL G CLEF")
+            // (U+1D160 "MUSICAL SYMBOL EIGHTH NOTE")
+            0x04, (byte) 0xF0, (byte) 0x9D, (byte) 0x84, (byte) 0x9E,
+            0x08, (byte) 0xF0, (byte) 0x9D, (byte) 0x84, (byte) 0x9E,
+                  (byte) 0xF0, (byte) 0x9D, (byte) 0x85, (byte) 0xA0,
+            0x0E, (byte) 'L', (byte) 'u',
+                  (byte) 0xF0, (byte) 0x9D, (byte) 0x84, (byte) 0x9E,
+                  (byte) 'c', (byte) 'e',
+                  (byte) 0xF0, (byte) 0x9D, (byte) 0x85, (byte) 0xA0,
+                  (byte) 'n', (byte) 'e',  
+
+            // null bytes
+            0x01, 0x00,
+            0x08, (byte) 'L', (byte) 'u', 0x00, (byte) 'c', (byte) 'e', 0x00, (byte) 'n', (byte) 'e',
+    
+            // tests for Exceptions on invalid values
+            (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x17,
+            (byte) 0x01, // guard value
+            (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF,
+            (byte) 0x01, // guard value
+        };
 
         internal static readonly int COUNT = RandomMultiplier * 65536;
         internal static int[] INTS;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs b/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
index 0a50168..57c52af 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
@@ -44,7 +44,7 @@
                 DirectoryReader open = DirectoryReader.Open(dir);
                 bool throwOnClose = !Rarely();
                 AtomicReader wrap = SlowCompositeReaderWrapper.Wrap(open);
-                FilterAtomicReader reader = new FilterAtomicReaderAnonymousInnerClassHelper(this, wrap, throwOnClose);
+                FilterAtomicReader reader = new FilterAtomicReaderAnonymousClass(this, wrap, throwOnClose);
                 IList<IndexReader.IReaderClosedListener> listeners = new List<IndexReader.IReaderClosedListener>();
                 int listenerCount = Random.Next(20);
                 AtomicInt32 count = new AtomicInt32();
@@ -104,13 +104,13 @@
             }
         }
 
-        private class FilterAtomicReaderAnonymousInnerClassHelper : FilterAtomicReader
+        private class FilterAtomicReaderAnonymousClass : FilterAtomicReader
         {
             private readonly TestIndexReaderClose outerInstance;
 
             private bool throwOnClose;
 
-            public FilterAtomicReaderAnonymousInnerClassHelper(TestIndexReaderClose outerInstance, AtomicReader wrap, bool throwOnClose)
+            public FilterAtomicReaderAnonymousClass(TestIndexReaderClose outerInstance, AtomicReader wrap, bool throwOnClose)
                 : base(wrap)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
index 7e9ed60..2303bdd 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
@@ -1,4 +1,4 @@
-using J2N.Text;
+using J2N.Text;
 using J2N.Threading;
 using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
@@ -242,14 +242,13 @@
         }
 
         [Test]
-        public virtual void TestChangesAfterClose([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestChangesAfterClose()
         {
             Directory dir = NewDirectory();
 
             IndexWriter writer = null;
 
-            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(newScheduler());
-            writer = new IndexWriter(dir, config);
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
             AddDoc(writer);
 
             // close
@@ -1020,7 +1019,7 @@
         [Test]
         public virtual void TestNegativePositions()
         {
-            TokenStream tokens = new TokenStreamAnonymousInnerClassHelper(this);
+            TokenStream tokens = new TokenStreamAnonymousClass(this);
 
             Directory dir = NewDirectory();
             IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
@@ -1041,11 +1040,11 @@
             dir.Dispose();
         }
 
-        private class TokenStreamAnonymousInnerClassHelper : TokenStream
+        private class TokenStreamAnonymousClass : TokenStream
         {
             private readonly TestIndexWriter outerInstance;
 
-            public TokenStreamAnonymousInnerClassHelper(TestIndexWriter outerInstance)
+            public TokenStreamAnonymousClass(TestIndexWriter outerInstance)
             {
                 this.outerInstance = outerInstance;
                 termAtt = AddAttribute<ICharTermAttribute>();
@@ -2283,7 +2282,7 @@
         [Test]
         public virtual void TestDontInvokeAnalyzerForUnAnalyzedFields()
         {
-            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this);
+            Analyzer analyzer = new AnalyzerAnonymousClass(this);
             Directory dir = NewDirectory();
             IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
             Document doc = new Document();
@@ -2302,11 +2301,11 @@
             dir.Dispose();
         }
 
-        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        private class AnalyzerAnonymousClass : Analyzer
         {
             private readonly TestIndexWriter outerInstance;
 
-            public AnalyzerAnonymousInnerClassHelper(TestIndexWriter outerInstance)
+            public AnalyzerAnonymousClass(TestIndexWriter outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -2341,7 +2340,7 @@
             {
                 // Create my own random file:
                 IndexOutput @out = dir.CreateOutput("myrandomfile", NewIOContext(Random));
-                @out.WriteByte((byte)(sbyte)42);
+                @out.WriteByte((byte)42);
                 @out.Dispose();
 
                 (new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)))).Dispose();
@@ -2432,7 +2431,7 @@
             {
                 // Create my own random file:
                 IndexOutput @out = dir.CreateOutput("_a.frq", NewIOContext(Random));
-                @out.WriteByte((byte)(sbyte)42);
+                @out.WriteByte((byte)42);
                 @out.Dispose();
 
                 (new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)))).Dispose();
@@ -2646,7 +2645,7 @@
             public virtual IEnumerator<IEnumerable<IIndexableField>> GetEnumerator()
             {
                 return docList.GetEnumerator();
-                //return new IteratorAnonymousInnerClassHelper(this, docIter);
+                //return new IteratorAnonymousClass(this, docIter);
             }
 
             System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
@@ -2655,13 +2654,13 @@
             }
 
             /*
-          private class IteratorAnonymousInnerClassHelper : IEnumerator<IEnumerable<IndexableField>>
+          private class IteratorAnonymousClass : IEnumerator<IEnumerable<IndexableField>>
           {
               private readonly RandomFailingFieldIterable outerInstance;
 
               private IEnumerator<IEnumerable<IndexableField>> DocIter;
 
-              public IteratorAnonymousInnerClassHelper(RandomFailingFieldIterable outerInstance, IEnumerator<IEnumerable<IndexableField>> docIter)
+              public IteratorAnonymousClass(RandomFailingFieldIterable outerInstance, IEnumerator<IEnumerable<IndexableField>> docIter)
               {
                   this.outerInstance = outerInstance;
                   this.DocIter = docIter;
@@ -2812,7 +2811,7 @@
             Directory dir = NewDirectory();
             IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
             SetOnce<IndexWriter> iwRef = new SetOnce<IndexWriter>();
-            iwc.SetInfoStream(new TestPointInfoStream(iwc.InfoStream, new TestPointAnonymousInnerClassHelper(this, iwRef)));
+            iwc.SetInfoStream(new TestPointInfoStream(iwc.InfoStream, new TestPointAnonymousClass(this, iwRef)));
             IndexWriter evilWriter = new IndexWriter(dir, iwc);
             iwRef.Set(evilWriter);
             for (int i = 0; i < 1000; i++)
@@ -2829,13 +2828,13 @@
             dir.Dispose();
         }
 
-        private class TestPointAnonymousInnerClassHelper : ITestPoint
+        private class TestPointAnonymousClass : ITestPoint
         {
             private readonly TestIndexWriter outerInstance;
 
             private SetOnce<IndexWriter> iwRef;
 
-            public TestPointAnonymousInnerClassHelper(TestIndexWriter outerInstance, SetOnce<IndexWriter> iwRef)
+            public TestPointAnonymousClass(TestIndexWriter outerInstance, SetOnce<IndexWriter> iwRef)
             {
                 this.outerInstance = outerInstance;
                 this.iwRef = iwRef;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs
index 2b7a16a..33155f1 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs
@@ -346,7 +346,7 @@
             for (int i = 0; i < NUM_THREADS; i++)
             {
                 int finalI = i;
-                threads[i] = new ThreadAnonymousInnerClassHelper(dir, w, failed, endTime, finalI, NewStringField);
+                threads[i] = new ThreadAnonymousClass(dir, w, failed, endTime, finalI, NewStringField);
                 threads[i].Start();
             }
             for (int i = 0; i < NUM_THREADS; i++)
@@ -358,7 +358,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly Func<string, string, Field.Store, Field> newStringField;
             private Directory dir;
@@ -372,7 +372,7 @@
             /// This is passed in because <see cref="LuceneTestCase.NewStringField(string, string, Field.Store)"/>
             /// is no longer static.
             /// </param>
-            public ThreadAnonymousInnerClassHelper(Directory dir, RandomIndexWriter w, AtomicBoolean failed, long endTime, int finalI, Func<string, string, Field.Store, Field> newStringField)
+            public ThreadAnonymousClass(Directory dir, RandomIndexWriter w, AtomicBoolean failed, long endTime, int finalI, Func<string, string, Field.Store, Field> newStringField)
             {
                 this.newStringField = newStringField;
                 this.dir = dir;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs
index 41e7f50..44652a4 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Util;
 using NUnit.Framework;
@@ -62,11 +62,7 @@
             Assert.AreEqual(typeof(MockAnalyzer), conf.Analyzer.GetType());
             Assert.IsNull(conf.IndexCommit);
             Assert.AreEqual(typeof(KeepOnlyLastCommitDeletionPolicy), conf.IndexDeletionPolicy.GetType());
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-            Assert.AreEqual(typeof(TaskMergeScheduler), conf.MergeScheduler.GetType());
-#else
             Assert.AreEqual(typeof(ConcurrentMergeScheduler), conf.MergeScheduler.GetType());
-#endif
             Assert.AreEqual(OpenMode.CREATE_OR_APPEND, conf.OpenMode);
             // we don't need to assert this, it should be unspecified
             Assert.IsTrue(IndexSearcher.DefaultSimilarity == conf.Similarity);
@@ -296,11 +292,7 @@
             Assert.IsTrue(mergeSched.GetType() == mergeSchedClone.GetType() && (mergeSched != mergeSchedClone || mergeSched.Clone() == mergeSchedClone.Clone()));
 
             conf.SetMergeScheduler(new SerialMergeScheduler());
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-            Assert.AreEqual(typeof(TaskMergeScheduler), clone.MergeScheduler.GetType());
-#else
             Assert.AreEqual(typeof(ConcurrentMergeScheduler), clone.MergeScheduler.GetType());
-#endif
         }
 
         [Test]
@@ -325,11 +317,7 @@
             }
 
             // Test MergeScheduler
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-            Assert.AreEqual(typeof(TaskMergeScheduler), conf.MergeScheduler.GetType());
-#else
             Assert.AreEqual(typeof(ConcurrentMergeScheduler), conf.MergeScheduler.GetType());
-#endif
             conf.SetMergeScheduler(new SerialMergeScheduler());
             Assert.AreEqual(typeof(SerialMergeScheduler), conf.MergeScheduler.GetType());
             try
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
index a17cd1f..3f38404 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
@@ -1,4 +1,4 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
 using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Analysis;
@@ -349,7 +349,7 @@
             for (int i = 0; i < numThreads; i++)
             {
                 int offset = i;
-                threads[i] = new ThreadAnonymousInnerClassHelper(this, modifier, latch, doneLatch, offset);
+                threads[i] = new ThreadAnonymousClass(this, modifier, latch, doneLatch, offset);
                 threads[i].Start();
             }
             latch.Signal();
@@ -379,7 +379,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestIndexWriterDelete outerInstance;
 
@@ -388,7 +388,7 @@
             private readonly CountdownEvent doneLatch;
             private readonly int offset;
 
-            public ThreadAnonymousInnerClassHelper(TestIndexWriterDelete outerInstance, RandomIndexWriter modifier, CountdownEvent latch, CountdownEvent doneLatch, int offset)
+            public ThreadAnonymousClass(TestIndexWriterDelete outerInstance, RandomIndexWriter modifier, CountdownEvent latch, CountdownEvent doneLatch, int offset)
             {
                 this.outerInstance = outerInstance;
                 this.modifier = modifier;
@@ -551,24 +551,22 @@
         }
 
         [Test]
-        public virtual void TestDeletesOnDiskFull(
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestDeletesOnDiskFull()
         {
-            DoTestOperationsOnDiskFull(newScheduler, false);
+            DoTestOperationsOnDiskFull(false);
         }
 
         [Test]
-        public virtual void TestUpdatesOnDiskFull(
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestUpdatesOnDiskFull()
         {
-            DoTestOperationsOnDiskFull(newScheduler, true);
+            DoTestOperationsOnDiskFull(true);
         }
 
         /// <summary>
         /// Make sure if modifier tries to commit but hits disk full that modifier
         /// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
         /// </summary>
-        private void DoTestOperationsOnDiskFull(Func<IConcurrentMergeScheduler> newScheduler, bool updates)
+        private void DoTestOperationsOnDiskFull(bool updates)
         {
             Term searchTerm = new Term("content", "aaa");
             int START_COUNT = 157;
@@ -613,7 +611,7 @@
                 var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false))
                                 .SetMaxBufferedDocs(1000)
                                 .SetMaxBufferedDeleteTerms(1000)
-                                .SetMergeScheduler(newScheduler());
+                                .SetMergeScheduler(new ConcurrentMergeScheduler());
 
                 IConcurrentMergeScheduler scheduler = config.MergeScheduler as IConcurrentMergeScheduler;
                 if (scheduler != null)
@@ -830,7 +828,7 @@
         [Test]
         public virtual void TestErrorAfterApplyDeletes()
         {
-            Failure failure = new FailureAnonymousInnerClassHelper(this);
+            Failure failure = new FailureAnonymousClass(this);
 
             // create a couple of files
 
@@ -945,11 +943,11 @@
             dir.Dispose();
         }
 
-        private class FailureAnonymousInnerClassHelper : Failure
+        private class FailureAnonymousClass : Failure
         {
             private readonly TestIndexWriterDelete outerInstance;
 
-            public FailureAnonymousInnerClassHelper(TestIndexWriterDelete outerInstance)
+            public FailureAnonymousClass(TestIndexWriterDelete outerInstance)
             {
                 this.outerInstance = outerInstance;
                 sawMaybe = false;
@@ -1017,7 +1015,7 @@
         [Test]
         public virtual void TestErrorInDocsWriterAdd()
         {
-            Failure failure = new FailureAnonymousInnerClassHelper2(this);
+            Failure failure = new FailureAnonymousClass2(this);
 
             // create a couple of files
 
@@ -1060,11 +1058,11 @@
             dir.Dispose();
         }
 
-        private class FailureAnonymousInnerClassHelper2 : Failure
+        private class FailureAnonymousClass2 : Failure
         {
             private readonly TestIndexWriterDelete outerInstance;
 
-            public FailureAnonymousInnerClassHelper2(TestIndexWriterDelete outerInstance)
+            public FailureAnonymousClass2(TestIndexWriterDelete outerInstance)
             {
                 this.outerInstance = outerInstance;
                 failed = false;
@@ -1314,7 +1312,7 @@
             AtomicInt32 docsInSegment = new AtomicInt32();
             AtomicBoolean closing = new AtomicBoolean();
             AtomicBoolean sawAfterFlush = new AtomicBoolean();
-            IndexWriter w = new IndexWriterAnonymousInnerClassHelper(this, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.5).SetMaxBufferedDocs(-1).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetReaderPooling(false), docsInSegment, closing, sawAfterFlush);
+            IndexWriter w = new IndexWriterAnonymousClass(this, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.5).SetMaxBufferedDocs(-1).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetReaderPooling(false), docsInSegment, closing, sawAfterFlush);
             int id = 0;
             while (true)
             {
@@ -1349,7 +1347,7 @@
             dir.Dispose();
         }
 
-        private class IndexWriterAnonymousInnerClassHelper : IndexWriter
+        private class IndexWriterAnonymousClass : IndexWriter
         {
             private readonly TestIndexWriterDelete outerInstance;
 
@@ -1357,7 +1355,7 @@
             private readonly AtomicBoolean closing;
             private readonly AtomicBoolean sawAfterFlush;
 
-            public IndexWriterAnonymousInnerClassHelper(TestIndexWriterDelete outerInstance, Directory dir, IndexWriterConfig setReaderPooling, AtomicInt32 docsInSegment, AtomicBoolean closing, AtomicBoolean sawAfterFlush)
+            public IndexWriterAnonymousClass(TestIndexWriterDelete outerInstance, Directory dir, IndexWriterConfig setReaderPooling, AtomicInt32 docsInSegment, AtomicBoolean closing, AtomicBoolean sawAfterFlush)
                 : base(dir, setReaderPooling)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
index c9ca8a4..f82f31e 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
@@ -1,4 +1,4 @@
-using J2N.Threading;
+using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Analysis;
 using Lucene.Net.Attributes;
@@ -113,7 +113,7 @@
 
             public virtual IEnumerator<Document> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper(this);
+                return new IteratorAnonymousClass(this);
             }
 
             System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
@@ -121,11 +121,11 @@
                 return GetEnumerator();
             }
 
-            private class IteratorAnonymousInnerClassHelper : IEnumerator<Document>
+            private class IteratorAnonymousClass : IEnumerator<Document>
             {
                 private readonly DocCopyIterator outerInstance;
 
-                public IteratorAnonymousInnerClassHelper(DocCopyIterator outerInstance)
+                public IteratorAnonymousClass(DocCopyIterator outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -318,7 +318,7 @@
         }
 
         [Test]
-        public virtual void TestRandomExceptions([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestRandomExceptions()
         {
             if (Verbose)
             {
@@ -329,19 +329,9 @@
             MockAnalyzer analyzer = new MockAnalyzer(Random);
             analyzer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
 
-
-            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
-                            .SetRAMBufferSizeMB(0.1)
-                            .SetMergeScheduler(newScheduler());
-
-            var scheduler = config.MergeScheduler as IConcurrentMergeScheduler;
-            if (scheduler != null)
-            {
-                scheduler.SetSuppressExceptions();
-            }
-            
-
-            IndexWriter writer = RandomIndexWriter.MockIndexWriter(dir, config , new TestPoint1(this));
+            IndexWriter writer = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
+                .SetRAMBufferSizeMB(0.1).SetMergeScheduler(new ConcurrentMergeScheduler()) , new TestPoint1(this));
+            ((IConcurrentMergeScheduler)writer.Config.MergeScheduler).SetSuppressExceptions();
             //writer.SetMaxBufferedDocs(10);
             if (Verbose)
             {
@@ -386,24 +376,15 @@
 
         [Test]
         [Slow]
-        public virtual void TestRandomExceptionsThreads([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestRandomExceptionsThreads()
         {
             Directory dir = NewDirectory();
             MockAnalyzer analyzer = new MockAnalyzer(Random);
             analyzer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
 
-            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
-                            .SetRAMBufferSizeMB(0.2)
-                            .SetMergeScheduler(newScheduler());
-
-            IndexWriter writer = RandomIndexWriter.MockIndexWriter(dir, config, new TestPoint1(this));
-
-            var scheduler = config.MergeScheduler as IConcurrentMergeScheduler;
-            if (scheduler != null)
-            {
-                scheduler.SetSuppressExceptions();
-            }
-
+            IndexWriter writer = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
+                .SetRAMBufferSizeMB(0.2).SetMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1(this));
+            ((IConcurrentMergeScheduler)writer.Config.MergeScheduler).SetSuppressExceptions();
             //writer.SetMaxBufferedDocs(10);
             writer.Commit();
 
@@ -578,15 +559,15 @@
 
         // LUCENE-1210
         [Test]
-        public virtual void TestExceptionOnMergeInit([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestExceptionOnMergeInit()
         {
             // LUCENENET specific - disable the test if asserts are not enabled
             AssumeTrue("This test requires asserts to be enabled.", Debugging.AssertsEnabled);
 
             Directory dir = NewDirectory();
-            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
-
-            var cms = newScheduler();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+                .SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
+            var cms = new ConcurrentMergeScheduler();
             cms.SetSuppressExceptions();
             conf.SetMergeScheduler(cms);
             ((LogMergePolicy)conf.MergePolicy).MergeFactor = 2;
@@ -622,7 +603,7 @@
             {
                 MockTokenizer tokenizer = new MockTokenizer(reader2, MockTokenizer.SIMPLE, true);
                 tokenizer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
-                return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousInnerClassHelper(tokenizer));
+                return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousClass(tokenizer));
             });
 
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
@@ -675,9 +656,9 @@
             dir.Dispose();
         }
 
-        private class TokenFilterAnonymousInnerClassHelper : TokenFilter
+        private class TokenFilterAnonymousClass : TokenFilter
         {
-            public TokenFilterAnonymousInnerClassHelper(MockTokenizer tokenizer)
+            public TokenFilterAnonymousClass(MockTokenizer tokenizer)
                 : base(tokenizer)
             {
                 count = 0;
@@ -907,7 +888,7 @@
                     ThreadJob[] threads = new ThreadJob[NUM_THREAD];
                     for (int t = 0; t < NUM_THREAD; t++)
                     {
-                        threads[t] = new ThreadAnonymousInnerClassHelper(NUM_ITER, writer, finalI);
+                        threads[t] = new ThreadAnonymousClass(NUM_ITER, writer, finalI);
                         threads[t].Start();
                     }
 
@@ -968,13 +949,13 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly int NUM_ITER;
             private readonly IndexWriter writer;
             private readonly int finalI;
 
-            public ThreadAnonymousInnerClassHelper(int NUM_ITER, IndexWriter writer, int finalI)
+            public ThreadAnonymousClass(int NUM_ITER, IndexWriter writer, int finalI)
             {
                 this.NUM_ITER = NUM_ITER;
                 this.writer = writer;
@@ -1064,7 +1045,7 @@
 
         // LUCENE-1044: test exception during sync
         [Test]
-        public virtual void TestExceptionDuringSync([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestExceptionDuringSync()
         {
             MockDirectoryWrapper dir = NewMockDirectory();
             FailOnlyInSync failure = new FailOnlyInSync();
@@ -1072,7 +1053,7 @@
 
             var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(2)
-                            .SetMergeScheduler(newScheduler())
+                            .SetMergeScheduler(new ConcurrentMergeScheduler())
                             .SetMergePolicy(NewLogMergePolicy(5));
 
             IndexWriter writer = new IndexWriter(dir, config);
@@ -1188,7 +1169,7 @@
         }
 
         [Test]
-        public virtual void TestForceMergeExceptions([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestForceMergeExceptions()
         {
             Directory startDir = NewDirectory();
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
@@ -1208,7 +1189,7 @@
                     Console.WriteLine("TEST: iter " + i);
                 }
                 MockDirectoryWrapper dir = new MockDirectoryWrapper(Random, new RAMDirectory(startDir, NewIOContext(Random)));
-                conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(newScheduler());
+                conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new ConcurrentMergeScheduler());
                 var scheduler = conf.MergeScheduler as IConcurrentMergeScheduler;
                 if (scheduler != null)
                 {
@@ -1240,7 +1221,7 @@
         {
             AtomicBoolean thrown = new AtomicBoolean(false);
             Directory dir = NewDirectory();
-            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetInfoStream(new TOOMInfoStreamAnonymousInnerClassHelper(thrown)));
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetInfoStream(new TOOMInfoStreamAnonymousClass(thrown)));
 
             try
             {
@@ -1258,11 +1239,11 @@
             dir.Dispose();
         }
 
-        private class TOOMInfoStreamAnonymousInnerClassHelper : InfoStream
+        private class TOOMInfoStreamAnonymousClass : InfoStream
         {
             private readonly AtomicBoolean thrown;
 
-            public TOOMInfoStreamAnonymousInnerClassHelper(AtomicBoolean thrown)
+            public TOOMInfoStreamAnonymousClass(AtomicBoolean thrown)
             {
                 this.thrown = thrown;
             }
@@ -1347,7 +1328,7 @@
             IndexOutput @out = dir.CreateOutput(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1 + gen), NewIOContext(Random));
             @out.CopyBytes(@in, @in.Length - 1);
             byte b = @in.ReadByte();
-            @out.WriteByte((byte)(sbyte)(1 + b));
+            @out.WriteByte((byte)(1 + b));
             @out.Dispose();
             @in.Dispose();
 
@@ -1935,7 +1916,7 @@
                 doc = new Document();
                 // try to boost with norms omitted
                 IList<IIndexableField> list = new List<IIndexableField>();
-                list.Add(new IndexableFieldAnonymousInnerClassHelper());
+                list.Add(new IndexableFieldAnonymousClass());
                 iw.AddDocument(list);
                 Assert.Fail("didn't get any exception, boost silently discarded");
             }
@@ -1953,7 +1934,7 @@
             dir.Dispose();
         }
 
-        private class IndexableFieldAnonymousInnerClassHelper : IIndexableField
+        private class IndexableFieldAnonymousClass : IIndexableField
         {
             public string Name => "foo";
 
@@ -2051,7 +2032,7 @@
         public virtual void TestTooManyFileException()
         {
             // Create failure that throws Too many open files exception randomly
-            Failure failure = new FailureAnonymousInnerClassHelper();
+            Failure failure = new FailureAnonymousClass();
 
             MockDirectoryWrapper dir = NewMockDirectory();
             // The exception is only thrown on open input
@@ -2114,7 +2095,7 @@
             dir.Dispose();
         }
 
-        private class FailureAnonymousInnerClassHelper : Failure
+        private class FailureAnonymousClass : Failure
         {
 
             public override Failure Reset()
@@ -2148,7 +2129,7 @@
 
             MockDirectoryWrapper dir = NewMockDirectory();
             AtomicBoolean shouldFail = new AtomicBoolean();
-            dir.FailOn(new FailureAnonymousInnerClassHelper2(shouldFail));
+            dir.FailOn(new FailureAnonymousClass2(shouldFail));
 
             RandomIndexWriter w = null;
 
@@ -2165,7 +2146,7 @@
                     IMergeScheduler ms = iwc.MergeScheduler;
                     if (ms is IConcurrentMergeScheduler)
                     {
-                        IConcurrentMergeScheduler suppressFakeIOE = new ConcurrentMergeSchedulerAnonymousInnerClassHelper();
+                        IConcurrentMergeScheduler suppressFakeIOE = new ConcurrentMergeSchedulerAnonymousClass();
 
                         IConcurrentMergeScheduler cms = (IConcurrentMergeScheduler)ms;
                         suppressFakeIOE.SetMaxMergesAndThreads(cms.MaxMergeCount, cms.MaxThreadCount);
@@ -2376,11 +2357,11 @@
             dir.Dispose();
         }
 
-        private class FailureAnonymousInnerClassHelper2 : Failure
+        private class FailureAnonymousClass2 : Failure
         {
             private readonly AtomicBoolean shouldFail;
 
-            public FailureAnonymousInnerClassHelper2(AtomicBoolean shouldFail)
+            public FailureAnonymousClass2(AtomicBoolean shouldFail)
             {
                 this.shouldFail = shouldFail;
             }
@@ -2416,12 +2397,7 @@
             }
         }
 
-        private class ConcurrentMergeSchedulerAnonymousInnerClassHelper :
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-            TaskMergeScheduler
-#else
-            ConcurrentMergeScheduler
-#endif
+        private class ConcurrentMergeSchedulerAnonymousClass : ConcurrentMergeScheduler
         {
             protected override void HandleMergeException(Exception exc)
             {
@@ -2443,7 +2419,7 @@
             string messageToFailOn = Random.NextBoolean() ? "rollback: done finish merges" : "rollback before checkpoint";
 
             // infostream that throws exception during rollback
-            InfoStream evilInfoStream = new TEDRInfoStreamAnonymousInnerClassHelper(messageToFailOn);
+            InfoStream evilInfoStream = new TEDRInfoStreamAnonymousClass(messageToFailOn);
 
             Directory dir = NewMockDirectory(); // we want to ensure we don't leak any locks or file handles
             IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
@@ -2491,11 +2467,11 @@
             dir.Dispose();
         }
 
-        private class TEDRInfoStreamAnonymousInnerClassHelper : InfoStream
+        private class TEDRInfoStreamAnonymousClass : InfoStream
         {
             private readonly string messageToFailOn;
 
-            public TEDRInfoStreamAnonymousInnerClassHelper(string messageToFailOn)
+            public TEDRInfoStreamAnonymousClass(string messageToFailOn)
             {
                 this.messageToFailOn = messageToFailOn;
             }
@@ -2529,7 +2505,7 @@
             for (int iter = 0; iter < numIters; iter++)
             {
                 MockDirectoryWrapper dir = NewMockDirectory();
-                dir.FailOn(new FailureAnonymousInnerClassHelper3());
+                dir.FailOn(new FailureAnonymousClass3());
 
                 IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
                 IndexWriter iw = new IndexWriter(dir, iwc);
@@ -2576,7 +2552,7 @@
             }
         }
 
-        private class FailureAnonymousInnerClassHelper3 : Failure
+        private class FailureAnonymousClass3 : Failure
         {
             public override void Eval(MockDirectoryWrapper dir)
             {
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs
index f1640f6..d0c9c07 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using System;
@@ -86,7 +86,7 @@
         }
 
         [Test]
-        public virtual void TestMaxNumSegments2([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestMaxNumSegments2()
         {
             Directory dir = NewDirectory();
 
@@ -99,7 +99,7 @@
             var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(2)
                             .SetMergePolicy(ldmp)
-                            .SetMergeScheduler(newScheduler());
+                            .SetMergeScheduler(new ConcurrentMergeScheduler());
             IndexWriter writer = new IndexWriter(dir, config);
 
             for (int iter = 0; iter < 10; iter++)
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs
index b25f422..c04b652 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using NUnit.Framework;
 using System;
@@ -179,7 +179,7 @@
 
         // Test the case where a merge results in no doc at all
         [Test]
-        public virtual void TestMergeDocCount0([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestMergeDocCount0()
         {
             Directory dir = NewDirectory();
 
@@ -205,7 +205,7 @@
                 .SetOpenMode(OpenMode.APPEND)
                 .SetMaxBufferedDocs(10)
                 .SetMergePolicy(ldmp)
-                .SetMergeScheduler(newScheduler());
+                .SetMergeScheduler(new ConcurrentMergeScheduler());
             writer = new IndexWriter(dir, config);
 
             // merge factor is changed, so check invariants after all adds
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
index eee2d87..c925740 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
@@ -405,7 +405,7 @@
 
                     IndexWriter finalWriter = writer;
                     List<Exception> failure = new List<Exception>();
-                    ThreadJob t1 = new ThreadAnonymousInnerClassHelper(this, doc, finalWriter, failure);
+                    ThreadJob t1 = new ThreadAnonymousClass(this, doc, finalWriter, failure);
 
                     if (failure.Count > 0)
                     {
@@ -430,7 +430,7 @@
             directory.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestIndexWriterMerging outerInstance;
 
@@ -438,7 +438,7 @@
             private IndexWriter finalWriter;
             private List<Exception> failure;
 
-            public ThreadAnonymousInnerClassHelper(TestIndexWriterMerging outerInstance, Document doc, IndexWriter finalWriter, List<Exception> failure)
+            public ThreadAnonymousClass(TestIndexWriterMerging outerInstance, Document doc, IndexWriter finalWriter, List<Exception> failure)
             {
                 this.outerInstance = outerInstance;
                 this.doc = doc;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs
index 5e89cff..cbe0089 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Codecs;
+using Lucene.Net.Codecs;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
@@ -637,12 +637,12 @@
         // an IndexWriter (hit during DW.ThreadState.Init()) is
         // OK:
         [Test]
-        public virtual void TestImmediateDiskFull([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestImmediateDiskFull()
         {
             MockDirectoryWrapper dir = NewMockDirectory();
             var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(2)
-                            .SetMergeScheduler(newScheduler());
+                            .SetMergeScheduler(new ConcurrentMergeScheduler());
             IndexWriter writer = new IndexWriter(dir, config);
             dir.MaxSizeInBytes = Math.Max(1, dir.GetRecomputedActualSizeInBytes());
             Document doc = new Document();
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
index b5250eb..fbcec42 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
@@ -1,4 +1,4 @@
-// LUCENENET NOTE: Clearly this test is not applicable to .NET, but just 
+// LUCENENET NOTE: Clearly this test is not applicable to .NET, but just 
 // adding the file to the project for completedness.
 
 //using System;
@@ -79,7 +79,7 @@
 //                AssumeFalse("does not support PreFlex, see LUCENE-3992", Codec.Default.Name.Equals("Lucene3x", StringComparison.Ordinal));
 //                // we are the fork, setup a crashing thread
 //                int crashTime = TestUtil.NextInt(Random(), 3000, 4000);
-//                ThreadClass t = new ThreadAnonymousInnerClassHelper(this, crashTime);
+//                ThreadJob t = new ThreadAnonymousClass(this, crashTime);
 //                t.Priority = ThreadPriority.Highest;
 //                t.Start();
 //                // run the test until we crash.
@@ -90,13 +90,13 @@
 //            }
 //        }
 
-//        private class ThreadAnonymousInnerClassHelper : ThreadClass
+//        private class ThreadAnonymousClass : ThreadJob
 //        {
 //            private readonly TestIndexWriterOnJRECrash outerInstance;
 
 //            private int CrashTime;
 
-//            public ThreadAnonymousInnerClassHelper(TestIndexWriterOnJRECrash outerInstance, int crashTime)
+//            public ThreadAnonymousClass(TestIndexWriterOnJRECrash outerInstance, int crashTime)
 //            {
 //                this.outerInstance = outerInstance;
 //                this.CrashTime = crashTime;
@@ -160,17 +160,17 @@
 //        {
 //            public static Thread Start(InputStream from, OutputStream to)
 //            {
-//                ThreadClass t = new ThreadAnonymousInnerClassHelper2(from, to);
+//                ThreadJob t = new ThreadAnonymousClass2(from, to);
 //                t.Start();
 //                return t;
 //            }
 
-//            private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+//            private class ThreadAnonymousClass2 : ThreadJob
 //            {
 //                private InputStream From;
 //                private OutputStream To;
 
-//                public ThreadAnonymousInnerClassHelper2(InputStream from, OutputStream to)
+//                public ThreadAnonymousClass2(InputStream from, OutputStream to)
 //                {
 //                    this.From = from;
 //                    this.To = to;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
index 34faea0..3743e0f 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
@@ -1,4 +1,4 @@
-using J2N.Threading;
+using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Attributes;
 using Lucene.Net.Documents;
@@ -555,7 +555,7 @@
             {
                 for (int i = 0; i < outerInstance.numThreads; i++)
                 {
-                    threads[i] = new ThreadAnonymousInnerClassHelper(this, numIter);
+                    threads[i] = new ThreadAnonymousClass(this, numIter);
                 }
                 for (int i = 0; i < outerInstance.numThreads; i++)
                 {
@@ -563,13 +563,13 @@
                 }
             }
 
-            private class ThreadAnonymousInnerClassHelper : ThreadJob
+            private class ThreadAnonymousClass : ThreadJob
             {
                 private readonly AddDirectoriesThreads outerInstance;
 
                 private readonly int numIter;
 
-                public ThreadAnonymousInnerClassHelper(AddDirectoriesThreads outerInstance, int numIter)
+                public ThreadAnonymousClass(AddDirectoriesThreads outerInstance, int numIter)
                 {
                     this.outerInstance = outerInstance;
                     this.numIter = numIter;
@@ -749,7 +749,7 @@
 
         [Test]
         [Slow]
-        public virtual void TestMergeWarmer([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestMergeWarmer()
         {
             Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
             // Enroll warmer
@@ -757,7 +757,7 @@
             var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
                             .SetMaxBufferedDocs(2)
                             .SetMergedSegmentWarmer(warmer)
-                            .SetMergeScheduler(newScheduler())
+                            .SetMergeScheduler(new ConcurrentMergeScheduler())
                             .SetMergePolicy(NewLogMergePolicy());
             IndexWriter writer = new IndexWriter(dir1, config);
 
@@ -792,10 +792,10 @@
         }
 
         [Test]
-        public virtual void TestAfterCommit([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestAfterCommit()
         {
             Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
-            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(newScheduler());
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new ConcurrentMergeScheduler());
             IndexWriter writer = new IndexWriter(dir1, config);
             writer.Commit();
 
@@ -899,7 +899,7 @@
             var threads = new ThreadJob[1];
             for (int i = 0; i < threads.Length; i++)
             {
-                threads[i] = new ThreadAnonymousInnerClassHelper(writer, dirs, endTime, excs);
+                threads[i] = new ThreadAnonymousClass(writer, dirs, endTime, excs);
                 threads[i].IsBackground = (true);
                 threads[i].Start();
             }
@@ -949,14 +949,14 @@
             dir1.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly IndexWriter writer;
             private readonly Directory[] dirs;
             private readonly long endTime;
             private readonly ConcurrentQueue<Exception> excs;
 
-            public ThreadAnonymousInnerClassHelper(IndexWriter writer, Directory[] dirs, long endTime, ConcurrentQueue<Exception> excs)
+            public ThreadAnonymousClass(IndexWriter writer, Directory[] dirs, long endTime, ConcurrentQueue<Exception> excs)
             {
                 this.writer = writer;
                 this.dirs = dirs;
@@ -1013,7 +1013,7 @@
             var threads = new ThreadJob[numThreads];
             for (int i = 0; i < numThreads; i++)
             {
-                threads[i] = new ThreadAnonymousInnerClassHelper2(writer, endTime, excs);
+                threads[i] = new ThreadAnonymousClass2(writer, endTime, excs);
                 threads[i].IsBackground = (true);
                 threads[i].Start();
             }
@@ -1055,13 +1055,13 @@
             dir1.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly IndexWriter writer;
             private readonly long endTime;
             private readonly ConcurrentQueue<Exception> excs;
 
-            public ThreadAnonymousInnerClassHelper2(IndexWriter writer, long endTime, ConcurrentQueue<Exception> excs)
+            public ThreadAnonymousClass2(IndexWriter writer, long endTime, ConcurrentQueue<Exception> excs)
             {
                 this.writer = writer;
                 this.endTime = endTime;
@@ -1174,7 +1174,7 @@
         {
             Directory dir = NewDirectory();
             AtomicBoolean didWarm = new AtomicBoolean();
-            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetReaderPooling(true).SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousInnerClassHelper(didWarm)).
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetReaderPooling(true).SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousClass(didWarm)).
                     SetMergePolicy(NewLogMergePolicy(10)));
 
             Document doc = new Document();
@@ -1189,11 +1189,11 @@
             Assert.IsTrue(didWarm);
         }
 
-        private class IndexReaderWarmerAnonymousInnerClassHelper : IndexWriter.IndexReaderWarmer
+        private class IndexReaderWarmerAnonymousClass : IndexWriter.IndexReaderWarmer
         {
             private readonly AtomicBoolean didWarm;
 
-            public IndexReaderWarmerAnonymousInnerClassHelper(AtomicBoolean didWarm)
+            public IndexReaderWarmerAnonymousClass(AtomicBoolean didWarm)
             {
                 this.didWarm = didWarm;
             }
@@ -1216,7 +1216,7 @@
         {
             Directory dir = NewDirectory();
             AtomicBoolean didWarm = new AtomicBoolean();
-            InfoStream infoStream = new InfoStreamAnonymousInnerClassHelper(didWarm);
+            InfoStream infoStream = new InfoStreamAnonymousClass(didWarm);
             IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetReaderPooling(true).SetInfoStream(infoStream).SetMergedSegmentWarmer(new SimpleMergedSegmentWarmer(infoStream)).SetMergePolicy(NewLogMergePolicy(10)));
 
             Document doc = new Document();
@@ -1231,11 +1231,11 @@
             Assert.IsTrue(didWarm);
         }
 
-        private class InfoStreamAnonymousInnerClassHelper : InfoStream
+        private class InfoStreamAnonymousClass : InfoStream
         {
             private readonly AtomicBoolean didWarm;
 
-            public InfoStreamAnonymousInnerClassHelper(AtomicBoolean didWarm)
+            public InfoStreamAnonymousClass(AtomicBoolean didWarm)
             {
                 this.didWarm = didWarm;
             }
@@ -1347,7 +1347,7 @@
             // don't leak file handles.
             MockDirectoryWrapper dir = (MockDirectoryWrapper)GetAssertNoDeletesDirectory(NewMockDirectory());
             AtomicBoolean shouldFail = new AtomicBoolean();
-            dir.FailOn(new FailureAnonymousInnerClassHelper(shouldFail));
+            dir.FailOn(new FailureAnonymousClass(shouldFail));
 
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
             conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges from getting in the way
@@ -1386,11 +1386,11 @@
             dir.Dispose();
         }
 
-        private class FailureAnonymousInnerClassHelper : Failure
+        private class FailureAnonymousClass : Failure
         {
             private readonly AtomicBoolean shouldFail;
 
-            public FailureAnonymousInnerClassHelper(AtomicBoolean shouldFail)
+            public FailureAnonymousClass(AtomicBoolean shouldFail)
             {
                 this.shouldFail = shouldFail;
             }
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs
index 36c2d7f..e3e973a 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs
@@ -1,4 +1,4 @@
-using J2N.Threading;
+using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Attributes;
 using Lucene.Net.Documents;
@@ -164,7 +164,7 @@
         // an IndexWriter (hit during DW.ThreadState.Init()), with
         // multiple threads, is OK:
         [Test]
-        public virtual void TestImmediateDiskFullWithThreads([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestImmediateDiskFullWithThreads()
         {
             int NUM_THREADS = 3;
             int numIterations = TestNightly ? 10 : 3;
@@ -175,16 +175,14 @@
                     Console.WriteLine("\nTEST: iter=" + iter);
                 }
                 MockDirectoryWrapper dir = NewMockDirectory();
-                var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
-                                .SetMaxBufferedDocs(2)
-                                .SetMergeScheduler(newScheduler())
-                                .SetMergePolicy(NewLogMergePolicy(4));
-                IndexWriter writer = new IndexWriter(dir, config);
-                var scheduler = config.mergeScheduler as IConcurrentMergeScheduler;
-                if (scheduler != null)
-                {
-                    scheduler.SetSuppressExceptions();
-                }
+                IndexWriter writer = new IndexWriter(
+                    dir,
+                    NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+                        .SetMaxBufferedDocs(2)
+                        .SetMergeScheduler(new ConcurrentMergeScheduler())
+                        .SetMergePolicy(NewLogMergePolicy(4)));
+                ((IConcurrentMergeScheduler)writer.Config.MergeScheduler).SetSuppressExceptions();
+
                 dir.MaxSizeInBytes = 4 * 1024 + 20 * iter;
 
                 IndexerThread[] threads = new IndexerThread[NUM_THREADS];
@@ -220,7 +218,7 @@
         // speaking, this isn't valid us of Lucene's APIs, but we
         // still want to be robust to this case:
         [Test]
-        public virtual void TestCloseWithThreads([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestCloseWithThreads()
         {
             int NUM_THREADS = 3;
             int numIterations = TestNightly ? 7 : 3;
@@ -231,16 +229,13 @@
                     Console.WriteLine("\nTEST: iter=" + iter);
                 }
                 Directory dir = NewDirectory();
-                var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
-                                .SetMaxBufferedDocs(10)
-                                .SetMergeScheduler(newScheduler())
-                                .SetMergePolicy(NewLogMergePolicy(4));
-                IndexWriter writer = new IndexWriter(dir, config);
-                var scheduler = config.mergeScheduler as IConcurrentMergeScheduler;
-                if (scheduler != null)
-                {
-                    scheduler.SetSuppressExceptions();
-                }
+                IndexWriter writer = new IndexWriter(
+                    dir,
+                    NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+                        .SetMaxBufferedDocs(10)
+                        .SetMergeScheduler(new ConcurrentMergeScheduler())
+                        .SetMergePolicy(NewLogMergePolicy(4)));
+                ((IConcurrentMergeScheduler)writer.Config.MergeScheduler).SetSuppressExceptions();
 
                 IndexerThread[] threads = new IndexerThread[NUM_THREADS];
 
@@ -314,7 +309,7 @@
 
         // Runs test, with multiple threads, using the specific
         // failure to trigger an IOException
-        public virtual void TestMultipleThreadsFailure(Func<IConcurrentMergeScheduler> newScheduler, Failure failure)
+        public virtual void TestMultipleThreadsFailure(Failure failure)
         {
             int NUM_THREADS = 3;
 
@@ -325,16 +320,13 @@
                     Console.WriteLine("TEST: iter=" + iter);
                 }
                 MockDirectoryWrapper dir = NewMockDirectory();
-                var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
-                                .SetMaxBufferedDocs(2)
-                                .SetMergeScheduler(newScheduler())
-                                .SetMergePolicy(NewLogMergePolicy(4));
-                IndexWriter writer = new IndexWriter(dir, config);
-                var scheduler = config.mergeScheduler as IConcurrentMergeScheduler;
-                if (scheduler != null)
-                {
-                    scheduler.SetSuppressExceptions();
-                }
+                IndexWriter writer = new IndexWriter(
+                    dir,
+                    NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+                        .SetMaxBufferedDocs(2)
+                        .SetMergeScheduler(new ConcurrentMergeScheduler())
+                        .SetMergePolicy(NewLogMergePolicy(4)));
+                ((IConcurrentMergeScheduler)writer.Config.MergeScheduler).SetSuppressExceptions();
 
                 IndexerThread[] threads = new IndexerThread[NUM_THREADS];
 
@@ -396,11 +388,11 @@
 
         // Runs test, with one thread, using the specific failure
         // to trigger an IOException
-        public virtual void TestSingleThreadFailure(Func<IConcurrentMergeScheduler> newScheduler, Failure failure)
+        public virtual void TestSingleThreadFailure(Failure failure)
         {
             MockDirectoryWrapper dir = NewMockDirectory();
 
-            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergeScheduler(newScheduler()));
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergeScheduler(new ConcurrentMergeScheduler()));
             Document doc = new Document();
             FieldType customType = new FieldType(TextField.TYPE_STORED);
             customType.StoreTermVectors = true;
@@ -475,33 +467,33 @@
         // LUCENE-1130: make sure initial IOException, and then 2nd
         // IOException during rollback(), is OK:
         [Test]
-        public virtual void TestIOExceptionDuringAbort([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestIOExceptionDuringAbort()
         {
-            TestSingleThreadFailure(newScheduler, new FailOnlyOnAbortOrFlush(false));
+            TestSingleThreadFailure(new FailOnlyOnAbortOrFlush(false));
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
         // IOException during rollback(), is OK:
         [Test]
-        public virtual void TestIOExceptionDuringAbortOnlyOnce([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestIOExceptionDuringAbortOnlyOnce()
         {
-            TestSingleThreadFailure(newScheduler, new FailOnlyOnAbortOrFlush(true));
+            TestSingleThreadFailure(new FailOnlyOnAbortOrFlush(true));
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
         // IOException during rollback(), with multiple threads, is OK:
         [Test]
-        public virtual void TestIOExceptionDuringAbortWithThreads([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestIOExceptionDuringAbortWithThreads()
         {
-            TestMultipleThreadsFailure(newScheduler, new FailOnlyOnAbortOrFlush(false));
+            TestMultipleThreadsFailure(new FailOnlyOnAbortOrFlush(false));
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
         // IOException during rollback(), with multiple threads, is OK:
         [Test]
-        public virtual void TestIOExceptionDuringAbortWithThreadsOnlyOnce([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestIOExceptionDuringAbortWithThreadsOnlyOnce()
         {
-            TestMultipleThreadsFailure(newScheduler, new FailOnlyOnAbortOrFlush(true));
+            TestMultipleThreadsFailure( new FailOnlyOnAbortOrFlush(true));
         }
 
         // Throws IOException during DocumentsWriter.writeSegment
@@ -536,30 +528,30 @@
 
         // LUCENE-1130: test IOException in writeSegment
         [Test]
-        public virtual void TestIOExceptionDuringWriteSegment([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestIOExceptionDuringWriteSegment()
         {
-            TestSingleThreadFailure(newScheduler, new FailOnlyInWriteSegment(false));
+            TestSingleThreadFailure(new FailOnlyInWriteSegment(false));
         }
 
         // LUCENE-1130: test IOException in writeSegment
         [Test]
-        public virtual void TestIOExceptionDuringWriteSegmentOnlyOnce([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestIOExceptionDuringWriteSegmentOnlyOnce()
         {
-            TestSingleThreadFailure(newScheduler, new FailOnlyInWriteSegment(true));
+            TestSingleThreadFailure(new FailOnlyInWriteSegment(true));
         }
 
         // LUCENE-1130: test IOException in writeSegment, with threads
         [Test]
-        public virtual void TestIOExceptionDuringWriteSegmentWithThreads([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestIOExceptionDuringWriteSegmentWithThreads()
         {
-            TestMultipleThreadsFailure(newScheduler, new FailOnlyInWriteSegment(false));
+            TestMultipleThreadsFailure(new FailOnlyInWriteSegment(false));
         }
 
         // LUCENE-1130: test IOException in writeSegment, with threads
         [Test]
-        public virtual void TestIOExceptionDuringWriteSegmentWithThreadsOnlyOnce([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestIOExceptionDuringWriteSegmentWithThreadsOnlyOnce()
         {
-            TestMultipleThreadsFailure(newScheduler, new FailOnlyInWriteSegment(true));
+            TestMultipleThreadsFailure(new FailOnlyInWriteSegment(true));
         }
 
         //  LUCENE-3365: Test adding two documents with the same field from two different IndexWriters
@@ -691,7 +683,7 @@
             ReentrantLock commitLock = new ReentrantLock();
             for (int threadID = 0; threadID < threadCount; threadID++)
             {
-                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, d, writerRef, docs, iters, failed, rollbackLock, commitLock);
+                threads[threadID] = new ThreadAnonymousClass(this, d, writerRef, docs, iters, failed, rollbackLock, commitLock);
                 threads[threadID].Start();
             }
 
@@ -703,7 +695,7 @@
                 } 
                 catch (Exception e)
                 {
-                    Console.WriteLine("EXCEPTION in ThreadAnonymousInnerClassHelper: " + Environment.NewLine + e);
+                    Console.WriteLine("EXCEPTION in ThreadAnonymousClass: " + Environment.NewLine + e);
                 }
             }
 
@@ -712,7 +704,7 @@
             d.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
             private readonly TestIndexWriterWithThreads outerInstance;
@@ -726,7 +718,7 @@
             private readonly ReentrantLock rollbackLock;
             private readonly ReentrantLock commitLock;
 
-            public ThreadAnonymousInnerClassHelper(TestIndexWriterWithThreads outerInstance, BaseDirectoryWrapper d, AtomicReference<IndexWriter> writerRef, LineFileDocs docs, int iters, AtomicBoolean failed, ReentrantLock rollbackLock, ReentrantLock commitLock)
+            public ThreadAnonymousClass(TestIndexWriterWithThreads outerInstance, BaseDirectoryWrapper d, AtomicReference<IndexWriter> writerRef, LineFileDocs docs, int iters, AtomicBoolean failed, ReentrantLock rollbackLock, ReentrantLock commitLock)
             {
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Index/TestIndexableField.cs b/src/Lucene.Net.Tests/Index/TestIndexableField.cs
index 06b39ba..588c308 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexableField.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexableField.cs
@@ -55,14 +55,14 @@
 
             public MyField()
             {
-                fieldType = new IndexableFieldTypeAnonymousInnerClassHelper(this);
+                fieldType = new IndexableFieldTypeAnonymousClass(this);
             }
 
-            private class IndexableFieldTypeAnonymousInnerClassHelper : IIndexableFieldType
+            private class IndexableFieldTypeAnonymousClass : IIndexableFieldType
             {
                 private MyField outerInstance;
 
-                public IndexableFieldTypeAnonymousInnerClassHelper(MyField outerInstance)
+                public IndexableFieldTypeAnonymousClass(MyField outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -261,7 +261,7 @@
                 int finalBaseCount = baseCount;
                 baseCount += fieldCount - 1;
 
-                w.AddDocument(new IterableAnonymousInnerClassHelper(this, fieldCount, finalDocCount, finalBaseCount));
+                w.AddDocument(new IterableAnonymousClass(this, fieldCount, finalDocCount, finalBaseCount));
             }
 
             IndexReader r = w.GetReader();
@@ -379,7 +379,7 @@
             dir.Dispose();
         }
 
-        private class IterableAnonymousInnerClassHelper : IEnumerable<IIndexableField>
+        private class IterableAnonymousClass : IEnumerable<IIndexableField>
         {
             private readonly TestIndexableField outerInstance;
 
@@ -387,7 +387,7 @@
             private int finalDocCount;
             private int finalBaseCount;
 
-            public IterableAnonymousInnerClassHelper(TestIndexableField outerInstance, int fieldCount, int finalDocCount, int finalBaseCount)
+            public IterableAnonymousClass(TestIndexableField outerInstance, int fieldCount, int finalDocCount, int finalBaseCount)
             {
                 this.outerInstance = outerInstance;
                 this.fieldCount = fieldCount;
@@ -397,7 +397,7 @@
 
             public virtual IEnumerator<IIndexableField> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper(this, outerInstance);
+                return new IteratorAnonymousClass(this, outerInstance);
             }
 
             System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
@@ -405,12 +405,12 @@
                 return GetEnumerator();
             }
 
-            private class IteratorAnonymousInnerClassHelper : IEnumerator<IIndexableField>
+            private class IteratorAnonymousClass : IEnumerator<IIndexableField>
             {
-                private readonly IterableAnonymousInnerClassHelper outerInstance;
+                private readonly IterableAnonymousClass outerInstance;
                 private readonly TestIndexableField outerTextIndexableField;
 
-                public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper outerInstance, TestIndexableField outerTextIndexableField)
+                public IteratorAnonymousClass(IterableAnonymousClass outerInstance, TestIndexableField outerTextIndexableField)
                 {
                     this.outerInstance = outerInstance;
                     this.outerTextIndexableField = outerTextIndexableField;
diff --git a/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs
index e6e5d59..61a7417 100644
--- a/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs
@@ -278,7 +278,7 @@
             {
                 string f = "f" + i;
                 string cf = "cf" + i;
-                threads[i] = new ThreadAnonymousInnerClassHelper(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
+                threads[i] = new ThreadAnonymousClass(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
             }
 
             foreach (ThreadJob t in threads)
@@ -323,7 +323,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestMixedDocValuesUpdates outerInstance;
 
@@ -334,7 +334,7 @@
             private readonly string f;
             private readonly string cf;
 
-            public ThreadAnonymousInnerClassHelper(TestMixedDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
+            public ThreadAnonymousClass(TestMixedDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
                 : base(str)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Index/TestNeverDelete.cs b/src/Lucene.Net.Tests/Index/TestNeverDelete.cs
index 8842b34..41e4d41 100644
--- a/src/Lucene.Net.Tests/Index/TestNeverDelete.cs
+++ b/src/Lucene.Net.Tests/Index/TestNeverDelete.cs
@@ -64,7 +64,7 @@
             long stopTime = Environment.TickCount + AtLeast(1000);
             for (int x = 0; x < indexThreads.Length; x++)
             {
-                indexThreads[x] = new ThreadAnonymousInnerClassHelper(w, stopTime, NewStringField, NewTextField);
+                indexThreads[x] = new ThreadAnonymousClass(w, stopTime, NewStringField, NewTextField);
                 indexThreads[x].Name = "Thread " + x;
                 indexThreads[x].Start();
             }
@@ -105,7 +105,7 @@
             System.IO.Directory.Delete(tmpDir.FullName, true);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly Func<string, string, Field.Store, Field> newStringField;
             private readonly Func<string, string, Field.Store, Field> newTextField;
@@ -123,7 +123,7 @@
             /// Passed in because <see cref="LuceneTestCase.NewTextField(string, string, Field.Store)"/>
             /// is no longer static
             /// </param>
-            public ThreadAnonymousInnerClassHelper(RandomIndexWriter w, long stopTime, 
+            public ThreadAnonymousClass(RandomIndexWriter w, long stopTime, 
                 Func<string, string, Field.Store, Field> newStringField, Func<string, string, Field.Store, Field> newTextField)
             {
                 this.w = w;
diff --git a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
index 7c51371..afd0ffc 100644
--- a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
@@ -623,7 +623,7 @@
         {
             Directory dir = NewDirectory();
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
-            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper(this));
+            conf.SetCodec(new Lucene46CodecAnonymousClass(this));
             IndexWriter writer = new IndexWriter(dir, conf);
 
             Document doc = new Document();
@@ -654,11 +654,11 @@
             dir.Dispose();
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        private class Lucene46CodecAnonymousClass : Lucene46Codec
         {
             private readonly TestNumericDocValuesUpdates outerInstance;
 
-            public Lucene46CodecAnonymousInnerClassHelper(TestNumericDocValuesUpdates outerInstance)
+            public Lucene46CodecAnonymousClass(TestNumericDocValuesUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1161,7 +1161,7 @@
             {
                 string f = "f" + i;
                 string cf = "cf" + i;
-                threads[i] = new ThreadAnonymousInnerClassHelper(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
+                threads[i] = new ThreadAnonymousClass(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
             }
 
             foreach (ThreadJob t in threads)
@@ -1200,7 +1200,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestNumericDocValuesUpdates outerInstance;
 
@@ -1211,7 +1211,7 @@
             private readonly string f;
             private readonly string cf;
 
-            public ThreadAnonymousInnerClassHelper(TestNumericDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
+            public ThreadAnonymousClass(TestNumericDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
                 : base(str)
             {
                 this.outerInstance = outerInstance;
@@ -1374,7 +1374,7 @@
             Directory dir = NewDirectory();
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
             conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions.
-            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper2(this));
+            conf.SetCodec(new Lucene46CodecAnonymousClass2(this));
             IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
             Document doc = new Document();
             doc.Add(new StringField("id", "d0", Store.NO));
@@ -1384,7 +1384,7 @@
             writer.Dispose();
 
             // change format
-            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper3(this));
+            conf.SetCodec(new Lucene46CodecAnonymousClass3(this));
             writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
             doc = new Document();
             doc.Add(new StringField("id", "d1", Store.NO));
@@ -1406,11 +1406,11 @@
             dir.Dispose();
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper2 : Lucene46Codec
+        private class Lucene46CodecAnonymousClass2 : Lucene46Codec
         {
             private readonly TestNumericDocValuesUpdates outerInstance;
 
-            public Lucene46CodecAnonymousInnerClassHelper2(TestNumericDocValuesUpdates outerInstance)
+            public Lucene46CodecAnonymousClass2(TestNumericDocValuesUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1421,11 +1421,11 @@
             }
         }
 
-        private class Lucene46CodecAnonymousInnerClassHelper3 : Lucene46Codec
+        private class Lucene46CodecAnonymousClass3 : Lucene46Codec
         {
             private readonly TestNumericDocValuesUpdates outerInstance;
 
-            public Lucene46CodecAnonymousInnerClassHelper3(TestNumericDocValuesUpdates outerInstance)
+            public Lucene46CodecAnonymousClass3(TestNumericDocValuesUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Index/TestOmitTf.cs b/src/Lucene.Net.Tests/Index/TestOmitTf.cs
index 791284e..2fb3e0e 100644
--- a/src/Lucene.Net.Tests/Index/TestOmitTf.cs
+++ b/src/Lucene.Net.Tests/Index/TestOmitTf.cs
@@ -374,34 +374,34 @@
                 } // else OK because positions are not indexed
             }
 
-            searcher.Search(q1, new CountingHitCollectorAnonymousInnerClassHelper(this));
+            searcher.Search(q1, new CountingHitCollectorAnonymousClass(this));
             //System.out.println(CountingHitCollector.getCount());
 
-            searcher.Search(q2, new CountingHitCollectorAnonymousInnerClassHelper2(this));
+            searcher.Search(q2, new CountingHitCollectorAnonymousClass2(this));
             //System.out.println(CountingHitCollector.getCount());
 
-            searcher.Search(q3, new CountingHitCollectorAnonymousInnerClassHelper3(this));
+            searcher.Search(q3, new CountingHitCollectorAnonymousClass3(this));
             //System.out.println(CountingHitCollector.getCount());
 
-            searcher.Search(q4, new CountingHitCollectorAnonymousInnerClassHelper4(this));
+            searcher.Search(q4, new CountingHitCollectorAnonymousClass4(this));
             //System.out.println(CountingHitCollector.getCount());
 
             BooleanQuery bq = new BooleanQuery();
             bq.Add(q1, Occur.MUST);
             bq.Add(q4, Occur.MUST);
 
-            searcher.Search(bq, new CountingHitCollectorAnonymousInnerClassHelper5(this));
+            searcher.Search(bq, new CountingHitCollectorAnonymousClass5(this));
             Assert.AreEqual(15, CountingHitCollector.Count);
 
             reader.Dispose();
             dir.Dispose();
         }
 
-        private class CountingHitCollectorAnonymousInnerClassHelper : CountingHitCollector
+        private class CountingHitCollectorAnonymousClass : CountingHitCollector
         {
             private readonly TestOmitTf outerInstance;
 
-            public CountingHitCollectorAnonymousInnerClassHelper(TestOmitTf outerInstance)
+            public CountingHitCollectorAnonymousClass(TestOmitTf outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -422,11 +422,11 @@
             }
         }
 
-        private class CountingHitCollectorAnonymousInnerClassHelper2 : CountingHitCollector
+        private class CountingHitCollectorAnonymousClass2 : CountingHitCollector
         {
             private readonly TestOmitTf outerInstance;
 
-            public CountingHitCollectorAnonymousInnerClassHelper2(TestOmitTf outerInstance)
+            public CountingHitCollectorAnonymousClass2(TestOmitTf outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -447,11 +447,11 @@
             }
         }
 
-        private class CountingHitCollectorAnonymousInnerClassHelper3 : CountingHitCollector
+        private class CountingHitCollectorAnonymousClass3 : CountingHitCollector
         {
             private readonly TestOmitTf outerInstance;
 
-            public CountingHitCollectorAnonymousInnerClassHelper3(TestOmitTf outerInstance)
+            public CountingHitCollectorAnonymousClass3(TestOmitTf outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -473,11 +473,11 @@
             }
         }
 
-        private class CountingHitCollectorAnonymousInnerClassHelper4 : CountingHitCollector
+        private class CountingHitCollectorAnonymousClass4 : CountingHitCollector
         {
             private readonly TestOmitTf outerInstance;
 
-            public CountingHitCollectorAnonymousInnerClassHelper4(TestOmitTf outerInstance)
+            public CountingHitCollectorAnonymousClass4(TestOmitTf outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -499,11 +499,11 @@
             }
         }
 
-        private class CountingHitCollectorAnonymousInnerClassHelper5 : CountingHitCollector
+        private class CountingHitCollectorAnonymousClass5 : CountingHitCollector
         {
             private readonly TestOmitTf outerInstance;
 
-            public CountingHitCollectorAnonymousInnerClassHelper5(TestOmitTf outerInstance)
+            public CountingHitCollectorAnonymousClass5(TestOmitTf outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs b/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs
index 83ae254..f11c545 100644
--- a/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs
+++ b/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs
@@ -160,7 +160,7 @@
 
             foreach (AtomicReaderContext cxt in pr.Leaves)
             {
-                cxt.Reader.AddReaderClosedListener(new ReaderClosedListenerAnonymousInnerClassHelper(this, listenerClosedCount));
+                cxt.Reader.AddReaderClosedListener(new ReaderClosedListenerAnonymousClass(this, listenerClosedCount));
             }
             pr.Dispose();
             ir1.Dispose();
@@ -168,13 +168,13 @@
             dir1.Dispose();
         }
 
-        private class ReaderClosedListenerAnonymousInnerClassHelper : IReaderClosedListener
+        private class ReaderClosedListenerAnonymousClass : IReaderClosedListener
         {
             private readonly TestParallelCompositeReader outerInstance;
 
             private readonly int[] listenerClosedCount;
 
-            public ReaderClosedListenerAnonymousInnerClassHelper(TestParallelCompositeReader outerInstance, int[] listenerClosedCount)
+            public ReaderClosedListenerAnonymousClass(TestParallelCompositeReader outerInstance, int[] listenerClosedCount)
             {
                 this.outerInstance = outerInstance;
                 this.listenerClosedCount = listenerClosedCount;
@@ -202,20 +202,20 @@
 
             foreach (AtomicReaderContext cxt in pr.Leaves)
             {
-                cxt.Reader.AddReaderClosedListener(new ReaderClosedListenerAnonymousInnerClassHelper2(this, listenerClosedCount));
+                cxt.Reader.AddReaderClosedListener(new ReaderClosedListenerAnonymousClass2(this, listenerClosedCount));
             }
             pr.Dispose();
             Assert.AreEqual(3, listenerClosedCount[0]);
             dir1.Dispose();
         }
 
-        private class ReaderClosedListenerAnonymousInnerClassHelper2 : IReaderClosedListener
+        private class ReaderClosedListenerAnonymousClass2 : IReaderClosedListener
         {
             private readonly TestParallelCompositeReader outerInstance;
 
             private readonly int[] listenerClosedCount;
 
-            public ReaderClosedListenerAnonymousInnerClassHelper2(TestParallelCompositeReader outerInstance, int[] listenerClosedCount)
+            public ReaderClosedListenerAnonymousClass2(TestParallelCompositeReader outerInstance, int[] listenerClosedCount)
             {
                 this.outerInstance = outerInstance;
                 this.listenerClosedCount = listenerClosedCount;
@@ -491,7 +491,7 @@
 
             string s = pr.ToString();
 
-            Assert.IsTrue(s.StartsWith("ParallelCompositeReader(ParallelCompositeReaderAnonymousInnerClassHelper(ParallelAtomicReader(", StringComparison.Ordinal), "toString incorrect: " + s);
+            Assert.IsTrue(s.StartsWith("ParallelCompositeReader(ParallelCompositeReaderAnonymousClass(ParallelAtomicReader(", StringComparison.Ordinal), "toString incorrect: " + s);
 
             pr.Dispose();
             dir1.Dispose();
diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs
index bc21cd9..5e36a16 100644
--- a/src/Lucene.Net.Tests/Index/TestPayloads.cs
+++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs
@@ -486,7 +486,7 @@
             ThreadJob[] ingesters = new ThreadJob[numThreads];
             for (int i = 0; i < numThreads; i++)
             {
-                ingesters[i] = new ThreadAnonymousInnerClassHelper(this, numDocs, pool, writer, field);
+                ingesters[i] = new ThreadAnonymousClass(this, numDocs, pool, writer, field);
                 ingesters[i].Start();
             }
 
@@ -519,7 +519,7 @@
             Assert.AreEqual(pool.Count, numThreads);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestPayloads outerInstance;
 
@@ -528,7 +528,7 @@
             private readonly IndexWriter writer;
             private readonly string field;
 
-            public ThreadAnonymousInnerClassHelper(TestPayloads outerInstance, int numDocs, ByteArrayPool pool, IndexWriter writer, string field)
+            public ThreadAnonymousClass(TestPayloads outerInstance, int numDocs, ByteArrayPool pool, IndexWriter writer, string field)
             {
                 this.outerInstance = outerInstance;
                 this.numDocs = numDocs;
diff --git a/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
index 1808768..21ad338 100644
--- a/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
+++ b/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
@@ -126,7 +126,7 @@
         public virtual void TestExceptionDuringSave()
         {
             MockDirectoryWrapper dir = NewMockDirectory();
-            dir.FailOn(new FailureAnonymousInnerClassHelper(this, dir));
+            dir.FailOn(new FailureAnonymousClass(this, dir));
             IndexWriter writer = new IndexWriter(dir, GetConfig(Random, new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE_OR_APPEND)));
             writer.AddDocument(new Document());
             writer.Commit();
@@ -153,13 +153,13 @@
             dir.Dispose();
         }
 
-        private class FailureAnonymousInnerClassHelper : Failure
+        private class FailureAnonymousClass : Failure
         {
             private readonly TestPersistentSnapshotDeletionPolicy outerInstance;
 
             private MockDirectoryWrapper dir;
 
-            public FailureAnonymousInnerClassHelper(TestPersistentSnapshotDeletionPolicy outerInstance, MockDirectoryWrapper dir)
+            public FailureAnonymousClass(TestPersistentSnapshotDeletionPolicy outerInstance, MockDirectoryWrapper dir)
             {
                 this.outerInstance = outerInstance;
                 this.dir = dir;
diff --git a/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
index 32f2ab1..21f490a 100644
--- a/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
+++ b/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
@@ -145,7 +145,7 @@
             dp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
             writer.Commit();
 
-            ThreadJob t = new ThreadAnonymousInnerClassHelper(stopTime, writer, NewField);
+            ThreadJob t = new ThreadAnonymousClass(stopTime, writer, NewField);
 
             t.Start();
 
@@ -176,7 +176,7 @@
             TestIndexWriter.AssertNoUnreferencedFiles(dir, "some files were not deleted but should have been");
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly long stopTime;
             private readonly IndexWriter writer;
@@ -187,7 +187,7 @@
             /// Passed in because <see cref="LuceneTestCase.NewField(string, string, FieldType)"/>
             /// is no longer static. 
             /// </param>
-            public ThreadAnonymousInnerClassHelper(long stopTime, IndexWriter writer, Func<string, string, FieldType, Field> newFieldFunc)
+            public ThreadAnonymousClass(long stopTime, IndexWriter writer, Func<string, string, FieldType, Field> newFieldFunc)
             {
                 this.stopTime = stopTime;
                 this.writer = writer;
@@ -360,7 +360,7 @@
             for (int i = 0; i < threads.Length; i++)
             {
                 int finalI = i;
-                threads[i] = new ThreadAnonymousInnerClassHelper2(this, writer, sdp, snapshots, finalI);
+                threads[i] = new ThreadAnonymousClass2(this, writer, sdp, snapshots, finalI);
                 threads[i].Name = "t" + i;
             }
 
@@ -388,7 +388,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly TestSnapshotDeletionPolicy outerInstance;
 
@@ -397,7 +397,7 @@
             private readonly IndexCommit[] snapshots;
             private readonly int finalI;
 
-            public ThreadAnonymousInnerClassHelper2(TestSnapshotDeletionPolicy outerInstance, IndexWriter writer, SnapshotDeletionPolicy sdp, IndexCommit[] snapshots, int finalI)
+            public ThreadAnonymousClass2(TestSnapshotDeletionPolicy outerInstance, IndexWriter writer, SnapshotDeletionPolicy sdp, IndexCommit[] snapshots, int finalI)
             {
                 this.outerInstance = outerInstance;
                 this.writer = writer;
diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing.cs
index fb35c63..fb31695 100644
--- a/src/Lucene.Net.Tests/Index/TestStressIndexing.cs
+++ b/src/Lucene.Net.Tests/Index/TestStressIndexing.cs
@@ -1,4 +1,4 @@
-using J2N.Threading;
+using J2N.Threading;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Search;
@@ -227,7 +227,7 @@
 
         [Test]
         [Slow]
-        public virtual void TestStressIndexAndSearching([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestStressIndexAndSearching()
         {
             Directory directory = NewDirectory();
             MockDirectoryWrapper wrapper = directory as MockDirectoryWrapper;
@@ -236,7 +236,7 @@
                 wrapper.AssertNoUnreferencedFilesOnClose = true;
             }
 
-            RunStressTest(directory, newScheduler());
+            RunStressTest(directory, new ConcurrentMergeScheduler());
             directory.Dispose();
         }
     }
diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
index b8dd2c2..4624df4 100644
--- a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
+++ b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using J2N.Text;
 using J2N.Threading;
 using Lucene.Net.Analysis.TokenAttributes;
@@ -657,7 +658,7 @@
                 // now compare
                 for (int i = 0; i < len1; i++)
                 {
-                    Assert.AreEqual(info1[i], info2[i], "i=" + i + " len=" + len1 + " d1=" + ((long)((ulong)info1[i] >> 32)) + " f1=" + (info1[i] & int.MaxValue) + " d2=" + ((long)((ulong)info2[i] >> 32)) + " f2=" + (info2[i] & int.MaxValue) + " field=" + field1 + " term=" + term1.Utf8ToString());
+                    Assert.AreEqual(info1[i], info2[i], "i=" + i + " len=" + len1 + " d1=" + (info1[i].TripleShift(32)) + " f1=" + (info1[i] & int.MaxValue) + " d2=" + (info2[i].TripleShift(32)) + " f2=" + (info2[i] & int.MaxValue) + " field=" + field1 + " term=" + term1.Utf8ToString());
                 }
             }
         }
diff --git a/src/Lucene.Net.Tests/Index/TestStressNRT.cs b/src/Lucene.Net.Tests/Index/TestStressNRT.cs
index b552cdb..6590b7b 100644
--- a/src/Lucene.Net.Tests/Index/TestStressNRT.cs
+++ b/src/Lucene.Net.Tests/Index/TestStressNRT.cs
@@ -122,14 +122,14 @@
 
             for (int i = 0; i < nWriteThreads; i++)
             {
-                ThreadJob thread = new ThreadAnonymousInnerClassHelper(this, "WRITER" + i, commitPercent, softCommitPercent, deletePercent, deleteByQueryPercent, ndocs, maxConcurrentCommits, tombstones, operations, storedOnlyType, numCommitting, writer);
+                ThreadJob thread = new ThreadAnonymousClass(this, "WRITER" + i, commitPercent, softCommitPercent, deletePercent, deleteByQueryPercent, ndocs, maxConcurrentCommits, tombstones, operations, storedOnlyType, numCommitting, writer);
 
                 threads.Add(thread);
             }
 
             for (int i = 0; i < nReadThreads; i++)
             {
-                ThreadJob thread = new ThreadAnonymousInnerClassHelper2(this, "READER" + i, ndocs, tombstones, operations);
+                ThreadJob thread = new ThreadAnonymousClass2(this, "READER" + i, ndocs, tombstones, operations);
 
                 threads.Add(thread);
             }
@@ -153,7 +153,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestStressNRT outerInstance;
 
@@ -169,7 +169,7 @@
             private readonly AtomicInt32 numCommitting;
             private readonly RandomIndexWriter writer;
 
-            public ThreadAnonymousInnerClassHelper(TestStressNRT outerInstance, string str, int commitPercent, int softCommitPercent, int deletePercent, int deleteByQueryPercent, int ndocs, int maxConcurrentCommits, bool tombstones, AtomicInt64 operations, FieldType storedOnlyType, AtomicInt32 numCommitting, RandomIndexWriter writer)
+            public ThreadAnonymousClass(TestStressNRT outerInstance, string str, int commitPercent, int softCommitPercent, int deletePercent, int deleteByQueryPercent, int ndocs, int maxConcurrentCommits, bool tombstones, AtomicInt64 operations, FieldType storedOnlyType, AtomicInt32 numCommitting, RandomIndexWriter writer)
                 : base(str)
             {
                 this.outerInstance = outerInstance;
@@ -406,7 +406,7 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly TestStressNRT outerInstance;
 
@@ -414,7 +414,7 @@
             private readonly bool tombstones;
             private readonly AtomicInt64 operations;
 
-            public ThreadAnonymousInnerClassHelper2(TestStressNRT outerInstance, string str, int ndocs, bool tombstones, AtomicInt64 operations)
+            public ThreadAnonymousClass2(TestStressNRT outerInstance, string str, int ndocs, bool tombstones, AtomicInt64 operations)
                 : base(str)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs b/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
index 0701821..3401e81 100644
--- a/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
+++ b/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
@@ -86,7 +86,7 @@
     {
         internal virtual void AddDocs(Random random, Directory dir, int ndocs, string field, string val, int maxTF, float percentDocs)
         {
-            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(random, val, maxTF, percentDocs);
+            Analyzer analyzer = new AnalyzerAnonymousClass(random, val, maxTF, percentDocs);
 
             Document doc = new Document();
 
@@ -102,14 +102,14 @@
             writer.Dispose();
         }
 
-        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        private class AnalyzerAnonymousClass : Analyzer
         {
             private readonly Random random;
             private readonly string val;
             private readonly int maxTf;
             private readonly float percentDocs;
 
-            public AnalyzerAnonymousInnerClassHelper(Random random, string val, int maxTF, float percentDocs)
+            public AnalyzerAnonymousClass(Random random, string val, int maxTF, float percentDocs)
             {
                 this.random = random;
                 this.val = val;
diff --git a/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs b/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs
index ba2d8d2..78b93d3 100644
--- a/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs
+++ b/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs
@@ -90,7 +90,7 @@
                 {
                     int iFinal = i;
                     IndexWriter writerFinal = writer;
-                    threads[i] = new ThreadAnonymousInnerClassHelper(this, iterFinal, customType, iFinal, writerFinal);
+                    threads[i] = new ThreadAnonymousClass(this, iterFinal, customType, iFinal, writerFinal);
                 }
 
                 for (int i = 0; i < NUM_THREADS; i++)
@@ -121,7 +121,7 @@
             writer.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestThreadedForceMerge outerInstance;
 
@@ -130,7 +130,7 @@
             private readonly int iFinal;
             private readonly IndexWriter writerFinal;
 
-            public ThreadAnonymousInnerClassHelper(TestThreadedForceMerge outerInstance, int iterFinal, FieldType customType, int iFinal, IndexWriter writerFinal)
+            public ThreadAnonymousClass(TestThreadedForceMerge outerInstance, int iterFinal, FieldType customType, int iFinal, IndexWriter writerFinal)
             {
                 this.outerInstance = outerInstance;
                 this.iterFinal = iterFinal;
diff --git a/src/Lucene.Net.Tests/Index/TestTransactions.cs b/src/Lucene.Net.Tests/Index/TestTransactions.cs
index de42270..cc93685 100644
--- a/src/Lucene.Net.Tests/Index/TestTransactions.cs
+++ b/src/Lucene.Net.Tests/Index/TestTransactions.cs
@@ -1,4 +1,4 @@
-using J2N.Threading;
+using J2N.Threading;
 using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
@@ -114,8 +114,6 @@
         private class IndexerThread : TimedThread
         {
             private readonly TestTransactions outerInstance;
-            private Func<IConcurrentMergeScheduler> newScheduler1;
-            private Func<IConcurrentMergeScheduler> newScheduler2;
             internal Directory dir1;
             internal Directory dir2;
             internal object @lock;
@@ -123,12 +121,9 @@
 
             public IndexerThread(TestTransactions outerInstance, object @lock, 
                 Directory dir1, Directory dir2,
-                Func<IConcurrentMergeScheduler> newScheduler1, Func<IConcurrentMergeScheduler> newScheduler2,
                 TimedThread[] threads)
                 : base(threads)
             {
-                this.newScheduler1 = newScheduler1;
-                this.newScheduler2 = newScheduler2;
                 this.outerInstance = outerInstance;
                 this.@lock = @lock;
                 this.dir1 = dir1;
@@ -142,9 +137,9 @@
                     outerInstance,
 #endif
                     TEST_VERSION_CURRENT, new MockAnalyzer(Random))
-                                .SetMaxBufferedDocs(3)
-                                .SetMergeScheduler(newScheduler1())
-                                .SetMergePolicy(NewLogMergePolicy(2));
+                        .SetMaxBufferedDocs(3)
+                        .SetMergeScheduler(new ConcurrentMergeScheduler())
+                        .SetMergePolicy(NewLogMergePolicy(2));
                 IndexWriter writer1 = new IndexWriter(dir1, config);
                 ((IConcurrentMergeScheduler)writer1.Config.MergeScheduler).SetSuppressExceptions();
 
@@ -155,9 +150,9 @@
                     outerInstance,
 #endif
                     TEST_VERSION_CURRENT, new MockAnalyzer(Random))
-                                .SetMaxBufferedDocs(2)
-                                .SetMergeScheduler(newScheduler2())
-                                .SetMergePolicy(NewLogMergePolicy(3));
+                        .SetMaxBufferedDocs(2)
+                        .SetMergeScheduler(new ConcurrentMergeScheduler())
+                        .SetMergePolicy(NewLogMergePolicy(3));
                 IndexWriter writer2 = new IndexWriter(dir2, config2);
                 ((IConcurrentMergeScheduler)writer2.Config.MergeScheduler).SetSuppressExceptions();
 
@@ -291,9 +286,7 @@
         }
 
         [Test]
-        public virtual void TestTransactions_Mem(
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler1,
-            [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler2)
+        public virtual void TestTransactions_Mem()
         {
             Console.WriteLine("start test");
             // we cant use non-ramdir on windows, because this test needs to double-write.
@@ -317,7 +310,7 @@
             TimedThread[] threads = new TimedThread[3];
             int numThread = 0;
 
-            IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, newScheduler1, newScheduler2, threads);
+            IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads);
 
             threads[numThread++] = indexerThread;
             indexerThread.Start();
diff --git a/src/Lucene.Net.Tests/Search/Payloads/TestPayloadExplanations.cs b/src/Lucene.Net.Tests/Search/Payloads/TestPayloadExplanations.cs
index b8f2605..08a6f8c 100644
--- a/src/Lucene.Net.Tests/Search/Payloads/TestPayloadExplanations.cs
+++ b/src/Lucene.Net.Tests/Search/Payloads/TestPayloadExplanations.cs
@@ -41,14 +41,14 @@
         public override void SetUp()
         {
             base.SetUp();
-            searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+            searcher.Similarity = new DefaultSimilarityAnonymousClass(this);
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass : DefaultSimilarity
         {
             private readonly TestPayloadExplanations outerInstance;
 
-            public DefaultSimilarityAnonymousInnerClassHelper(TestPayloadExplanations outerInstance)
+            public DefaultSimilarityAnonymousClass(TestPayloadExplanations outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestFieldMaskingSpanQuery.cs b/src/Lucene.Net.Tests/Search/Spans/TestFieldMaskingSpanQuery.cs
index 16301e7..7b37049 100644
--- a/src/Lucene.Net.Tests/Search/Spans/TestFieldMaskingSpanQuery.cs
+++ b/src/Lucene.Net.Tests/Search/Spans/TestFieldMaskingSpanQuery.cs
@@ -120,7 +120,7 @@
         public virtual void TestRewrite1()
         {
             // mask an anon SpanQuery class that rewrites to something else.
-            SpanQuery q = new FieldMaskingSpanQuery(new SpanTermQueryAnonymousInnerClassHelper(this, new Term("last", "sally")), "first");
+            SpanQuery q = new FieldMaskingSpanQuery(new SpanTermQueryAnonymousClass(this, new Term("last", "sally")), "first");
 
             SpanQuery qr = (SpanQuery)searcher.Rewrite(q);
 
@@ -131,11 +131,11 @@
             Assert.AreEqual(2, terms.Count);
         }
 
-        private class SpanTermQueryAnonymousInnerClassHelper : SpanTermQuery
+        private class SpanTermQueryAnonymousClass : SpanTermQuery
         {
             private readonly TestFieldMaskingSpanQuery outerInstance;
 
-            public SpanTermQueryAnonymousInnerClassHelper(TestFieldMaskingSpanQuery outerInstance, Term term)
+            public SpanTermQueryAnonymousClass(TestFieldMaskingSpanQuery outerInstance, Term term)
                 : base(term)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpans.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpans.cs
index c83a054..7beb2c2 100644
--- a/src/Lucene.Net.Tests/Search/Spans/TestSpans.cs
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpans.cs
@@ -417,7 +417,7 @@
             {
                 AtomicReaderContext ctx = leaves[i];
 
-                Similarity sim = new DefaultSimilarityAnonymousInnerClassHelper(this);
+                Similarity sim = new DefaultSimilarityAnonymousClass(this);
 
                 Similarity oldSim = searcher.Similarity;
                 Scorer spanScorer;
@@ -446,11 +446,11 @@
             }
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass : DefaultSimilarity
         {
             private readonly TestSpans outerInstance;
 
-            public DefaultSimilarityAnonymousInnerClassHelper(TestSpans outerInstance)
+            public DefaultSimilarityAnonymousClass(TestSpans outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestAutomatonQuery.cs b/src/Lucene.Net.Tests/Search/TestAutomatonQuery.cs
index e3fc408..b7197cc 100644
--- a/src/Lucene.Net.Tests/Search/TestAutomatonQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestAutomatonQuery.cs
@@ -239,7 +239,7 @@
             ThreadJob[] threads = new ThreadJob[numThreads];
             for (int threadID = 0; threadID < numThreads; threadID++)
             {
-                ThreadJob thread = new ThreadAnonymousInnerClassHelper(this, queries, startingGun);
+                ThreadJob thread = new ThreadAnonymousClass(this, queries, startingGun);
                 threads[threadID] = thread;
                 thread.Start();
             }
@@ -250,14 +250,14 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestAutomatonQuery outerInstance;
 
             private readonly AutomatonQuery[] queries;
             private readonly CountdownEvent startingGun;
 
-            public ThreadAnonymousInnerClassHelper(TestAutomatonQuery outerInstance, AutomatonQuery[] queries, CountdownEvent startingGun)
+            public ThreadAnonymousClass(TestAutomatonQuery outerInstance, AutomatonQuery[] queries, CountdownEvent startingGun)
             {
                 this.outerInstance = outerInstance;
                 this.queries = queries;
diff --git a/src/Lucene.Net.Tests/Search/TestBoolean2.cs b/src/Lucene.Net.Tests/Search/TestBoolean2.cs
index 73dd5c6..1700852 100644
--- a/src/Lucene.Net.Tests/Search/TestBoolean2.cs
+++ b/src/Lucene.Net.Tests/Search/TestBoolean2.cs
@@ -268,7 +268,7 @@
             Similarity oldSimilarity = searcher.Similarity;
             try
             {
-                searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+                searcher.Similarity = new DefaultSimilarityAnonymousClass(this);
                 QueriesTest(query, expDocNrs);
             }
             finally
@@ -277,11 +277,11 @@
             }
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass : DefaultSimilarity
         {
             private readonly TestBoolean2 outerInstance;
 
-            public DefaultSimilarityAnonymousInnerClassHelper(TestBoolean2 outerInstance)
+            public DefaultSimilarityAnonymousClass(TestBoolean2 outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanMinShouldMatch.cs b/src/Lucene.Net.Tests/Search/TestBooleanMinShouldMatch.cs
index c84036f..5a0746c 100644
--- a/src/Lucene.Net.Tests/Search/TestBooleanMinShouldMatch.cs
+++ b/src/Lucene.Net.Tests/Search/TestBooleanMinShouldMatch.cs
@@ -345,7 +345,7 @@
             int maxLev = 4;
 
             // callback object to set a random setMinimumNumberShouldMatch
-            TestBoolean2.ICallback minNrCB = new CallbackAnonymousInnerClassHelper(this, field, vals);
+            TestBoolean2.ICallback minNrCB = new CallbackAnonymousClass(this, field, vals);
 
             // increase number of iterations for more complete testing
             int num = AtLeast(20);
@@ -383,14 +383,14 @@
             // System.out.println("Total hits:"+tot);
         }
 
-        private class CallbackAnonymousInnerClassHelper : TestBoolean2.ICallback
+        private class CallbackAnonymousClass : TestBoolean2.ICallback
         {
             private readonly TestBooleanMinShouldMatch outerInstance;
 
             private readonly string field;
             private readonly string[] vals;
 
-            public CallbackAnonymousInnerClassHelper(TestBooleanMinShouldMatch outerInstance, string field, string[] vals)
+            public CallbackAnonymousClass(TestBooleanMinShouldMatch outerInstance, string field, string[] vals)
             {
                 this.outerInstance = outerInstance;
                 this.field = field;
@@ -458,7 +458,7 @@
             Similarity oldSimilarity = s.Similarity;
             try
             {
-                s.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+                s.Similarity = new DefaultSimilarityAnonymousClass(this);
                 BooleanQuery q1 = new BooleanQuery();
                 q1.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD);
                 BooleanQuery q2 = new BooleanQuery();
@@ -474,11 +474,11 @@
             }
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass : DefaultSimilarity
         {
             private readonly TestBooleanMinShouldMatch outerInstance;
 
-            public DefaultSimilarityAnonymousInnerClassHelper(TestBooleanMinShouldMatch outerInstance)
+            public DefaultSimilarityAnonymousClass(TestBooleanMinShouldMatch outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -495,7 +495,7 @@
             Similarity oldSimilarity = s.Similarity;
             try
             {
-                s.Similarity = new DefaultSimilarityAnonymousInnerClassHelper2(this);
+                s.Similarity = new DefaultSimilarityAnonymousClass2(this);
                 BooleanQuery q1 = new BooleanQuery();
                 q1.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD);
                 BooleanQuery q2 = new BooleanQuery();
@@ -511,11 +511,11 @@
             }
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper2 : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass2 : DefaultSimilarity
         {
             private readonly TestBooleanMinShouldMatch outerInstance;
 
-            public DefaultSimilarityAnonymousInnerClassHelper2(TestBooleanMinShouldMatch outerInstance)
+            public DefaultSimilarityAnonymousClass2(TestBooleanMinShouldMatch outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanOr.cs b/src/Lucene.Net.Tests/Search/TestBooleanOr.cs
index 278cbf1..999caef 100644
--- a/src/Lucene.Net.Tests/Search/TestBooleanOr.cs
+++ b/src/Lucene.Net.Tests/Search/TestBooleanOr.cs
@@ -208,7 +208,7 @@
 
             FixedBitSet hits = new FixedBitSet(docCount);
             AtomicInt32 end = new AtomicInt32();
-            ICollector c = new CollectorAnonymousInnerClassHelper(this, scorer, hits, end);
+            ICollector c = new CollectorAnonymousClass(this, scorer, hits, end);
 
             while (end < docCount)
             {
@@ -222,7 +222,7 @@
             dir.Dispose();
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestBooleanOr outerInstance;
 
@@ -230,7 +230,7 @@
             private readonly FixedBitSet hits;
             private readonly AtomicInt32 end;
 
-            public CollectorAnonymousInnerClassHelper(TestBooleanOr outerInstance, BulkScorer scorer, FixedBitSet hits, AtomicInt32 end)
+            public CollectorAnonymousClass(TestBooleanOr outerInstance, BulkScorer scorer, FixedBitSet hits, AtomicInt32 end)
             {
                 this.outerInstance = outerInstance;
                 this.scorer = scorer;
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs b/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
index c1868f0..752fd42 100644
--- a/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
@@ -403,7 +403,7 @@
             w.AddDocument(doc);
             IndexReader r = w.GetReader();
             w.Dispose();
-            IndexSearcher s = new IndexSearcherAnonymousInnerClassHelper(this, r);
+            IndexSearcher s = new IndexSearcherAnonymousClass(this, r);
             BooleanQuery bq = new BooleanQuery();
             bq.Add(new TermQuery(new Term("field", "some")), Occur.SHOULD);
             bq.Add(new TermQuery(new Term("field", "text")), Occur.SHOULD);
@@ -414,11 +414,11 @@
             dir.Dispose();
         }
 
-        private class IndexSearcherAnonymousInnerClassHelper : IndexSearcher
+        private class IndexSearcherAnonymousClass : IndexSearcher
         {
             private readonly TestBooleanQuery outerInstance;
 
-            public IndexSearcherAnonymousInnerClassHelper(TestBooleanQuery outerInstance, IndexReader r)
+            public IndexSearcherAnonymousClass(TestBooleanQuery outerInstance, IndexReader r)
                 : base(r)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
index a257e2b..c39e321 100644
--- a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
+++ b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
@@ -99,13 +99,13 @@
             BooleanWeight weight = (BooleanWeight)(new BooleanQuery()).CreateWeight(searcher);
 
             BulkScorer[] scorers = new BulkScorer[] {
-            new BulkScorerAnonymousInnerClassHelper()
+            new BulkScorerAnonymousClass()
         };
 
             BooleanScorer bs = new BooleanScorer(weight, false, 1, scorers, Collections.EmptyList<BulkScorer>(), scorers.Length);
 
             IList<int> hits = new List<int>();
-            bs.Score(new CollectorAnonymousInnerClassHelper(this, hits));
+            bs.Score(new CollectorAnonymousClass(this, hits));
 
             Assert.AreEqual(1, hits.Count, "should have only 1 hit");
             Assert.AreEqual(3000, (int)hits[0], "hit should have been docID=3000");
@@ -113,7 +113,7 @@
             directory.Dispose();
         }
 
-        private class BulkScorerAnonymousInnerClassHelper : BulkScorer
+        private class BulkScorerAnonymousClass : BulkScorer
         {
             private int doc = -1;
 
@@ -130,13 +130,13 @@
             }
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestBooleanScorer outerInstance;
 
             private readonly IList<int> hits;
 
-            public CollectorAnonymousInnerClassHelper(TestBooleanScorer outerInstance, IList<int> hits)
+            public CollectorAnonymousClass(TestBooleanScorer outerInstance, IList<int> hits)
             {
                 this.outerInstance = outerInstance;
                 this.hits = hits;
@@ -189,7 +189,7 @@
             q.Add(new BooleanClause(new TermQuery(new Term("field", "33")), Occur.SHOULD));
 
             int[] count = new int[1];
-            s.Search(q, new CollectorAnonymousInnerClassHelper2(this, doc, count));
+            s.Search(q, new CollectorAnonymousClass2(this, doc, count));
 
             Assert.AreEqual(1, count[0]);
 
@@ -197,14 +197,14 @@
             d.Dispose();
         }
 
-        private class CollectorAnonymousInnerClassHelper2 : ICollector
+        private class CollectorAnonymousClass2 : ICollector
         {
             private readonly TestBooleanScorer outerInstance;
 
             private Document doc;
             private readonly int[] count;
 
-            public CollectorAnonymousInnerClassHelper2(TestBooleanScorer outerInstance, Document doc, int[] count)
+            public CollectorAnonymousClass2(TestBooleanScorer outerInstance, Document doc, int[] count)
             {
                 this.outerInstance = outerInstance;
                 this.doc = doc;
@@ -241,14 +241,14 @@
 
             public override Weight CreateWeight(IndexSearcher searcher)
             {
-                return new WeightAnonymousInnerClassHelper(this);
+                return new WeightAnonymousClass(this);
             }
 
-            private class WeightAnonymousInnerClassHelper : Weight
+            private class WeightAnonymousClass : Weight
             {
                 private readonly CrazyMustUseBulkScorerQuery outerInstance;
 
-                public WeightAnonymousInnerClassHelper(CrazyMustUseBulkScorerQuery outerInstance)
+                public WeightAnonymousClass(CrazyMustUseBulkScorerQuery outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -276,14 +276,14 @@
 
                 public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, IBits acceptDocs)
                 {
-                    return new BulkScorerAnonymousInnerClassHelper(this);
+                    return new BulkScorerAnonymousClass(this);
                 }
 
-                private class BulkScorerAnonymousInnerClassHelper : BulkScorer
+                private class BulkScorerAnonymousClass : BulkScorer
                 {
-                    private readonly WeightAnonymousInnerClassHelper outerInstance;
+                    private readonly WeightAnonymousClass outerInstance;
 
-                    public BulkScorerAnonymousInnerClassHelper(WeightAnonymousInnerClassHelper outerInstance)
+                    public BulkScorerAnonymousClass(WeightAnonymousClass outerInstance)
                     {
                         this.outerInstance = outerInstance;
                     }
diff --git a/src/Lucene.Net.Tests/Search/TestCachingCollector.cs b/src/Lucene.Net.Tests/Search/TestCachingCollector.cs
index ef26cf7..b347636 100644
--- a/src/Lucene.Net.Tests/Search/TestCachingCollector.cs
+++ b/src/Lucene.Net.Tests/Search/TestCachingCollector.cs
@@ -100,15 +100,15 @@
                 }
 
                 // now replay them
-                cc.Replay(new CollectorAnonymousInnerClassHelper(this));
+                cc.Replay(new CollectorAnonymousClass(this));
             }
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestCachingCollector outerInstance;
 
-            public CollectorAnonymousInnerClassHelper(TestCachingCollector outerInstance)
+            public CollectorAnonymousClass(TestCachingCollector outerInstance)
             {
                 this.outerInstance = outerInstance;
                 prevDocID = -1;
diff --git a/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs b/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
index fe3375a..12e98eb 100644
--- a/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
+++ b/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
@@ -219,7 +219,7 @@
             IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
             AtomicReaderContext context = (AtomicReaderContext)reader.Context;
 
-            Filter filter = new FilterAnonymousInnerClassHelper(this, context);
+            Filter filter = new FilterAnonymousClass(this, context);
             CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
 
             // the caching filter should return the empty set constant
@@ -230,13 +230,13 @@
             dir.Dispose();
         }
 
-        private class FilterAnonymousInnerClassHelper : Filter
+        private class FilterAnonymousClass : Filter
         {
             private readonly TestCachingWrapperFilter outerInstance;
 
             private AtomicReaderContext context;
 
-            public FilterAnonymousInnerClassHelper(TestCachingWrapperFilter outerInstance, AtomicReaderContext context)
+            public FilterAnonymousClass(TestCachingWrapperFilter outerInstance, AtomicReaderContext context)
             {
                 this.outerInstance = outerInstance;
                 this.context = context;
@@ -262,7 +262,7 @@
             IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
             AtomicReaderContext context = (AtomicReaderContext)reader.Context;
 
-            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
+            Filter filter = new FilterAnonymousClass2(this, context);
             CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
 
             // the caching filter should return the empty set constant
@@ -272,13 +272,13 @@
             dir.Dispose();
         }
 
-        private class FilterAnonymousInnerClassHelper2 : Filter
+        private class FilterAnonymousClass2 : Filter
         {
             private readonly TestCachingWrapperFilter outerInstance;
 
             private AtomicReaderContext context;
 
-            public FilterAnonymousInnerClassHelper2(TestCachingWrapperFilter outerInstance, AtomicReaderContext context)
+            public FilterAnonymousClass2(TestCachingWrapperFilter outerInstance, AtomicReaderContext context)
             {
                 this.outerInstance = outerInstance;
                 this.context = context;
@@ -286,14 +286,14 @@
 
             public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
             {
-                return new DocIdSetAnonymousInnerClassHelper(this);
+                return new DocIdSetAnonymousClass(this);
             }
 
-            private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+            private class DocIdSetAnonymousClass : DocIdSet
             {
-                private readonly FilterAnonymousInnerClassHelper2 outerInstance;
+                private readonly FilterAnonymousClass2 outerInstance;
 
-                public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper2 outerInstance)
+                public DocIdSetAnonymousClass(FilterAnonymousClass2 outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -357,17 +357,17 @@
             // is cacheable:
             AssertDocIdSetCacheable(reader, FieldCacheRangeFilter.NewInt32Range("test", Convert.ToInt32(10), Convert.ToInt32(20), true, true), true);
             // a fixedbitset filter is always cacheable
-            AssertDocIdSetCacheable(reader, new FilterAnonymousInnerClassHelper3(this), true);
+            AssertDocIdSetCacheable(reader, new FilterAnonymousClass3(this), true);
 
             reader.Dispose();
             dir.Dispose();
         }
 
-        private class FilterAnonymousInnerClassHelper3 : Filter
+        private class FilterAnonymousClass3 : Filter
         {
             private readonly TestCachingWrapperFilter outerInstance;
 
-            public FilterAnonymousInnerClassHelper3(TestCachingWrapperFilter outerInstance)
+            public FilterAnonymousClass3(TestCachingWrapperFilter outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs b/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
index 382aba8..a85f8f1 100644
--- a/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
+++ b/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
@@ -52,12 +52,12 @@
         // must be static for weight serialization tests
         private static DefaultSimilarity CreateQnorm1Similarity()
         {
-            return new DefaultSimilarityAnonymousInnerClassHelper();
+            return new DefaultSimilarityAnonymousClass();
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass : DefaultSimilarity
         {
-            public DefaultSimilarityAnonymousInnerClassHelper()
+            public DefaultSimilarityAnonymousClass()
             {
             }
 
diff --git a/src/Lucene.Net.Tests/Search/TestConjunctions.cs b/src/Lucene.Net.Tests/Search/TestConjunctions.cs
index 75656be..a8ccb89 100644
--- a/src/Lucene.Net.Tests/Search/TestConjunctions.cs
+++ b/src/Lucene.Net.Tests/Search/TestConjunctions.cs
@@ -105,14 +105,14 @@
 
             public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
             {
-                return new SimWeightAnonymousInnerClassHelper(this);
+                return new SimWeightAnonymousClass(this);
             }
 
-            private class SimWeightAnonymousInnerClassHelper : SimWeight
+            private class SimWeightAnonymousClass : SimWeight
             {
                 private readonly TFSimilarity outerInstance;
 
-                public SimWeightAnonymousInnerClassHelper(TFSimilarity outerInstance)
+                public SimWeightAnonymousClass(TFSimilarity outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -130,14 +130,14 @@
 
             public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext context)
             {
-                return new SimScorerAnonymousInnerClassHelper(this);
+                return new SimScorerAnonymousClass(this);
             }
 
-            private class SimScorerAnonymousInnerClassHelper : SimScorer
+            private class SimScorerAnonymousClass : SimScorer
             {
                 private readonly TFSimilarity outerInstance;
 
-                public SimScorerAnonymousInnerClassHelper(TFSimilarity outerInstance)
+                public SimScorerAnonymousClass(TFSimilarity outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
diff --git a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
index 80747c3..4a4cde6 100644
--- a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
@@ -59,11 +59,11 @@
         private void CheckHits(IndexSearcher searcher, Query q, float expectedScore, string scorerClassName, string innerScorerClassName)
         {
             int[] count = new int[1];
-            searcher.Search(q, new CollectorAnonymousInnerClassHelper(this, expectedScore, scorerClassName, innerScorerClassName, count));
+            searcher.Search(q, new CollectorAnonymousClass(this, expectedScore, scorerClassName, innerScorerClassName, count));
             Assert.AreEqual(1, count[0], "invalid number of results");
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestConstantScoreQuery outerInstance;
 
@@ -72,7 +72,7 @@
             private readonly string innerScorerClassName;
             private readonly int[] count;
 
-            public CollectorAnonymousInnerClassHelper(TestConstantScoreQuery outerInstance, float expectedScore, string scorerClassName, string innerScorerClassName, int[] count)
+            public CollectorAnonymousClass(TestConstantScoreQuery outerInstance, float expectedScore, string scorerClassName, string innerScorerClassName, int[] count)
             {
                 this.outerInstance = outerInstance;
                 this.expectedScore = expectedScore;
@@ -132,7 +132,7 @@
                 searcher = NewSearcher(reader, true, false);
 
                 // set a similarity that does not normalize our boost away
-                searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+                searcher.Similarity = new DefaultSimilarityAnonymousClass(this);
 
                 Query csq1 = new ConstantScoreQuery(new TermQuery(new Term("field", "term")));
                 csq1.Boost = 2.0f;
@@ -167,11 +167,11 @@
             }
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass : DefaultSimilarity
         {
             private readonly TestConstantScoreQuery outerInstance;
 
-            public DefaultSimilarityAnonymousInnerClassHelper(TestConstantScoreQuery outerInstance)
+            public DefaultSimilarityAnonymousClass(TestConstantScoreQuery outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
index 7f35c2b..62244cf 100644
--- a/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
+++ b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
@@ -274,7 +274,7 @@
 
             genWriter = new TrackingIndexWriter(m_writer);
 
-            SearcherFactory sf = new SearcherFactoryAnonymousInnerClassHelper(this, es);
+            SearcherFactory sf = new SearcherFactoryAnonymousClass(this, es);
 
             nrtNoDeletes = new SearcherManager(m_writer, false, sf);
             nrtDeletes = new SearcherManager(m_writer, true, sf);
@@ -296,13 +296,13 @@
             nrtNoDeletesThread.Start();
         }
 
-        private class SearcherFactoryAnonymousInnerClassHelper : SearcherFactory
+        private class SearcherFactoryAnonymousClass : SearcherFactory
         {
             private readonly TestControlledRealTimeReopenThread outerInstance;
 
             private TaskScheduler es;
 
-            public SearcherFactoryAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, TaskScheduler es)
+            public SearcherFactoryAnonymousClass(TestControlledRealTimeReopenThread outerInstance, TaskScheduler es)
             {
                 this.outerInstance = outerInstance;
                 this.es = es;
@@ -399,7 +399,7 @@
             doc.Add(NewTextField("test", "test", Field.Store.YES));
             writer.AddDocument(doc);
             manager.MaybeRefresh();
-            var t = new ThreadAnonymousInnerClassHelper(this, latch, signal, writer, manager);
+            var t = new ThreadAnonymousClass(this, latch, signal, writer, manager);
             t.Start();
             _writer.waitAfterUpdate = true; // wait in addDocument to let some reopens go through
             long lastGen = writer.UpdateDocument(new Term("foo", "bar"), doc); // once this returns the doc is already reflected in the last reopen
@@ -423,7 +423,7 @@
             }
 
             AtomicBoolean finished = new AtomicBoolean(false);
-            var waiter = new ThreadAnonymousInnerClassHelper2(this, lastGen, thread, finished);
+            var waiter = new ThreadAnonymousClass2(this, lastGen, thread, finished);
             waiter.Start();
             manager.MaybeRefresh();
             waiter.Join(1000);
@@ -437,7 +437,7 @@
             IOUtils.Dispose(manager, _writer, d);
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestControlledRealTimeReopenThread outerInstance;
 
@@ -446,7 +446,7 @@
             private readonly TrackingIndexWriter writer;
             private readonly SearcherManager manager;
 
-            public ThreadAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, CountdownEvent latch, CountdownEvent signal, TrackingIndexWriter writer, SearcherManager manager)
+            public ThreadAnonymousClass(TestControlledRealTimeReopenThread outerInstance, CountdownEvent latch, CountdownEvent signal, TrackingIndexWriter writer, SearcherManager manager)
             {
                 this.outerInstance = outerInstance;
                 this.latch = latch;
@@ -476,7 +476,7 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly TestControlledRealTimeReopenThread outerInstance;
 
@@ -484,7 +484,7 @@
             private readonly ControlledRealTimeReopenThread<IndexSearcher> thread;
             private readonly AtomicBoolean finished;
 
-            public ThreadAnonymousInnerClassHelper2(TestControlledRealTimeReopenThread outerInstance, long lastGen, ControlledRealTimeReopenThread<IndexSearcher> thread, AtomicBoolean finished)
+            public ThreadAnonymousClass2(TestControlledRealTimeReopenThread outerInstance, long lastGen, ControlledRealTimeReopenThread<IndexSearcher> thread, AtomicBoolean finished)
             {
                 this.outerInstance = outerInstance;
                 this.lastGen = lastGen;
@@ -561,7 +561,7 @@
 
             IndexReader other = DirectoryReader.Open(dir);
 
-            SearcherFactory theEvilOne = new SearcherFactoryAnonymousInnerClassHelper2(this, other);
+            SearcherFactory theEvilOne = new SearcherFactoryAnonymousClass2(this, other);
 
             try
             {
@@ -579,13 +579,13 @@
             dir.Dispose();
         }
 
-        private class SearcherFactoryAnonymousInnerClassHelper2 : SearcherFactory
+        private class SearcherFactoryAnonymousClass2 : SearcherFactory
         {
             private readonly TestControlledRealTimeReopenThread outerInstance;
 
             private readonly IndexReader other;
 
-            public SearcherFactoryAnonymousInnerClassHelper2(TestControlledRealTimeReopenThread outerInstance, IndexReader other)
+            public SearcherFactoryAnonymousClass2(TestControlledRealTimeReopenThread outerInstance, IndexReader other)
             {
                 this.outerInstance = outerInstance;
                 this.other = other;
@@ -608,7 +608,7 @@
             IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
             AtomicBoolean afterRefreshCalled = new AtomicBoolean(false);
             SearcherManager sm = new SearcherManager(iw, true, new SearcherFactory());
-            sm.AddListener(new RefreshListenerAnonymousInnerClassHelper(this, afterRefreshCalled));
+            sm.AddListener(new RefreshListenerAnonymousClass(this, afterRefreshCalled));
             iw.AddDocument(new Document());
             iw.Commit();
             assertFalse(afterRefreshCalled);
@@ -619,13 +619,13 @@
             dir.Dispose();
         }
 
-        private class RefreshListenerAnonymousInnerClassHelper : ReferenceManager.IRefreshListener
+        private class RefreshListenerAnonymousClass : ReferenceManager.IRefreshListener
         {
             private readonly TestControlledRealTimeReopenThread outerInstance;
 
             private AtomicBoolean afterRefreshCalled;
 
-            public RefreshListenerAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, AtomicBoolean afterRefreshCalled)
+            public RefreshListenerAnonymousClass(TestControlledRealTimeReopenThread outerInstance, AtomicBoolean afterRefreshCalled)
             {
                 this.outerInstance = outerInstance;
                 this.afterRefreshCalled = afterRefreshCalled;
@@ -686,7 +686,7 @@
             {
                 if (i > 0 && i % 50 == 0)
                 {
-                    ThreadJob commitThread = new RunnableAnonymousInnerClassHelper(this, sdp, dir, iw);
+                    ThreadJob commitThread = new RunnableAnonymousClass(this, sdp, dir, iw);
                     commitThread.Start();
                     commitThreads.Add(commitThread);
                 }
@@ -715,7 +715,7 @@
             dir.Dispose();
         }
 
-        private class RunnableAnonymousInnerClassHelper : ThreadJob
+        private class RunnableAnonymousClass : ThreadJob
         {
             private readonly TestControlledRealTimeReopenThread outerInstance;
 
@@ -723,7 +723,7 @@
             private Directory dir;
             private IndexWriter iw;
 
-            public RunnableAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, SnapshotDeletionPolicy sdp, Directory dir, IndexWriter iw)
+            public RunnableAnonymousClass(TestControlledRealTimeReopenThread outerInstance, SnapshotDeletionPolicy sdp, Directory dir, IndexWriter iw)
             {
                 this.outerInstance = outerInstance;
                 this.sdp = sdp;
diff --git a/src/Lucene.Net.Tests/Search/TestDocBoost.cs b/src/Lucene.Net.Tests/Search/TestDocBoost.cs
index 89de72d..7d7f719 100644
--- a/src/Lucene.Net.Tests/Search/TestDocBoost.cs
+++ b/src/Lucene.Net.Tests/Search/TestDocBoost.cs
@@ -64,7 +64,7 @@
             float[] scores = new float[4];
 
             IndexSearcher searcher = NewSearcher(reader);
-            searcher.Search(new TermQuery(new Term("field", "word")), new CollectorAnonymousInnerClassHelper(this, scores));
+            searcher.Search(new TermQuery(new Term("field", "word")), new CollectorAnonymousClass(this, scores));
 
             float lastScore = 0.0f;
 
@@ -82,13 +82,13 @@
             store.Dispose();
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestDocBoost outerInstance;
 
             private readonly float[] scores;
 
-            public CollectorAnonymousInnerClassHelper(TestDocBoost outerInstance, float[] scores)
+            public CollectorAnonymousClass(TestDocBoost outerInstance, float[] scores)
             {
                 this.outerInstance = outerInstance;
                 this.scores = scores;
diff --git a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
index e04dbe3..ff795c3 100644
--- a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
+++ b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
@@ -41,9 +41,9 @@
         public virtual void TestFilteredDocIdSet()
         {
             const int maxdoc = 10;
-            DocIdSet innerSet = new DocIdSetAnonymousInnerClassHelper(this, maxdoc);
+            DocIdSet innerSet = new DocIdSetAnonymousClass(this, maxdoc);
 
-            DocIdSet filteredSet = new FilteredDocIdSetAnonymousInnerClassHelper(this, innerSet);
+            DocIdSet filteredSet = new FilteredDocIdSetAnonymousClass(this, innerSet);
 
             DocIdSetIterator iter = filteredSet.GetIterator();
             List<int?> list = new List<int?>();
@@ -74,13 +74,13 @@
             }
         }
 
-        private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+        private class DocIdSetAnonymousClass : DocIdSet
         {
             private readonly TestDocIdSet outerInstance;
 
             private readonly int maxdoc;
 
-            public DocIdSetAnonymousInnerClassHelper(TestDocIdSet outerInstance, int maxdoc)
+            public DocIdSetAnonymousClass(TestDocIdSet outerInstance, int maxdoc)
             {
                 this.outerInstance = outerInstance;
                 this.maxdoc = maxdoc;
@@ -88,14 +88,14 @@
 
             public override DocIdSetIterator GetIterator()
             {
-                return new DocIdSetIteratorAnonymousInnerClassHelper(this);
+                return new DocIdSetIteratorAnonymousClass(this);
             }
 
-            private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+            private class DocIdSetIteratorAnonymousClass : DocIdSetIterator
             {
-                private readonly DocIdSetAnonymousInnerClassHelper outerInstance;
+                private readonly DocIdSetAnonymousClass outerInstance;
 
-                public DocIdSetIteratorAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper outerInstance)
+                public DocIdSetIteratorAnonymousClass(DocIdSetAnonymousClass outerInstance)
                 {
                     this.outerInstance = outerInstance;
                     docid = -1;
@@ -123,11 +123,11 @@
             }
         }
 
-        private class FilteredDocIdSetAnonymousInnerClassHelper : FilteredDocIdSet
+        private class FilteredDocIdSetAnonymousClass : FilteredDocIdSet
         {
             private readonly TestDocIdSet outerInstance;
 
-            public FilteredDocIdSetAnonymousInnerClassHelper(TestDocIdSet outerInstance, DocIdSet innerSet)
+            public FilteredDocIdSetAnonymousClass(TestDocIdSet outerInstance, DocIdSet innerSet)
                 : base(innerSet)
             {
                 this.outerInstance = outerInstance;
@@ -161,18 +161,18 @@
             Assert.AreEqual(1, searcher.Search(new MatchAllDocsQuery(), 10).TotalHits);
 
             // Now search w/ a Filter which returns a null DocIdSet
-            Filter f = new FilterAnonymousInnerClassHelper(this);
+            Filter f = new FilterAnonymousClass(this);
 
             Assert.AreEqual(0, searcher.Search(new MatchAllDocsQuery(), f, 10).TotalHits);
             reader.Dispose();
             dir.Dispose();
         }
 
-        private class FilterAnonymousInnerClassHelper : Filter
+        private class FilterAnonymousClass : Filter
         {
             private readonly TestDocIdSet outerInstance;
 
-            public FilterAnonymousInnerClassHelper(TestDocIdSet outerInstance)
+            public FilterAnonymousClass(TestDocIdSet outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -203,33 +203,33 @@
             Assert.AreEqual(1, searcher.Search(new MatchAllDocsQuery(), 10).TotalHits);
 
             // Now search w/ a Filter which returns a null DocIdSet
-            Filter f = new FilterAnonymousInnerClassHelper2(this);
+            Filter f = new FilterAnonymousClass2(this);
 
             Assert.AreEqual(0, searcher.Search(new MatchAllDocsQuery(), f, 10).TotalHits);
             reader.Dispose();
             dir.Dispose();
         }
 
-        private class FilterAnonymousInnerClassHelper2 : Filter
+        private class FilterAnonymousClass2 : Filter
         {
             private readonly TestDocIdSet outerInstance;
 
-            public FilterAnonymousInnerClassHelper2(TestDocIdSet outerInstance)
+            public FilterAnonymousClass2(TestDocIdSet outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
 
             public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
             {
-                DocIdSet innerNullIteratorSet = new DocIdSetAnonymousInnerClassHelper2(this);
-                return new FilteredDocIdSetAnonymousInnerClassHelper2(this, innerNullIteratorSet);
+                DocIdSet innerNullIteratorSet = new DocIdSetAnonymousClass2(this);
+                return new FilteredDocIdSetAnonymousClass2(this, innerNullIteratorSet);
             }
 
-            private class DocIdSetAnonymousInnerClassHelper2 : DocIdSet
+            private class DocIdSetAnonymousClass2 : DocIdSet
             {
-                private readonly FilterAnonymousInnerClassHelper2 outerInstance;
+                private readonly FilterAnonymousClass2 outerInstance;
 
-                public DocIdSetAnonymousInnerClassHelper2(FilterAnonymousInnerClassHelper2 outerInstance)
+                public DocIdSetAnonymousClass2(FilterAnonymousClass2 outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -240,11 +240,11 @@
                 }
             }
 
-            private class FilteredDocIdSetAnonymousInnerClassHelper2 : FilteredDocIdSet
+            private class FilteredDocIdSetAnonymousClass2 : FilteredDocIdSet
             {
-                private readonly FilterAnonymousInnerClassHelper2 outerInstance;
+                private readonly FilterAnonymousClass2 outerInstance;
 
-                public FilteredDocIdSetAnonymousInnerClassHelper2(FilterAnonymousInnerClassHelper2 outerInstance, DocIdSet innerNullIteratorSet)
+                public FilteredDocIdSetAnonymousClass2(FilterAnonymousClass2 outerInstance, DocIdSet innerNullIteratorSet)
                     : base(innerNullIteratorSet)
                 {
                     this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs b/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs
index f28a299..91491c8 100644
--- a/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs
+++ b/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs
@@ -89,7 +89,7 @@
                 this,
 #endif
                 ir, false);
-            searcher2.Similarity = new PerFieldSimilarityWrapperAnonymousInnerClassHelper(this, field, @base);
+            searcher2.Similarity = new PerFieldSimilarityWrapperAnonymousClass(this, field, @base);
 
             // in this case, we searched on field "foo". first document should have 2x the score.
             TermQuery tq = new TermQuery(new Term("foo", "quick"));
@@ -157,14 +157,14 @@
             dir.Dispose();
         }
 
-        private class PerFieldSimilarityWrapperAnonymousInnerClassHelper : PerFieldSimilarityWrapper
+        private class PerFieldSimilarityWrapperAnonymousClass : PerFieldSimilarityWrapper
         {
             private readonly TestDocValuesScoring outerInstance;
 
             private Field field;
             private Similarity @base;
 
-            public PerFieldSimilarityWrapperAnonymousInnerClassHelper(TestDocValuesScoring outerInstance, Field field, Similarity @base)
+            public PerFieldSimilarityWrapperAnonymousClass(TestDocValuesScoring outerInstance, Field field, Similarity @base)
             {
                 this.outerInstance = outerInstance;
                 this.field = field;
@@ -222,17 +222,17 @@
                 SimScorer sub = sim.GetSimScorer(stats, context);
                 FieldCache.Singles values = FieldCache.DEFAULT.GetSingles(context.AtomicReader, boostField, false);
 
-                return new SimScorerAnonymousInnerClassHelper(this, sub, values);
+                return new SimScorerAnonymousClass(this, sub, values);
             }
 
-            private class SimScorerAnonymousInnerClassHelper : SimScorer
+            private class SimScorerAnonymousClass : SimScorer
             {
                 private readonly BoostingSimilarity outerInstance;
 
                 private readonly SimScorer sub;
                 private readonly FieldCache.Singles values;
 
-                public SimScorerAnonymousInnerClassHelper(BoostingSimilarity outerInstance, SimScorer sub, FieldCache.Singles values)
+                public SimScorerAnonymousClass(BoostingSimilarity outerInstance, SimScorer sub, FieldCache.Singles values)
                 {
                     this.outerInstance = outerInstance;
                     this.sub = sub;
diff --git a/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs b/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs
index 8b43018..431f29b 100644
--- a/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs
+++ b/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs
@@ -71,18 +71,18 @@
             for (int i = 0; i < iters; ++i)
             {
                 IndexSearcher searcher = NewSearcher(reader);
-                ICollector collector = new CollectorAnonymousInnerClassHelper(this);
+                ICollector collector = new CollectorAnonymousClass(this);
 
                 searcher.Search(new MatchAllDocsQuery(), collector);
             }
             reader.Dispose();
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestEarlyTermination outerInstance;
 
-            public CollectorAnonymousInnerClassHelper(TestEarlyTermination outerInstance)
+            public CollectorAnonymousClass(TestEarlyTermination outerInstance)
             {
                 this.outerInstance = outerInstance;
                 outOfOrder = Random.NextBoolean();
diff --git a/src/Lucene.Net.Tests/Search/TestElevationComparator.cs b/src/Lucene.Net.Tests/Search/TestElevationComparator.cs
index 7bb7e7a..848fa97 100644
--- a/src/Lucene.Net.Tests/Search/TestElevationComparator.cs
+++ b/src/Lucene.Net.Tests/Search/TestElevationComparator.cs
@@ -147,17 +147,17 @@
 
         public override FieldComparer NewComparer(string fieldname, int numHits, int sortPos, bool reversed)
         {
-            return new FieldComparerAnonymousInnerClassHelper(this, fieldname, numHits);
+            return new FieldComparerAnonymousClass(this, fieldname, numHits);
         }
 
-        private class FieldComparerAnonymousInnerClassHelper : FieldComparer
+        private class FieldComparerAnonymousClass : FieldComparer
         {
             private readonly ElevationComparerSource outerInstance;
 
             private readonly string fieldname;
             private int numHits;
 
-            public FieldComparerAnonymousInnerClassHelper(ElevationComparerSource outerInstance, string fieldname, int numHits)
+            public FieldComparerAnonymousClass(ElevationComparerSource outerInstance, string fieldname, int numHits)
             {
                 this.outerInstance = outerInstance;
                 this.fieldname = fieldname;
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCache.cs b/src/Lucene.Net.Tests/Search/TestFieldCache.cs
index f892f25..30086c0 100644
--- a/src/Lucene.Net.Tests/Search/TestFieldCache.cs
+++ b/src/Lucene.Net.Tests/Search/TestFieldCache.cs
@@ -487,10 +487,10 @@
             AtomicBoolean failed = new AtomicBoolean();
             AtomicInt32 iters = new AtomicInt32();
             int NUM_ITER = 200 * RandomMultiplier;
-            Barrier restart = new Barrier(NUM_THREADS, (barrier) => new RunnableAnonymousInnerClassHelper(this, cache, iters).Run());
+            Barrier restart = new Barrier(NUM_THREADS, (barrier) => new RunnableAnonymousClass(this, cache, iters).Run());
             for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++)
             {
-                threads[threadIDX] = new ThreadAnonymousInnerClassHelper(this, cache, failed, iters, NUM_ITER, restart);
+                threads[threadIDX] = new ThreadAnonymousClass(this, cache, failed, iters, NUM_ITER, restart);
                 threads[threadIDX].Start();
             }
 
@@ -501,14 +501,14 @@
             Assert.IsFalse(failed);
         }
 
-        private class RunnableAnonymousInnerClassHelper //: IThreadRunnable
+        private class RunnableAnonymousClass //: IThreadRunnable
         {
             private readonly TestFieldCache outerInstance;
 
             private readonly IFieldCache cache;
             private readonly AtomicInt32 iters;
 
-            public RunnableAnonymousInnerClassHelper(TestFieldCache outerInstance, IFieldCache cache, AtomicInt32 iters)
+            public RunnableAnonymousClass(TestFieldCache outerInstance, IFieldCache cache, AtomicInt32 iters)
             {
                 this.outerInstance = outerInstance;
                 this.cache = cache;
@@ -522,7 +522,7 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestFieldCache outerInstance;
 
@@ -532,7 +532,7 @@
             private readonly int NUM_ITER;
             private readonly Barrier restart;
 
-            public ThreadAnonymousInnerClassHelper(TestFieldCache outerInstance, IFieldCache cache, AtomicBoolean failed, AtomicInt32 iters, int NUM_ITER, Barrier restart)
+            public ThreadAnonymousClass(TestFieldCache outerInstance, IFieldCache cache, AtomicBoolean failed, AtomicInt32 iters, int NUM_ITER, Barrier restart)
             {
                 this.outerInstance = outerInstance;
                 this.cache = cache;
diff --git a/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
index 575b8bc..416733f 100644
--- a/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
@@ -103,12 +103,12 @@
         // must be static for serialization tests
         private static Filter NewStaticFilterB()
         {
-            return new FilterAnonymousInnerClassHelper();
+            return new FilterAnonymousClass();
         }
 
-        private class FilterAnonymousInnerClassHelper : Filter
+        private class FilterAnonymousClass : Filter
         {
-            public FilterAnonymousInnerClassHelper()
+            public FilterAnonymousClass()
             {
             }
 
@@ -226,12 +226,12 @@
         // must be static for serialization tests
         private static Filter NewStaticFilterA()
         {
-            return new FilterAnonymousInnerClassHelper2();
+            return new FilterAnonymousClass2();
         }
 
-        private class FilterAnonymousInnerClassHelper2 : Filter
+        private class FilterAnonymousClass2 : Filter
         {
-            public FilterAnonymousInnerClassHelper2()
+            public FilterAnonymousClass2()
             {
             }
 
@@ -500,14 +500,14 @@
         {
             if (useRandomAccess)
             {
-                return new RandomAccessFilterStrategyAnonymousInnerClassHelper();
+                return new RandomAccessFilterStrategyAnonymousClass();
             }
             return TestUtil.RandomFilterStrategy(random);
         }
 
-        private class RandomAccessFilterStrategyAnonymousInnerClassHelper : FilteredQuery.RandomAccessFilterStrategy
+        private class RandomAccessFilterStrategyAnonymousClass : FilteredQuery.RandomAccessFilterStrategy
         {
-            public RandomAccessFilterStrategyAnonymousInnerClassHelper()
+            public RandomAccessFilterStrategyAnonymousClass()
             {
             }
 
@@ -544,20 +544,20 @@
             writer.Dispose();
 
             IndexSearcher searcher = NewSearcher(reader);
-            Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousInnerClassHelper3(this, reader), FilteredQuery.QUERY_FIRST_FILTER_STRATEGY);
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousClass3(this, reader), FilteredQuery.QUERY_FIRST_FILTER_STRATEGY);
 
             TopDocs search = searcher.Search(query, 10);
             Assert.AreEqual(totalDocsWithZero, search.TotalHits);
             IOUtils.Dispose(reader, writer, directory);
         }
 
-        private class FilterAnonymousInnerClassHelper3 : Filter
+        private class FilterAnonymousClass3 : Filter
         {
             private readonly TestFilteredQuery outerInstance;
 
             private IndexReader reader;
 
-            public FilterAnonymousInnerClassHelper3(TestFilteredQuery outerInstance, IndexReader reader)
+            public FilterAnonymousClass3(TestFilteredQuery outerInstance, IndexReader reader)
             {
                 this.outerInstance = outerInstance;
                 this.reader = reader;
@@ -578,18 +578,18 @@
                 {
                     bitSet.Set(d);
                 }
-                return new DocIdSetAnonymousInnerClassHelper(this, nullBitset, reader, bitSet);
+                return new DocIdSetAnonymousClass(this, nullBitset, reader, bitSet);
             }
 
-            private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+            private class DocIdSetAnonymousClass : DocIdSet
             {
-                private readonly FilterAnonymousInnerClassHelper3 outerInstance;
+                private readonly FilterAnonymousClass3 outerInstance;
 
                 private readonly bool nullBitset;
                 private readonly AtomicReader reader;
                 private readonly BitSet bitSet;
 
-                public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper3 outerInstance, bool nullBitset, AtomicReader reader, BitSet bitSet)
+                public DocIdSetAnonymousClass(FilterAnonymousClass3 outerInstance, bool nullBitset, AtomicReader reader, BitSet bitSet)
                 {
                     this.outerInstance = outerInstance;
                     this.nullBitset = nullBitset;
@@ -605,15 +605,15 @@
                         {
                             return null;
                         }
-                        return new BitsAnonymousInnerClassHelper(this);
+                        return new BitsAnonymousClass(this);
                     }
                 }
 
-                private class BitsAnonymousInnerClassHelper : IBits
+                private class BitsAnonymousClass : IBits
                 {
-                    private readonly DocIdSetAnonymousInnerClassHelper outerInstance;
+                    private readonly DocIdSetAnonymousClass outerInstance;
 
-                    public BitsAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper outerInstance)
+                    public BitsAnonymousClass(DocIdSetAnonymousClass outerInstance)
                     {
                         this.outerInstance = outerInstance;
                     }
@@ -662,7 +662,7 @@
             writer.Dispose();
             bool queryFirst = Random.NextBoolean();
             IndexSearcher searcher = NewSearcher(reader);
-            Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousInnerClassHelper4(this, queryFirst), queryFirst ? FilteredQuery.LEAP_FROG_QUERY_FIRST_STRATEGY : Random
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousClass4(this, queryFirst), queryFirst ? FilteredQuery.LEAP_FROG_QUERY_FIRST_STRATEGY : Random
                   .NextBoolean() ? FilteredQuery.RANDOM_ACCESS_FILTER_STRATEGY : FilteredQuery.LEAP_FROG_FILTER_FIRST_STRATEGY); // if filterFirst, we can use random here since bits are null
 
             TopDocs search = searcher.Search(query, 10);
@@ -670,13 +670,13 @@
             IOUtils.Dispose(reader, writer, directory);
         }
 
-        private class FilterAnonymousInnerClassHelper4 : Filter
+        private class FilterAnonymousClass4 : Filter
         {
             private readonly TestFilteredQuery outerInstance;
 
             private readonly bool queryFirst;
 
-            public FilterAnonymousInnerClassHelper4(TestFilteredQuery outerInstance, bool queryFirst)
+            public FilterAnonymousClass4(TestFilteredQuery outerInstance, bool queryFirst)
             {
                 this.outerInstance = outerInstance;
                 this.queryFirst = queryFirst;
@@ -684,16 +684,16 @@
 
             public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
             {
-                return new DocIdSetAnonymousInnerClassHelper2(this, context);
+                return new DocIdSetAnonymousClass2(this, context);
             }
 
-            private class DocIdSetAnonymousInnerClassHelper2 : DocIdSet
+            private class DocIdSetAnonymousClass2 : DocIdSet
             {
-                private readonly FilterAnonymousInnerClassHelper4 outerInstance;
+                private readonly FilterAnonymousClass4 outerInstance;
 
                 private readonly AtomicReaderContext context;
 
-                public DocIdSetAnonymousInnerClassHelper2(FilterAnonymousInnerClassHelper4 outerInstance, AtomicReaderContext context)
+                public DocIdSetAnonymousClass2(FilterAnonymousClass4 outerInstance, AtomicReaderContext context)
                 {
                     this.outerInstance = outerInstance;
                     this.context = context;
@@ -708,16 +708,16 @@
                     {
                         return null;
                     }
-                    return new DocIdSetIteratorAnonymousInnerClassHelper(this, termDocsEnum);
+                    return new DocIdSetIteratorAnonymousClass(this, termDocsEnum);
                 }
 
-                private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+                private class DocIdSetIteratorAnonymousClass : DocIdSetIterator
                 {
-                    private readonly DocIdSetAnonymousInnerClassHelper2 outerInstance;
+                    private readonly DocIdSetAnonymousClass2 outerInstance;
 
                     private readonly DocsEnum termDocsEnum;
 
-                    public DocIdSetIteratorAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper2 outerInstance, DocsEnum termDocsEnum)
+                    public DocIdSetIteratorAnonymousClass(DocIdSetAnonymousClass2 outerInstance, DocsEnum termDocsEnum)
                     {
                         this.outerInstance = outerInstance;
                         this.termDocsEnum = termDocsEnum;
diff --git a/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
index 043fada..081ef86 100644
--- a/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
+++ b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
@@ -52,11 +52,11 @@
 
             IndexWriter w = new IndexWriter(dir, iwc);
 
-            SearcherManager mgr = new SearcherManager(w, true, new SearcherFactoryAnonymousInnerClassHelper());
+            SearcherManager mgr = new SearcherManager(w, true, new SearcherFactoryAnonymousClass());
 
             const int missing = -1;
 
-            LiveFieldValues<IndexSearcher, int?> rt = new LiveFieldValuesAnonymousInnerClassHelper(mgr, missing);
+            LiveFieldValues<IndexSearcher, int?> rt = new LiveFieldValuesAnonymousClass(mgr, missing);
 
             int numThreads = TestUtil.NextInt32(Random, 2, 5);
             if (Verbose)
@@ -78,7 +78,7 @@
             {
                 int threadID = t;
                 Random threadRandom = new Random(Random.Next());
-                ThreadJob thread = new ThreadAnonymousInnerClassHelper(w, mgr, missing, rt, startingGun, iters, idCount, reopenChance, deleteChance, addChance, t, threadID, threadRandom);
+                ThreadJob thread = new ThreadAnonymousClass(w, mgr, missing, rt, startingGun, iters, idCount, reopenChance, deleteChance, addChance, t, threadID, threadRandom);
                 threads.Add(thread);
                 thread.Start();
             }
@@ -98,7 +98,7 @@
             dir.Dispose();
         }
 
-        private class SearcherFactoryAnonymousInnerClassHelper : SearcherFactory
+        private class SearcherFactoryAnonymousClass : SearcherFactory
         {
             public override IndexSearcher NewSearcher(IndexReader r)
             {
@@ -106,9 +106,9 @@
             }
         }
 
-        private class LiveFieldValuesAnonymousInnerClassHelper : LiveFieldValues<IndexSearcher, int?>
+        private class LiveFieldValuesAnonymousClass : LiveFieldValues<IndexSearcher, int?>
         {
-            public LiveFieldValuesAnonymousInnerClassHelper(SearcherManager mgr, int missing)
+            public LiveFieldValuesAnonymousClass(SearcherManager mgr, int missing)
                 : base(mgr, missing)
             {
             }
@@ -130,7 +130,7 @@
             }
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly IndexWriter w;
             private readonly SearcherManager mgr;
@@ -146,7 +146,7 @@
             private readonly int threadID;
             private readonly Random threadRandom;
 
-            public ThreadAnonymousInnerClassHelper(IndexWriter w, SearcherManager mgr, int? missing, LiveFieldValues<IndexSearcher, int?> rt, CountdownEvent startingGun, int iters, int idCount, double reopenChance, double deleteChance, double addChance, int t, int threadID, Random threadRandom)
+            public ThreadAnonymousClass(IndexWriter w, SearcherManager mgr, int? missing, LiveFieldValues<IndexSearcher, int?> rt, CountdownEvent startingGun, int iters, int idCount, double reopenChance, double deleteChance, double addChance, int t, int threadID, Random threadRandom)
             {
                 this.w = w;
                 this.mgr = mgr;
diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
index 124ad4d..24315d6 100644
--- a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
+++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
@@ -105,12 +105,12 @@
             r = DirectoryReader.Open(dir);
             atomicReader = GetOnlySegmentReader(r);
             searcher = new IndexSearcher(atomicReader);
-            searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper();
+            searcher.Similarity = new DefaultSimilarityAnonymousClass();
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass : DefaultSimilarity
         {
-            public DefaultSimilarityAnonymousInnerClassHelper()
+            public DefaultSimilarityAnonymousClass()
             {
             }
 
diff --git a/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
index 5628df8..70bc1e8 100644
--- a/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
@@ -401,7 +401,7 @@
 
             IndexReader reader = writer.GetReader();
             IndexSearcher searcher = NewSearcher(reader);
-            searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+            searcher.Similarity = new DefaultSimilarityAnonymousClass(this);
 
             MultiPhraseQuery query = new MultiPhraseQuery();
             query.Add(new Term[] { new Term("body", "this"), new Term("body", "that") });
@@ -414,11 +414,11 @@
             indexStore.Dispose();
         }
 
-        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        private class DefaultSimilarityAnonymousClass : DefaultSimilarity
         {
             private readonly TestMultiPhraseQuery outerInstance;
 
-            public DefaultSimilarityAnonymousInnerClassHelper(TestMultiPhraseQuery outerInstance)
+            public DefaultSimilarityAnonymousClass(TestMultiPhraseQuery outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs b/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
index 225e5ac..329d87c 100644
--- a/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
+++ b/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
@@ -242,7 +242,7 @@
             search.Similarity = new DefaultSimilarity();
             Query q = Csrq("data", "1", "6", T, T);
             q.Boost = 100;
-            search.Search(q, null, new CollectorAnonymousInnerClassHelper(this));
+            search.Search(q, null, new CollectorAnonymousClass(this));
 
             //
             // Ensure that boosting works to score one clause of a query higher
@@ -285,11 +285,11 @@
             Assert.IsTrue(hits[0].Score > hits[1].Score);
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestMultiTermConstantScore outerInstance;
 
-            public CollectorAnonymousInnerClassHelper(TestMultiTermConstantScore outerInstance)
+            public CollectorAnonymousClass(TestMultiTermConstantScore outerInstance)
             {
                 this.outerInstance = outerInstance;
                 @base = 0;
diff --git a/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs b/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
index a413827..f0bfe54 100644
--- a/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
+++ b/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
@@ -196,7 +196,7 @@
 
         private void CheckBoosts(MultiTermQuery.RewriteMethod method)
         {
-            MultiTermQuery mtq = new MultiTermQueryAnonymousInnerClassHelper(this);
+            MultiTermQuery mtq = new MultiTermQueryAnonymousClass(this);
             mtq.MultiTermRewriteMethod = (method);
             Query q1 = searcher.Rewrite(mtq);
             Query q2 = multiSearcher.Rewrite(mtq);
@@ -215,11 +215,11 @@
             CheckBooleanQueryBoosts((BooleanQuery)q3);
         }
 
-        private class MultiTermQueryAnonymousInnerClassHelper : MultiTermQuery
+        private class MultiTermQueryAnonymousClass : MultiTermQuery
         {
             private readonly TestMultiTermQueryRewrites outerInstance;
 
-            public MultiTermQueryAnonymousInnerClassHelper(TestMultiTermQueryRewrites outerInstance)
+            public MultiTermQueryAnonymousClass(TestMultiTermQueryRewrites outerInstance)
                 : base("data")
             {
                 this.outerInstance = outerInstance;
@@ -227,14 +227,14 @@
 
             protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
             {
-                return new TermRangeTermsEnumAnonymousInnerClassHelper(this, terms.GetEnumerator(), new BytesRef("2"), new BytesRef("7"));
+                return new TermRangeTermsEnumAnonymousClass(this, terms.GetEnumerator(), new BytesRef("2"), new BytesRef("7"));
             }
 
-            private class TermRangeTermsEnumAnonymousInnerClassHelper : TermRangeTermsEnum
+            private class TermRangeTermsEnumAnonymousClass : TermRangeTermsEnum
             {
-                private readonly MultiTermQueryAnonymousInnerClassHelper outerInstance;
+                private readonly MultiTermQueryAnonymousClass outerInstance;
 
-                public TermRangeTermsEnumAnonymousInnerClassHelper(MultiTermQueryAnonymousInnerClassHelper outerInstance, TermsEnum iterator, BytesRef bref1, BytesRef bref2)
+                public TermRangeTermsEnumAnonymousClass(MultiTermQueryAnonymousClass outerInstance, TermsEnum iterator, BytesRef bref1, BytesRef bref2)
                     : base(iterator, bref1, bref2, true, true)
                 {
                     this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
index 6915fd9..9a4b9f9 100644
--- a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
@@ -62,7 +62,7 @@
             base.BeforeClass();
 
             directory = NewDirectory();
-            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper();
+            Analyzer analyzer = new AnalyzerAnonymousClass();
             RandomIndexWriter writer = new RandomIndexWriter(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                 this,
@@ -91,9 +91,9 @@
             searcher = NewSearcher(reader);
         }
 
-        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        private class AnalyzerAnonymousClass : Analyzer
         {
-            public AnalyzerAnonymousInnerClassHelper()
+            public AnalyzerAnonymousClass()
             {
             }
 
diff --git a/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs b/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs
index fe6a24a..8af33f1 100644
--- a/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs
+++ b/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs
@@ -60,7 +60,7 @@
         [Test]
         public virtual void TestSetPosition()
         {
-            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this);
+            Analyzer analyzer = new AnalyzerAnonymousClass(this);
             Directory store = NewDirectory();
             RandomIndexWriter writer = new RandomIndexWriter(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
@@ -170,23 +170,23 @@
             store.Dispose();
         }
 
-        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        private class AnalyzerAnonymousClass : Analyzer
         {
             private readonly TestPositionIncrement outerInstance;
 
-            public AnalyzerAnonymousInnerClassHelper(TestPositionIncrement outerInstance)
+            public AnalyzerAnonymousClass(TestPositionIncrement outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
 
             protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
             {
-                return new TokenStreamComponents(new TokenizerAnonymousInnerClassHelper(reader));
+                return new TokenStreamComponents(new TokenizerAnonymousClass(reader));
             }
 
-            private class TokenizerAnonymousInnerClassHelper : Tokenizer
+            private class TokenizerAnonymousClass : Tokenizer
             {
-                public TokenizerAnonymousInnerClassHelper(TextReader reader)
+                public TokenizerAnonymousClass(TextReader reader)
                     : base(reader)
                 {
                     TOKENS = new string[] { "1", "2", "3", "4", "5" };
diff --git a/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs b/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs
index 1c80a6f..6d1a4d0 100644
--- a/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs
+++ b/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs
@@ -158,7 +158,7 @@
             pq.Add(new Term("field", "wizard"));
             pq.Add(new Term("field", "oz"));
 
-            TopDocs hits2 = new QueryRescorerAnonymousInnerClassHelper(this, pq)
+            TopDocs hits2 = new QueryRescorerAnonymousClass(this, pq)
               .Rescore(searcher, hits, 10);
 
             // Resorting didn't change the order:
@@ -170,11 +170,11 @@
             dir.Dispose();
         }
 
-        private class QueryRescorerAnonymousInnerClassHelper : QueryRescorer
+        private class QueryRescorerAnonymousClass : QueryRescorer
         {
             private readonly TestQueryRescorer outerInstance;
 
-            public QueryRescorerAnonymousInnerClassHelper(TestQueryRescorer outerInstance, PhraseQuery pq)
+            public QueryRescorerAnonymousClass(TestQueryRescorer outerInstance, PhraseQuery pq)
                 : base(pq)
             {
                 this.outerInstance = outerInstance;
@@ -229,7 +229,7 @@
             pq.Add(new Term("field", "wizard"));
             pq.Add(new Term("field", "oz"));
 
-            Rescorer rescorer = new QueryRescorerAnonymousInnerClassHelper2(this, pq);
+            Rescorer rescorer = new QueryRescorerAnonymousClass2(this, pq);
 
             TopDocs hits2 = rescorer.Rescore(searcher, hits, 10);
 
@@ -262,11 +262,11 @@
             dir.Dispose();
         }
 
-        private class QueryRescorerAnonymousInnerClassHelper2 : QueryRescorer
+        private class QueryRescorerAnonymousClass2 : QueryRescorer
         {
             private readonly TestQueryRescorer outerInstance;
 
-            public QueryRescorerAnonymousInnerClassHelper2(TestQueryRescorer outerInstance, PhraseQuery pq)
+            public QueryRescorerAnonymousClass2(TestQueryRescorer outerInstance, PhraseQuery pq)
                 : base(pq)
             {
                 this.outerInstance = outerInstance;
@@ -382,7 +382,7 @@
             //System.out.println("numHits=" + numHits + " reverse=" + reverse);
             TopDocs hits = s.Search(new TermQuery(new Term("field", "a")), numHits);
 
-            TopDocs hits2 = new QueryRescorerAnonymousInnerClassHelper3(this, new FixedScoreQuery(idToNum, reverse))
+            TopDocs hits2 = new QueryRescorerAnonymousClass3(this, new FixedScoreQuery(idToNum, reverse))
               .Rescore(s, hits, numHits);
 
             int[] expected = new int[numHits];
@@ -430,11 +430,11 @@
             dir.Dispose();
         }
 
-        private class QueryRescorerAnonymousInnerClassHelper3 : QueryRescorer
+        private class QueryRescorerAnonymousClass3 : QueryRescorer
         {
             private readonly TestQueryRescorer outerInstance;
 
-            public QueryRescorerAnonymousInnerClassHelper3(TestQueryRescorer outerInstance, FixedScoreQuery fixedScoreQuery)
+            public QueryRescorerAnonymousClass3(TestQueryRescorer outerInstance, FixedScoreQuery fixedScoreQuery)
                 : base(fixedScoreQuery)
             {
                 this.outerInstance = outerInstance;
@@ -461,14 +461,14 @@
 
             public override Weight CreateWeight(IndexSearcher searcher)
             {
-                return new WeightAnonymousInnerClassHelper(this);
+                return new WeightAnonymousClass(this);
             }
 
-            private class WeightAnonymousInnerClassHelper : Weight
+            private class WeightAnonymousClass : Weight
             {
                 private readonly FixedScoreQuery outerInstance;
 
-                public WeightAnonymousInnerClassHelper(FixedScoreQuery outerInstance)
+                public WeightAnonymousClass(FixedScoreQuery outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -486,16 +486,16 @@
 
                 public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs)
                 {
-                    return new ScorerAnonymousInnerClassHelper(this, context);
+                    return new ScorerAnonymousClass(this, context);
                 }
 
-                private class ScorerAnonymousInnerClassHelper : Scorer
+                private class ScorerAnonymousClass : Scorer
                 {
-                    private readonly WeightAnonymousInnerClassHelper outerInstance;
+                    private readonly WeightAnonymousClass outerInstance;
 
                     private readonly AtomicReaderContext context;
 
-                    public ScorerAnonymousInnerClassHelper(WeightAnonymousInnerClassHelper outerInstance, AtomicReaderContext context)
+                    public ScorerAnonymousClass(WeightAnonymousClass outerInstance, AtomicReaderContext context)
                         : base(null)
                     {
                         this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs b/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs
index 1a2c326..16954bd 100644
--- a/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs
@@ -119,16 +119,16 @@
         [Test]
         public virtual void TestCustomProvider()
         {
-            IAutomatonProvider myProvider = new AutomatonProviderAnonymousInnerClassHelper(this);
+            IAutomatonProvider myProvider = new AutomatonProviderAnonymousClass(this);
             RegexpQuery query = new RegexpQuery(NewTerm("<quickBrown>"), RegExpSyntax.ALL, myProvider);
             Assert.AreEqual(1, searcher.Search(query, 5).TotalHits);
         }
 
-        private class AutomatonProviderAnonymousInnerClassHelper : IAutomatonProvider
+        private class AutomatonProviderAnonymousClass : IAutomatonProvider
         {
             private readonly TestRegexpQuery outerInstance;
 
-            public AutomatonProviderAnonymousInnerClassHelper(TestRegexpQuery outerInstance)
+            public AutomatonProviderAnonymousClass(TestRegexpQuery outerInstance)
             {
                 this.outerInstance = outerInstance;
                 quickBrownAutomaton = BasicOperations.Union(new Automaton[] { BasicAutomata.MakeString("quick"), BasicAutomata.MakeString("brown"), BasicAutomata.MakeString("bob") });
diff --git a/src/Lucene.Net.Tests/Search/TestSameScoresWithThreads.cs b/src/Lucene.Net.Tests/Search/TestSameScoresWithThreads.cs
index 5313a2b..d309226 100644
--- a/src/Lucene.Net.Tests/Search/TestSameScoresWithThreads.cs
+++ b/src/Lucene.Net.Tests/Search/TestSameScoresWithThreads.cs
@@ -99,7 +99,7 @@
                 ThreadJob[] threads = new ThreadJob[numThreads];
                 for (int threadID = 0; threadID < numThreads; threadID++)
                 {
-                    ThreadJob thread = new ThreadAnonymousInnerClassHelper(this, s, answers, startingGun);
+                    ThreadJob thread = new ThreadAnonymousClass(this, s, answers, startingGun);
                     threads[threadID] = thread;
                     thread.Start();
                 }
@@ -113,7 +113,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestSameScoresWithThreads outerInstance;
 
@@ -121,7 +121,7 @@
             private readonly IDictionary<BytesRef, TopDocs> answers;
             private readonly CountdownEvent startingGun;
 
-            public ThreadAnonymousInnerClassHelper(TestSameScoresWithThreads outerInstance, IndexSearcher s, IDictionary<BytesRef, TopDocs> answers, CountdownEvent startingGun)
+            public ThreadAnonymousClass(TestSameScoresWithThreads outerInstance, IndexSearcher s, IDictionary<BytesRef, TopDocs> answers, CountdownEvent startingGun)
             {
                 this.outerInstance = outerInstance;
                 this.s = s;
diff --git a/src/Lucene.Net.Tests/Search/TestScorerPerf.cs b/src/Lucene.Net.Tests/Search/TestScorerPerf.cs
index a5efe87..bf2fba7 100644
--- a/src/Lucene.Net.Tests/Search/TestScorerPerf.cs
+++ b/src/Lucene.Net.Tests/Search/TestScorerPerf.cs
@@ -166,7 +166,7 @@
         internal virtual BitSet AddClause(BooleanQuery bq, BitSet result)
         {
             BitSet rnd = sets[Random.Next(sets.Length)];
-            Query q = new ConstantScoreQuery(new FilterAnonymousInnerClassHelper(rnd));
+            Query q = new ConstantScoreQuery(new FilterAnonymousClass(rnd));
             bq.Add(q, Occur.MUST);
             if (validate)
             {
@@ -182,11 +182,11 @@
             return result;
         }
 
-        private class FilterAnonymousInnerClassHelper : Filter
+        private class FilterAnonymousClass : Filter
         {
             private readonly BitSet rnd;
 
-            public FilterAnonymousInnerClassHelper(BitSet rnd)
+            public FilterAnonymousClass(BitSet rnd)
             {
                 this.rnd = rnd;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs b/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs
index f57843d..09f2eb1 100644
--- a/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs
+++ b/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs
@@ -97,7 +97,7 @@
             ThreadJob[] threads = new ThreadJob[NUM_SEARCH_THREADS];
             for (int threadID = 0; threadID < NUM_SEARCH_THREADS; threadID++)
             {
-                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, s, failed, netSearch);
+                threads[threadID] = new ThreadAnonymousClass(this, s, failed, netSearch);
                 threads[threadID].IsBackground = (true);
             }
 
@@ -120,7 +120,7 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestSearchWithThreads outerInstance;
 
@@ -128,7 +128,7 @@
             private readonly AtomicBoolean failed;
             private readonly AtomicInt64 netSearch;
 
-            public ThreadAnonymousInnerClassHelper(TestSearchWithThreads outerInstance, IndexSearcher s, AtomicBoolean failed, AtomicInt64 netSearch)
+            public ThreadAnonymousClass(TestSearchWithThreads outerInstance, IndexSearcher s, AtomicBoolean failed, AtomicInt64 netSearch)
             {
                 this.outerInstance = outerInstance;
                 this.s = s;
diff --git a/src/Lucene.Net.Tests/Search/TestSearcherManager.cs b/src/Lucene.Net.Tests/Search/TestSearcherManager.cs
index e95f944..074096e 100644
--- a/src/Lucene.Net.Tests/Search/TestSearcherManager.cs
+++ b/src/Lucene.Net.Tests/Search/TestSearcherManager.cs
@@ -1,4 +1,4 @@
-using J2N.Threading;
+using J2N.Threading;
 using J2N.Threading.Atomic;
 using Lucene.Net.Index;
 using Lucene.Net.Index.Extensions;
@@ -74,7 +74,7 @@
 
         protected override void DoAfterWriter(TaskScheduler es)
         {
-            SearcherFactory factory = new SearcherFactoryAnonymousInnerClassHelper(this, es);
+            SearcherFactory factory = new SearcherFactoryAnonymousClass(this, es);
             if (Random.NextBoolean())
             {
                 // TODO: can we randomize the applyAllDeletes?  But
@@ -95,13 +95,13 @@
             lifetimeMGR = new SearcherLifetimeManager();
         }
 
-        private class SearcherFactoryAnonymousInnerClassHelper : SearcherFactory
+        private class SearcherFactoryAnonymousClass : SearcherFactory
         {
             private readonly TestSearcherManager outerInstance;
 
             private TaskScheduler es;
 
-            public SearcherFactoryAnonymousInnerClassHelper(TestSearcherManager outerInstance, TaskScheduler es)
+            public SearcherFactoryAnonymousClass(TestSearcherManager outerInstance, TaskScheduler es)
             {
                 this.outerInstance = outerInstance;
                 this.es = es;
@@ -118,7 +118,7 @@
 
         protected override void DoSearching(TaskScheduler es, long stopTime)
         {
-            ThreadJob reopenThread = new ThreadAnonymousInnerClassHelper(this, stopTime);
+            ThreadJob reopenThread = new ThreadAnonymousClass(this, stopTime);
             reopenThread.IsBackground = (true);
             reopenThread.Start();
 
@@ -127,13 +127,13 @@
             reopenThread.Join();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestSearcherManager outerInstance;
 
             private long stopTime;
 
-            public ThreadAnonymousInnerClassHelper(TestSearcherManager outerInstance, long stopTime)
+            public ThreadAnonymousClass(TestSearcherManager outerInstance, long stopTime)
             {
                 this.outerInstance = outerInstance;
                 this.stopTime = stopTime;
@@ -256,13 +256,7 @@
         {
             Directory dir = NewDirectory();
             // Test can deadlock if we use SMS:
-            IConcurrentMergeScheduler scheduler;
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-            scheduler = new TaskMergeScheduler();
-#else
-            scheduler = new ConcurrentMergeScheduler();
-#endif
-            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(scheduler));
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new ConcurrentMergeScheduler()));
             writer.AddDocument(new Document());
             writer.Commit();
             CountdownEvent awaitEnterWarm = new CountdownEvent(1);
@@ -270,7 +264,7 @@
             AtomicBoolean triedReopen = new AtomicBoolean(false);
             //TaskScheduler es = Random().NextBoolean() ? null : Executors.newCachedThreadPool(new NamedThreadFactory("testIntermediateClose"));
             TaskScheduler es = Random.NextBoolean() ? null : TaskScheduler.Default;
-            SearcherFactory factory = new SearcherFactoryAnonymousInnerClassHelper2(this, awaitEnterWarm, awaitClose, triedReopen, es);
+            SearcherFactory factory = new SearcherFactoryAnonymousClass2(this, awaitEnterWarm, awaitClose, triedReopen, es);
             SearcherManager searcherManager = Random.NextBoolean() ? new SearcherManager(dir, factory) : new SearcherManager(writer, Random.NextBoolean(), factory);
             if (Verbose)
             {
@@ -289,7 +283,7 @@
             writer.Commit();
             AtomicBoolean success = new AtomicBoolean(false);
             Exception[] exc = new Exception[1];
-            ThreadJob thread = new ThreadJob(() => new RunnableAnonymousInnerClassHelper(this, triedReopen, searcherManager, success, exc).Run());
+            ThreadJob thread = new ThreadJob(() => new RunnableAnonymousClass(this, triedReopen, searcherManager, success, exc).Run());
             thread.Start();
             if (Verbose)
             {
@@ -326,7 +320,7 @@
             //}
         }
 
-        private class SearcherFactoryAnonymousInnerClassHelper2 : SearcherFactory
+        private class SearcherFactoryAnonymousClass2 : SearcherFactory
         {
             private readonly TestSearcherManager outerInstance;
 
@@ -335,7 +329,7 @@
             private AtomicBoolean triedReopen;
             private TaskScheduler es;
 
-            public SearcherFactoryAnonymousInnerClassHelper2(TestSearcherManager outerInstance, CountdownEvent awaitEnterWarm, CountdownEvent awaitClose, AtomicBoolean triedReopen, TaskScheduler es)
+            public SearcherFactoryAnonymousClass2(TestSearcherManager outerInstance, CountdownEvent awaitEnterWarm, CountdownEvent awaitClose, AtomicBoolean triedReopen, TaskScheduler es)
             {
                 this.outerInstance = outerInstance;
                 this.awaitEnterWarm = awaitEnterWarm;
@@ -368,7 +362,7 @@
             }
         }
 
-        private class RunnableAnonymousInnerClassHelper //: IThreadRunnable
+        private class RunnableAnonymousClass //: IThreadRunnable
         {
             private readonly TestSearcherManager outerInstance;
 
@@ -377,7 +371,7 @@
             private AtomicBoolean success;
             private Exception[] exc;
 
-            public RunnableAnonymousInnerClassHelper(TestSearcherManager outerInstance, AtomicBoolean triedReopen, SearcherManager searcherManager, AtomicBoolean success, Exception[] exc)
+            public RunnableAnonymousClass(TestSearcherManager outerInstance, AtomicBoolean triedReopen, SearcherManager searcherManager, AtomicBoolean success, Exception[] exc)
             {
                 this.outerInstance = outerInstance;
                 this.triedReopen = triedReopen;
@@ -429,11 +423,11 @@
         }
 
         [Test]
-        public virtual void TestReferenceDecrementIllegally([ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")]Func<IConcurrentMergeScheduler> newScheduler)
+        public virtual void TestReferenceDecrementIllegally()
         {
             Directory dir = NewDirectory();
-            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
-                            .SetMergeScheduler(newScheduler());
+            var config = NewIndexWriterConfig(
+                TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new ConcurrentMergeScheduler());
             IndexWriter writer = new IndexWriter(dir, config);
             SearcherManager sm = new SearcherManager(writer, false, new SearcherFactory());
             writer.AddDocument(new Document());
@@ -501,7 +495,7 @@
             IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
             AtomicBoolean afterRefreshCalled = new AtomicBoolean(false);
             SearcherManager sm = new SearcherManager(iw, false, new SearcherFactory());
-            sm.AddListener(new RefreshListenerAnonymousInnerClassHelper(this, afterRefreshCalled));
+            sm.AddListener(new RefreshListenerAnonymousClass(this, afterRefreshCalled));
             iw.AddDocument(new Document());
             iw.Commit();
             assertFalse(afterRefreshCalled);
@@ -512,13 +506,13 @@
             dir.Dispose();
         }
 
-        private class RefreshListenerAnonymousInnerClassHelper : ReferenceManager.IRefreshListener
+        private class RefreshListenerAnonymousClass : ReferenceManager.IRefreshListener
         {
             private readonly TestSearcherManager outerInstance;
 
             private AtomicBoolean afterRefreshCalled;
 
-            public RefreshListenerAnonymousInnerClassHelper(TestSearcherManager outerInstance, AtomicBoolean afterRefreshCalled)
+            public RefreshListenerAnonymousClass(TestSearcherManager outerInstance, AtomicBoolean afterRefreshCalled)
             {
                 this.outerInstance = outerInstance;
                 this.afterRefreshCalled = afterRefreshCalled;
@@ -551,7 +545,7 @@
 
             IndexReader other = DirectoryReader.Open(dir);
 
-            SearcherFactory theEvilOne = new SearcherFactoryAnonymousInnerClassHelper3(this, other);
+            SearcherFactory theEvilOne = new SearcherFactoryAnonymousClass3(this, other);
 
             try
             {
@@ -578,13 +572,13 @@
             dir.Dispose();
         }
 
-        private class SearcherFactoryAnonymousInnerClassHelper3 : SearcherFactory
+        private class SearcherFactoryAnonymousClass3 : SearcherFactory
         {
             private readonly TestSearcherManager outerInstance;
 
             private IndexReader other;
 
-            public SearcherFactoryAnonymousInnerClassHelper3(TestSearcherManager outerInstance, IndexReader other)
+            public SearcherFactoryAnonymousClass3(TestSearcherManager outerInstance, IndexReader other)
             {
                 this.outerInstance = outerInstance;
                 this.other = other;
@@ -615,7 +609,7 @@
 
             SearcherManager sm = new SearcherManager(dir, null);
 
-            ThreadJob t = new ThreadAnonymousInnerClassHelper2(this, sm);
+            ThreadJob t = new ThreadAnonymousClass2(this, sm);
             t.Start();
             t.Join();
 
@@ -626,13 +620,13 @@
             dir.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly TestSearcherManager outerInstance;
 
             private SearcherManager sm;
 
-            public ThreadAnonymousInnerClassHelper2(TestSearcherManager outerInstance, SearcherManager sm)
+            public ThreadAnonymousClass2(TestSearcherManager outerInstance, SearcherManager sm)
             {
                 this.outerInstance = outerInstance;
                 this.sm = sm;
diff --git a/src/Lucene.Net.Tests/Search/TestSimilarity.cs b/src/Lucene.Net.Tests/Search/TestSimilarity.cs
index 916243d..a2a4d34 100644
--- a/src/Lucene.Net.Tests/Search/TestSimilarity.cs
+++ b/src/Lucene.Net.Tests/Search/TestSimilarity.cs
@@ -104,33 +104,33 @@
             Term b = new Term("field", "b");
             Term c = new Term("field", "c");
 
-            searcher.Search(new TermQuery(b), new CollectorAnonymousInnerClassHelper(this));
+            searcher.Search(new TermQuery(b), new CollectorAnonymousClass(this));
 
             BooleanQuery bq = new BooleanQuery();
             bq.Add(new TermQuery(a), Occur.SHOULD);
             bq.Add(new TermQuery(b), Occur.SHOULD);
             //System.out.println(bq.toString("field"));
-            searcher.Search(bq, new CollectorAnonymousInnerClassHelper2(this));
+            searcher.Search(bq, new CollectorAnonymousClass2(this));
 
             PhraseQuery pq = new PhraseQuery();
             pq.Add(a);
             pq.Add(c);
             //System.out.println(pq.toString("field"));
-            searcher.Search(pq, new CollectorAnonymousInnerClassHelper3(this));
+            searcher.Search(pq, new CollectorAnonymousClass3(this));
 
             pq.Slop = 2;
             //System.out.println(pq.toString("field"));
-            searcher.Search(pq, new CollectorAnonymousInnerClassHelper4(this));
+            searcher.Search(pq, new CollectorAnonymousClass4(this));
 
             reader.Dispose();
             store.Dispose();
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestSimilarity outerInstance;
 
-            public CollectorAnonymousInnerClassHelper(TestSimilarity outerInstance)
+            public CollectorAnonymousClass(TestSimilarity outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -154,11 +154,11 @@
             public virtual bool AcceptsDocsOutOfOrder => true;
         }
 
-        private class CollectorAnonymousInnerClassHelper2 : ICollector
+        private class CollectorAnonymousClass2 : ICollector
         {
             private readonly TestSimilarity outerInstance;
 
-            public CollectorAnonymousInnerClassHelper2(TestSimilarity outerInstance)
+            public CollectorAnonymousClass2(TestSimilarity outerInstance)
             {
                 this.outerInstance = outerInstance;
                 @base = 0;
@@ -186,11 +186,11 @@
             public virtual bool AcceptsDocsOutOfOrder => true;
         }
 
-        private class CollectorAnonymousInnerClassHelper3 : ICollector
+        private class CollectorAnonymousClass3 : ICollector
         {
             private readonly TestSimilarity outerInstance;
 
-            public CollectorAnonymousInnerClassHelper3(TestSimilarity outerInstance)
+            public CollectorAnonymousClass3(TestSimilarity outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -215,11 +215,11 @@
             public virtual bool AcceptsDocsOutOfOrder => true;
         }
 
-        private class CollectorAnonymousInnerClassHelper4 : ICollector
+        private class CollectorAnonymousClass4 : ICollector
         {
             private readonly TestSimilarity outerInstance;
 
-            public CollectorAnonymousInnerClassHelper4(TestSimilarity outerInstance)
+            public CollectorAnonymousClass4(TestSimilarity outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs
index f7499db..f262e34 100644
--- a/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs
@@ -243,7 +243,7 @@
         /// checks that no scores or freqs are infinite </summary>
         private void AssertSaneScoring(PhraseQuery pq, IndexSearcher searcher)
         {
-            searcher.Search(pq, new CollectorAnonymousInnerClassHelper(this));
+            searcher.Search(pq, new CollectorAnonymousClass(this));
             QueryUtils.Check(
 #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                 this,
@@ -251,11 +251,11 @@
                 Random, pq, searcher);
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestSloppyPhraseQuery outerInstance;
 
-            public CollectorAnonymousInnerClassHelper(TestSloppyPhraseQuery outerInstance)
+            public CollectorAnonymousClass(TestSloppyPhraseQuery outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestSort.cs b/src/Lucene.Net.Tests/Search/TestSort.cs
index 4312838..99176a0 100644
--- a/src/Lucene.Net.Tests/Search/TestSort.cs
+++ b/src/Lucene.Net.Tests/Search/TestSort.cs
@@ -1727,7 +1727,7 @@
             iw.Dispose();
 
             IndexSearcher searcher = NewSearcher(ir);
-            Sort sort = new Sort(new SortField("parser", new IntParserAnonymousInnerClassHelper(this)), SortField.FIELD_DOC);
+            Sort sort = new Sort(new SortField("parser", new IntParserAnonymousClass(this)), SortField.FIELD_DOC);
 
             TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
 
@@ -1743,11 +1743,11 @@
             dir.Dispose();
         }
 
-        private class IntParserAnonymousInnerClassHelper : FieldCache.IInt32Parser
+        private class IntParserAnonymousClass : FieldCache.IInt32Parser
         {
             private readonly TestSort outerInstance;
 
-            public IntParserAnonymousInnerClassHelper(TestSort outerInstance)
+            public IntParserAnonymousClass(TestSort outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1792,7 +1792,7 @@
             iw.Dispose();
 
             IndexSearcher searcher = NewSearcher(ir);
-            Sort sort = new Sort(new SortField("parser", new ByteParserAnonymousInnerClassHelper(this)), SortField.FIELD_DOC);
+            Sort sort = new Sort(new SortField("parser", new ByteParserAnonymousClass(this)), SortField.FIELD_DOC);
 
             TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
 
@@ -1809,12 +1809,12 @@
         }
 
 #pragma warning disable 612, 618
-        private class ByteParserAnonymousInnerClassHelper : FieldCache.IByteParser
+        private class ByteParserAnonymousClass : FieldCache.IByteParser
 #pragma warning restore 612, 618
         {
             private readonly TestSort outerInstance;
 
-            public ByteParserAnonymousInnerClassHelper(TestSort outerInstance)
+            public ByteParserAnonymousClass(TestSort outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1856,7 +1856,7 @@
             iw.Dispose();
 
             IndexSearcher searcher = NewSearcher(ir);
-            Sort sort = new Sort(new SortField("parser", new ShortParserAnonymousInnerClassHelper(this)), SortField.FIELD_DOC);
+            Sort sort = new Sort(new SortField("parser", new ShortParserAnonymousClass(this)), SortField.FIELD_DOC);
 
             TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
 
@@ -1873,12 +1873,12 @@
         }
 
 #pragma warning disable 612, 618
-        private class ShortParserAnonymousInnerClassHelper : FieldCache.IInt16Parser
+        private class ShortParserAnonymousClass : FieldCache.IInt16Parser
 #pragma warning restore 612, 618
         {
             private readonly TestSort outerInstance;
 
-            public ShortParserAnonymousInnerClassHelper(TestSort outerInstance)
+            public ShortParserAnonymousClass(TestSort outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1923,7 +1923,7 @@
             iw.Dispose();
 
             IndexSearcher searcher = NewSearcher(ir);
-            Sort sort = new Sort(new SortField("parser", new LongParserAnonymousInnerClassHelper(this)), SortField.FIELD_DOC);
+            Sort sort = new Sort(new SortField("parser", new LongParserAnonymousClass(this)), SortField.FIELD_DOC);
 
             TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
 
@@ -1939,11 +1939,11 @@
             dir.Dispose();
         }
 
-        private class LongParserAnonymousInnerClassHelper : FieldCache.IInt64Parser
+        private class LongParserAnonymousClass : FieldCache.IInt64Parser
         {
             private readonly TestSort outerInstance;
 
-            public LongParserAnonymousInnerClassHelper(TestSort outerInstance)
+            public LongParserAnonymousClass(TestSort outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -1988,7 +1988,7 @@
             iw.Dispose();
 
             IndexSearcher searcher = NewSearcher(ir);
-            Sort sort = new Sort(new SortField("parser", new FloatParserAnonymousInnerClassHelper(this)), SortField.FIELD_DOC);
+            Sort sort = new Sort(new SortField("parser", new FloatParserAnonymousClass(this)), SortField.FIELD_DOC);
 
             TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
 
@@ -2004,11 +2004,11 @@
             dir.Dispose();
         }
 
-        private class FloatParserAnonymousInnerClassHelper : FieldCache.ISingleParser
+        private class FloatParserAnonymousClass : FieldCache.ISingleParser
         {
             private readonly TestSort outerInstance;
 
-            public FloatParserAnonymousInnerClassHelper(TestSort outerInstance)
+            public FloatParserAnonymousClass(TestSort outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -2053,7 +2053,7 @@
             iw.Dispose();
 
             IndexSearcher searcher = NewSearcher(ir);
-            Sort sort = new Sort(new SortField("parser", new DoubleParserAnonymousInnerClassHelper(this)), SortField.FIELD_DOC);
+            Sort sort = new Sort(new SortField("parser", new DoubleParserAnonymousClass(this)), SortField.FIELD_DOC);
 
             TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
 
@@ -2069,11 +2069,11 @@
             dir.Dispose();
         }
 
-        private class DoubleParserAnonymousInnerClassHelper : FieldCache.IDoubleParser
+        private class DoubleParserAnonymousClass : FieldCache.IDoubleParser
         {
             private readonly TestSort outerInstance;
 
-            public DoubleParserAnonymousInnerClassHelper(TestSort outerInstance)
+            public DoubleParserAnonymousClass(TestSort outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Search/TestTermScorer.cs b/src/Lucene.Net.Tests/Search/TestTermScorer.cs
index 6a1d8ba..3241196 100644
--- a/src/Lucene.Net.Tests/Search/TestTermScorer.cs
+++ b/src/Lucene.Net.Tests/Search/TestTermScorer.cs
@@ -87,7 +87,7 @@
             IList<TestHit> docs = new List<TestHit>();
             // must call next first
 
-            ts.Score(new CollectorAnonymousInnerClassHelper(this, context, docs));
+            ts.Score(new CollectorAnonymousClass(this, context, docs));
             Assert.IsTrue(docs.Count == 2, "docs Size: " + docs.Count + " is not: " + 2);
             TestHit doc0 = docs[0];
             TestHit doc5 = docs[1];
@@ -105,14 +105,14 @@
             Assert.IsTrue(doc0.Score == 1.6931472f, doc0.Score + " does not equal: " + 1.6931472f);
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly TestTermScorer outerInstance;
 
             private AtomicReaderContext context;
             private readonly IList<TestHit> docs;
 
-            public CollectorAnonymousInnerClassHelper(TestTermScorer outerInstance, AtomicReaderContext context, IList<TestHit> docs)
+            public CollectorAnonymousClass(TestTermScorer outerInstance, AtomicReaderContext context, IList<TestHit> docs)
             {
                 this.outerInstance = outerInstance;
                 this.context = context;
diff --git a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs
index e77740a..ba019d7 100644
--- a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs
+++ b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs
@@ -325,7 +325,7 @@
             for (int i = 0; i < threadArray.Length; ++i)
             {
                 int num = i;
-                threadArray[num] = new ThreadClassAnonymousHelper(this, success, withTimeout, num);
+                threadArray[num] = new ThreadAnonymousClass(this, success, withTimeout, num);
             }
             for (int i = 0; i < threadArray.Length; ++i)
             {
@@ -338,13 +338,13 @@
             assertEquals("some threads failed!", N_THREADS, success.Cardinality());
         }
 
-        internal class ThreadClassAnonymousHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestTimeLimitingCollector outerInstance;
             private readonly OpenBitSet success;
             private readonly bool withTimeout;
             private readonly int num;
-            public ThreadClassAnonymousHelper(TestTimeLimitingCollector outerInstance, OpenBitSet success, bool withTimeout, int num)
+            public ThreadAnonymousClass(TestTimeLimitingCollector outerInstance, OpenBitSet success, bool withTimeout, int num)
             {
                 this.outerInstance = outerInstance;
                 this.success = success;
diff --git a/src/Lucene.Net.Tests/Store/TestDirectory.cs b/src/Lucene.Net.Tests/Store/TestDirectory.cs
index 4fb2d95..4d1913e 100644
--- a/src/Lucene.Net.Tests/Store/TestDirectory.cs
+++ b/src/Lucene.Net.Tests/Store/TestDirectory.cs
@@ -1,4 +1,4 @@
-using J2N;
+using J2N;
 using J2N.Threading;
 using Lucene.Net.Attributes;
 using Lucene.Net.Support;
@@ -194,7 +194,7 @@
                 string fname = "foo." + i;
                 string lockname = "foo" + i + ".lck";
                 IndexOutput @out = dir.CreateOutput(fname, NewIOContext(Random));
-                @out.WriteByte((byte)(sbyte)i);
+                @out.WriteByte((byte)i);
                 @out.WriteBytes(largeBuffer, largeBuffer.Length);
                 @out.Dispose();
 
diff --git a/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs b/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs
index 22f9558..40f6b12 100644
--- a/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs
+++ b/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs
@@ -1,4 +1,4 @@
-using NUnit.Framework;
+using NUnit.Framework;
 using System;
 using System.Collections.Generic;
 using Assert = Lucene.Net.TestFramework.Assert;
@@ -73,11 +73,11 @@
             var b2 = new byte[RAMOutputStream.BUFFER_SIZE / 3];
             for (int i = 0; i < b1.Length; i++)
             {
-                b1[i] = (byte)(sbyte)(i & 0x0007F);
+                b1[i] = (byte)(i & 0x0007F);
             }
             for (int i = 0; i < b2.Length; i++)
             {
-                b2[i] = (byte)(sbyte)(i & 0x0003F);
+                b2[i] = (byte)(i & 0x0003F);
             }
             long n = 0;
             Assert.AreEqual(n, @out.Length, "output length must match");
diff --git a/src/Lucene.Net.Tests/Store/TestRAMDirectory.cs b/src/Lucene.Net.Tests/Store/TestRAMDirectory.cs
index 1b80cda..76970da 100644
--- a/src/Lucene.Net.Tests/Store/TestRAMDirectory.cs
+++ b/src/Lucene.Net.Tests/Store/TestRAMDirectory.cs
@@ -124,7 +124,7 @@
             for (int i = 0; i < numThreads; i++)
             {
                 int num = i;
-                threads[i] = new ThreadAnonymousInnerClassHelper(this, writer, num);
+                threads[i] = new ThreadAnonymousClass(this, writer, num);
             }
             for (int i = 0; i < numThreads; i++)
             {
@@ -141,14 +141,14 @@
             writer.Dispose();
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestRAMDirectory outerInstance;
 
             private readonly IndexWriter writer;
             private readonly int num;
 
-            public ThreadAnonymousInnerClassHelper(TestRAMDirectory outerInstance, IndexWriter writer, int num)
+            public ThreadAnonymousClass(TestRAMDirectory outerInstance, IndexWriter writer, int num)
             {
                 this.outerInstance = outerInstance;
                 this.writer = writer;
diff --git a/src/Lucene.Net.Tests/Support/Index/TestTaskMergeScheduler.cs b/src/Lucene.Net.Tests/Support/Index/TestTaskMergeScheduler.cs
deleted file mode 100644
index 27d58d2..0000000
--- a/src/Lucene.Net.Tests/Support/Index/TestTaskMergeScheduler.cs
+++ /dev/null
@@ -1,179 +0,0 @@
-using Lucene.Net.Attributes;
-using Lucene.Net.Documents;
-using Lucene.Net.Index.Extensions;
-using Lucene.Net.Store;
-using Lucene.Net.Util;
-using NUnit.Framework;
-using System;
-using System.IO;
-using Console = Lucene.Net.Util.SystemConsole;
-
-namespace Lucene.Net.Index
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Directory = Lucene.Net.Store.Directory;
-    using Document = Documents.Document;
-    using Field = Field;
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
-    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
-    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-
-    /// <summary>
-    /// Holds tests cases to verify external APIs are accessible
-    /// while not being in Lucene.Net.Index package.
-    /// </summary>
-    public class TestTaskMergeScheduler : LuceneTestCase
-    {
-        internal volatile bool mergeCalled;
-        internal volatile bool excCalled;
-
-        private class MyMergeScheduler : TaskMergeScheduler
-        {
-            private readonly TestTaskMergeScheduler outerInstance;
-
-            public MyMergeScheduler(TestTaskMergeScheduler outerInstance)
-            {
-                this.outerInstance = outerInstance;
-            }
-
-            protected override void HandleMergeException(Exception t)
-            {
-                outerInstance.excCalled = true;
-            }
-
-            public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound)
-            {
-                outerInstance.mergeCalled = true;
-                base.Merge(writer, trigger, newMergesFound);
-            }
-        }
-
-        private class FailOnlyOnMerge : Failure
-        {
-            public override void Eval(MockDirectoryWrapper dir)
-            {
-                // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)]
-                // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods.
-                if (StackTraceHelper.DoesStackTraceContainMethod("DoMerge"))
-                {
-                    throw new IOException("now failing during merge");
-                }
-            }
-        }
-
-        [Test]
-        [AwaitsFix(BugUrl = "https://github.com/apache/lucenenet/issues/269")] // LUCENENET TODO: this test occasionally fails
-        public void TestSubclassTaskMergeScheduler()
-        {
-            MockDirectoryWrapper dir = NewMockDirectory();
-            dir.FailOn(new FailOnlyOnMerge());
-
-            Document doc = new Document();
-            Field idField = NewStringField("id", "", Field.Store.YES);
-            doc.Add(idField);
-
-            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new MyMergeScheduler(this)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy()));
-            LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy;
-            logMP.MergeFactor = 10;
-            for (int i = 0; i < 20; i++)
-            {
-                writer.AddDocument(doc);
-            }
-
-            ((MyMergeScheduler)writer.Config.MergeScheduler).Sync();
-            writer.Dispose();
-
-            assertTrue(mergeCalled);
-            dir.Dispose();
-        }
-
-        private class ReportingMergeScheduler : MergeScheduler
-        {
-            public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound)
-            {
-                MergePolicy.OneMerge merge = null;
-                while ((merge = writer.NextMerge()) != null)
-                {
-                    if (Verbose)
-                    {
-                        Console.WriteLine("executing merge " + merge.SegString(writer.Directory));
-                    }
-                    writer.Merge(merge);
-                }
-            }
-
-            protected override void Dispose(bool disposing)
-            {
-            }
-        }
-
-        [Test]
-        public void TestCustomMergeScheduler()
-        {
-            // we don't really need to execute anything, just to make sure the custom MS
-            // compiles. But ensure that it can be used as well, e.g., no other hidden
-            // dependencies or something. Therefore, don't use any random API !
-            Directory dir = new RAMDirectory();
-            IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
-            conf.SetMergeScheduler(new ReportingMergeScheduler());
-            IndexWriter writer = new IndexWriter(dir, conf);
-            writer.AddDocument(new Document());
-            writer.Commit(); // trigger flush
-            writer.AddDocument(new Document());
-            writer.Commit(); // trigger flush
-            writer.ForceMerge(1);
-            writer.Dispose();
-            dir.Dispose();
-        }
-
-        // LUCENENET-603
-        [Test, LuceneNetSpecific]
-        public void TestExceptionOnBackgroundThreadIsPropagatedToCallingThread()
-        {
-            using MockDirectoryWrapper dir = NewMockDirectory();
-            dir.FailOn(new FailOnlyOnMerge());
-
-            Document doc = new Document();
-            Field idField = NewStringField("id", "", Field.Store.YES);
-            doc.Add(idField);
-
-            var mergeScheduler = new TaskMergeScheduler();
-            using IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy()));
-            LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy;
-            logMP.MergeFactor = 10;
-            for (int i = 0; i < 20; i++)
-            {
-                writer.AddDocument(doc);
-            }
-
-            bool exceptionHit = false;
-            try
-            {
-                mergeScheduler.Sync();
-            }
-            catch (MergePolicy.MergeException)
-            {
-                exceptionHit = true;
-            }
-
-            assertTrue(exceptionHit);
-        }
-    }
-}
\ No newline at end of file
diff --git a/src/Lucene.Net.Tests/TestMergeSchedulerExternal.cs b/src/Lucene.Net.Tests/TestMergeSchedulerExternal.cs
index e33adcc..a440a06 100644
--- a/src/Lucene.Net.Tests/TestMergeSchedulerExternal.cs
+++ b/src/Lucene.Net.Tests/TestMergeSchedulerExternal.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Documents;
+using Lucene.Net.Documents;
 using Lucene.Net.Index.Extensions;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
@@ -31,9 +31,7 @@
     using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
     using Document = Documents.Document;
     using Field = Field;
-#if FEATURE_CONCURRENTMERGESCHEDULER
     using ConcurrentMergeScheduler = Lucene.Net.Index.ConcurrentMergeScheduler;
-#endif
     using IndexWriter = Lucene.Net.Index.IndexWriter;
     using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
     using LogMergePolicy = Lucene.Net.Index.LogMergePolicy;
@@ -52,7 +50,6 @@
     /// </summary>
     public class TestMergeSchedulerExternal : LuceneTestCase
     {
-#if FEATURE_CONCURRENTMERGESCHEDULER
         internal volatile bool mergeCalled;
         internal volatile bool mergeThreadCreated;
         internal volatile bool excCalled;
@@ -139,7 +136,6 @@
             Assert.IsTrue(excCalled);
             dir.Dispose();
         }
-#endif
 
         private class ReportingMergeScheduler : MergeScheduler
         {
diff --git a/src/Lucene.Net.Tests/TestWorstCaseTestBehavior.cs b/src/Lucene.Net.Tests/TestWorstCaseTestBehavior.cs
index 6d219f7..6648516 100644
--- a/src/Lucene.Net.Tests/TestWorstCaseTestBehavior.cs
+++ b/src/Lucene.Net.Tests/TestWorstCaseTestBehavior.cs
@@ -31,7 +31,7 @@
         [Test]
         public virtual void TestThreadLeak()
         {
-            ThreadJob t = new ThreadAnonymousInnerClassHelper(this);
+            ThreadJob t = new ThreadAnonymousClass(this);
             t.Start();
 
             while (!t.IsAlive)
@@ -43,11 +43,11 @@
         }
 #endif
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly TestWorstCaseTestBehavior outerInstance;
 
-            public ThreadAnonymousInnerClassHelper(TestWorstCaseTestBehavior outerInstance)
+            public ThreadAnonymousClass(TestWorstCaseTestBehavior outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -108,16 +108,16 @@
         [Test]
         public virtual void TestUncaughtException()
         {
-            ThreadJob t = new ThreadAnonymousInnerClassHelper2(this);
+            ThreadJob t = new ThreadAnonymousClass2(this);
             t.Start();
             t.Join();
         }
 
-        private class ThreadAnonymousInnerClassHelper2 : ThreadJob
+        private class ThreadAnonymousClass2 : ThreadJob
         {
             private readonly TestWorstCaseTestBehavior outerInstance;
 
-            public ThreadAnonymousInnerClassHelper2(TestWorstCaseTestBehavior outerInstance)
+            public ThreadAnonymousClass2(TestWorstCaseTestBehavior outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
diff --git a/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs b/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs
index c914cb9..ee62eb1 100644
--- a/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs
+++ b/src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs
@@ -792,34 +792,34 @@
                 PositiveIntOutputs o1 = PositiveIntOutputs.Singleton;
                 PositiveIntOutputs o2 = PositiveIntOutputs.Singleton;
                 PairOutputs<long, long> outputs = new PairOutputs<long, long>(o1, o2);
-                new VisitTermsAnonymousInnerClassHelper(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays).Run(limit, verify, false);
+                new VisitTermsAnonymousClass(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays).Run(limit, verify, false);
             }
             else if (storeOrds)
             {
                 // Store only ords
                 PositiveIntOutputs outputs = PositiveIntOutputs.Singleton;
-                new VisitTermsAnonymousInnerClassHelper2(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays).Run(limit, verify, true);
+                new VisitTermsAnonymousClass2(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays).Run(limit, verify, true);
             }
             else if (storeDocFreqs)
             {
                 // Store only docFreq
                 PositiveIntOutputs outputs = PositiveIntOutputs.Singleton;
-                new VisitTermsAnonymousInnerClassHelper3(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays).Run(limit, verify, false);
+                new VisitTermsAnonymousClass3(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays).Run(limit, verify, false);
             }
             else
             {
                 // Store nothing
                 NoOutputs outputs = NoOutputs.Singleton;
                 object NO_OUTPUT = outputs.NoOutput;
-                new VisitTermsAnonymousInnerClassHelper4(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays, NO_OUTPUT).Run(limit, verify, false);
+                new VisitTermsAnonymousClass4(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays, NO_OUTPUT).Run(limit, verify, false);
             }
         }*/
 
-        private class VisitTermsAnonymousInnerClassHelper : VisitTerms<Pair>
+        private class VisitTermsAnonymousClass : VisitTerms<Pair>
         {
             private readonly PairOutputs<long?, long?> outputs;
 
-            public VisitTermsAnonymousInnerClassHelper(string dirOut, string wordsFileIn, int inputMode, int prune, PairOutputs<long?, long?> outputs, bool doPack, bool noArcArrays)
+            public VisitTermsAnonymousClass(string dirOut, string wordsFileIn, int inputMode, int prune, PairOutputs<long?, long?> outputs, bool doPack, bool noArcArrays)
                 : base(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays)
             {
                 this.outputs = outputs;
@@ -836,9 +836,9 @@
             }
         }
 
-        private class VisitTermsAnonymousInnerClassHelper2 : VisitTerms<long?>
+        private class VisitTermsAnonymousClass2 : VisitTerms<long?>
         {
-            public VisitTermsAnonymousInnerClassHelper2(string dirOut, string wordsFileIn, int inputMode, int prune, PositiveInt32Outputs outputs, bool doPack, bool noArcArrays)
+            public VisitTermsAnonymousClass2(string dirOut, string wordsFileIn, int inputMode, int prune, PositiveInt32Outputs outputs, bool doPack, bool noArcArrays)
                 : base(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays)
             {
             }
@@ -849,9 +849,9 @@
             }
         }
 
-        private class VisitTermsAnonymousInnerClassHelper3 : VisitTerms<long?>
+        private class VisitTermsAnonymousClass3 : VisitTerms<long?>
         {
-            public VisitTermsAnonymousInnerClassHelper3(string dirOut, string wordsFileIn, int inputMode, int prune, PositiveInt32Outputs outputs, bool doPack, bool noArcArrays)
+            public VisitTermsAnonymousClass3(string dirOut, string wordsFileIn, int inputMode, int prune, PositiveInt32Outputs outputs, bool doPack, bool noArcArrays)
                 : base(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays)
             {
             }
@@ -867,11 +867,11 @@
             }
         }
 
-        private class VisitTermsAnonymousInnerClassHelper4 : VisitTerms<object>
+        private class VisitTermsAnonymousClass4 : VisitTerms<object>
         {
             private readonly object NO_OUTPUT;
 
-            public VisitTermsAnonymousInnerClassHelper4(string dirOut, string wordsFileIn, int inputMode, int prune, NoOutputs outputs, bool doPack, bool noArcArrays, object NO_OUTPUT)
+            public VisitTermsAnonymousClass4(string dirOut, string wordsFileIn, int inputMode, int prune, NoOutputs outputs, bool doPack, bool noArcArrays, object NO_OUTPUT)
                 : base(dirOut, wordsFileIn, inputMode, prune, outputs, doPack, noArcArrays)
             {
                 this.NO_OUTPUT = NO_OUTPUT;
@@ -1495,7 +1495,7 @@
             builder.Add(Util.ToInt32sRef(new BytesRef("ax"), scratch), 17L);
             FST<long?> fst = builder.Finish();
             AtomicInt32 rejectCount = new AtomicInt32();
-            Util.TopNSearcher<long?> searcher = new TopNSearcherAnonymousInnerClassHelper(fst, minLongComparer, rejectCount);
+            Util.TopNSearcher<long?> searcher = new TopNSearcherAnonymousClass(fst, minLongComparer, rejectCount);
 
             searcher.AddStartPaths(fst.GetFirstArc(new FST.Arc<long?>()), outputs.NoOutput, true, new Int32sRef());
             Util.TopResults<long?> res = searcher.Search();
@@ -1506,7 +1506,7 @@
             Assert.AreEqual(Util.ToInt32sRef(new BytesRef("aac"), scratch), res.TopN[0].Input);
             Assert.AreEqual(7L, res.TopN[0].Output);
             rejectCount.Value = (0);
-            searcher = new TopNSearcherAnonymousInnerClassHelper2(fst, minLongComparer, rejectCount);
+            searcher = new TopNSearcherAnonymousClass2(fst, minLongComparer, rejectCount);
 
             searcher.AddStartPaths(fst.GetFirstArc(new FST.Arc<long?>()), outputs.NoOutput, true, new Int32sRef());
             res = searcher.Search();
@@ -1514,11 +1514,11 @@
             Assert.IsFalse(res.IsComplete); // rejected(4) + topN(2) > maxQueueSize(5)
         }
 
-        private class TopNSearcherAnonymousInnerClassHelper : Util.TopNSearcher<long?>
+        private class TopNSearcherAnonymousClass : Util.TopNSearcher<long?>
         {
             private readonly AtomicInt32 rejectCount;
 
-            public TopNSearcherAnonymousInnerClassHelper(FST<long?> fst, IComparer<long?> minLongComparer, AtomicInt32 rejectCount)
+            public TopNSearcherAnonymousClass(FST<long?> fst, IComparer<long?> minLongComparer, AtomicInt32 rejectCount)
                 : base(fst, 2, 6, minLongComparer)
             {
                 this.rejectCount = rejectCount;
@@ -1535,11 +1535,11 @@
             }
         }
 
-        private class TopNSearcherAnonymousInnerClassHelper2 : Util.TopNSearcher<long?>
+        private class TopNSearcherAnonymousClass2 : Util.TopNSearcher<long?>
         {
             private readonly AtomicInt32 rejectCount;
 
-            public TopNSearcherAnonymousInnerClassHelper2(FST<long?> fst, IComparer<long?> minLongComparer, AtomicInt32 rejectCount)
+            public TopNSearcherAnonymousClass2(FST<long?> fst, IComparer<long?> minLongComparer, AtomicInt32 rejectCount)
                 : base(fst, 2, 5, minLongComparer)
             {
                 this.rejectCount = rejectCount;
diff --git a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs
index 5d3d040..a0b115d 100644
--- a/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs
+++ b/src/Lucene.Net.Tests/Util/Packed/TestEliasFanoDocIdSet.cs
@@ -27,16 +27,16 @@
         public override EliasFanoDocIdSet CopyOf(BitSet bs, int numBits)
         {
             EliasFanoDocIdSet set = new EliasFanoDocIdSet((int)bs.Cardinality, numBits - 1);
-            set.EncodeFromDisi(new DocIdSetIteratorAnonymousInnerClassHelper(bs, numBits));
+            set.EncodeFromDisi(new DocIdSetIteratorAnonymousClass(bs, numBits));
             return set;
         }
 
-        private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+        private class DocIdSetIteratorAnonymousClass : DocIdSetIterator
         {
             private readonly BitSet bs;
             private readonly int numBits;
 
-            public DocIdSetIteratorAnonymousInnerClassHelper(BitSet bs, int numBits)
+            public DocIdSetIteratorAnonymousClass(BitSet bs, int numBits)
             {
                 this.bs = bs;
                 this.numBits = numBits;
diff --git a/src/Lucene.Net.Tests/Util/Packed/TestPackedInts.cs b/src/Lucene.Net.Tests/Util/Packed/TestPackedInts.cs
index f1bfabc..99c393f 100644
--- a/src/Lucene.Net.Tests/Util/Packed/TestPackedInts.cs
+++ b/src/Lucene.Net.Tests/Util/Packed/TestPackedInts.cs
@@ -1,4 +1,5 @@
-using J2N.IO;
+using J2N.IO;
+using J2N.Numerics;
 using Lucene.Net.Randomized.Generators;
 using Lucene.Net.Support;
 using NUnit.Framework;
@@ -1032,7 +1033,7 @@
                         {
                             // clear highest bits for packed
                             int toClear = 64 % bpv;
-                            blocks[i] = (int)((uint)(blocks[i] << toClear) >> toClear);
+                            blocks[i] = (blocks[i] << toClear).TripleShift(toClear);
                         }
                     }
 
diff --git a/src/Lucene.Net.Tests/Util/TestFilterIterator.cs b/src/Lucene.Net.Tests/Util/TestFilterIterator.cs
index 3652c03..fafe3a6 100644
--- a/src/Lucene.Net.Tests/Util/TestFilterIterator.cs
+++ b/src/Lucene.Net.Tests/Util/TestFilterIterator.cs
@@ -131,14 +131,14 @@
         [Obsolete("This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
         public virtual void TestEmptyIterator()
         {
-            IEnumerator<string> it = new FilterIteratorAnonymousInnerClassHelper(set.GetEnumerator());
+            IEnumerator<string> it = new FilterIteratorAnonymousClass(set.GetEnumerator());
             AssertNoMore(it);
         }
 
         [Obsolete("This class will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
-        private class FilterIteratorAnonymousInnerClassHelper : FilterIterator<string>
+        private class FilterIteratorAnonymousClass : FilterIterator<string>
         {
-            public FilterIteratorAnonymousInnerClassHelper(IEnumerator<string> iterator)
+            public FilterIteratorAnonymousClass(IEnumerator<string> iterator)
                 : base(iterator)
             {
             }
@@ -153,16 +153,16 @@
         [Obsolete("This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
         public virtual void TestA1Iterator()
         {
-            IEnumerator<string> it = new FilterIteratorAnonymousInnerClassHelper2(set.GetEnumerator());
+            IEnumerator<string> it = new FilterIteratorAnonymousClass2(set.GetEnumerator());
             Assert.IsTrue(it.MoveNext());
             Assert.AreEqual("a", it.Current);
             AssertNoMore(it);
         }
 
         [Obsolete("This class will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
-        private class FilterIteratorAnonymousInnerClassHelper2 : FilterIterator<string>
+        private class FilterIteratorAnonymousClass2 : FilterIterator<string>
         {
-            public FilterIteratorAnonymousInnerClassHelper2(IEnumerator<string> iterator)
+            public FilterIteratorAnonymousClass2(IEnumerator<string> iterator)
                 : base(iterator)
             {
             }
@@ -177,7 +177,7 @@
         [Obsolete("This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
         public virtual void TestA2Iterator()
         {
-            IEnumerator<string> it = new FilterIteratorAnonymousInnerClassHelper3(set.GetEnumerator());
+            IEnumerator<string> it = new FilterIteratorAnonymousClass3(set.GetEnumerator());
             // this time without check: Assert.IsTrue(it.hasNext());
             it.MoveNext();
             Assert.AreEqual("a", it.Current);
@@ -185,9 +185,9 @@
         }
 
         [Obsolete("This class will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
-        private class FilterIteratorAnonymousInnerClassHelper3 : FilterIterator<string>
+        private class FilterIteratorAnonymousClass3 : FilterIterator<string>
         {
-            public FilterIteratorAnonymousInnerClassHelper3(IEnumerator<string> iterator)
+            public FilterIteratorAnonymousClass3(IEnumerator<string> iterator)
                 : base(iterator)
             {
             }
@@ -202,16 +202,16 @@
         [Obsolete("This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
         public virtual void TestB1Iterator()
         {
-            IEnumerator<string> it = new FilterIteratorAnonymousInnerClassHelper4(set.GetEnumerator());
+            IEnumerator<string> it = new FilterIteratorAnonymousClass4(set.GetEnumerator());
             Assert.IsTrue(it.MoveNext());
             Assert.AreEqual("b", it.Current);
             AssertNoMore(it);
         }
 
         [Obsolete("This class will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
-        private class FilterIteratorAnonymousInnerClassHelper4 : FilterIterator<string>
+        private class FilterIteratorAnonymousClass4 : FilterIterator<string>
         {
-            public FilterIteratorAnonymousInnerClassHelper4(IEnumerator<string> iterator)
+            public FilterIteratorAnonymousClass4(IEnumerator<string> iterator)
                 : base(iterator)
             {
             }
@@ -226,7 +226,7 @@
         [Obsolete("This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
         public virtual void TestB2Iterator()
         {
-            IEnumerator<string> it = new FilterIteratorAnonymousInnerClassHelper5(set.GetEnumerator());
+            IEnumerator<string> it = new FilterIteratorAnonymousClass5(set.GetEnumerator());
             // this time without check: Assert.IsTrue(it.hasNext());
             it.MoveNext();
             Assert.AreEqual("b", it.Current);
@@ -234,9 +234,9 @@
         }
 
         [Obsolete("This class will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
-        private class FilterIteratorAnonymousInnerClassHelper5 : FilterIterator<string>
+        private class FilterIteratorAnonymousClass5 : FilterIterator<string>
         {
-            public FilterIteratorAnonymousInnerClassHelper5(IEnumerator<string> iterator)
+            public FilterIteratorAnonymousClass5(IEnumerator<string> iterator)
                 : base(iterator)
             {
             }
@@ -251,7 +251,7 @@
         [Obsolete("This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
         public virtual void TestAll1Iterator()
         {
-            IEnumerator<string> it = new FilterIteratorAnonymousInnerClassHelper6(set.GetEnumerator());
+            IEnumerator<string> it = new FilterIteratorAnonymousClass6(set.GetEnumerator());
             Assert.IsTrue(it.MoveNext());
             Assert.AreEqual("a", it.Current);
             Assert.IsTrue(it.MoveNext());
@@ -262,9 +262,9 @@
         }
 
         [Obsolete("This class will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
-        private class FilterIteratorAnonymousInnerClassHelper6 : FilterIterator<string>
+        private class FilterIteratorAnonymousClass6 : FilterIterator<string>
         {
-            public FilterIteratorAnonymousInnerClassHelper6(IEnumerator<string> iterator)
+            public FilterIteratorAnonymousClass6(IEnumerator<string> iterator)
                 : base(iterator)
             {
             }
@@ -279,7 +279,7 @@
         [Obsolete("This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
         public virtual void TestAll2Iterator()
         {
-            IEnumerator<string> it = new FilterIteratorAnonymousInnerClassHelper7(set.GetEnumerator());
+            IEnumerator<string> it = new FilterIteratorAnonymousClass7(set.GetEnumerator());
             it.MoveNext();
             Assert.AreEqual("a", it.Current);
             it.MoveNext();
@@ -290,9 +290,9 @@
         }
 
         [Obsolete("This class will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
-        private class FilterIteratorAnonymousInnerClassHelper7 : FilterIterator<string>
+        private class FilterIteratorAnonymousClass7 : FilterIterator<string>
         {
-            public FilterIteratorAnonymousInnerClassHelper7(IEnumerator<string> iterator)
+            public FilterIteratorAnonymousClass7(IEnumerator<string> iterator)
                 : base(iterator)
             {
             }
diff --git a/src/Lucene.Net.Tests/Util/TestNumericUtils.cs b/src/Lucene.Net.Tests/Util/TestNumericUtils.cs
index 27caf2b..a654c97 100644
--- a/src/Lucene.Net.Tests/Util/TestNumericUtils.cs
+++ b/src/Lucene.Net.Tests/Util/TestNumericUtils.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using J2N.Text;
 using Lucene.Net.Support;
 using NUnit.Framework;
@@ -268,7 +269,7 @@
             using IEnumerator<long> neededBounds = expectedBounds?.GetEnumerator();
             using IEnumerator<int> neededShifts = expectedShifts?.GetEnumerator();
 
-            NumericUtils.SplitInt64Range(new LongRangeBuilderAnonymousInnerClassHelper(lower, upper, useBitSet, bits, neededBounds, neededShifts), precisionStep, lower, upper);
+            NumericUtils.SplitInt64Range(new LongRangeBuilderAnonymousClass(lower, upper, useBitSet, bits, neededBounds, neededShifts), precisionStep, lower, upper);
 
             if (useBitSet)
             {
@@ -278,7 +279,7 @@
             }
         }
 
-        private class LongRangeBuilderAnonymousInnerClassHelper : NumericUtils.Int64RangeBuilder
+        private class LongRangeBuilderAnonymousClass : NumericUtils.Int64RangeBuilder
         {
             private readonly long lower;
             private readonly long upper;
@@ -287,7 +288,7 @@
             private readonly IEnumerator<long> neededBounds;
             private readonly IEnumerator<int> neededShifts;
 
-            public LongRangeBuilderAnonymousInnerClassHelper(long lower, long upper, bool useBitSet, Int64BitSet bits, IEnumerator<long> neededBounds, IEnumerator<int> neededShifts)
+            public LongRangeBuilderAnonymousClass(long lower, long upper, bool useBitSet, Int64BitSet bits, IEnumerator<long> neededBounds, IEnumerator<int> neededShifts)
             {
                 this.lower = lower;
                 this.upper = upper;
@@ -323,9 +324,9 @@
                 neededShifts.MoveNext();
                 Assert.AreEqual(neededShifts.Current, shift, "shift");
                 neededBounds.MoveNext();
-                Assert.AreEqual(neededBounds.Current, (long)((ulong)min >> shift), "inner min bound");
+                Assert.AreEqual(neededBounds.Current, min.TripleShift(shift), "inner min bound");
                 neededBounds.MoveNext();
-                Assert.AreEqual(neededBounds.Current, (long)((ulong)max >> shift), "inner max bound");
+                Assert.AreEqual(neededBounds.Current, max.TripleShift(shift), "inner max bound");
             }
         }
 
@@ -414,7 +415,7 @@
                 }
                 if (random.NextBoolean())
                 {
-                    val = (long)((ulong)val >> 1);
+                    val = val.TripleShift(1);
                 }
             }
 
@@ -461,7 +462,7 @@
             IEnumerator<int> neededBounds = (expectedBounds == null) ? null : expectedBounds.GetEnumerator();
             IEnumerator<int> neededShifts = (expectedShifts == null) ? null : expectedShifts.GetEnumerator();
 
-            NumericUtils.SplitInt32Range(new IntRangeBuilderAnonymousInnerClassHelper(lower, upper, useBitSet, bits, neededBounds, neededShifts), precisionStep, lower, upper);
+            NumericUtils.SplitInt32Range(new IntRangeBuilderAnonymousClass(lower, upper, useBitSet, bits, neededBounds, neededShifts), precisionStep, lower, upper);
 
             if (useBitSet)
             {
@@ -471,7 +472,7 @@
             }
         }
 
-        private class IntRangeBuilderAnonymousInnerClassHelper : NumericUtils.Int32RangeBuilder
+        private class IntRangeBuilderAnonymousClass : NumericUtils.Int32RangeBuilder
         {
             private readonly int lower;
             private readonly int upper;
@@ -480,7 +481,7 @@
             private readonly IEnumerator<int> neededBounds;
             private readonly IEnumerator<int> neededShifts;
 
-            public IntRangeBuilderAnonymousInnerClassHelper(int lower, int upper, bool useBitSet, FixedBitSet bits, IEnumerator<int> neededBounds, IEnumerator<int> neededShifts)
+            public IntRangeBuilderAnonymousClass(int lower, int upper, bool useBitSet, FixedBitSet bits, IEnumerator<int> neededBounds, IEnumerator<int> neededShifts)
             {
                 this.lower = lower;
                 this.upper = upper;
@@ -516,9 +517,9 @@
                 neededShifts.MoveNext();
                 Assert.AreEqual(neededShifts.Current, shift, "shift");
                 neededBounds.MoveNext();
-                Assert.AreEqual(neededBounds.Current, (int)((uint)min >> shift), "inner min bound");
+                Assert.AreEqual(neededBounds.Current, min.TripleShift(shift), "inner min bound");
                 neededBounds.MoveNext();
-                Assert.AreEqual(neededBounds.Current, (int)((uint)max >> shift), "inner max bound");
+                Assert.AreEqual(neededBounds.Current, max.TripleShift(shift), "inner max bound");
             }
         }
 
diff --git a/src/Lucene.Net.Tests/Util/TestPagedBytes.cs b/src/Lucene.Net.Tests/Util/TestPagedBytes.cs
index 3a2c679..2b67251 100644
--- a/src/Lucene.Net.Tests/Util/TestPagedBytes.cs
+++ b/src/Lucene.Net.Tests/Util/TestPagedBytes.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Store;
+using Lucene.Net.Store;
 using Lucene.Net.Support;
 using NUnit.Framework;
 using System;
@@ -194,7 +194,7 @@
             var arr = new byte[TestUtil.NextInt32(Random, blockSize / 2, blockSize * 2)];
             for (int i = 0; i < arr.Length; ++i)
             {
-                arr[i] = (byte)(sbyte)i;
+                arr[i] = (byte)i;
             }
             long numBytes = (1L << 31) + TestUtil.NextInt32(Random, 1, blockSize * 3);
             var p = new PagedBytes(blockBits);
diff --git a/src/Lucene.Net.Tests/Util/TestRollingBuffer.cs b/src/Lucene.Net.Tests/Util/TestRollingBuffer.cs
index 059f6e1..5c5b0d0 100644
--- a/src/Lucene.Net.Tests/Util/TestRollingBuffer.cs
+++ b/src/Lucene.Net.Tests/Util/TestRollingBuffer.cs
@@ -38,7 +38,7 @@
         [Test]
         public virtual void Test()
         {
-            RollingBuffer<Position> buffer = new RollingBufferAnonymousInnerClassHelper();
+            RollingBuffer<Position> buffer = new RollingBufferAnonymousClass();
 
             for (int iter = 0; iter < 100 * RandomMultiplier; iter++)
             {
@@ -88,9 +88,9 @@
             }
         }
 
-        private class RollingBufferAnonymousInnerClassHelper : RollingBuffer<Position>
+        private class RollingBufferAnonymousClass : RollingBuffer<Position>
         {
-            public RollingBufferAnonymousInnerClassHelper()
+            public RollingBufferAnonymousClass()
                 : base(NewInstanceFunc)
             {
             }
diff --git a/src/Lucene.Net.Tests/Util/TestWeakIdentityMap.cs b/src/Lucene.Net.Tests/Util/TestWeakIdentityMap.cs
index 5aa1224..fde368d 100644
--- a/src/Lucene.Net.Tests/Util/TestWeakIdentityMap.cs
+++ b/src/Lucene.Net.Tests/Util/TestWeakIdentityMap.cs
@@ -207,7 +207,7 @@
 //            // don't make threadCount and keyCount random, otherwise easily OOMs or fails otherwise:
 //            const int threadCount = 8, keyCount = 1024;
 
-//            RunnableAnonymousInnerClassHelper[] workers = new RunnableAnonymousInnerClassHelper[threadCount];
+//            RunnableAnonymousClass[] workers = new RunnableAnonymousClass[threadCount];
 //            WeakIdentityMap<object, int?> map = WeakIdentityMap<object, int?>.NewConcurrentHashMap(Random.NextBoolean());
 //            // we keep strong references to the keys,
 //            // so WeakIdentityMap will not forget about them:
@@ -222,7 +222,7 @@
 //                for (int t = 0; t < threadCount; t++)
 //                {
 //                    Random rnd = new Random(Random.Next());
-//                    var worker = new RunnableAnonymousInnerClassHelper(this, keyCount, map, keys, rnd);
+//                    var worker = new RunnableAnonymousClass(this, keyCount, map, keys, rnd);
 //                    workers[t] = worker;
 //                    worker.Start();
 //                }
@@ -285,7 +285,7 @@
 //            }
 //        }
 
-//        private class RunnableAnonymousInnerClassHelper : ThreadJob
+//        private class RunnableAnonymousClass : ThreadJob
 //        {
 //            private readonly TestWeakIdentityMap outerInstance;
 
@@ -295,7 +295,7 @@
 //            private readonly Random rnd;
 //            private volatile Exception error;
 
-//            public RunnableAnonymousInnerClassHelper(TestWeakIdentityMap outerInstance, int keyCount, WeakIdentityMap<object, int?> map, AtomicReferenceArray<object> keys, Random rnd)
+//            public RunnableAnonymousClass(TestWeakIdentityMap outerInstance, int keyCount, WeakIdentityMap<object, int?> map, AtomicReferenceArray<object> keys, Random rnd)
 //            {
 //                this.outerInstance = outerInstance;
 //                this.keyCount = keyCount;
diff --git a/src/Lucene.Net/Analysis/Tokenizer.cs b/src/Lucene.Net/Analysis/Tokenizer.cs
index 195b8b9..3b4c37e 100644
--- a/src/Lucene.Net/Analysis/Tokenizer.cs
+++ b/src/Lucene.Net/Analysis/Tokenizer.cs
@@ -124,9 +124,9 @@
             return true;
         }
 
-        private static readonly TextReader ILLEGAL_STATE_READER = new ReaderAnonymousInnerClassHelper();
+        private static readonly TextReader ILLEGAL_STATE_READER = new ReaderAnonymousClass();
 
-        private class ReaderAnonymousInnerClassHelper : TextReader
+        private class ReaderAnonymousClass : TextReader
         {
             public override int Read(char[] cbuf, int off, int len)
             {
diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
index 3d712ca..036ecc3 100644
--- a/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
+++ b/src/Lucene.Net/Codecs/BlockTreeTermsReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Support;
@@ -574,7 +575,7 @@
                 //   System.out.println("BTTR: seg=" + segment + " field=" + fieldInfo.name + " rootBlockCode=" + rootCode + " divisor=" + indexDivisor);
                 // }
 
-                rootBlockFP = (int)((uint)(new ByteArrayDataInput(rootCode.Bytes, rootCode.Offset, rootCode.Length)).ReadVInt64() >> BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS);
+                rootBlockFP = new ByteArrayDataInput(rootCode.Bytes, rootCode.Offset, rootCode.Length).ReadVInt64().TripleShift(BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS);
 
                 if (indexIn != null)
                 {
@@ -767,7 +768,7 @@
 
                         do
                         {
-                            fp = fpOrig + ((int)((uint)floorDataReader.ReadVInt64() >> 1));
+                            fp = fpOrig + (floorDataReader.ReadVInt64().TripleShift(1));
                             numFollowFloorBlocks--;
                             // if (DEBUG) System.out.println("    skip floor block2!  nextFloorLabel=" + (char) nextFloorLabel + " vs target=" + (char) transitions[transitionIndex].getMin() + " newFP=" + fp + " numFollowFloorBlocks=" + numFollowFloorBlocks);
                             if (numFollowFloorBlocks != 0)
@@ -828,7 +829,7 @@
                                     // Maybe skip floor blocks:
                                     while (numFollowFloorBlocks != 0 && nextFloorLabel <= transitions[0].Min)
                                     {
-                                        fp = fpOrig + ((int)((uint)floorDataReader.ReadVInt64() >> 1));
+                                        fp = fpOrig + (floorDataReader.ReadVInt64().TripleShift(1));
                                         numFollowFloorBlocks--;
                                         // if (DEBUG) System.out.println("    skip floor block!  nextFloorLabel=" + (char) nextFloorLabel + " vs target=" + (char) transitions[0].getMin() + " newFP=" + fp + " numFollowFloorBlocks=" + numFollowFloorBlocks);
                                         if (numFollowFloorBlocks != 0)
@@ -846,14 +847,14 @@
 
                         outerInstance.@in.Seek(fp);
                         int code_ = outerInstance.@in.ReadVInt32();
-                        entCount = (int)((uint)code_ >> 1);
+                        entCount = code_.TripleShift(1);
                         if (Debugging.AssertsEnabled) Debugging.Assert(entCount > 0);
                         isLastInFloor = (code_ & 1) != 0;
 
                         // term suffixes:
                         code_ = outerInstance.@in.ReadVInt32();
                         isLeafBlock = (code_ & 1) != 0;
-                        int numBytes = (int)((uint)code_ >> 1);
+                        int numBytes = code_.TripleShift(1);
                         // if (DEBUG) System.out.println("      entCount=" + entCount + " lastInFloor?=" + isLastInFloor + " leafBlock?=" + isLeafBlock + " numSuffixBytes=" + numBytes);
                         if (suffixBytes.Length < numBytes)
                         {
@@ -922,7 +923,7 @@
                         if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt={0} entCount={1} fp={2}", nextEnt, entCount, fp);
                         nextEnt++;
                         int code = suffixesReader.ReadVInt32();
-                        suffix = (int)((uint)code >> 1);
+                        suffix = code.TripleShift(1);
                         startBytePos = suffixesReader.Position;
                         suffixesReader.SkipBytes(suffix);
                         if ((code & 1) == 0)
@@ -1725,7 +1726,7 @@
                 {
                     scratchReader.Reset(frameData.Bytes, frameData.Offset, frameData.Length);
                     long code = scratchReader.ReadVInt64();
-                    long fpSeek = (long)((ulong)code >> BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS);
+                    long fpSeek = code.TripleShift(BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS);
                     Frame f = GetFrame(1 + currentFrame.ord);
                     f.hasTerms = (code & BlockTreeTermsWriter.OUTPUT_FLAG_HAS_TERMS) != 0;
                     f.hasTermsOrig = f.hasTerms;
@@ -2737,7 +2738,7 @@
 
                         outerInstance.@in.Seek(fp);
                         int code = outerInstance.@in.ReadVInt32();
-                        entCount = (int)((uint)code >> 1);
+                        entCount = code.TripleShift(1);
                         if (Debugging.AssertsEnabled) Debugging.Assert(entCount > 0);
                         isLastInFloor = (code & 1) != 0;
                         if (Debugging.AssertsEnabled) Debugging.Assert(arc == null || (isLastInFloor || isFloor));
@@ -2750,7 +2751,7 @@
                         // term suffixes:
                         code = outerInstance.@in.ReadVInt32();
                         isLeafBlock = (code & 1) != 0;
-                        int numBytes = (int)((uint)code >> 1);
+                        int numBytes = code.TripleShift(1);
                         if (suffixBytes.Length < numBytes)
                         {
                             suffixBytes = new byte[ArrayUtil.Oversize(numBytes, 1)];
@@ -2883,7 +2884,7 @@
                         if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt != -1 && nextEnt < entCount, "nextEnt={0} entCount={1} fp={2}", nextEnt, entCount, fp);
                         nextEnt++;
                         int code = suffixesReader.ReadVInt32();
-                        suffix = (int)((uint)code >> 1);
+                        suffix = code.TripleShift(1);
                         startBytePos = suffixesReader.Position;
                         outerInstance.term.Length = prefix + suffix;
                         if (outerInstance.term.Bytes.Length < outerInstance.term.Length)
@@ -2945,7 +2946,7 @@
                         while (true)
                         {
                             long code = floorDataReader.ReadVInt64();
-                            newFP = fpOrig + ((long)((ulong)code >> 1));
+                            newFP = fpOrig + (code.TripleShift(1));
                             hasTerms = (code & 1) != 0;
                             // if (DEBUG) {
                             //   System.out.println("      label=" + toHex(nextFloorLabel) + " fp=" + newFP + " hasTerms?=" + hasTerms + " numFollowFloor=" + numFollowFloorBlocks);
@@ -3071,7 +3072,7 @@
                             if (Debugging.AssertsEnabled) Debugging.Assert(nextEnt < entCount);
                             nextEnt++;
                             int code = suffixesReader.ReadVInt32();
-                            suffixesReader.SkipBytes(isLeafBlock ? code : (int)((uint)code >> 1));
+                            suffixesReader.SkipBytes(isLeafBlock ? code : code.TripleShift(1));
                             //if (DEBUG) System.out.println("    " + nextEnt + " (of " + entCount + ") ent isSubBlock=" + ((code&1)==1));
                             if ((code & 1) != 0)
                             {
@@ -3274,7 +3275,7 @@
                             nextEnt++;
 
                             int code = suffixesReader.ReadVInt32();
-                            suffix = (int)((uint)code >> 1);
+                            suffix = code.TripleShift(1);
                             // if (DEBUG) {
                             //   BytesRef suffixBytesRef = new BytesRef();
                             //   suffixBytesRef.bytes = suffixBytes;
diff --git a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
index 2b2e54d..342bbdd 100644
--- a/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
+++ b/src/Lucene.Net/Codecs/BlockTreeTermsWriter.cs
@@ -1,4 +1,4 @@
-using J2N.Text;
+using J2N.Text;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using Lucene.Net.Util.Fst;
@@ -517,7 +517,7 @@
                         //if (DEBUG) {
                         //  System.out.println("    write floorLeadByte=" + Integer.toHexString(sub.floorLeadByte&0xff));
                         //}
-                        scratchBytes.WriteByte((byte)(sbyte)sub.FloorLeadByte);
+                        scratchBytes.WriteByte((byte)sub.FloorLeadByte);
                         if (Debugging.AssertsEnabled) Debugging.Assert(sub.Fp > Fp);
                         scratchBytes.WriteVInt64((sub.Fp - Fp) << 1 | (uint)(sub.HasTerms ? 1 : 0));
                     }
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
index 576e195..b12a6d9 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsIndexReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Support;
 using System;
 using System.Runtime.CompilerServices;
@@ -40,7 +41,7 @@
     {
         internal static long MoveLowOrderBitToSign(long n)
         {
-            return (((long)((ulong)n >> 1)) ^ -(n & 1));
+            return ((n.TripleShift(1)) ^ -(n & 1));
         }
 
         internal readonly int maxDoc;
@@ -121,7 +122,7 @@
             int lo = 0, hi = docBases.Length - 1;
             while (lo <= hi)
             {
-                int mid = (int)((uint)(lo + hi) >> 1);
+                int mid = (lo + hi).TripleShift(1);
                 int midValue = docBases[mid];
                 if (midValue == docID)
                 {
@@ -160,7 +161,7 @@
             int lo = 0, hi = docBasesDeltas[block].Count - 1;
             while (lo <= hi)
             {
-                int mid = (int)((uint)(lo + hi) >> 1);
+                int mid = (lo + hi).TripleShift(1);
                 int midValue = RelativeDocBase(block, mid);
                 if (midValue == relativeDoc)
                 {
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
index 742169b..bfc095d 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingStoredFieldsReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Codecs.Lucene40;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
@@ -343,7 +344,7 @@
                 }
 
                 decompressor.Decompress(fieldsStream, chunkSize, offset, Math.Min(length, chunkSize - offset), bytes);
-                documentInput = new DataInputAnonymousInnerClassHelper(this, length);
+                documentInput = new DataInputAnonymousClass(this, length);
             }
             else
             {
@@ -356,7 +357,7 @@
             for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++)
             {
                 long infoAndBits = documentInput.ReadVInt64();
-                int fieldNumber = (int)((long)((ulong)infoAndBits >> CompressingStoredFieldsWriter.TYPE_BITS));
+                int fieldNumber = (int)infoAndBits.TripleShift(CompressingStoredFieldsWriter.TYPE_BITS);
                 FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
 
                 int bits = (int)(infoAndBits & CompressingStoredFieldsWriter.TYPE_MASK);
@@ -378,13 +379,13 @@
             }
         }
 
-        private class DataInputAnonymousInnerClassHelper : DataInput
+        private class DataInputAnonymousClass : DataInput
         {
             private readonly CompressingStoredFieldsReader outerInstance;
 
             private readonly int length;
 
-            public DataInputAnonymousInnerClassHelper(CompressingStoredFieldsReader outerInstance, int length)
+            public DataInputAnonymousClass(CompressingStoredFieldsReader outerInstance, int length)
             {
                 this.outerInstance = outerInstance;
                 this.length = length;
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs
index 0e4f03f..c3ccdd0 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
@@ -224,7 +225,7 @@
                 int token = vectorsStream.ReadByte() & 0xFF;
                 if (Debugging.AssertsEnabled) Debugging.Assert(token != 0); // means no term vectors, cannot happen since we checked for numFields == 0
                 int bitsPerFieldNum = token & 0x1F;
-                int totalDistinctFields = (int)((uint)token >> 5);
+                int totalDistinctFields = token.TripleShift(5);
                 if (totalDistinctFields == 0x07)
                 {
                     totalDistinctFields += vectorsStream.ReadVInt32();
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
index 1faa1c1..f4b612f 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressingTermVectorsWriter.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
@@ -6,6 +6,7 @@
 using System.Runtime.CompilerServices;
 using JCG = J2N.Collections.Generic;
 using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+using J2N.Numerics;
 
 namespace Lucene.Net.Codecs.Compressing
 {
@@ -478,7 +479,7 @@
             if (Debugging.AssertsEnabled) Debugging.Assert(numDistinctFields > 0);
             int bitsRequired = PackedInt32s.BitsRequired(fieldNums.Max);
             int token = (Math.Min(numDistinctFields - 1, 0x07) << 5) | bitsRequired;
-            vectorsStream.WriteByte((byte)(sbyte)token);
+            vectorsStream.WriteByte((byte)token);
             if (numDistinctFields - 1 >= 0x07)
             {
                 vectorsStream.WriteVInt32(numDistinctFields - 1 - 0x07);
@@ -843,7 +844,7 @@
                         {
                             payloadLengthsBuf[payStart + i] = 0;
                         }
-                        position += (int)((uint)code >> 1);
+                        position += code.TripleShift(1);
                         positionsBuf[posStart + i] = position;
                     }
                 }
@@ -851,7 +852,7 @@
                 {
                     for (int i = 0; i < numProx; ++i)
                     {
-                        position += ((int)((uint)positions.ReadVInt32() >> 1));
+                        position += positions.ReadVInt32().TripleShift(1);
                         positionsBuf[posStart + i] = position;
                     }
                 }
diff --git a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
index 3f489cb..f40c383 100644
--- a/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
+++ b/src/Lucene.Net/Codecs/Compressing/CompressionMode.cs
@@ -42,9 +42,9 @@
         /// very fast. Use this mode with indices that have a high update rate but
         /// should be able to load documents from disk quickly.
         /// </summary>
-        public static readonly CompressionMode FAST = new CompressionModeAnonymousInnerClassHelper();
+        public static readonly CompressionMode FAST = new CompressionModeAnonymousClass();
 
-        private class CompressionModeAnonymousInnerClassHelper : CompressionMode
+        private class CompressionModeAnonymousClass : CompressionMode
         {
             [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public override Compressor NewCompressor()
@@ -71,9 +71,9 @@
         /// provide a good compression ratio. this mode might be interesting if/when
         /// your index size is much bigger than your OS cache.
         /// </summary>
-        public static readonly CompressionMode HIGH_COMPRESSION = new CompressionModeAnonymousInnerClassHelper2();
+        public static readonly CompressionMode HIGH_COMPRESSION = new CompressionModeAnonymousClass2();
 
-        private class CompressionModeAnonymousInnerClassHelper2 : CompressionMode
+        private class CompressionModeAnonymousClass2 : CompressionMode
         {
             [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public override Compressor NewCompressor()
@@ -100,9 +100,9 @@
         /// mode is best used with indices that have a low update rate but should be
         /// able to load documents from disk quickly.
         /// </summary>
-        public static readonly CompressionMode FAST_DECOMPRESSION = new CompressionModeAnonymousInnerClassHelper3();
+        public static readonly CompressionMode FAST_DECOMPRESSION = new CompressionModeAnonymousClass3();
 
-        private class CompressionModeAnonymousInnerClassHelper3 : CompressionMode
+        private class CompressionModeAnonymousClass3 : CompressionMode
         {
             [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public override Compressor NewCompressor()
@@ -139,9 +139,9 @@
         /// </summary>
         public abstract Decompressor NewDecompressor();
 
-        private static readonly Decompressor LZ4_DECOMPRESSOR = new DecompressorAnonymousInnerClassHelper();
+        private static readonly Decompressor LZ4_DECOMPRESSOR = new DecompressorAnonymousClass();
 
-        private class DecompressorAnonymousInnerClassHelper : Decompressor
+        private class DecompressorAnonymousClass : Decompressor
         {
             public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes)
             {
diff --git a/src/Lucene.Net/Codecs/Compressing/LZ4.cs b/src/Lucene.Net/Codecs/Compressing/LZ4.cs
index 7df0575..7f213db 100644
--- a/src/Lucene.Net/Codecs/Compressing/LZ4.cs
+++ b/src/Lucene.Net/Codecs/Compressing/LZ4.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -111,7 +111,7 @@
             {
                 // literals
                 int token = compressed.ReadByte() & 0xFF;
-                int literalLen = (int)(((uint)token) >> 4);
+                int literalLen = token.TripleShift(4);
 
                 if (literalLen != 0)
                 {
@@ -177,16 +177,16 @@
         {
             while (l >= 0xFF)
             {
-                @out.WriteByte(unchecked((byte)(sbyte)0xFF));
+                @out.WriteByte(/*(byte)*/0xFF); // LUCENENET: Removed unnecessary cast
                 l -= 0xFF;
             }
-            @out.WriteByte((byte)(sbyte)l);
+            @out.WriteByte((byte)l);
         }
 
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         private static void EncodeLiterals(byte[] bytes, int token, int anchor, int literalLen, DataOutput @out)
         {
-            @out.WriteByte((byte)(sbyte)token);
+            @out.WriteByte((byte)token);
 
             // encode literal length
             if (literalLen >= 0x0F)
@@ -216,8 +216,8 @@
             // encode match dec
             int matchDec = matchOff - matchRef;
             if (Debugging.AssertsEnabled) Debugging.Assert(matchDec > 0 && matchDec < 1 << 16);
-            @out.WriteByte((byte)(sbyte)matchDec);
-            @out.WriteByte((byte)(sbyte)((int)((uint)matchDec >> 8)));
+            @out.WriteByte((byte)matchDec);
+            @out.WriteByte((byte)matchDec.TripleShift(8));
 
             // encode match len
             if (matchLen >= MIN_MATCH + 0x0F)
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs
index 2f7e661..c93d02c 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xCodec.cs
@@ -58,11 +58,11 @@
         private readonly LiveDocsFormat liveDocsFormat = new Lucene40LiveDocsFormat();
 
         // 3.x doesn't support docvalues
-        private readonly DocValuesFormat docValuesFormat = new DocValuesFormatAnonymousInnerClassHelper();
+        private readonly DocValuesFormat docValuesFormat = new DocValuesFormatAnonymousClass();
 
-        private class DocValuesFormatAnonymousInnerClassHelper : DocValuesFormat
+        private class DocValuesFormatAnonymousClass : DocValuesFormat
         {
-            public DocValuesFormatAnonymousInnerClassHelper()
+            public DocValuesFormatAnonymousClass()
                 : base()
             {
             }
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs
index 607e00d..2f55ce0 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xNormsProducer.cs
@@ -235,18 +235,18 @@
                                 file.Dispose();
                             }
                             outerInstance.ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes));
-                            instance = new NumericDocValuesAnonymousInnerClassHelper(bytes);
+                            instance = new NumericDocValuesAnonymousClass(bytes);
                         }
                         return instance;
                     }
                 }
             }
 
-            private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
+            private class NumericDocValuesAnonymousClass : NumericDocValues
             {
                 private readonly byte[] bytes;
 
-                public NumericDocValuesAnonymousInnerClassHelper(byte[] bytes)
+                public NumericDocValuesAnonymousClass(byte[] bytes)
                 {
                     this.bytes = bytes;
                 }
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSkipListReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSkipListReader.cs
index 7d37cc4..86e4d3f 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSkipListReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xSkipListReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Support;
 using System;
 using System.Runtime.CompilerServices;
@@ -107,7 +108,7 @@
                 {
                     payloadLength[level] = skipStream.ReadVInt32();
                 }
-                delta = (int)((uint)delta >> 1);
+                delta = delta.TripleShift(1);
             }
             else
             {
diff --git a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
index 9d4be03..2541022 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/Lucene3xTermVectorsReader.cs
@@ -277,17 +277,17 @@
             [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public override IEnumerator<string> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper(this);
+                return new IteratorAnonymousClass(this);
             }
 
-            private class IteratorAnonymousInnerClassHelper : IEnumerator<string>
+            private class IteratorAnonymousClass : IEnumerator<string>
             {
                 private readonly TVFields outerInstance;
                 private string current;
                 private int i;
                 private readonly int upTo;
 
-                public IteratorAnonymousInnerClassHelper(TVFields outerInstance)
+                public IteratorAnonymousClass(TVFields outerInstance)
                 {
                     this.outerInstance = outerInstance;
                     upTo = this.outerInstance.fieldNumbers.Length;
diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs
index a24fa90..20f4f97 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermDocs.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Diagnostics;
@@ -168,7 +169,7 @@
                 }
                 else
                 {
-                    doc += (int)((uint)docCode >> 1); // shift off low bit
+                    doc += docCode.TripleShift(1); // shift off low bit
                     if ((docCode & 1) != 0) // if low bit is set
                     {
                         freq = 1; // freq is one
@@ -207,7 +208,7 @@
                 {
                     // manually inlined call to next() for speed
                     int docCode = m_freqStream.ReadVInt32();
-                    doc += (int)((uint)docCode >> 1); // shift off low bit
+                    doc += docCode.TripleShift(1); // shift off low bit
                     if ((docCode & 1) != 0) // if low bit is set
                     {
                         freq = 1; // freq is one
diff --git a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs
index de24367..9515d7d 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/SegmentTermPositions.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using System;
@@ -115,7 +116,7 @@
                 {
                     payloadLength = proxStream.ReadVInt32();
                 }
-                delta = (int)((uint)delta >> 1);
+                delta = delta.TripleShift(1);
                 needToLoadPayload = true;
             }
             else if (delta == -1)
diff --git a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReaderIndex.cs b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReaderIndex.cs
index 69c15d9..e70b092 100644
--- a/src/Lucene.Net/Codecs/Lucene3x/TermInfosReaderIndex.cs
+++ b/src/Lucene.Net/Codecs/Lucene3x/TermInfosReaderIndex.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using J2N.Text;
 using System;
 using System.Collections.Generic;
@@ -174,7 +174,7 @@
             BytesRef scratch = new BytesRef();
             while (hi >= lo)
             {
-                int mid = (int)((uint)(lo + hi) >> 1);
+                int mid = (lo + hi).TripleShift(1);
                 int delta = CompareTo(term, mid, input, scratch);
                 if (delta < 0)
                 {
diff --git a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs
index e7ca8c1..1973cd2 100644
--- a/src/Lucene.Net/Codecs/Lucene40/BitVector.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/BitVector.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -76,7 +77,7 @@
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         private static int GetNumBytes(int size) // LUCENENET: CA1822: Mark members as static
         {
-            int bytesLength = (int)((uint)size >> 3);
+            int bytesLength = size.TripleShift(3);
             if ((size & 7) != 0)
             {
                 bytesLength++;
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs
index fc86de0..036c7ac 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40Codec.cs
@@ -45,11 +45,11 @@
 
         private readonly PostingsFormat postingsFormat;
 
-        private class PerFieldPostingsFormatAnonymousInnerClassHelper : PerFieldPostingsFormat
+        private class PerFieldPostingsFormatAnonymousClass : PerFieldPostingsFormat
         {
             private readonly Lucene40Codec outerInstance;
 
-            public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene40Codec outerInstance)
+            public PerFieldPostingsFormatAnonymousClass(Lucene40Codec outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -66,7 +66,7 @@
         public Lucene40Codec()
             : base()
         {
-            postingsFormat = new PerFieldPostingsFormatAnonymousInnerClassHelper(this);
+            postingsFormat = new PerFieldPostingsFormatAnonymousClass(this);
         }
 
         public override sealed StoredFieldsFormat StoredFieldsFormat => fieldsFormat;
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesReader.cs
index fa95ec4..038a569 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40DocValuesReader.cs
@@ -155,7 +155,7 @@
                     values[i] = input.ReadInt64();
                 }
                 ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-                return new NumericDocValuesAnonymousInnerClassHelper(values);
+                return new NumericDocValuesAnonymousClass(values);
             }
             else if (header == Lucene40DocValuesFormat.VAR_INTS_PACKED)
             {
@@ -163,7 +163,7 @@
                 long defaultValue = input.ReadInt64();
                 PackedInt32s.Reader reader = PackedInt32s.GetReader(input);
                 ramBytesUsed.AddAndGet(reader.RamBytesUsed());
-                return new NumericDocValuesAnonymousInnerClassHelper2(minValue, defaultValue, reader);
+                return new NumericDocValuesAnonymousClass2(minValue, defaultValue, reader);
             }
             else
             {
@@ -171,11 +171,11 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
+        private class NumericDocValuesAnonymousClass : NumericDocValues
         {
             private readonly long[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper(long[] values)
+            public NumericDocValuesAnonymousClass(long[] values)
             {
                 this.values = values;
             }
@@ -187,13 +187,13 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues
+        private class NumericDocValuesAnonymousClass2 : NumericDocValues
         {
             private readonly long minValue;
             private readonly long defaultValue;
             private readonly PackedInt32s.Reader reader;
 
-            public NumericDocValuesAnonymousInnerClassHelper2(long minValue, long defaultValue, PackedInt32s.Reader reader)
+            public NumericDocValuesAnonymousClass2(long minValue, long defaultValue, PackedInt32s.Reader reader)
             {
                 this.minValue = minValue;
                 this.defaultValue = defaultValue;
@@ -227,14 +227,14 @@
             var values = new byte[maxDoc];
             input.ReadBytes(values, 0, values.Length);
             ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-            return new NumericDocValuesAnonymousInnerClassHelper3(values);
+            return new NumericDocValuesAnonymousClass3(values);
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper3 : NumericDocValues
+        private class NumericDocValuesAnonymousClass3 : NumericDocValues
         {
             private readonly byte[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper3(byte[] values)
+            public NumericDocValuesAnonymousClass3(byte[] values)
             {
                 this.values = values;
             }
@@ -264,14 +264,14 @@
                 values[i] = input.ReadInt16();
             }
             ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-            return new NumericDocValuesAnonymousInnerClassHelper4(values);
+            return new NumericDocValuesAnonymousClass4(values);
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper4 : NumericDocValues
+        private class NumericDocValuesAnonymousClass4 : NumericDocValues
         {
             private readonly short[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper4(short[] values)
+            public NumericDocValuesAnonymousClass4(short[] values)
             {
                 this.values = values;
             }
@@ -301,14 +301,14 @@
                 values[i] = input.ReadInt32();
             }
             ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-            return new NumericDocValuesAnonymousInnerClassHelper5(values);
+            return new NumericDocValuesAnonymousClass5(values);
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper5 : NumericDocValues
+        private class NumericDocValuesAnonymousClass5 : NumericDocValues
         {
             private readonly int[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper5(int[] values)
+            public NumericDocValuesAnonymousClass5(int[] values)
             {
                 this.values = values;
             }
@@ -338,14 +338,14 @@
                 values[i] = input.ReadInt64();
             }
             ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-            return new NumericDocValuesAnonymousInnerClassHelper6(values);
+            return new NumericDocValuesAnonymousClass6(values);
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper6 : NumericDocValues
+        private class NumericDocValuesAnonymousClass6 : NumericDocValues
         {
             private readonly long[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper6(long[] values)
+            public NumericDocValuesAnonymousClass6(long[] values)
             {
                 this.values = values;
             }
@@ -375,14 +375,14 @@
                 values[i] = input.ReadInt32();
             }
             ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-            return new NumericDocValuesAnonymousInnerClassHelper7(values);
+            return new NumericDocValuesAnonymousClass7(values);
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper7 : NumericDocValues
+        private class NumericDocValuesAnonymousClass7 : NumericDocValues
         {
             private readonly int[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper7(int[] values)
+            public NumericDocValuesAnonymousClass7(int[] values)
             {
                 this.values = values;
             }
@@ -409,14 +409,14 @@
                 values[i] = input.ReadInt64();
             }
             ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values));
-            return new NumericDocValuesAnonymousInnerClassHelper8(values);
+            return new NumericDocValuesAnonymousClass8(values);
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper8 : NumericDocValues
+        private class NumericDocValuesAnonymousClass8 : NumericDocValues
         {
             private readonly long[] values;
 
-            public NumericDocValuesAnonymousInnerClassHelper8(long[] values)
+            public NumericDocValuesAnonymousClass8(long[] values)
             {
                 this.values = values;
             }
@@ -477,7 +477,7 @@
                 CodecUtil.CheckEOF(input);
                 success = true;
                 ramBytesUsed.AddAndGet(bytes.RamBytesUsed());
-                return new BinaryDocValuesAnonymousInnerClassHelper(fixedLength, bytesReader);
+                return new BinaryDocValuesAnonymousClass(fixedLength, bytesReader);
             }
             finally
             {
@@ -492,12 +492,12 @@
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass : BinaryDocValues
         {
             private readonly int fixedLength;
             private readonly PagedBytes.Reader bytesReader;
 
-            public BinaryDocValuesAnonymousInnerClassHelper(int fixedLength, PagedBytes.Reader bytesReader)
+            public BinaryDocValuesAnonymousClass(int fixedLength, PagedBytes.Reader bytesReader)
             {
                 this.fixedLength = fixedLength;
                 this.bytesReader = bytesReader;
@@ -532,7 +532,7 @@
                 CodecUtil.CheckEOF(index);
                 success = true;
                 ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + reader.RamBytesUsed());
-                return new BinaryDocValuesAnonymousInnerClassHelper2(bytesReader, reader);
+                return new BinaryDocValuesAnonymousClass2(bytesReader, reader);
             }
             finally
             {
@@ -547,12 +547,12 @@
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper2 : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass2 : BinaryDocValues
         {
             private readonly PagedBytes.Reader bytesReader;
             private readonly PackedInt32s.Reader reader;
 
-            public BinaryDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, PackedInt32s.Reader reader)
+            public BinaryDocValuesAnonymousClass2(PagedBytes.Reader bytesReader, PackedInt32s.Reader reader)
             {
                 this.bytesReader = bytesReader;
                 this.reader = reader;
@@ -591,7 +591,7 @@
                 CodecUtil.CheckEOF(index);
                 ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + reader.RamBytesUsed());
                 success = true;
-                return new BinaryDocValuesAnonymousInnerClassHelper3(fixedLength, bytesReader, reader);
+                return new BinaryDocValuesAnonymousClass3(fixedLength, bytesReader, reader);
             }
             finally
             {
@@ -606,13 +606,13 @@
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper3 : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass3 : BinaryDocValues
         {
             private readonly int fixedLength;
             private readonly PagedBytes.Reader bytesReader;
             private readonly PackedInt32s.Reader reader;
 
-            public BinaryDocValuesAnonymousInnerClassHelper3(int fixedLength, PagedBytes.Reader bytesReader, PackedInt32s.Reader reader)
+            public BinaryDocValuesAnonymousClass3(int fixedLength, PagedBytes.Reader bytesReader, PackedInt32s.Reader reader)
             {
                 this.fixedLength = fixedLength;
                 this.bytesReader = bytesReader;
@@ -650,7 +650,7 @@
                 CodecUtil.CheckEOF(index);
                 ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + reader.RamBytesUsed());
                 success = true;
-                return new BinaryDocValuesAnonymousInnerClassHelper4(bytesReader, reader);
+                return new BinaryDocValuesAnonymousClass4(bytesReader, reader);
             }
             finally
             {
@@ -665,12 +665,12 @@
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper4 : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass4 : BinaryDocValues
         {
             private readonly PagedBytes.Reader bytesReader;
             private readonly PackedInt32s.Reader reader;
 
-            public BinaryDocValuesAnonymousInnerClassHelper4(PagedBytes.Reader bytesReader, PackedInt32s.Reader reader)
+            public BinaryDocValuesAnonymousClass4(PagedBytes.Reader bytesReader, PackedInt32s.Reader reader)
             {
                 this.bytesReader = bytesReader;
                 this.reader = reader;
@@ -762,17 +762,17 @@
             PackedInt32s.Reader reader = PackedInt32s.GetReader(index);
             ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + reader.RamBytesUsed());
 
-            return CorrectBuggyOrds(new SortedDocValuesAnonymousInnerClassHelper(fixedLength, valueCount, bytesReader, reader));
+            return CorrectBuggyOrds(new SortedDocValuesAnonymousClass(fixedLength, valueCount, bytesReader, reader));
         }
 
-        private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues
+        private class SortedDocValuesAnonymousClass : SortedDocValues
         {
             private readonly int fixedLength;
             private readonly int valueCount;
             private readonly PagedBytes.Reader bytesReader;
             private readonly PackedInt32s.Reader reader;
 
-            public SortedDocValuesAnonymousInnerClassHelper(int fixedLength, int valueCount, PagedBytes.Reader bytesReader, PackedInt32s.Reader reader)
+            public SortedDocValuesAnonymousClass(int fixedLength, int valueCount, PagedBytes.Reader bytesReader, PackedInt32s.Reader reader)
             {
                 this.fixedLength = fixedLength;
                 this.valueCount = valueCount;
@@ -810,17 +810,17 @@
             int valueCount = addressReader.Count - 1;
             ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + addressReader.RamBytesUsed() + ordsReader.RamBytesUsed());
 
-            return CorrectBuggyOrds(new SortedDocValuesAnonymousInnerClassHelper2(bytesReader, addressReader, ordsReader, valueCount));
+            return CorrectBuggyOrds(new SortedDocValuesAnonymousClass2(bytesReader, addressReader, ordsReader, valueCount));
         }
 
-        private class SortedDocValuesAnonymousInnerClassHelper2 : SortedDocValues
+        private class SortedDocValuesAnonymousClass2 : SortedDocValues
         {
             private readonly PagedBytes.Reader bytesReader;
             private readonly PackedInt32s.Reader addressReader;
             private readonly PackedInt32s.Reader ordsReader;
             private readonly int valueCount;
 
-            public SortedDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, PackedInt32s.Reader addressReader, PackedInt32s.Reader ordsReader, int valueCount)
+            public SortedDocValuesAnonymousClass2(PagedBytes.Reader bytesReader, PackedInt32s.Reader addressReader, PackedInt32s.Reader ordsReader, int valueCount)
             {
                 this.bytesReader = bytesReader;
                 this.addressReader = addressReader;
@@ -858,14 +858,14 @@
             }
 
             // we had ord holes, return an ord-shifting-impl that corrects the bug
-            return new SortedDocValuesAnonymousInnerClassHelper3(@in);
+            return new SortedDocValuesAnonymousClass3(@in);
         }
 
-        private class SortedDocValuesAnonymousInnerClassHelper3 : SortedDocValues
+        private class SortedDocValuesAnonymousClass3 : SortedDocValues
         {
             private readonly SortedDocValues @in;
 
-            public SortedDocValuesAnonymousInnerClassHelper3(SortedDocValues @in)
+            public SortedDocValuesAnonymousClass3(SortedDocValues @in)
             {
                 this.@in = @in;
             }
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40FieldInfosReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40FieldInfosReader.cs
index 488c0de..f6256f6 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40FieldInfosReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40FieldInfosReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Index;
 using System;
 using System.Collections.Generic;
@@ -103,7 +104,7 @@
                     // DV Types are packed in one byte
                     byte val = input.ReadByte();
                     LegacyDocValuesType oldValuesType = GetDocValuesType((sbyte)(val & 0x0F));
-                    LegacyDocValuesType oldNormsType = GetDocValuesType((sbyte)(((int)((uint)val >> 4)) & 0x0F));
+                    LegacyDocValuesType oldNormsType = GetDocValuesType((sbyte)(val.TripleShift(4) & 0x0F));
                     IDictionary<string, string> attributes = input.ReadStringStringMap();
                     if (oldValuesType.GetMapping() != DocValuesType.NONE)
                     {
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs
index 734ecb5..61bb3f0 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40PostingsReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Support;
@@ -388,7 +389,7 @@
             {
                 while (low <= hi)
                 {
-                    int mid = (int)((uint)(hi + low) >> 1);
+                    int mid = (hi + low).TripleShift(1);
                     int doc = docs[mid];
                     if (doc < target)
                     {
@@ -474,7 +475,7 @@
                 for (int i = 0; i < size; i++)
                 {
                     int code = freqIn.ReadVInt32();
-                    docAc += (int)((uint)code >> 1); // shift off low bit
+                    docAc += code.TripleShift(1); // shift off low bit
                     freqs[i] = ReadFreq(freqIn, code);
                     docs[i] = docAc;
                 }
@@ -578,7 +579,7 @@
                     }
                     else
                     {
-                        docAcc += (int)((uint)code >> 1); // shift off low bit
+                        docAcc += code.TripleShift(1); // shift off low bit
                         frq = ReadFreq(freqIn, code);
                     }
                     if (docAcc >= target)
@@ -605,7 +606,7 @@
                     }
                     else
                     {
-                        m_accum += (int)((uint)code >> 1); // shift off low bit
+                        m_accum += code.TripleShift(1); // shift off low bit
                         m_freq = ReadFreq(freqIn, code);
                     }
                     return m_accum;
@@ -677,7 +678,7 @@
                     }
                     else
                     {
-                        docAcc += (int)((uint)code >> 1); // shift off low bit
+                        docAcc += code.TripleShift(1); // shift off low bit
                         frq = ReadFreq(freqIn, code);
                     }
                     if (docAcc >= target && liveDocs.Get(docAcc))
@@ -710,7 +711,7 @@
                     }
                     else
                     {
-                        docAcc += (int)((uint)code >> 1); // shift off low bit
+                        docAcc += code.TripleShift(1); // shift off low bit
                         frq = ReadFreq(freqIn, code);
                     }
                     if (liveDocs.Get(docAcc))
@@ -815,7 +816,7 @@
                     // Decode next doc/freq pair
                     int code = freqIn.ReadVInt32();
 
-                    accum += (int)((uint)code >> 1); // shift off low bit
+                    accum += code.TripleShift(1); // shift off low bit
                     if ((code & 1) != 0) // if low bit is set
                     {
                         freq = 1; // freq is one
@@ -1046,7 +1047,7 @@
                     // Decode next doc/freq pair
                     int code = freqIn.ReadVInt32();
 
-                    accum += (int)((uint)code >> 1); // shift off low bit
+                    accum += code.TripleShift(1); // shift off low bit
                     if ((code & 1) != 0) // if low bit is set
                     {
                         freq = 1; // freq is one
@@ -1198,7 +1199,7 @@
                     if (Debugging.AssertsEnabled) Debugging.Assert(payloadLength != -1);
 
                     payloadPending = true;
-                    code_ = (int)((uint)code_ >> 1);
+                    code_ = code_.TripleShift(1);
                 }
                 position += code_;
 
@@ -1210,7 +1211,7 @@
                         // new offset length
                         offsetLength = proxIn.ReadVInt32();
                     }
-                    startOffset += (int)((uint)offsetCode >> 1);
+                    startOffset += offsetCode.TripleShift(1);
                 }
 
                 posPendingCount--;
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40SegmentInfoWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40SegmentInfoWriter.cs
index 8b4b00f..15e3fc3 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40SegmentInfoWriter.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40SegmentInfoWriter.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Support;
+using Lucene.Net.Support;
 using System;
 
 namespace Lucene.Net.Codecs.Lucene40
@@ -60,7 +60,7 @@
                 output.WriteString(si.Version);
                 output.WriteInt32(si.DocCount);
 
-                output.WriteByte((byte)(sbyte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
+                output.WriteByte((byte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
                 output.WriteStringStringMap(si.Diagnostics);
                 output.WriteStringStringMap(Collections.EmptyMap<string, string>());
                 output.WriteStringSet(si.GetFiles());
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40SkipListReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40SkipListReader.cs
index f074e7d..e353831 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40SkipListReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40SkipListReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Support;
 using System;
 using System.Runtime.CompilerServices;
@@ -138,7 +139,7 @@
                         offsetLength[level] = skipStream.ReadVInt32();
                     }
                 }
-                delta = (int)((uint)delta >> 1);
+                delta = delta.TripleShift(1);
             }
             else
             {
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
index 47bf7c8..67e292f 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
@@ -1,4 +1,4 @@
-using J2N;
+using J2N;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Documents;
 using System;
@@ -217,7 +217,7 @@
                 }
             }
 
-            fieldsStream.WriteByte((byte)(sbyte)bits);
+            fieldsStream.WriteByte((byte)bits);
 
             if (bytes != null)
             {
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs
index 4ebb197..bdccfbb 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Support;
@@ -498,7 +499,7 @@
                     for (int posUpto = 0; posUpto < freq; posUpto++)
                     {
                         int code = tvf.ReadVInt32();
-                        pos += (int)((uint)code >> 1);
+                        pos += code.TripleShift(1);
                         positions[posUpto] = pos;
                         if ((code & 1) != 0)
                         {
diff --git a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
index 6055bf1..5c8875e 100644
--- a/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
+++ b/src/Lucene.Net/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using J2N.Text;
 using Lucene.Net.Diagnostics;
 using System;
@@ -208,11 +209,11 @@
                         scratch.Grow(length);
                         scratch.Length = length;
                         positions.ReadBytes(scratch.Bytes, scratch.Offset, scratch.Length);
-                        WritePosition((int)((uint)code >> 1), scratch);
+                        WritePosition(code.TripleShift(1), scratch);
                     }
                     else
                     {
-                        WritePosition((int)((uint)code >> 1), null);
+                        WritePosition(code.TripleShift(1), null);
                     }
                 }
                 tvf.WriteBytes(payloadData.Bytes, payloadData.Offset, payloadData.Length);
@@ -222,7 +223,7 @@
                 // pure positions, no payloads
                 for (int i = 0; i < numProx; i++)
                 {
-                    tvf.WriteVInt32((int)((uint)positions.ReadVInt32() >> 1));
+                    tvf.WriteVInt32(positions.ReadVInt32().TripleShift(1));
                 }
             }
 
diff --git a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs
index 4406d54..ee1b8af 100644
--- a/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs
+++ b/src/Lucene.Net/Codecs/Lucene41/ForUtil.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
 using Lucene.Net.Support;
@@ -142,7 +143,7 @@
             for (int bpv = 1; bpv <= 32; ++bpv)
             {
                 var code = @in.ReadVInt32();
-                var formatId = (int)((uint)code >> 5);
+                var formatId = code.TripleShift(5);
                 var bitsPerValue = (code & 31) + 1;
 
                 PackedInt32s.Format format = PackedInt32s.Format.ById(formatId);
@@ -165,7 +166,7 @@
         {
             if (IsAllEqual(data))
             {
-                @out.WriteByte((byte)(sbyte)ALL_VALUES_EQUAL);
+                @out.WriteByte((byte)ALL_VALUES_EQUAL);
                 @out.WriteVInt32(data[0]);
                 return;
             }
diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs
index e206944..bafd126 100644
--- a/src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs
+++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41Codec.cs
@@ -48,11 +48,11 @@
     public class Lucene41Codec : Codec
     {
         // TODO: slightly evil
-        private readonly StoredFieldsFormat fieldsFormat = new CompressingStoredFieldsFormatAnonymousInnerClassHelper("Lucene41StoredFields", CompressionMode.FAST, 1 << 14);
+        private readonly StoredFieldsFormat fieldsFormat = new CompressingStoredFieldsFormatAnonymousClass("Lucene41StoredFields", CompressionMode.FAST, 1 << 14);
 
-        private class CompressingStoredFieldsFormatAnonymousInnerClassHelper : CompressingStoredFieldsFormat
+        private class CompressingStoredFieldsFormatAnonymousClass : CompressingStoredFieldsFormat
         {
-            public CompressingStoredFieldsFormatAnonymousInnerClassHelper(string formatName, CompressionMode compressionMode, int chunkSize)
+            public CompressingStoredFieldsFormatAnonymousClass(string formatName, CompressionMode compressionMode, int chunkSize)
                 : base(formatName, compressionMode, chunkSize)
             {
             }
@@ -70,11 +70,11 @@
 
         private readonly PostingsFormat postingsFormat;
 
-        private class PerFieldPostingsFormatAnonymousInnerClassHelper : PerFieldPostingsFormat
+        private class PerFieldPostingsFormatAnonymousClass : PerFieldPostingsFormat
         {
             private readonly Lucene41Codec outerInstance;
 
-            public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene41Codec outerInstance)
+            public PerFieldPostingsFormatAnonymousClass(Lucene41Codec outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -91,7 +91,7 @@
         public Lucene41Codec()
             : base()
         {
-            postingsFormat = new PerFieldPostingsFormatAnonymousInnerClassHelper(this);
+            postingsFormat = new PerFieldPostingsFormatAnonymousClass(this);
         }
 
         // TODO: slightly evil
diff --git a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs
index 7683ecf..414bd3a 100644
--- a/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene41/Lucene41PostingsReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
@@ -109,7 +110,7 @@
                 for (int i = 0; i < num; i++)
                 {
                     int code = docIn.ReadVInt32();
-                    docBuffer[i] = (int)((uint)code >> 1);
+                    docBuffer[i] = code.TripleShift(1);
                     if ((code & 1) != 0)
                     {
                         freqBuffer[i] = 1;
@@ -766,7 +767,7 @@
                             {
                                 payloadLength = posIn.ReadVInt32();
                             }
-                            posDeltaBuffer[i] = (int)((uint)code >> 1);
+                            posDeltaBuffer[i] = code.TripleShift(1);
                             if (payloadLength != 0)
                             {
                                 posIn.Seek(posIn.GetFilePointer() + payloadLength);
@@ -1281,7 +1282,7 @@
                             //   System.out.println("        i=" + i + " payloadLen=" + payloadLength);
                             // }
                             payloadLengthBuffer[i] = payloadLength;
-                            posDeltaBuffer[i] = (int)((uint)code >> 1);
+                            posDeltaBuffer[i] = code.TripleShift(1);
                             if (payloadLength != 0)
                             {
                                 if (payloadByteUpto + payloadLength > payloadBytes.Length)
@@ -1308,7 +1309,7 @@
                             {
                                 offsetLength = posIn.ReadVInt32();
                             }
-                            offsetStartDeltaBuffer[i] = (int)((uint)deltaCode >> 1);
+                            offsetStartDeltaBuffer[i] = deltaCode.TripleShift(1);
                             offsetLengthBuffer[i] = offsetLength;
                             // if (DEBUG) {
                             //   System.out.println("          startOffDelta=" + offsetStartDeltaBuffer[i] + " offsetLen=" + offsetLengthBuffer[i]);
diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs
index 3e9126b..747b305 100644
--- a/src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs
+++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42Codec.cs
@@ -53,11 +53,11 @@
 
         private readonly PostingsFormat postingsFormat;
 
-        private class PerFieldPostingsFormatAnonymousInnerClassHelper : PerFieldPostingsFormat
+        private class PerFieldPostingsFormatAnonymousClass : PerFieldPostingsFormat
         {
             private readonly Lucene42Codec outerInstance;
 
-            public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene42Codec outerInstance)
+            public PerFieldPostingsFormatAnonymousClass(Lucene42Codec outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -71,11 +71,11 @@
 
         private readonly DocValuesFormat docValuesFormat;
 
-        private class PerFieldDocValuesFormatAnonymousInnerClassHelper : PerFieldDocValuesFormat
+        private class PerFieldDocValuesFormatAnonymousClass : PerFieldDocValuesFormat
         {
             private readonly Lucene42Codec outerInstance;
 
-            public PerFieldDocValuesFormatAnonymousInnerClassHelper(Lucene42Codec outerInstance)
+            public PerFieldDocValuesFormatAnonymousClass(Lucene42Codec outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -92,8 +92,8 @@
         public Lucene42Codec()
             : base()
         {
-            postingsFormat = new PerFieldPostingsFormatAnonymousInnerClassHelper(this);
-            docValuesFormat = new PerFieldDocValuesFormatAnonymousInnerClassHelper(this);
+            postingsFormat = new PerFieldPostingsFormatAnonymousClass(this);
+            docValuesFormat = new PerFieldDocValuesFormatAnonymousClass(this);
         }
 
         public override sealed StoredFieldsFormat StoredFieldsFormat => fieldsFormat;
@@ -148,9 +148,9 @@
         private PostingsFormat defaultFormat;
         private DocValuesFormat defaultDVFormat;
 
-        private readonly NormsFormat normsFormat = new Lucene42NormsFormatAnonymousInnerClassHelper();
+        private readonly NormsFormat normsFormat = new Lucene42NormsFormatAnonymousClass();
 
-        private class Lucene42NormsFormatAnonymousInnerClassHelper : Lucene42NormsFormat
+        private class Lucene42NormsFormatAnonymousClass : Lucene42NormsFormat
         {
             public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
             {
diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesProducer.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesProducer.cs
index 91ed273..5b4e24b 100644
--- a/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesProducer.cs
+++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42DocValuesProducer.cs
@@ -265,7 +265,7 @@
                     int bitsPerValue = data.ReadVInt32();
                     PackedInt32s.Reader ordsReader = PackedInt32s.GetReaderNoHeader(data, PackedInt32s.Format.ById(formatID), entry.PackedInt32sVersion, maxDoc, bitsPerValue);
                     ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(decode) + ordsReader.RamBytesUsed());
-                    return new NumericDocValuesAnonymousInnerClassHelper(decode, ordsReader);
+                    return new NumericDocValuesAnonymousClass(decode, ordsReader);
 
                 case DELTA_COMPRESSED:
                     int blockSize = data.ReadVInt32();
@@ -277,7 +277,7 @@
                     byte[] bytes = new byte[maxDoc];
                     data.ReadBytes(bytes, 0, bytes.Length);
                     ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes));
-                    return new NumericDocValuesAnonymousInnerClassHelper2(bytes);
+                    return new NumericDocValuesAnonymousClass2(bytes);
 
                 case GCD_COMPRESSED:
                     long min = data.ReadInt64();
@@ -285,19 +285,19 @@
                     int quotientBlockSize = data.ReadVInt32();
                     BlockPackedReader quotientReader = new BlockPackedReader(data, entry.PackedInt32sVersion, quotientBlockSize, maxDoc, false);
                     ramBytesUsed.AddAndGet(quotientReader.RamBytesUsed());
-                    return new NumericDocValuesAnonymousInnerClassHelper3(min, mult, quotientReader);
+                    return new NumericDocValuesAnonymousClass3(min, mult, quotientReader);
 
                 default:
                     throw new InvalidOperationException();
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
+        private class NumericDocValuesAnonymousClass : NumericDocValues
         {
             private readonly long[] decode;
             private readonly PackedInt32s.Reader ordsReader;
 
-            public NumericDocValuesAnonymousInnerClassHelper(long[] decode, PackedInt32s.Reader ordsReader)
+            public NumericDocValuesAnonymousClass(long[] decode, PackedInt32s.Reader ordsReader)
             {
                 this.decode = decode;
                 this.ordsReader = ordsReader;
@@ -310,11 +310,11 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues
+        private class NumericDocValuesAnonymousClass2 : NumericDocValues
         {
             private readonly byte[] bytes;
 
-            public NumericDocValuesAnonymousInnerClassHelper2(byte[] bytes)
+            public NumericDocValuesAnonymousClass2(byte[] bytes)
             {
                 this.bytes = bytes;
             }
@@ -326,13 +326,13 @@
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper3 : NumericDocValues
+        private class NumericDocValuesAnonymousClass3 : NumericDocValues
         {
             private readonly long min;
             private readonly long mult;
             private readonly BlockPackedReader quotientReader;
 
-            public NumericDocValuesAnonymousInnerClassHelper3(long min, long mult, BlockPackedReader quotientReader)
+            public NumericDocValuesAnonymousClass3(long min, long mult, BlockPackedReader quotientReader)
             {
                 this.min = min;
                 this.mult = mult;
@@ -370,22 +370,22 @@
             {
                 int fixedLength = entry.MinLength;
                 ramBytesUsed.AddAndGet(bytes.RamBytesUsed());
-                return new BinaryDocValuesAnonymousInnerClassHelper(bytesReader, fixedLength);
+                return new BinaryDocValuesAnonymousClass(bytesReader, fixedLength);
             }
             else
             {
                 MonotonicBlockPackedReader addresses = new MonotonicBlockPackedReader(data, entry.PackedInt32sVersion, entry.BlockSize, maxDoc, false);
                 ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + addresses.RamBytesUsed());
-                return new BinaryDocValuesAnonymousInnerClassHelper2(bytesReader, addresses);
+                return new BinaryDocValuesAnonymousClass2(bytesReader, addresses);
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass : BinaryDocValues
         {
             private readonly PagedBytes.Reader bytesReader;
             private readonly int fixedLength;
 
-            public BinaryDocValuesAnonymousInnerClassHelper(PagedBytes.Reader bytesReader, int fixedLength)
+            public BinaryDocValuesAnonymousClass(PagedBytes.Reader bytesReader, int fixedLength)
             {
                 this.bytesReader = bytesReader;
                 this.fixedLength = fixedLength;
@@ -398,12 +398,12 @@
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper2 : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass2 : BinaryDocValues
         {
             private readonly PagedBytes.Reader bytesReader;
             private readonly MonotonicBlockPackedReader addresses;
 
-            public BinaryDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, MonotonicBlockPackedReader addresses)
+            public BinaryDocValuesAnonymousClass2(PagedBytes.Reader bytesReader, MonotonicBlockPackedReader addresses)
             {
                 this.bytesReader = bytesReader;
                 this.addresses = addresses;
@@ -442,10 +442,10 @@
             var scratchInts = new Int32sRef();
             var fstEnum = new BytesRefFSTEnum<long?>(fst);
 
-            return new SortedDocValuesAnonymousInnerClassHelper(entry, docToOrd, fst, @in, firstArc, scratchArc, scratchInts, fstEnum);
+            return new SortedDocValuesAnonymousClass(entry, docToOrd, fst, @in, firstArc, scratchArc, scratchInts, fstEnum);
         }
 
-        private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues
+        private class SortedDocValuesAnonymousClass : SortedDocValues
         {
             private readonly FSTEntry entry;
             private readonly NumericDocValues docToOrd;
@@ -456,7 +456,7 @@
             private readonly Int32sRef scratchInts;
             private readonly BytesRefFSTEnum<long?> fstEnum;
 
-            public SortedDocValuesAnonymousInnerClassHelper(FSTEntry entry, NumericDocValues docToOrd, FST<long?> fst, FST.BytesReader @in, FST.Arc<long?> firstArc, FST.Arc<long?> scratchArc, Int32sRef scratchInts, BytesRefFSTEnum<long?> fstEnum)
+            public SortedDocValuesAnonymousClass(FSTEntry entry, NumericDocValues docToOrd, FST<long?> fst, FST.BytesReader @in, FST.Arc<long?> firstArc, FST.Arc<long?> scratchArc, Int32sRef scratchInts, BytesRefFSTEnum<long?> fstEnum)
             {
                 this.entry = entry;
                 this.docToOrd = docToOrd;
@@ -554,10 +554,10 @@
             var fstEnum = new BytesRefFSTEnum<long?>(fst);
             var @ref = new BytesRef();
             var input = new ByteArrayDataInput();
-            return new SortedSetDocValuesAnonymousInnerClassHelper(entry, docToOrds, fst, @in, firstArc, scratchArc, scratchInts, fstEnum, @ref, input);
+            return new SortedSetDocValuesAnonymousClass(entry, docToOrds, fst, @in, firstArc, scratchArc, scratchInts, fstEnum, @ref, input);
         }
 
-        private class SortedSetDocValuesAnonymousInnerClassHelper : SortedSetDocValues
+        private class SortedSetDocValuesAnonymousClass : SortedSetDocValues
         {
             private readonly FSTEntry entry;
             private readonly BinaryDocValues docToOrds;
@@ -570,7 +570,7 @@
             private readonly BytesRef @ref;
             private readonly ByteArrayDataInput input;
 
-            public SortedSetDocValuesAnonymousInnerClassHelper(FSTEntry entry, BinaryDocValues docToOrds, FST<long?> fst, FST.BytesReader @in, FST.Arc<long?> firstArc, FST.Arc<long?> scratchArc, Int32sRef scratchInts, BytesRefFSTEnum<long?> fstEnum, BytesRef @ref, ByteArrayDataInput input)
+            public SortedSetDocValuesAnonymousClass(FSTEntry entry, BinaryDocValues docToOrds, FST<long?> fst, FST.BytesReader @in, FST.Arc<long?> firstArc, FST.Arc<long?> scratchArc, Int32sRef scratchInts, BytesRefFSTEnum<long?> fstEnum, BytesRef @ref, ByteArrayDataInput input)
             {
                 this.entry = entry;
                 this.docToOrds = docToOrds;
diff --git a/src/Lucene.Net/Codecs/Lucene42/Lucene42FieldInfosReader.cs b/src/Lucene.Net/Codecs/Lucene42/Lucene42FieldInfosReader.cs
index de1a50b..5c53f00 100644
--- a/src/Lucene.Net/Codecs/Lucene42/Lucene42FieldInfosReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene42/Lucene42FieldInfosReader.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using System;
 using System.Collections.Generic;
 
@@ -95,8 +96,8 @@
 
                     // DV Types are packed in one byte
                     sbyte val = (sbyte)input.ReadByte();
-                    DocValuesType docValuesType = GetDocValuesType(input, (sbyte)(val & 0x0F));
-                    DocValuesType normsType = GetDocValuesType(input, (sbyte)(((int)((uint)val >> 4)) & 0x0F));
+                    DocValuesType docValuesType = GetDocValuesType(input, (byte)(val & 0x0F));
+                    DocValuesType normsType = GetDocValuesType(input, (byte)((val.TripleShift(4)) & 0x0F));
                     IDictionary<string, string> attributes = input.ReadStringStringMap();
                     infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, 
                         omitNorms, storePayloads, indexOptions, docValuesType, normsType, attributes.AsReadOnly());
@@ -120,7 +121,7 @@
             }
         }
 
-        private static DocValuesType GetDocValuesType(IndexInput input, sbyte b)
+        private static DocValuesType GetDocValuesType(IndexInput input, byte b)
         {
             if (b == 0)
             {
diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs
index c14dee1..9037713 100644
--- a/src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs
+++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45Codec.cs
@@ -55,11 +55,11 @@
 
         private readonly PostingsFormat postingsFormat;
 
-        private class PerFieldPostingsFormatAnonymousInnerClassHelper : PerFieldPostingsFormat
+        private class PerFieldPostingsFormatAnonymousClass : PerFieldPostingsFormat
         {
             private readonly Lucene45Codec outerInstance;
 
-            public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene45Codec outerInstance)
+            public PerFieldPostingsFormatAnonymousClass(Lucene45Codec outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -73,11 +73,11 @@
 
         private readonly DocValuesFormat docValuesFormat;
 
-        private class PerFieldDocValuesFormatAnonymousInnerClassHelper : PerFieldDocValuesFormat
+        private class PerFieldDocValuesFormatAnonymousClass : PerFieldDocValuesFormat
         {
             private readonly Lucene45Codec outerInstance;
 
-            public PerFieldDocValuesFormatAnonymousInnerClassHelper(Lucene45Codec outerInstance)
+            public PerFieldDocValuesFormatAnonymousClass(Lucene45Codec outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -94,8 +94,8 @@
         public Lucene45Codec()
             : base()
         {
-            postingsFormat = new PerFieldPostingsFormatAnonymousInnerClassHelper(this);
-            docValuesFormat = new PerFieldDocValuesFormatAnonymousInnerClassHelper(this);
+            postingsFormat = new PerFieldPostingsFormatAnonymousClass(this);
+            docValuesFormat = new PerFieldDocValuesFormatAnonymousClass(this);
         }
 
         public override sealed StoredFieldsFormat StoredFieldsFormat => fieldsFormat;
diff --git a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs
index fd29a7a..2f4fb0a 100644
--- a/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs
+++ b/src/Lucene.Net/Codecs/Lucene45/Lucene45DocValuesProducer.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using J2N.Threading.Atomic;
 using Lucene.Net.Index;
 using Lucene.Net.Util;
@@ -387,26 +388,26 @@
                     long min = entry.minValue;
                     long mult = entry.gcd;
                     BlockPackedReader quotientReader = new BlockPackedReader(data, entry.PackedInt32sVersion, entry.BlockSize, entry.Count, true);
-                    return new Int64ValuesAnonymousInnerClassHelper(min, mult, quotientReader);
+                    return new Int64ValuesAnonymousClass(min, mult, quotientReader);
 
                 case Lucene45DocValuesConsumer.TABLE_COMPRESSED:
                     long[] table = entry.table;
                     int bitsRequired = PackedInt32s.BitsRequired(table.Length - 1);
                     PackedInt32s.Reader ords = PackedInt32s.GetDirectReaderNoHeader(data, PackedInt32s.Format.PACKED, entry.PackedInt32sVersion, (int)entry.Count, bitsRequired);
-                    return new Int64ValuesAnonymousInnerClassHelper2(table, ords);
+                    return new Int64ValuesAnonymousClass2(table, ords);
 
                 default:
                     throw new Exception();
             }
         }
 
-        private class Int64ValuesAnonymousInnerClassHelper : Int64Values
+        private class Int64ValuesAnonymousClass : Int64Values
         {
             private readonly long min;
             private readonly long mult;
             private readonly BlockPackedReader quotientReader;
 
-            public Int64ValuesAnonymousInnerClassHelper(long min, long mult, BlockPackedReader quotientReader)
+            public Int64ValuesAnonymousClass(long min, long mult, BlockPackedReader quotientReader)
             {
                 this.min = min;
                 this.mult = mult;
@@ -420,12 +421,12 @@
             }
         }
 
-        private class Int64ValuesAnonymousInnerClassHelper2 : Int64Values
+        private class Int64ValuesAnonymousClass2 : Int64Values
         {
             private readonly long[] table;
             private readonly PackedInt32s.Reader ords;
 
-            public Int64ValuesAnonymousInnerClassHelper2(long[] table, PackedInt32s.Reader ords)
+            public Int64ValuesAnonymousClass2(long[] table, PackedInt32s.Reader ords)
             {
                 this.table = table;
                 this.ords = ords;
@@ -462,15 +463,15 @@
         {
             IndexInput data = (IndexInput)this.data.Clone();
 
-            return new Int64BinaryDocValuesAnonymousInnerClassHelper(bytes, data);
+            return new Int64BinaryDocValuesAnonymousClass(bytes, data);
         }
 
-        private class Int64BinaryDocValuesAnonymousInnerClassHelper : Int64BinaryDocValues
+        private class Int64BinaryDocValuesAnonymousClass : Int64BinaryDocValues
         {
             private readonly Lucene45DocValuesProducer.BinaryEntry bytes;
             private readonly IndexInput data;
 
-            public Int64BinaryDocValuesAnonymousInnerClassHelper(Lucene45DocValuesProducer.BinaryEntry bytes, IndexInput data)
+            public Int64BinaryDocValuesAnonymousClass(Lucene45DocValuesProducer.BinaryEntry bytes, IndexInput data)
             {
                 this.bytes = bytes;
                 this.data = data;
@@ -526,16 +527,16 @@
 
             MonotonicBlockPackedReader addresses = GetAddressInstance(data, field, bytes);
 
-            return new Int64BinaryDocValuesAnonymousInnerClassHelper2(bytes, data, addresses);
+            return new Int64BinaryDocValuesAnonymousClass2(bytes, data, addresses);
         }
 
-        private class Int64BinaryDocValuesAnonymousInnerClassHelper2 : Int64BinaryDocValues
+        private class Int64BinaryDocValuesAnonymousClass2 : Int64BinaryDocValues
         {
             private readonly Lucene45DocValuesProducer.BinaryEntry bytes;
             private readonly IndexInput data;
             private readonly MonotonicBlockPackedReader addresses;
 
-            public Int64BinaryDocValuesAnonymousInnerClassHelper2(Lucene45DocValuesProducer.BinaryEntry bytes, IndexInput data, MonotonicBlockPackedReader addresses)
+            public Int64BinaryDocValuesAnonymousClass2(Lucene45DocValuesProducer.BinaryEntry bytes, IndexInput data, MonotonicBlockPackedReader addresses)
             {
                 this.bytes = bytes;
                 this.data = data;
@@ -616,16 +617,16 @@
             data.Seek(entry.Offset);
             BlockPackedReader ordinals = new BlockPackedReader(data, entry.PackedInt32sVersion, entry.BlockSize, entry.Count, true);
 
-            return new SortedDocValuesAnonymousInnerClassHelper(valueCount, binary, ordinals);
+            return new SortedDocValuesAnonymousClass(valueCount, binary, ordinals);
         }
 
-        private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues
+        private class SortedDocValuesAnonymousClass : SortedDocValues
         {
             private readonly int valueCount;
             private readonly BinaryDocValues binary;
             private readonly BlockPackedReader ordinals;
 
-            public SortedDocValuesAnonymousInnerClassHelper(int valueCount, BinaryDocValues binary, BlockPackedReader ordinals)
+            public SortedDocValuesAnonymousClass(int valueCount, BinaryDocValues binary, BlockPackedReader ordinals)
             {
                 this.valueCount = valueCount;
                 this.binary = binary;
@@ -716,17 +717,17 @@
             // but the addresses to the ord stream are in RAM
             MonotonicBlockPackedReader ordIndex = GetOrdIndexInstance(data, field, ordIndexes[field.Number]);
 
-            return new RandomAccessOrdsAnonymousInnerClassHelper(valueCount, binary, ordinals, ordIndex);
+            return new RandomAccessOrdsAnonymousClass(valueCount, binary, ordinals, ordIndex);
         }
 
-        private class RandomAccessOrdsAnonymousInnerClassHelper : RandomAccessOrds
+        private class RandomAccessOrdsAnonymousClass : RandomAccessOrds
         {
             private readonly long valueCount;
             private readonly Lucene45DocValuesProducer.Int64BinaryDocValues binary;
             private readonly Int64Values ordinals;
             private readonly MonotonicBlockPackedReader ordIndex;
 
-            public RandomAccessOrdsAnonymousInnerClassHelper(long valueCount, Lucene45DocValuesProducer.Int64BinaryDocValues binary, Int64Values ordinals, MonotonicBlockPackedReader ordIndex)
+            public RandomAccessOrdsAnonymousClass(long valueCount, Lucene45DocValuesProducer.Int64BinaryDocValues binary, Int64Values ordinals, MonotonicBlockPackedReader ordIndex)
             {
                 this.valueCount = valueCount;
                 this.binary = binary;
@@ -817,18 +818,18 @@
             else
             {
                 IndexInput @in = (IndexInput)data.Clone();
-                return new BitsAnonymousInnerClassHelper(this, offset, @in);
+                return new BitsAnonymousClass(this, offset, @in);
             }
         }
 
-        private class BitsAnonymousInnerClassHelper : IBits
+        private class BitsAnonymousClass : IBits
         {
             private readonly Lucene45DocValuesProducer outerInstance;
 
             private readonly long offset;
             private readonly IndexInput @in;
 
-            public BitsAnonymousInnerClassHelper(Lucene45DocValuesProducer outerInstance, long offset, IndexInput @in)
+            public BitsAnonymousClass(Lucene45DocValuesProducer outerInstance, long offset, IndexInput @in)
             {
                 this.outerInstance = outerInstance;
                 this.offset = offset;
@@ -1070,16 +1071,16 @@
             {
                 input.Seek(bytes.offset);
 
-                return new TermsEnumAnonymousInnerClassHelper(this, input);
+                return new TermsEnumAnonymousClass(this, input);
             }
 
-            private class TermsEnumAnonymousInnerClassHelper : TermsEnum
+            private class TermsEnumAnonymousClass : TermsEnum
             {
                 private readonly CompressedBinaryDocValues outerInstance;
 
                 private readonly IndexInput input;
 
-                public TermsEnumAnonymousInnerClassHelper(CompressedBinaryDocValues outerInstance, IndexInput input)
+                public TermsEnumAnonymousClass(CompressedBinaryDocValues outerInstance, IndexInput input)
                 {
                     this.outerInstance = outerInstance;
                     this.input = input;
@@ -1130,7 +1131,7 @@
 
                     while (low <= high)
                     {
-                        long mid = (int)((uint)(low + high) >> 1);
+                        long mid = (low + high).TripleShift(1);
                         DoSeek(mid * outerInstance.interval);
                         int cmp = termBuffer.CompareTo(text);
 
diff --git a/src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs b/src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs
index ce48a90..3fa177d 100644
--- a/src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs
+++ b/src/Lucene.Net/Codecs/Lucene46/Lucene46Codec.cs
@@ -51,11 +51,11 @@
 
         private readonly PostingsFormat postingsFormat;
 
-        private class PerFieldPostingsFormatAnonymousInnerClassHelper : PerFieldPostingsFormat
+        private class PerFieldPostingsFormatAnonymousClass : PerFieldPostingsFormat
         {
             private readonly Lucene46Codec outerInstance;
 
-            public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene46Codec outerInstance)
+            public PerFieldPostingsFormatAnonymousClass(Lucene46Codec outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -69,11 +69,11 @@
 
         private readonly DocValuesFormat docValuesFormat;
 
-        private class PerFieldDocValuesFormatAnonymousInnerClassHelper : PerFieldDocValuesFormat
+        private class PerFieldDocValuesFormatAnonymousClass : PerFieldDocValuesFormat
         {
             private readonly Lucene46Codec outerInstance;
 
-            public PerFieldDocValuesFormatAnonymousInnerClassHelper(Lucene46Codec outerInstance)
+            public PerFieldDocValuesFormatAnonymousClass(Lucene46Codec outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -90,8 +90,8 @@
         public Lucene46Codec()
             : base()
         {
-            postingsFormat = new PerFieldPostingsFormatAnonymousInnerClassHelper(this);
-            docValuesFormat = new PerFieldDocValuesFormatAnonymousInnerClassHelper(this);
+            postingsFormat = new PerFieldPostingsFormatAnonymousClass(this);
+            docValuesFormat = new PerFieldDocValuesFormatAnonymousClass(this);
         }
 
         public override sealed StoredFieldsFormat StoredFieldsFormat => fieldsFormat;
diff --git a/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosReader.cs b/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosReader.cs
index 4997850..0c41a05 100644
--- a/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosReader.cs
+++ b/src/Lucene.Net/Codecs/Lucene46/Lucene46FieldInfosReader.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using System.Collections.Generic;
 
 namespace Lucene.Net.Codecs.Lucene46
@@ -92,8 +93,8 @@
 
                     // DV Types are packed in one byte
                     byte val = input.ReadByte();
-                    DocValuesType docValuesType = GetDocValuesType(input, (sbyte)(val & 0x0F));
-                    DocValuesType normsType = GetDocValuesType(input, (sbyte)(((int)((uint)val >> 4)) & 0x0F));
+                    DocValuesType docValuesType = GetDocValuesType(input, (byte)(val & 0x0F));
+                    DocValuesType normsType = GetDocValuesType(input, (byte)((val.TripleShift(4)) & 0x0F));
                     long dvGen = input.ReadInt64();
                     IDictionary<string, string> attributes = input.ReadStringStringMap();
                     infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValuesType, normsType, attributes.AsReadOnly());
@@ -127,7 +128,7 @@
             }
         }
 
-        private static DocValuesType GetDocValuesType(IndexInput input, sbyte b)
+        private static DocValuesType GetDocValuesType(IndexInput input, byte b)
         {
             if (b == 0)
             {
diff --git a/src/Lucene.Net/Codecs/Lucene46/Lucene46SegmentInfoWriter.cs b/src/Lucene.Net/Codecs/Lucene46/Lucene46SegmentInfoWriter.cs
index aaa3869..35af117 100644
--- a/src/Lucene.Net/Codecs/Lucene46/Lucene46SegmentInfoWriter.cs
+++ b/src/Lucene.Net/Codecs/Lucene46/Lucene46SegmentInfoWriter.cs
@@ -1,4 +1,4 @@
-namespace Lucene.Net.Codecs.Lucene46
+namespace Lucene.Net.Codecs.Lucene46
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -56,7 +56,7 @@
                 output.WriteString(si.Version);
                 output.WriteInt32(si.DocCount);
 
-                output.WriteByte((byte)(sbyte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
+                output.WriteByte((byte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
                 output.WriteStringStringMap(si.Diagnostics);
                 output.WriteStringSet(si.GetFiles());
                 CodecUtil.WriteFooter(output);
diff --git a/src/Lucene.Net/Codecs/TermVectorsWriter.cs b/src/Lucene.Net/Codecs/TermVectorsWriter.cs
index a817481..7a2126f 100644
--- a/src/Lucene.Net/Codecs/TermVectorsWriter.cs
+++ b/src/Lucene.Net/Codecs/TermVectorsWriter.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using J2N.Text;
 using Lucene.Net.Diagnostics;
 using System;
@@ -169,7 +170,7 @@
                 else
                 {
                     int code = positions.ReadVInt32();
-                    position += (int)((uint)code >> 1);
+                    position += code.TripleShift(1);
                     if ((code & 1) != 0)
                     {
                         // this position has a payload
diff --git a/src/Lucene.Net/Index/BinaryDocValuesFieldUpdates.cs b/src/Lucene.Net/Index/BinaryDocValuesFieldUpdates.cs
index 28a8d81..0d9f036 100644
--- a/src/Lucene.Net/Index/BinaryDocValuesFieldUpdates.cs
+++ b/src/Lucene.Net/Index/BinaryDocValuesFieldUpdates.cs
@@ -178,19 +178,19 @@
             PagedGrowableWriter lengths = this.lengths;
             BytesRef values = this.values;
             FixedBitSet docsWithField = this.docsWithField;
-            new InPlaceMergeSorterAnonymousInnerClassHelper(docs, offsets, lengths, docsWithField).Sort(0, size);
+            new InPlaceMergeSorterAnonymousClass(docs, offsets, lengths, docsWithField).Sort(0, size);
 
             return new Iterator(size, offsets, lengths, docs, values, docsWithField);
         }
 
-        private class InPlaceMergeSorterAnonymousInnerClassHelper : InPlaceMergeSorter
+        private class InPlaceMergeSorterAnonymousClass : InPlaceMergeSorter
         {
             private readonly PagedMutable docs;
             private readonly PagedGrowableWriter offsets;
             private readonly PagedGrowableWriter lengths;
             private readonly FixedBitSet docsWithField;
 
-            public InPlaceMergeSorterAnonymousInnerClassHelper(PagedMutable docs, PagedGrowableWriter offsets, PagedGrowableWriter lengths, FixedBitSet docsWithField)
+            public InPlaceMergeSorterAnonymousClass(PagedMutable docs, PagedGrowableWriter offsets, PagedGrowableWriter lengths, FixedBitSet docsWithField)
             {
                 this.docs = docs;
                 this.offsets = offsets;
diff --git a/src/Lucene.Net/Index/CoalescedUpdates.cs b/src/Lucene.Net/Index/CoalescedUpdates.cs
index 8e536db..6394052 100644
--- a/src/Lucene.Net/Index/CoalescedUpdates.cs
+++ b/src/Lucene.Net/Index/CoalescedUpdates.cs
@@ -68,14 +68,14 @@
 
         public virtual IEnumerable<Term> TermsIterable()
         {
-            return new IterableAnonymousInnerClassHelper(this);
+            return new IterableAnonymousClass(this);
         }
 
-        private class IterableAnonymousInnerClassHelper : IEnumerable<Term>
+        private class IterableAnonymousClass : IEnumerable<Term>
         {
             private readonly CoalescedUpdates outerInstance;
 
-            public IterableAnonymousInnerClassHelper(CoalescedUpdates outerInstance)
+            public IterableAnonymousClass(CoalescedUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -98,21 +98,21 @@
 
         public virtual IEnumerable<QueryAndLimit> QueriesIterable()
         {
-            return new IterableAnonymousInnerClassHelper2(this);
+            return new IterableAnonymousClass2(this);
         }
 
-        private class IterableAnonymousInnerClassHelper2 : IEnumerable<QueryAndLimit>
+        private class IterableAnonymousClass2 : IEnumerable<QueryAndLimit>
         {
             private readonly CoalescedUpdates outerInstance;
 
-            public IterableAnonymousInnerClassHelper2(CoalescedUpdates outerInstance)
+            public IterableAnonymousClass2(CoalescedUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
 
             public virtual IEnumerator<QueryAndLimit> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper(this);
+                return new IteratorAnonymousClass(this);
             }
 
             IEnumerator IEnumerable.GetEnumerator()
@@ -120,13 +120,13 @@
                 return GetEnumerator();
             }
 
-            private class IteratorAnonymousInnerClassHelper : IEnumerator<QueryAndLimit>
+            private class IteratorAnonymousClass : IEnumerator<QueryAndLimit>
             {
-                private readonly IterableAnonymousInnerClassHelper2 outerInstance;
+                private readonly IterableAnonymousClass2 outerInstance;
                 private readonly IEnumerator<KeyValuePair<Query, int>> iter;
                 private QueryAndLimit current;
 
-                public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper2 outerInstance)
+                public IteratorAnonymousClass(IterableAnonymousClass2 outerInstance)
                 {
                     this.outerInstance = outerInstance;
                     iter = this.outerInstance.outerInstance.queries.GetEnumerator();
diff --git a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs
index 6a48765..d6ce849 100644
--- a/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs
+++ b/src/Lucene.Net/Index/ConcurrentMergeScheduler.cs
@@ -1,5 +1,4 @@
-#if FEATURE_CONCURRENTMERGESCHEDULER
-using J2N.Threading;
+using J2N.Threading;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Collections.Generic;
@@ -747,5 +746,4 @@
             return clone;
         }
     }
-}
-#endif
\ No newline at end of file
+}
\ No newline at end of file
diff --git a/src/Lucene.Net/Index/DocTermOrds.cs b/src/Lucene.Net/Index/DocTermOrds.cs
index b6fea05..891f592 100644
--- a/src/Lucene.Net/Index/DocTermOrds.cs
+++ b/src/Lucene.Net/Index/DocTermOrds.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Collections.Generic;
@@ -255,7 +256,7 @@
             this.m_field = field;
             this.m_maxTermDocFreq = maxTermDocFreq;
             this.indexIntervalBits = indexIntervalBits;
-            indexIntervalMask = (int)((uint)0xffffffff >> (32 - indexIntervalBits));
+            indexIntervalMask = (int)(0xffffffff >> (32 - indexIntervalBits)); // LUCENENET: No need to cast to uint here for the triple shift because the literal already is uint
             indexInterval = 1 << indexIntervalBits;
         }
 
@@ -466,7 +467,7 @@
                         {
                             // index into byte array (actually the end of
                             // the doc-specific byte[] when building)
-                            int pos = (int)((uint)val >> 8);
+                            int pos = val.TripleShift(8);
                             int ilen = VInt32Size(delta);
                             var arr = bytes[doc];
                             int newend = pos + ilen;
@@ -533,7 +534,7 @@
                                 for (int j = 0; j < ipos; j++)
                                 {
                                     tempArr[j] = (sbyte)val;
-                                    val = (int)((uint)val >> 8);
+                                    val = val.TripleShift(8);
                                 }
                                 // point at the end index in the byte[]
                                 index[doc] = (endPos << 8) | 1;
@@ -597,7 +598,7 @@
                             int val = index[doc];
                             if ((val & 0xff) == 1)
                             {
-                                int len = (int)((uint)val >> 8);
+                                int len = val.TripleShift(8);
                                 //System.out.println("    ptr pos=" + pos);
                                 index[doc] = (pos << 8) | 1; // change index to point to start of array
                                 if ((pos & 0xff000000) != 0)
@@ -703,22 +704,22 @@
         /// </summary>
         private static int WriteInt32(int x, sbyte[] arr, int pos)
         {
-            var a = ((int)((uint)x >> (7 * 4)));
+            var a = x.TripleShift(7 * 4);
             if (a != 0)
             {
                 arr[pos++] = (sbyte)(a | 0x80);
             }
-            a = ((int)((uint)x >> (7 * 3)));
+            a = x.TripleShift(7 * 3);
             if (a != 0)
             {
                 arr[pos++] = (sbyte)(a | 0x80);
             }
-            a = ((int)((uint)x >> (7 * 2)));
+            a = x.TripleShift(7 * 2);
             if (a != 0)
             {
                 arr[pos++] = (sbyte)(a | 0x80);
             }
-            a = ((int)((uint)x >> (7 * 1)));
+            a = x.TripleShift(7 * 1);
             if (a != 0)
             {
                 arr[pos++] = (sbyte)(a | 0x80);
@@ -774,8 +775,7 @@
                     term = null;
                     return false;
                 }
-                SetTerm(); // this is extra work if we know we are in bounds...
-                return true;
+                return SetTerm() != null;   // this is extra work if we know we are in bounds...
             }
 
             [Obsolete("Use MoveNext() and Term instead. This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
@@ -870,7 +870,7 @@
                 //System.out.println("  seek(ord) targetOrd=" + targetOrd + " delta=" + delta + " ord=" + ord + " ii=" + indexInterval);
                 if (delta < 0 || delta > outerInstance.indexInterval)
                 {
-                    int idx = (int)((long)((ulong)targetOrd >> outerInstance.indexIntervalBits));
+                    int idx = (int)targetOrd.TripleShift(outerInstance.indexIntervalBits);
                     BytesRef @base = outerInstance.m_indexedTermsArray[idx];
                     //System.out.println("  do seek term=" + base.utf8ToString());
                     ord = idx << outerInstance.indexIntervalBits;
@@ -1002,7 +1002,7 @@
                             //System.out.println("  tnum=" + tnum);
                             delta = 0;
                         }
-                        code = (int)((uint)code >> 8);
+                        code = code.TripleShift(8);
                     }
                 }
                 else
@@ -1046,9 +1046,9 @@
                 if ((code & 0xff) == 1)
                 {
                     // a pointer
-                    upto = (int)((uint)code >> 8);
+                    upto = code.TripleShift(8);
                     //System.out.println("    pointer!  upto=" + upto);
-                    int whichArray = ((int)((uint)docID >> 16)) & 0xff;
+                    int whichArray = (docID.TripleShift(16)) & 0xff;
                     arr = outerInstance.m_tnums[whichArray];
                 }
                 else
diff --git a/src/Lucene.Net/Index/DocValues.cs b/src/Lucene.Net/Index/DocValues.cs
index 1cf287e..fa41d10 100644
--- a/src/Lucene.Net/Index/DocValues.cs
+++ b/src/Lucene.Net/Index/DocValues.cs
@@ -36,11 +36,11 @@
         /// <summary>
         /// An empty <see cref="BinaryDocValues"/> which returns <see cref="BytesRef.EMPTY_BYTES"/> for every document
         /// </summary>
-        public static readonly BinaryDocValues EMPTY_BINARY = new BinaryDocValuesAnonymousInnerClassHelper();
+        public static readonly BinaryDocValues EMPTY_BINARY = new BinaryDocValuesAnonymousClass();
 
-        private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass : BinaryDocValues
         {
-            public BinaryDocValuesAnonymousInnerClassHelper()
+            public BinaryDocValuesAnonymousClass()
             {
             }
 
@@ -55,11 +55,11 @@
         /// <summary>
         /// An empty <see cref="NumericDocValues"/> which returns zero for every document
         /// </summary>
-        public static readonly NumericDocValues EMPTY_NUMERIC = new NumericDocValuesAnonymousInnerClassHelper();
+        public static readonly NumericDocValues EMPTY_NUMERIC = new NumericDocValuesAnonymousClass();
 
-        private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
+        private class NumericDocValuesAnonymousClass : NumericDocValues
         {
-            public NumericDocValuesAnonymousInnerClassHelper()
+            public NumericDocValuesAnonymousClass()
             {
             }
 
@@ -72,11 +72,11 @@
         /// <summary>
         /// An empty <see cref="SortedDocValues"/> which returns <see cref="BytesRef.EMPTY_BYTES"/> for every document
         /// </summary>
-        public static readonly SortedDocValues EMPTY_SORTED = new SortedDocValuesAnonymousInnerClassHelper();
+        public static readonly SortedDocValues EMPTY_SORTED = new SortedDocValuesAnonymousClass();
 
-        private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues
+        private class SortedDocValuesAnonymousClass : SortedDocValues
         {
-            public SortedDocValuesAnonymousInnerClassHelper()
+            public SortedDocValuesAnonymousClass()
             {
             }
 
@@ -98,11 +98,11 @@
         /// <summary>
         /// An empty <see cref="SortedDocValues"/> which returns <see cref="SortedSetDocValues.NO_MORE_ORDS"/> for every document
         /// </summary>
-        public static readonly SortedSetDocValues EMPTY_SORTED_SET = new RandomAccessOrdsAnonymousInnerClassHelper();
+        public static readonly SortedSetDocValues EMPTY_SORTED_SET = new RandomAccessOrdsAnonymousClass();
 
-        private class RandomAccessOrdsAnonymousInnerClassHelper : RandomAccessOrds
+        private class RandomAccessOrdsAnonymousClass : RandomAccessOrds
         {
-            public RandomAccessOrdsAnonymousInnerClassHelper()
+            public RandomAccessOrdsAnonymousClass()
             {
             }
 
@@ -162,15 +162,15 @@
         /// </summary>
         public static IBits DocsWithValue(SortedDocValues dv, int maxDoc)
         {
-            return new BitsAnonymousInnerClassHelper(dv, maxDoc);
+            return new BitsAnonymousClass(dv, maxDoc);
         }
 
-        private class BitsAnonymousInnerClassHelper : IBits
+        private class BitsAnonymousClass : IBits
         {
             private readonly SortedDocValues dv;
             private readonly int maxDoc;
 
-            public BitsAnonymousInnerClassHelper(SortedDocValues dv, int maxDoc)
+            public BitsAnonymousClass(SortedDocValues dv, int maxDoc)
             {
                 this.dv = dv;
                 this.maxDoc = maxDoc;
@@ -189,15 +189,15 @@
         /// </summary>
         public static IBits DocsWithValue(SortedSetDocValues dv, int maxDoc)
         {
-            return new BitsAnonymousInnerClassHelper2(dv, maxDoc);
+            return new BitsAnonymousClass2(dv, maxDoc);
         }
 
-        private class BitsAnonymousInnerClassHelper2 : IBits
+        private class BitsAnonymousClass2 : IBits
         {
             private readonly SortedSetDocValues dv;
             private readonly int maxDoc;
 
-            public BitsAnonymousInnerClassHelper2(SortedSetDocValues dv, int maxDoc)
+            public BitsAnonymousClass2(SortedSetDocValues dv, int maxDoc)
             {
                 this.dv = dv;
                 this.maxDoc = maxDoc;
diff --git a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
index cc16315..34cb27b 100644
--- a/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterFlushControl.cs
@@ -511,17 +511,17 @@
 
         private IEnumerator<ThreadState> GetPerThreadsIterator(int upto)
         {
-            return new IteratorAnonymousInnerClassHelper(this, upto);
+            return new IteratorAnonymousClass(this, upto);
         }
 
-        private class IteratorAnonymousInnerClassHelper : IEnumerator<ThreadState>
+        private class IteratorAnonymousClass : IEnumerator<ThreadState>
         {
             private readonly DocumentsWriterFlushControl outerInstance;
             private ThreadState current;
             private readonly int upto;
             private int i;
 
-            public IteratorAnonymousInnerClassHelper(DocumentsWriterFlushControl outerInstance, int upto)
+            public IteratorAnonymousClass(DocumentsWriterFlushControl outerInstance, int upto)
             {
                 this.outerInstance = outerInstance;
                 this.upto = upto;
diff --git a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
index bea4cb5..e54ca15 100644
--- a/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
+++ b/src/Lucene.Net/Index/DocumentsWriterPerThread.cs
@@ -56,13 +56,13 @@
             internal abstract DocConsumer GetChain(DocumentsWriterPerThread documentsWriterPerThread);
         }
 
-        private static readonly IndexingChain defaultIndexingChain = new IndexingChainAnonymousInnerClassHelper();
+        private static readonly IndexingChain defaultIndexingChain = new IndexingChainAnonymousClass();
 
         public static IndexingChain DefaultIndexingChain => defaultIndexingChain;
 
-        private class IndexingChainAnonymousInnerClassHelper : IndexingChain
+        private class IndexingChainAnonymousClass : IndexingChain
         {
-            public IndexingChainAnonymousInnerClassHelper()
+            public IndexingChainAnonymousClass()
             {
             }
 
diff --git a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
index eff6529..95cb5c6 100644
--- a/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
+++ b/src/Lucene.Net/Index/FreqProxTermsWriterPerField.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using J2N.Text;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Diagnostics;
@@ -544,7 +545,7 @@
                         }
                         else
                         {
-                            docID += (int)((uint)code >> 1);
+                            docID += code.TripleShift(1);
                             if ((code & 1) != 0)
                             {
                                 termFreq = 1;
@@ -610,7 +611,7 @@
                             if (readPositions)
                             {
                                 int code = prox.ReadVInt32();
-                                position += (int)((uint)code >> 1);
+                                position += code.TripleShift(1);
 
                                 if ((code & 1) != 0)
                                 {
diff --git a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
index b4ee244..3a242f3 100644
--- a/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
+++ b/src/Lucene.Net/Index/FrozenBufferedUpdates.cs
@@ -153,14 +153,14 @@
         // LUCENENET NOTE: This was termsIterable() in Lucene
         public virtual IEnumerable<Term> GetTermsEnumerable()
         {
-            return new IterableAnonymousInnerClassHelper(this);
+            return new IterableAnonymousClass(this);
         }
 
-        private class IterableAnonymousInnerClassHelper : IEnumerable<Term>
+        private class IterableAnonymousClass : IEnumerable<Term>
         {
             private readonly FrozenBufferedUpdates outerInstance;
 
-            public IterableAnonymousInnerClassHelper(FrozenBufferedUpdates outerInstance)
+            public IterableAnonymousClass(FrozenBufferedUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -179,21 +179,21 @@
         // LUCENENET NOTE: This was queriesIterable() in Lucene
         public virtual IEnumerable<QueryAndLimit> GetQueriesEnumerable()
         {
-            return new IterableAnonymousInnerClassHelper2(this);
+            return new IterableAnonymousClass2(this);
         }
 
-        private class IterableAnonymousInnerClassHelper2 : IEnumerable<QueryAndLimit>
+        private class IterableAnonymousClass2 : IEnumerable<QueryAndLimit>
         {
             private readonly FrozenBufferedUpdates outerInstance;
 
-            public IterableAnonymousInnerClassHelper2(FrozenBufferedUpdates outerInstance)
+            public IterableAnonymousClass2(FrozenBufferedUpdates outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
 
             public virtual IEnumerator<QueryAndLimit> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper(this);
+                return new IteratorAnonymousClass(this);
             }
 
             IEnumerator IEnumerable.GetEnumerator()
@@ -201,14 +201,14 @@
                 return GetEnumerator();
             }
 
-            private class IteratorAnonymousInnerClassHelper : IEnumerator<QueryAndLimit>
+            private class IteratorAnonymousClass : IEnumerator<QueryAndLimit>
             {
-                private readonly IterableAnonymousInnerClassHelper2 outerInstance;
+                private readonly IterableAnonymousClass2 outerInstance;
                 private readonly int upto;
                 private int i;
                 private QueryAndLimit current;
 
-                public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper2 outerInstance)
+                public IteratorAnonymousClass(IterableAnonymousClass2 outerInstance)
                 {
                     this.outerInstance = outerInstance;
                     upto = this.outerInstance.outerInstance.queries.Length;
diff --git a/src/Lucene.Net/Index/IndexWriterConfig.cs b/src/Lucene.Net/Index/IndexWriterConfig.cs
index a525a9b..ef33649 100644
--- a/src/Lucene.Net/Index/IndexWriterConfig.cs
+++ b/src/Lucene.Net/Index/IndexWriterConfig.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Util;
+using Lucene.Net.Util;
 using System;
 using System.IO;
 using System.Text;
@@ -272,17 +272,6 @@
             set => similarity = value ?? throw new ArgumentNullException(nameof(value), "Similarity must not be null");
         }
 
-
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-        /// <summary>
-        /// Expert: Gets or sets the merge scheduler used by this writer. The default is
-        /// <see cref="TaskMergeScheduler"/>.
-        /// <para/>
-        /// <b>NOTE:</b> the merge scheduler cannot be <c>null</c>.
-        ///
-        /// <para/>Only takes effect when <see cref="IndexWriter"/> is first created.
-        /// </summary>
-#else
         /// <summary>
         /// Expert: Gets or sets the merge scheduler used by this writer. The default is
         /// <see cref="ConcurrentMergeScheduler"/>.
@@ -291,7 +280,6 @@
         ///
         /// <para/>Only takes effect when <see cref="IndexWriter"/> is first created.
         /// </summary>
-#endif
         // LUCENENET NOTE: We cannot override a getter and add a setter, 
         // so must declare it new. See: http://stackoverflow.com/q/82437
         new public IMergeScheduler MergeScheduler
diff --git a/src/Lucene.Net/Index/LiveIndexWriterConfig.cs b/src/Lucene.Net/Index/LiveIndexWriterConfig.cs
index b564d45..a4f8c63 100644
--- a/src/Lucene.Net/Index/LiveIndexWriterConfig.cs
+++ b/src/Lucene.Net/Index/LiveIndexWriterConfig.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Util;
+using Lucene.Net.Util;
 using System;
 using System.Text;
 
@@ -153,11 +153,7 @@
             useCompoundFile = IndexWriterConfig.DEFAULT_USE_COMPOUND_FILE_SYSTEM;
             openMode = Index.OpenMode.CREATE_OR_APPEND;
             similarity = IndexSearcher.DefaultSimilarity;
-#if !FEATURE_CONCURRENTMERGESCHEDULER
-            mergeScheduler = new TaskMergeScheduler();
-#else
             mergeScheduler = new ConcurrentMergeScheduler();
-#endif
             writeLockTimeout = IndexWriterConfig.WRITE_LOCK_TIMEOUT;
             indexingChain = DocumentsWriterPerThread.DefaultIndexingChain;
             codec = Codec.Default;
diff --git a/src/Lucene.Net/Index/MergePolicy.cs b/src/Lucene.Net/Index/MergePolicy.cs
index ef63e99..5625dd6 100644
--- a/src/Lucene.Net/Index/MergePolicy.cs
+++ b/src/Lucene.Net/Index/MergePolicy.cs
@@ -213,10 +213,10 @@
             /// </summary>
             public virtual DocMap GetDocMap(MergeState mergeState)
             {
-                return new DocMapAnonymousInnerClassHelper();
+                return new DocMapAnonymousClass();
             }
 
-            private class DocMapAnonymousInnerClassHelper : DocMap
+            private class DocMapAnonymousClass : DocMap
             {
                 public override int Map(int docID)
                 {
diff --git a/src/Lucene.Net/Index/MergeState.cs b/src/Lucene.Net/Index/MergeState.cs
index 8434200..2d78809 100644
--- a/src/Lucene.Net/Index/MergeState.cs
+++ b/src/Lucene.Net/Index/MergeState.cs
@@ -96,17 +96,17 @@
                 docMap.Freeze();
                 int numDeletedDocs = del;
                 if (Debugging.AssertsEnabled) Debugging.Assert(docMap.Count == maxDoc);
-                return new DocMapAnonymousInnerClassHelper(maxDoc, liveDocs, docMap, numDeletedDocs);
+                return new DocMapAnonymousClass(maxDoc, liveDocs, docMap, numDeletedDocs);
             }
 
-            private class DocMapAnonymousInnerClassHelper : DocMap
+            private class DocMapAnonymousClass : DocMap
             {
                 private readonly int maxDoc;
                 private readonly IBits liveDocs;
                 private readonly MonotonicAppendingInt64Buffer docMap;
                 private readonly int numDeletedDocs;
 
-                public DocMapAnonymousInnerClassHelper(int maxDoc, IBits liveDocs, MonotonicAppendingInt64Buffer docMap, int numDeletedDocs)
+                public DocMapAnonymousClass(int maxDoc, IBits liveDocs, MonotonicAppendingInt64Buffer docMap, int numDeletedDocs)
                 {
                     this.maxDoc = maxDoc;
                     this.liveDocs = liveDocs;
@@ -249,11 +249,11 @@
         /// <para/>
         /// @lucene.internal
         /// </summary>
-        public static readonly CheckAbort NONE = new CheckAbortAnonymousInnerClassHelper();
+        public static readonly CheckAbort NONE = new CheckAbortAnonymousClass();
 
-        private class CheckAbortAnonymousInnerClassHelper : CheckAbort
+        private class CheckAbortAnonymousClass : CheckAbort
         {
-            public CheckAbortAnonymousInnerClassHelper()
+            public CheckAbortAnonymousClass()
                 : base(null, null)
             {
             }
diff --git a/src/Lucene.Net/Index/MultiDocValues.cs b/src/Lucene.Net/Index/MultiDocValues.cs
index a628b38..04045dc 100644
--- a/src/Lucene.Net/Index/MultiDocValues.cs
+++ b/src/Lucene.Net/Index/MultiDocValues.cs
@@ -95,15 +95,15 @@
 
             if (Debugging.AssertsEnabled) Debugging.Assert(anyReal);
 
-            return new NumericDocValuesAnonymousInnerClassHelper(values, starts);
+            return new NumericDocValuesAnonymousClass(values, starts);
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
+        private class NumericDocValuesAnonymousClass : NumericDocValues
         {
             private readonly NumericDocValues[] values;
             private readonly int[] starts;
 
-            public NumericDocValuesAnonymousInnerClassHelper(NumericDocValues[] values, int[] starts)
+            public NumericDocValuesAnonymousClass(NumericDocValues[] values, int[] starts)
             {
                 this.values = values;
                 this.starts = starts;
@@ -162,16 +162,16 @@
             }
             else
             {
-                return new NumericDocValuesAnonymousInnerClassHelper2(values, starts);
+                return new NumericDocValuesAnonymousClass2(values, starts);
             }
         }
 
-        private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues
+        private class NumericDocValuesAnonymousClass2 : NumericDocValues
         {
             private readonly NumericDocValues[] values;
             private readonly int[] starts;
 
-            public NumericDocValuesAnonymousInnerClassHelper2(NumericDocValues[] values, int[] starts)
+            public NumericDocValuesAnonymousClass2(NumericDocValues[] values, int[] starts)
             {
                 this.values = values;
                 this.starts = starts;
@@ -291,16 +291,16 @@
             }
             else
             {
-                return new BinaryDocValuesAnonymousInnerClassHelper(values, starts);
+                return new BinaryDocValuesAnonymousClass(values, starts);
             }
         }
 
-        private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues
+        private class BinaryDocValuesAnonymousClass : BinaryDocValues
         {
             private readonly BinaryDocValues[] values;
             private readonly int[] starts;
 
-            public BinaryDocValuesAnonymousInnerClassHelper(BinaryDocValues[] values, int[] starts)
+            public BinaryDocValuesAnonymousClass(BinaryDocValues[] values, int[] starts)
             {
                 this.values = values;
                 this.starts = starts;
diff --git a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs
index 6885b46..d4f4716 100644
--- a/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs
+++ b/src/Lucene.Net/Index/NumericDocValuesFieldUpdates.cs
@@ -145,18 +145,18 @@
             PagedMutable docs = this.docs;
             PagedGrowableWriter values = this.values;
             FixedBitSet docsWithField = this.docsWithField;
-            new InPlaceMergeSorterAnonymousInnerClassHelper(docs, values, docsWithField).Sort(0, size);
+            new InPlaceMergeSorterAnonymousClass(docs, values, docsWithField).Sort(0, size);
 
             return new Iterator(size, values, docsWithField, docs);
         }
 
-        private class InPlaceMergeSorterAnonymousInnerClassHelper : InPlaceMergeSorter
+        private class InPlaceMergeSorterAnonymousClass : InPlaceMergeSorter
         {
             private readonly PagedMutable docs;
             private readonly PagedGrowableWriter values;
             private readonly FixedBitSet docsWithField;
 
-            public InPlaceMergeSorterAnonymousInnerClassHelper(PagedMutable docs, PagedGrowableWriter values, FixedBitSet docsWithField)
+            public InPlaceMergeSorterAnonymousClass(PagedMutable docs, PagedGrowableWriter values, FixedBitSet docsWithField)
             {
                 this.docs = docs;
                 this.values = values;
diff --git a/src/Lucene.Net/Index/ParallelCompositeReader.cs b/src/Lucene.Net/Index/ParallelCompositeReader.cs
index 5679f9f..1bc75e2 100644
--- a/src/Lucene.Net/Index/ParallelCompositeReader.cs
+++ b/src/Lucene.Net/Index/ParallelCompositeReader.cs
@@ -141,7 +141,7 @@
                         }
                         // We pass true for closeSubs and we prevent closing of subreaders in doClose():
                         // By this the synthetic throw-away readers used here are completely invisible to ref-counting
-                        subReaders[i] = new ParallelAtomicReaderAnonymousInnerClassHelper(atomicSubs, storedSubs);
+                        subReaders[i] = new ParallelAtomicReaderAnonymousClass(atomicSubs, storedSubs);
                     }
                     else
                     {
@@ -158,16 +158,16 @@
                         }
                         // We pass true for closeSubs and we prevent closing of subreaders in doClose():
                         // By this the synthetic throw-away readers used here are completely invisible to ref-counting
-                        subReaders[i] = new ParallelCompositeReaderAnonymousInnerClassHelper(compositeSubs, storedSubs);
+                        subReaders[i] = new ParallelCompositeReaderAnonymousClass(compositeSubs, storedSubs);
                     }
                 }
                 return subReaders;
             }
         }
 
-        private class ParallelAtomicReaderAnonymousInnerClassHelper : ParallelAtomicReader
+        private class ParallelAtomicReaderAnonymousClass : ParallelAtomicReader
         {
-            public ParallelAtomicReaderAnonymousInnerClassHelper(Lucene.Net.Index.AtomicReader[] atomicSubs, Lucene.Net.Index.AtomicReader[] storedSubs)
+            public ParallelAtomicReaderAnonymousClass(Lucene.Net.Index.AtomicReader[] atomicSubs, Lucene.Net.Index.AtomicReader[] storedSubs)
                 : base(true, atomicSubs, storedSubs)
             {
             }
@@ -177,9 +177,9 @@
             }
         }
 
-        private class ParallelCompositeReaderAnonymousInnerClassHelper : ParallelCompositeReader
+        private class ParallelCompositeReaderAnonymousClass : ParallelCompositeReader
         {
-            public ParallelCompositeReaderAnonymousInnerClassHelper(Lucene.Net.Index.CompositeReader[] compositeSubs, Lucene.Net.Index.CompositeReader[] storedSubs)
+            public ParallelCompositeReaderAnonymousClass(Lucene.Net.Index.CompositeReader[] compositeSubs, Lucene.Net.Index.CompositeReader[] storedSubs)
                 : base(true, compositeSubs, storedSubs)
             {
             }
diff --git a/src/Lucene.Net/Index/ReaderUtil.cs b/src/Lucene.Net/Index/ReaderUtil.cs
index 914f3ba..6426498 100644
--- a/src/Lucene.Net/Index/ReaderUtil.cs
+++ b/src/Lucene.Net/Index/ReaderUtil.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System.Collections.Generic;
 
 namespace Lucene.Net.Index
@@ -55,7 +56,7 @@
             int hi = size - 1; // for first element less than n, return its index
             while (hi >= lo)
             {
-                int mid = (int)((uint)(lo + hi) >> 1);
+                int mid = (lo + hi).TripleShift(1);
                 int midValue = docStarts[mid];
                 if (n < midValue)
                 {
@@ -89,7 +90,7 @@
             int hi = size - 1; // for first element less than n, return its index
             while (hi >= lo)
             {
-                int mid = (int)((uint)(lo + hi) >> 1);
+                int mid = (lo + hi).TripleShift(1);
                 int midValue = leaves[mid].DocBase;
                 if (n < midValue)
                 {
diff --git a/src/Lucene.Net/Index/SegmentInfos.cs b/src/Lucene.Net/Index/SegmentInfos.cs
index bdfc679..2cea568 100644
--- a/src/Lucene.Net/Index/SegmentInfos.cs
+++ b/src/Lucene.Net/Index/SegmentInfos.cs
@@ -1,4 +1,4 @@
-using J2N;
+using J2N;
 using J2N.Collections.Generic.Extensions;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
@@ -470,14 +470,14 @@
         {
             generation = lastGeneration = -1;
 
-            new FindSegmentsFileAnonymousInnerClassHelper(this, directory).Run();
+            new FindSegmentsFileAnonymousClass(this, directory).Run();
         }
 
-        private class FindSegmentsFileAnonymousInnerClassHelper : FindSegmentsFile
+        private class FindSegmentsFileAnonymousClass : FindSegmentsFile
         {
             private readonly SegmentInfos outerInstance;
 
-            public FindSegmentsFileAnonymousInnerClassHelper(SegmentInfos outerInstance, Directory directory)
+            public FindSegmentsFileAnonymousClass(SegmentInfos outerInstance, Directory directory)
                 : base(directory)
             {
                 this.outerInstance = outerInstance;
@@ -672,7 +672,7 @@
 
                 output.WriteStringStringMap(si.Attributes);
 
-                output.WriteByte((byte)(sbyte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
+                output.WriteByte((byte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
                 output.WriteStringStringMap(si.Diagnostics);
                 output.WriteStringSet(si.GetFiles());
 
diff --git a/src/Lucene.Net/Index/SortedDocValues.cs b/src/Lucene.Net/Index/SortedDocValues.cs
index 5235ef7..ed21a45 100644
--- a/src/Lucene.Net/Index/SortedDocValues.cs
+++ b/src/Lucene.Net/Index/SortedDocValues.cs
@@ -1,3 +1,5 @@
+using J2N.Numerics;
+
 namespace Lucene.Net.Index
 {
     /*
@@ -87,7 +89,7 @@
 
             while (low <= high)
             {
-                int mid = (int)((uint)(low + high) >> 1);
+                int mid = (low + high).TripleShift(1);
                 LookupOrd(mid, spare);
                 int cmp = spare.CompareTo(key);
 
diff --git a/src/Lucene.Net/Index/SortedSetDocValues.cs b/src/Lucene.Net/Index/SortedSetDocValues.cs
index 9418ea8..b1ce6d0 100644
--- a/src/Lucene.Net/Index/SortedSetDocValues.cs
+++ b/src/Lucene.Net/Index/SortedSetDocValues.cs
@@ -1,3 +1,5 @@
+using J2N.Numerics;
+
 namespace Lucene.Net.Index
 {
     /*
@@ -83,7 +85,7 @@
 
             while (low <= high)
             {
-                long mid = (int)((uint)(low + high) >> 1);
+                long mid = (low + high).TripleShift(1);
                 LookupOrd(mid, spare);
                 int cmp = spare.CompareTo(key);
 
diff --git a/src/Lucene.Net/Index/StandardDirectoryReader.cs b/src/Lucene.Net/Index/StandardDirectoryReader.cs
index 9c022dc..33aac6b 100644
--- a/src/Lucene.Net/Index/StandardDirectoryReader.cs
+++ b/src/Lucene.Net/Index/StandardDirectoryReader.cs
@@ -50,14 +50,14 @@
         /// called from <c>DirectoryReader.Open(...)</c> methods </summary>
         internal static DirectoryReader Open(Directory directory, IndexCommit commit, int termInfosIndexDivisor)
         {
-            return (DirectoryReader)new FindSegmentsFileAnonymousInnerClassHelper(directory, termInfosIndexDivisor).Run(commit);
+            return (DirectoryReader)new FindSegmentsFileAnonymousClass(directory, termInfosIndexDivisor).Run(commit);
         }
 
-        private class FindSegmentsFileAnonymousInnerClassHelper : SegmentInfos.FindSegmentsFile
+        private class FindSegmentsFileAnonymousClass : SegmentInfos.FindSegmentsFile
         {
             private readonly int termInfosIndexDivisor;
 
-            public FindSegmentsFileAnonymousInnerClassHelper(Directory directory, int termInfosIndexDivisor)
+            public FindSegmentsFileAnonymousClass(Directory directory, int termInfosIndexDivisor)
                 : base(directory)
             {
                 this.termInfosIndexDivisor = termInfosIndexDivisor;
@@ -401,14 +401,14 @@
 
         private DirectoryReader DoOpenFromCommit(IndexCommit commit)
         {
-            return (DirectoryReader)new FindSegmentsFileAnonymousInnerClassHelper2(this, m_directory).Run(commit);
+            return (DirectoryReader)new FindSegmentsFileAnonymousClass2(this, m_directory).Run(commit);
         }
 
-        private class FindSegmentsFileAnonymousInnerClassHelper2 : SegmentInfos.FindSegmentsFile
+        private class FindSegmentsFileAnonymousClass2 : SegmentInfos.FindSegmentsFile
         {
             private readonly StandardDirectoryReader outerInstance;
 
-            public FindSegmentsFileAnonymousInnerClassHelper2(StandardDirectoryReader outerInstance, Directory directory)
+            public FindSegmentsFileAnonymousClass2(StandardDirectoryReader outerInstance, Directory directory)
                 : base(directory)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net/Index/Terms.cs b/src/Lucene.Net/Index/Terms.cs
index b016ff2..b391e48 100644
--- a/src/Lucene.Net/Index/Terms.cs
+++ b/src/Lucene.Net/Index/Terms.cs
@@ -93,15 +93,15 @@
             }
             else
             {
-                return new AutomatonTermsEnumAnonymousInnerClassHelper(GetEnumerator(), compiled, startTerm);
+                return new AutomatonTermsEnumAnonymousClass(GetEnumerator(), compiled, startTerm);
             }
         }
 
-        private class AutomatonTermsEnumAnonymousInnerClassHelper : AutomatonTermsEnum
+        private class AutomatonTermsEnumAnonymousClass : AutomatonTermsEnum
         {
             private readonly BytesRef startTerm;
 
-            public AutomatonTermsEnumAnonymousInnerClassHelper(TermsEnum iterator, CompiledAutomaton compiled, BytesRef startTerm)
+            public AutomatonTermsEnumAnonymousClass(TermsEnum iterator, CompiledAutomaton compiled, BytesRef startTerm)
                 : base(iterator, compiled)
             {
                 this.startTerm = startTerm;
diff --git a/src/Lucene.Net/Index/TermsEnum.cs b/src/Lucene.Net/Index/TermsEnum.cs
index 8937433..25a9a28 100644
--- a/src/Lucene.Net/Index/TermsEnum.cs
+++ b/src/Lucene.Net/Index/TermsEnum.cs
@@ -268,10 +268,10 @@
         /// <seealso cref="SeekExact(BytesRef, TermState)"/>
         public virtual TermState GetTermState() // LUCENENET NOTE: Renamed from TermState()
         {
-            return new TermStateAnonymousInnerClassHelper();
+            return new TermStateAnonymousClass();
         }
 
-        private class TermStateAnonymousInnerClassHelper : TermState
+        private class TermStateAnonymousClass : TermState
         {
             public override void CopyFrom(TermState other)
             {
@@ -287,9 +287,9 @@
         /// This should not be a problem, as the enum is always empty and
         /// the existence of unused Attributes does not matter.
         /// </summary>
-        public static readonly TermsEnum EMPTY = new TermsEnumAnonymousInnerClassHelper();
+        public static readonly TermsEnum EMPTY = new TermsEnumAnonymousClass();
 
-        private class TermsEnumAnonymousInnerClassHelper : TermsEnum
+        private class TermsEnumAnonymousClass : TermsEnum
         {
             public override SeekStatus SeekCeil(BytesRef term)
             {
diff --git a/src/Lucene.Net/Index/TermsHashPerField.cs b/src/Lucene.Net/Index/TermsHashPerField.cs
index 5d7429c..59b0ba2 100644
--- a/src/Lucene.Net/Index/TermsHashPerField.cs
+++ b/src/Lucene.Net/Index/TermsHashPerField.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Diagnostics;
 using System;
@@ -282,11 +283,6 @@
         internal int[] intUptos;
         internal int intUptoStart;
 
-        internal void WriteByte(int stream, sbyte b)
-        {
-            WriteByte(stream, (byte)b);
-        }
-
         internal void WriteByte(int stream, byte b)
         {
             int upto = intUptos[intUptoStart + stream];
@@ -322,10 +318,10 @@
             if (Debugging.AssertsEnabled) Debugging.Assert(stream < streamCount);
             while ((i & ~0x7F) != 0)
             {
-                WriteByte(stream, (sbyte)((i & 0x7f) | 0x80));
-                i = (int)((uint)i >> 7);
+                WriteByte(stream, (byte)((i & 0x7f) | 0x80));
+                i = i.TripleShift(7);
             }
-            WriteByte(stream, (sbyte)i);
+            WriteByte(stream, (byte)i);
         }
 
         internal override void Finish()
diff --git a/src/Lucene.Net/Index/TieredMergePolicy.cs b/src/Lucene.Net/Index/TieredMergePolicy.cs
index 5df6a74..1524d9f 100644
--- a/src/Lucene.Net/Index/TieredMergePolicy.cs
+++ b/src/Lucene.Net/Index/TieredMergePolicy.cs
@@ -545,16 +545,16 @@
 
             double finalMergeScore = mergeScore;
 
-            return new MergeScoreAnonymousInnerClassHelper(skew, nonDelRatio, finalMergeScore);
+            return new MergeScoreAnonymousClass(skew, nonDelRatio, finalMergeScore);
         }
 
-        private class MergeScoreAnonymousInnerClassHelper : MergeScore
+        private class MergeScoreAnonymousClass : MergeScore
         {
             private readonly double skew;
             private readonly double nonDelRatio;
             private readonly double finalMergeScore;
 
-            public MergeScoreAnonymousInnerClassHelper(double skew, double nonDelRatio, double finalMergeScore)
+            public MergeScoreAnonymousClass(double skew, double nonDelRatio, double finalMergeScore)
             {
                 this.skew = skew;
                 this.nonDelRatio = nonDelRatio;
diff --git a/src/Lucene.Net/Search/BooleanScorer2.cs b/src/Lucene.Net/Search/BooleanScorer2.cs
index b549911..0414f54 100644
--- a/src/Lucene.Net/Search/BooleanScorer2.cs
+++ b/src/Lucene.Net/Search/BooleanScorer2.cs
@@ -167,20 +167,20 @@
             // each scorer from the list counted as a single matcher
             if (minNrShouldMatch > 1)
             {
-                return new MinShouldMatchSumScorerAnonymousInnerClassHelper(this, m_weight, scorers, minNrShouldMatch);
+                return new MinShouldMatchSumScorerAnonymousClass(this, m_weight, scorers, minNrShouldMatch);
             }
             else
             {
                 // we pass null for coord[] since we coordinate ourselves and override score()
-                return new DisjunctionSumScorerAnonymousInnerClassHelper(this, m_weight, scorers.ToArray(), null);
+                return new DisjunctionSumScorerAnonymousClass(this, m_weight, scorers.ToArray(), null);
             }
         }
 
-        private class MinShouldMatchSumScorerAnonymousInnerClassHelper : MinShouldMatchSumScorer
+        private class MinShouldMatchSumScorerAnonymousClass : MinShouldMatchSumScorer
         {
             private readonly BooleanScorer2 outerInstance;
 
-            public MinShouldMatchSumScorerAnonymousInnerClassHelper(BooleanScorer2 outerInstance, Weight weight, IList<Scorer> scorers, int minNrShouldMatch)
+            public MinShouldMatchSumScorerAnonymousClass(BooleanScorer2 outerInstance, Weight weight, IList<Scorer> scorers, int minNrShouldMatch)
                 : base(weight, scorers, minNrShouldMatch)
             {
                 this.outerInstance = outerInstance;
@@ -193,11 +193,11 @@
             }
         }
 
-        private class DisjunctionSumScorerAnonymousInnerClassHelper : DisjunctionSumScorer
+        private class DisjunctionSumScorerAnonymousClass : DisjunctionSumScorer
         {
             private readonly BooleanScorer2 outerInstance;
 
-            public DisjunctionSumScorerAnonymousInnerClassHelper(BooleanScorer2 outerInstance, Weight weight, Scorer[] subScorers, float[] coord)
+            public DisjunctionSumScorerAnonymousClass(BooleanScorer2 outerInstance, Weight weight, Scorer[] subScorers, float[] coord)
                 : base(weight, subScorers, coord)
             {
                 this.outerInstance = outerInstance;
@@ -214,16 +214,16 @@
         {
             // each scorer from the list counted as a single matcher
             int requiredNrMatchers = requiredScorers.Count;
-            return new ConjunctionScorerAnonymousInnerClassHelper(this, m_weight, requiredScorers.ToArray(), requiredNrMatchers);
+            return new ConjunctionScorerAnonymousClass(this, m_weight, requiredScorers.ToArray(), requiredNrMatchers);
         }
 
-        private class ConjunctionScorerAnonymousInnerClassHelper : ConjunctionScorer
+        private class ConjunctionScorerAnonymousClass : ConjunctionScorer
         {
             private readonly BooleanScorer2 outerInstance;
 
             private readonly int requiredNrMatchers;
 
-            public ConjunctionScorerAnonymousInnerClassHelper(BooleanScorer2 outerInstance, Weight weight, Scorer[] scorers, int requiredNrMatchers)
+            public ConjunctionScorerAnonymousClass(BooleanScorer2 outerInstance, Weight weight, Scorer[] scorers, int requiredNrMatchers)
                 : base(weight, scorers)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net/Search/CachingCollector.cs b/src/Lucene.Net/Search/CachingCollector.cs
index 0c74d23..cefff44 100644
--- a/src/Lucene.Net/Search/CachingCollector.cs
+++ b/src/Lucene.Net/Search/CachingCollector.cs
@@ -374,15 +374,15 @@
         ///          whether documents are allowed to be collected out-of-order </param>
         public static CachingCollector Create(bool acceptDocsOutOfOrder, bool cacheScores, double maxRAMMB)
         {
-            ICollector other = new CollectorAnonymousInnerClassHelper(acceptDocsOutOfOrder);
+            ICollector other = new CollectorAnonymousClass(acceptDocsOutOfOrder);
             return Create(other, cacheScores, maxRAMMB);
         }
 
-        private class CollectorAnonymousInnerClassHelper : ICollector
+        private class CollectorAnonymousClass : ICollector
         {
             private readonly bool acceptDocsOutOfOrder;
 
-            public CollectorAnonymousInnerClassHelper(bool acceptDocsOutOfOrder)
+            public CollectorAnonymousClass(bool acceptDocsOutOfOrder)
             {
                 this.acceptDocsOutOfOrder = acceptDocsOutOfOrder;
             }
diff --git a/src/Lucene.Net/Search/CachingWrapperFilter.cs b/src/Lucene.Net/Search/CachingWrapperFilter.cs
index 9e990a7..22600eb 100644
--- a/src/Lucene.Net/Search/CachingWrapperFilter.cs
+++ b/src/Lucene.Net/Search/CachingWrapperFilter.cs
@@ -150,9 +150,9 @@
 
         /// <summary>
         /// An empty <see cref="DocIdSet"/> instance </summary>
-        protected static readonly DocIdSet EMPTY_DOCIDSET = new DocIdSetAnonymousInnerClassHelper();
+        protected static readonly DocIdSet EMPTY_DOCIDSET = new DocIdSetAnonymousClass();
 
-        private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+        private class DocIdSetAnonymousClass : DocIdSet
         {
             public override DocIdSetIterator GetIterator()
             {
diff --git a/src/Lucene.Net/Search/ConstantScoreQuery.cs b/src/Lucene.Net/Search/ConstantScoreQuery.cs
index f97a722..12b8266 100644
--- a/src/Lucene.Net/Search/ConstantScoreQuery.cs
+++ b/src/Lucene.Net/Search/ConstantScoreQuery.cs
@@ -249,16 +249,16 @@
 
             private ICollector WrapCollector(ICollector collector)
             {
-                return new CollectorAnonymousInnerClassHelper(this, collector);
+                return new CollectorAnonymousClass(this, collector);
             }
 
-            private class CollectorAnonymousInnerClassHelper : ICollector
+            private class CollectorAnonymousClass : ICollector
             {
                 private readonly ConstantBulkScorer outerInstance;
 
                 private readonly ICollector collector;
 
-                public CollectorAnonymousInnerClassHelper(ConstantBulkScorer outerInstance, ICollector collector)
+                public CollectorAnonymousClass(ConstantBulkScorer outerInstance, ICollector collector)
                 {
                     this.outerInstance = outerInstance;
                     this.collector = collector;
diff --git a/src/Lucene.Net/Search/DocIdSetIterator.cs b/src/Lucene.Net/Search/DocIdSetIterator.cs
index 5f3986c..e51c372 100644
--- a/src/Lucene.Net/Search/DocIdSetIterator.cs
+++ b/src/Lucene.Net/Search/DocIdSetIterator.cs
@@ -32,12 +32,12 @@
         /// An empty <see cref="DocIdSetIterator"/> instance </summary>
         public static DocIdSetIterator GetEmpty()
         {
-            return new DocIdSetIteratorAnonymousInnerClassHelper();
+            return new DocIdSetIteratorAnonymousClass();
         }
 
-        private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+        private class DocIdSetIteratorAnonymousClass : DocIdSetIterator
         {
-            public DocIdSetIteratorAnonymousInnerClassHelper()
+            public DocIdSetIteratorAnonymousClass()
             {
             }
 
diff --git a/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs b/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs
index e2d0d20..1c494bc 100644
--- a/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs
+++ b/src/Lucene.Net/Search/DocTermOrdsRangeFilter.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Text;
@@ -62,12 +63,12 @@
         /// </summary>
         public static DocTermOrdsRangeFilter NewBytesRefRange(string field, BytesRef lowerVal, BytesRef upperVal, bool includeLower, bool includeUpper)
         {
-            return new DocTermOrdsRangeFilterAnonymousInnerClassHelper(field, lowerVal, upperVal, includeLower, includeUpper);
+            return new DocTermOrdsRangeFilterAnonymousClass(field, lowerVal, upperVal, includeLower, includeUpper);
         }
 
-        private class DocTermOrdsRangeFilterAnonymousInnerClassHelper : DocTermOrdsRangeFilter
+        private class DocTermOrdsRangeFilterAnonymousClass : DocTermOrdsRangeFilter
         {
-            public DocTermOrdsRangeFilterAnonymousInnerClassHelper(string field, BytesRef lowerVal, BytesRef upperVal, bool includeLower, bool includeUpper)
+            public DocTermOrdsRangeFilterAnonymousClass(string field, BytesRef lowerVal, BytesRef upperVal, bool includeLower, bool includeUpper)
                 : base(field, lowerVal, upperVal, includeLower, includeUpper)
             {
             }
@@ -187,7 +188,7 @@
         {
             int h = field.GetHashCode();
             h ^= (lowerVal != null) ? lowerVal.GetHashCode() : 550356204;
-            h = (h << 1) | ((int)((uint)h >> 31)); // rotate to distinguish lower from upper
+            h = (h << 1) | (h.TripleShift(31)); // rotate to distinguish lower from upper
             h ^= (upperVal != null) ? upperVal.GetHashCode() : -1674416163;
             h ^= (includeLower ? 1549299360 : -365038026) ^ (includeUpper ? 1721088258 : 1948649653);
             return h;
diff --git a/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs b/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs
index 88f941f..69c9baf 100644
--- a/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs
+++ b/src/Lucene.Net/Search/DocTermOrdsRewriteMethod.cs
@@ -99,7 +99,7 @@
                 SortedSetDocValues docTermOrds = FieldCache.DEFAULT.GetDocTermOrds((context.AtomicReader), m_query.m_field);
                 // Cannot use FixedBitSet because we require long index (ord):
                 Int64BitSet termSet = new Int64BitSet(docTermOrds.ValueCount);
-                TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousInnerClassHelper(docTermOrds));
+                TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousClass(docTermOrds));
 
                 if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null);
                 if (termsEnum.MoveNext())
@@ -130,11 +130,11 @@
                 });
             }
 
-            private class TermsAnonymousInnerClassHelper : Terms
+            private class TermsAnonymousClass : Terms
             {
                 private readonly SortedSetDocValues docTermOrds;
 
-                public TermsAnonymousInnerClassHelper(SortedSetDocValues docTermOrds)
+                public TermsAnonymousClass(SortedSetDocValues docTermOrds)
                 {
                     this.docTermOrds = docTermOrds;
                 }
diff --git a/src/Lucene.Net/Search/FieldCacheDocIdSet.cs b/src/Lucene.Net/Search/FieldCacheDocIdSet.cs
index 68ad720..9f033a9 100644
--- a/src/Lucene.Net/Search/FieldCacheDocIdSet.cs
+++ b/src/Lucene.Net/Search/FieldCacheDocIdSet.cs
@@ -67,13 +67,13 @@
         /// </summary>
         public override sealed bool IsCacheable => true;
 
-        public override sealed IBits Bits => (m_acceptDocs == null) ? (IBits)new BitsAnonymousInnerClassHelper(this) : new BitsAnonymousInnerClassHelper2(this);
+        public override sealed IBits Bits => (m_acceptDocs == null) ? (IBits)new BitsAnonymousClass(this) : new BitsAnonymousClass2(this);
 
-        private class BitsAnonymousInnerClassHelper : IBits
+        private class BitsAnonymousClass : IBits
         {
             private readonly FieldCacheDocIdSet outerInstance;
 
-            public BitsAnonymousInnerClassHelper(FieldCacheDocIdSet outerInstance)
+            public BitsAnonymousClass(FieldCacheDocIdSet outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -86,11 +86,11 @@
             public virtual int Length => outerInstance.m_maxDoc;
         }
 
-        private class BitsAnonymousInnerClassHelper2 : IBits
+        private class BitsAnonymousClass2 : IBits
         {
             private readonly FieldCacheDocIdSet outerInstance;
 
-            public BitsAnonymousInnerClassHelper2(FieldCacheDocIdSet outerInstance)
+            public BitsAnonymousClass2(FieldCacheDocIdSet outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -108,26 +108,26 @@
             if (m_acceptDocs == null)
             {
                 // Specialization optimization disregard acceptDocs
-                return new DocIdSetIteratorAnonymousInnerClassHelper(this);
+                return new DocIdSetIteratorAnonymousClass(this);
             }
             else if (m_acceptDocs is FixedBitSet || m_acceptDocs is OpenBitSet)
             {
                 // special case for FixedBitSet / OpenBitSet: use the iterator and filter it
                 // (used e.g. when Filters are chained by FilteredQuery)
-                return new FilteredDocIdSetIteratorAnonymousInnerClassHelper(this, ((DocIdSet)m_acceptDocs).GetIterator());
+                return new FilteredDocIdSetIteratorAnonymousClass(this, ((DocIdSet)m_acceptDocs).GetIterator());
             }
             else
             {
                 // Stupid consultation of acceptDocs and matchDoc()
-                return new DocIdSetIteratorAnonymousInnerClassHelper2(this);
+                return new DocIdSetIteratorAnonymousClass2(this);
             }
         }
 
-        private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+        private class DocIdSetIteratorAnonymousClass : DocIdSetIterator
         {
             private readonly FieldCacheDocIdSet outerInstance;
 
-            public DocIdSetIteratorAnonymousInnerClassHelper(FieldCacheDocIdSet outerInstance)
+            public DocIdSetIteratorAnonymousClass(FieldCacheDocIdSet outerInstance)
             {
                 this.outerInstance = outerInstance;
                 doc = -1;
@@ -168,11 +168,11 @@
             }
         }
 
-        private class FilteredDocIdSetIteratorAnonymousInnerClassHelper : FilteredDocIdSetIterator
+        private class FilteredDocIdSetIteratorAnonymousClass : FilteredDocIdSetIterator
         {
             private readonly FieldCacheDocIdSet outerInstance;
 
-            public FilteredDocIdSetIteratorAnonymousInnerClassHelper(FieldCacheDocIdSet outerInstance, Lucene.Net.Search.DocIdSetIterator iterator)
+            public FilteredDocIdSetIteratorAnonymousClass(FieldCacheDocIdSet outerInstance, Lucene.Net.Search.DocIdSetIterator iterator)
                 : base(iterator)
             {
                 this.outerInstance = outerInstance;
@@ -184,11 +184,11 @@
             }
         }
 
-        private class DocIdSetIteratorAnonymousInnerClassHelper2 : DocIdSetIterator
+        private class DocIdSetIteratorAnonymousClass2 : DocIdSetIterator
         {
             private readonly FieldCacheDocIdSet outerInstance;
 
-            public DocIdSetIteratorAnonymousInnerClassHelper2(FieldCacheDocIdSet outerInstance)
+            public DocIdSetIteratorAnonymousClass2(FieldCacheDocIdSet outerInstance)
             {
                 this.outerInstance = outerInstance;
                 doc = -1;
diff --git a/src/Lucene.Net/Search/FieldCacheImpl.cs b/src/Lucene.Net/Search/FieldCacheImpl.cs
index f00a57c..64f2d97 100644
--- a/src/Lucene.Net/Search/FieldCacheImpl.cs
+++ b/src/Lucene.Net/Search/FieldCacheImpl.cs
@@ -74,8 +74,8 @@
             Init();
 
             //Have to do this here because no 'this' in class definition
-            purgeCore = new CoreClosedListenerAnonymousInnerClassHelper(this);
-            purgeReader = new ReaderClosedListenerAnonymousInnerClassHelper(this);
+            purgeCore = new CoreClosedListenerAnonymousClass(this);
+            purgeReader = new ReaderClosedListenerAnonymousClass(this);
         }
 
         private void Init()
@@ -168,11 +168,11 @@
         // per-segment fieldcaches don't purge until the shared core closes.
         internal readonly SegmentReader.ICoreDisposedListener purgeCore;
 
-        private class CoreClosedListenerAnonymousInnerClassHelper : SegmentReader.ICoreDisposedListener
+        private class CoreClosedListenerAnonymousClass : SegmentReader.ICoreDisposedListener
         {
             private readonly FieldCacheImpl outerInstance;
 
-            public CoreClosedListenerAnonymousInnerClassHelper(FieldCacheImpl outerInstance)
+            public CoreClosedListenerAnonymousClass(FieldCacheImpl outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -186,11 +186,11 @@
         // composite/SlowMultiReaderWrapper fieldcaches don't purge until composite reader is closed.
         internal readonly IndexReader.IReaderClosedListener purgeReader;
 
-        private class ReaderClosedListenerAnonymousInnerClassHelper : IndexReader.IReaderClosedListener
+        private class ReaderClosedListenerAnonymousClass : IndexReader.IReaderClosedListener
         {
             private readonly FieldCacheImpl outerInstance;
 
-            public ReaderClosedListenerAnonymousInnerClassHelper(FieldCacheImpl outerInstance)
+            public ReaderClosedListenerAnonymousClass(FieldCacheImpl outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -580,7 +580,7 @@
             {
                 // Not cached here by FieldCacheImpl (cached instead
                 // per-thread by SegmentReader):
-                return new FieldCache_BytesAnonymousInnerClassHelper(valuesIn);
+                return new FieldCache_BytesAnonymousClass(valuesIn);
             }
             else
             {
@@ -604,11 +604,11 @@
             }
         }
 
-        private class FieldCache_BytesAnonymousInnerClassHelper : FieldCache.Bytes
+        private class FieldCache_BytesAnonymousClass : FieldCache.Bytes
         {
             private readonly NumericDocValues valuesIn;
 
-            public FieldCache_BytesAnonymousInnerClassHelper(NumericDocValues valuesIn)
+            public FieldCache_BytesAnonymousClass(NumericDocValues valuesIn)
             {
                 this.valuesIn = valuesIn;
             }
@@ -664,7 +664,7 @@
 
                 values = new sbyte[maxDoc];
 
-                Uninvert u = new UninvertAnonymousInnerClassHelper(values, parser);
+                Uninvert u = new UninvertAnonymousClass(values, parser);
 
                 u.DoUninvert(reader, key.field, setDocsWithField);
 
@@ -676,13 +676,13 @@
                 return new BytesFromArray(values);
             }
 
-            private class UninvertAnonymousInnerClassHelper : Uninvert
+            private class UninvertAnonymousClass : Uninvert
             {
                 private readonly sbyte[] values;
 #pragma warning disable 612, 618
                 private readonly FieldCache.IByteParser parser;
 
-                public UninvertAnonymousInnerClassHelper(sbyte[] values, FieldCache.IByteParser parser)
+                public UninvertAnonymousClass(sbyte[] values, FieldCache.IByteParser parser)
 #pragma warning restore 612, 618
                 {
                     this.values = values;
@@ -753,7 +753,7 @@
             {
                 // Not cached here by FieldCacheImpl (cached instead
                 // per-thread by SegmentReader):
-                return new FieldCache_Int16sAnonymousInnerClassHelper(valuesIn);
+                return new FieldCache_Int16sAnonymousClass(valuesIn);
             }
             else
             {
@@ -775,11 +775,11 @@
             }
         }
 
-        private class FieldCache_Int16sAnonymousInnerClassHelper : FieldCache.Int16s
+        private class FieldCache_Int16sAnonymousClass : FieldCache.Int16s
         {
             private readonly NumericDocValues valuesIn;
 
-            public FieldCache_Int16sAnonymousInnerClassHelper(NumericDocValues valuesIn)
+            public FieldCache_Int16sAnonymousClass(NumericDocValues valuesIn)
             {
                 this.valuesIn = valuesIn;
             }
@@ -838,7 +838,7 @@
 #pragma warning restore 612, 618
 
                 values = new short[maxDoc];
-                Uninvert u = new UninvertAnonymousInnerClassHelper(values, parser);
+                Uninvert u = new UninvertAnonymousClass(values, parser);
 
                 u.DoUninvert(reader, key.field, setDocsWithField);
 
@@ -849,13 +849,13 @@
                 return new Int16sFromArray(values);
             }
 
-            private class UninvertAnonymousInnerClassHelper : Uninvert
+            private class UninvertAnonymousClass : Uninvert
             {
                 private readonly short[] values;
 #pragma warning disable 612, 618
                 private readonly FieldCache.IInt16Parser parser;
 
-                public UninvertAnonymousInnerClassHelper(short[] values, FieldCache.IInt16Parser parser)
+                public UninvertAnonymousClass(short[] values, FieldCache.IInt16Parser parser)
 #pragma warning restore 612, 618
                 {
                     this.values = values;
@@ -925,7 +925,7 @@
             {
                 // Not cached here by FieldCacheImpl (cached instead
                 // per-thread by SegmentReader):
-                return new FieldCache_Int32sAnonymousInnerClassHelper(valuesIn);
+                return new FieldCache_Int32sAnonymousClass(valuesIn);
             }
             else
             {
@@ -947,11 +947,11 @@
             }
         }
 
-        private class FieldCache_Int32sAnonymousInnerClassHelper : FieldCache.Int32s
+        private class FieldCache_Int32sAnonymousClass : FieldCache.Int32s
         {
             private readonly NumericDocValues valuesIn;
 
-            public FieldCache_Int32sAnonymousInnerClassHelper(NumericDocValues valuesIn)
+            public FieldCache_Int32sAnonymousClass(NumericDocValues valuesIn)
             {
                 this.valuesIn = valuesIn;
             }
@@ -1045,7 +1045,7 @@
 
                 HoldsOneThing<GrowableWriterAndMinValue> valuesRef = new HoldsOneThing<GrowableWriterAndMinValue>();
 
-                Uninvert u = new UninvertAnonymousInnerClassHelper(reader, parser, valuesRef);
+                Uninvert u = new UninvertAnonymousClass(reader, parser, valuesRef);
 
                 u.DoUninvert(reader, key.field, setDocsWithField);
 
@@ -1061,13 +1061,13 @@
                 return new Int32sFromArray(values.Writer.Mutable, (int)values.MinValue);
             }
 
-            private class UninvertAnonymousInnerClassHelper : Uninvert
+            private class UninvertAnonymousClass : Uninvert
             {
                 private readonly AtomicReader reader;
                 private readonly FieldCache.IInt32Parser parser;
                 private readonly FieldCacheImpl.HoldsOneThing<GrowableWriterAndMinValue> valuesRef;
 
-                public UninvertAnonymousInnerClassHelper(AtomicReader reader, FieldCache.IInt32Parser parser, FieldCacheImpl.HoldsOneThing<GrowableWriterAndMinValue> valuesRef)
+                public UninvertAnonymousClass(AtomicReader reader, FieldCache.IInt32Parser parser, FieldCacheImpl.HoldsOneThing<GrowableWriterAndMinValue> valuesRef)
                 {
                     this.reader = reader;
                     this.parser = parser;
@@ -1220,7 +1220,7 @@
             {
                 // Not cached here by FieldCacheImpl (cached instead
                 // per-thread by SegmentReader):
-                return new FieldCache_SinglesAnonymousInnerClassHelper(valuesIn);
+                return new FieldCache_SinglesAnonymousClass(valuesIn);
             }
             else
             {
@@ -1242,11 +1242,11 @@
             }
         }
 
-        private class FieldCache_SinglesAnonymousInnerClassHelper : FieldCache.Singles
+        private class FieldCache_SinglesAnonymousClass : FieldCache.Singles
         {
             private readonly NumericDocValues valuesIn;
 
-            public FieldCache_SinglesAnonymousInnerClassHelper(NumericDocValues valuesIn)
+            public FieldCache_SinglesAnonymousClass(NumericDocValues valuesIn)
             {
                 this.valuesIn = valuesIn;
             }
@@ -1309,7 +1309,7 @@
 
                 HoldsOneThing<float[]> valuesRef = new HoldsOneThing<float[]>();
 
-                Uninvert u = new UninvertAnonymousInnerClassHelper(reader, parser, valuesRef);
+                Uninvert u = new UninvertAnonymousClass(reader, parser, valuesRef);
 
                 u.DoUninvert(reader, key.field, setDocsWithField);
 
@@ -1326,13 +1326,13 @@
                 return new SinglesFromArray(values);
             }
 
-            private class UninvertAnonymousInnerClassHelper : Uninvert
+            private class UninvertAnonymousClass : Uninvert
             {
                 private readonly AtomicReader reader;
                 private readonly FieldCache.ISingleParser parser;
                 private readonly FieldCacheImpl.HoldsOneThing<float[]> valuesRef;
 
-                public UninvertAnonymousInnerClassHelper(AtomicReader reader, FieldCache.ISingleParser parser, FieldCacheImpl.HoldsOneThing<float[]> valuesRef)
+                public UninvertAnonymousClass(AtomicReader reader, FieldCache.ISingleParser parser, FieldCacheImpl.HoldsOneThing<float[]> valuesRef)
                 {
                     this.reader = reader;
                     this.parser = parser;
@@ -1386,7 +1386,7 @@
             {
                 // Not cached here by FieldCacheImpl (cached instead
                 // per-thread by SegmentReader):
-                return new FieldCache_Int64sAnonymousInnerClassHelper(valuesIn);
+                return new FieldCache_Int64sAnonymousClass(valuesIn);
             }
             else
             {
@@ -1408,11 +1408,11 @@
             }
         }
 
-        private class FieldCache_Int64sAnonymousInnerClassHelper : FieldCache.Int64s
+        private class FieldCache_Int64sAnonymousClass : FieldCache.Int64s
         {
             private readonly NumericDocValues valuesIn;
 
-            public FieldCache_Int64sAnonymousInnerClassHelper(NumericDocValues valuesIn)
+            public FieldCache_Int64sAnonymousClass(NumericDocValues valuesIn)
             {
                 this.valuesIn = valuesIn;
             }
@@ -1477,7 +1477,7 @@
 
                 HoldsOneThing<GrowableWriterAndMinValue> valuesRef = new HoldsOneThing<GrowableWriterAndMinValue>();
 
-                Uninvert u = new UninvertAnonymousInnerClassHelper(reader, parser, valuesRef);
+                Uninvert u = new UninvertAnonymousClass(reader, parser, valuesRef);
 
                 u.DoUninvert(reader, key.field, setDocsWithField);
 
@@ -1493,13 +1493,13 @@
                 return new Int64sFromArray(values.Writer.Mutable, values.MinValue);
             }
 
-            private class UninvertAnonymousInnerClassHelper : Uninvert
+            private class UninvertAnonymousClass : Uninvert
             {
                 private readonly AtomicReader reader;
                 private readonly FieldCache.IInt64Parser parser;
                 private readonly FieldCacheImpl.HoldsOneThing<GrowableWriterAndMinValue> valuesRef;
 
-                public UninvertAnonymousInnerClassHelper(AtomicReader reader, FieldCache.IInt64Parser parser, FieldCacheImpl.HoldsOneThing<GrowableWriterAndMinValue> valuesRef)
+                public UninvertAnonymousClass(AtomicReader reader, FieldCache.IInt64Parser parser, FieldCacheImpl.HoldsOneThing<GrowableWriterAndMinValue> valuesRef)
                 {
                     this.reader = reader;
                     this.parser = parser;
@@ -1564,7 +1564,7 @@
             {
                 // Not cached here by FieldCacheImpl (cached instead
                 // per-thread by SegmentReader):
-                return new FieldCache_DoublesAnonymousInnerClassHelper(valuesIn);
+                return new FieldCache_DoublesAnonymousClass(valuesIn);
             }
             else
             {
@@ -1586,11 +1586,11 @@
             }
         }
 
-        private class FieldCache_DoublesAnonymousInnerClassHelper : FieldCache.Doubles
+        private class FieldCache_DoublesAnonymousClass : FieldCache.Doubles
         {
             private readonly NumericDocValues valuesIn;
 
-            public FieldCache_DoublesAnonymousInnerClassHelper(NumericDocValues valuesIn)
+            public FieldCache_DoublesAnonymousClass(NumericDocValues valuesIn)
             {
                 this.valuesIn = valuesIn;
             }
@@ -1647,7 +1647,7 @@
 
                 HoldsOneThing<double[]> valuesRef = new HoldsOneThing<double[]>();
 
-                Uninvert u = new UninvertAnonymousInnerClassHelper(reader, parser, valuesRef);
+                Uninvert u = new UninvertAnonymousClass(reader, parser, valuesRef);
 
                 u.DoUninvert(reader, key.field, setDocsWithField);
 
@@ -1663,13 +1663,13 @@
                 return new DoublesFromArray(values);
             }
 
-            private class UninvertAnonymousInnerClassHelper : Uninvert
+            private class UninvertAnonymousClass : Uninvert
             {
                 private readonly AtomicReader reader;
                 private readonly FieldCache.IDoubleParser parser;
                 private readonly FieldCacheImpl.HoldsOneThing<double[]> valuesRef;
 
-                public UninvertAnonymousInnerClassHelper(AtomicReader reader, FieldCache.IDoubleParser parser, FieldCacheImpl.HoldsOneThing<double[]> valuesRef)
+                public UninvertAnonymousClass(AtomicReader reader, FieldCache.IDoubleParser parser, FieldCacheImpl.HoldsOneThing<double[]> valuesRef)
                 {
                     this.reader = reader;
                     this.parser = parser;
@@ -2032,18 +2032,18 @@
                 PackedInt32s.Reader offsetReader = docToOffset.Mutable;
                 if (setDocsWithField)
                 {
-                    wrapper.SetDocsWithField(reader, key.field, new BitsAnonymousInnerClassHelper(maxDoc, offsetReader));
+                    wrapper.SetDocsWithField(reader, key.field, new BitsAnonymousClass(maxDoc, offsetReader));
                 }
                 // maybe an int-only impl?
                 return new BinaryDocValuesImpl(bytes.Freeze(true), offsetReader);
             }
 
-            private class BitsAnonymousInnerClassHelper : IBits
+            private class BitsAnonymousClass : IBits
             {
                 private readonly int maxDoc;
                 private readonly PackedInt32s.Reader offsetReader;
 
-                public BitsAnonymousInnerClassHelper(int maxDoc, PackedInt32s.Reader offsetReader)
+                public BitsAnonymousClass(int maxDoc, PackedInt32s.Reader offsetReader)
                 {
                     this.maxDoc = maxDoc;
                     this.offsetReader = offsetReader;
diff --git a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs
index 76dcdb9..5d38226 100644
--- a/src/Lucene.Net/Search/FieldCacheRangeFilter.cs
+++ b/src/Lucene.Net/Search/FieldCacheRangeFilter.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Text;
@@ -746,7 +747,7 @@
         {
             int h = field.GetHashCode();
             h ^= (lowerVal != null) ? lowerVal.GetHashCode() : 550356204;
-            h = (h << 1) | ((int)((uint)h >> 31)); // rotate to distinguish lower from upper
+            h = (h << 1) | (h.TripleShift(31)); // rotate to distinguish lower from upper
             h ^= (upperVal != null) ? upperVal.GetHashCode() : -1674416163;
             h ^= (parser != null) ? parser.GetHashCode() : -1572457324;
             h ^= (includeLower ? 1549299360 : -365038026) ^ (includeUpper ? 1721088258 : 1948649653);
diff --git a/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs b/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs
index 65653d5..dcb1f8b 100644
--- a/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs
+++ b/src/Lucene.Net/Search/FieldCacheRewriteMethod.cs
@@ -99,7 +99,7 @@
                 SortedDocValues fcsi = FieldCache.DEFAULT.GetTermsIndex((context.AtomicReader), m_query.m_field);
                 // Cannot use FixedBitSet because we require long index (ord):
                 Int64BitSet termSet = new Int64BitSet(fcsi.ValueCount);
-                TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousInnerClassHelper(fcsi));
+                TermsEnum termsEnum = m_query.GetTermsEnum(new TermsAnonymousClass(fcsi));
 
                 if (Debugging.AssertsEnabled) Debugging.Assert(termsEnum != null);
                 if (termsEnum.MoveNext())
@@ -130,11 +130,11 @@
                 });
             }
 
-            private class TermsAnonymousInnerClassHelper : Terms
+            private class TermsAnonymousClass : Terms
             {
                 private readonly SortedDocValues fcsi;
 
-                public TermsAnonymousInnerClassHelper(SortedDocValues fcsi)
+                public TermsAnonymousClass(SortedDocValues fcsi)
                 {
                     this.fcsi = fcsi;
                 }
diff --git a/src/Lucene.Net/Search/FilteredDocIdSet.cs b/src/Lucene.Net/Search/FilteredDocIdSet.cs
index aa5c46b..3f73be2 100644
--- a/src/Lucene.Net/Search/FilteredDocIdSet.cs
+++ b/src/Lucene.Net/Search/FilteredDocIdSet.cs
@@ -57,17 +57,17 @@
             get
             {
                 IBits bits = innerSet.Bits;
-                return (bits == null) ? null : new BitsAnonymousInnerClassHelper(this, bits);
+                return (bits == null) ? null : new BitsAnonymousClass(this, bits);
             }
         }
 
-        private class BitsAnonymousInnerClassHelper : IBits
+        private class BitsAnonymousClass : IBits
         {
             private readonly FilteredDocIdSet outerInstance;
 
             private readonly IBits bits;
 
-            public BitsAnonymousInnerClassHelper(FilteredDocIdSet outerInstance, IBits bits)
+            public BitsAnonymousClass(FilteredDocIdSet outerInstance, IBits bits)
             {
                 this.outerInstance = outerInstance;
                 this.bits = bits;
@@ -98,14 +98,14 @@
             {
                 return null;
             }
-            return new FilteredDocIdSetIteratorAnonymousInnerClassHelper(this, iterator);
+            return new FilteredDocIdSetIteratorAnonymousClass(this, iterator);
         }
 
-        private class FilteredDocIdSetIteratorAnonymousInnerClassHelper : FilteredDocIdSetIterator
+        private class FilteredDocIdSetIteratorAnonymousClass : FilteredDocIdSetIterator
         {
             private readonly FilteredDocIdSet outerInstance;
 
-            public FilteredDocIdSetIteratorAnonymousInnerClassHelper(FilteredDocIdSet outerInstance, Lucene.Net.Search.DocIdSetIterator iterator)
+            public FilteredDocIdSetIteratorAnonymousClass(FilteredDocIdSet outerInstance, Lucene.Net.Search.DocIdSetIterator iterator)
                 : base(iterator)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net/Search/FilteredQuery.cs b/src/Lucene.Net/Search/FilteredQuery.cs
index 1df1e47..3c9c8fe 100644
--- a/src/Lucene.Net/Search/FilteredQuery.cs
+++ b/src/Lucene.Net/Search/FilteredQuery.cs
@@ -77,16 +77,16 @@
         public override Weight CreateWeight(IndexSearcher searcher)
         {
             Weight weight = query.CreateWeight(searcher);
-            return new WeightAnonymousInnerClassHelper(this, weight);
+            return new WeightAnonymousClass(this, weight);
         }
 
-        private class WeightAnonymousInnerClassHelper : Weight
+        private class WeightAnonymousClass : Weight
         {
             private readonly FilteredQuery outerInstance;
 
             private readonly Weight weight;
 
-            public WeightAnonymousInnerClassHelper(FilteredQuery outerInstance, Weight weight)
+            public WeightAnonymousClass(FilteredQuery outerInstance, Weight weight)
             {
                 this.outerInstance = outerInstance;
                 this.weight = weight;
diff --git a/src/Lucene.Net/Search/MultiTermQuery.cs b/src/Lucene.Net/Search/MultiTermQuery.cs
index 2b99091..fce0c69 100644
--- a/src/Lucene.Net/Search/MultiTermQuery.cs
+++ b/src/Lucene.Net/Search/MultiTermQuery.cs
@@ -92,11 +92,11 @@
         /// exception.
         /// </summary>
         /// <seealso cref="MultiTermRewriteMethod"/>
-        public static readonly RewriteMethod CONSTANT_SCORE_FILTER_REWRITE = new RewriteMethodAnonymousInnerClassHelper();
+        public static readonly RewriteMethod CONSTANT_SCORE_FILTER_REWRITE = new RewriteMethodAnonymousClass();
 
-        private class RewriteMethodAnonymousInnerClassHelper : RewriteMethod
+        private class RewriteMethodAnonymousClass : RewriteMethod
         {
-            public RewriteMethodAnonymousInnerClassHelper()
+            public RewriteMethodAnonymousClass()
             {
             }
 
@@ -242,11 +242,11 @@
         /// instance; you'll need to create a private instance
         /// instead.
         /// </summary>
-        public static readonly RewriteMethod CONSTANT_SCORE_AUTO_REWRITE_DEFAULT = new ConstantScoreAutoRewriteAnonymousInnerClassHelper();
+        public static readonly RewriteMethod CONSTANT_SCORE_AUTO_REWRITE_DEFAULT = new ConstantScoreAutoRewriteAnonymousClass();
 
-        private class ConstantScoreAutoRewriteAnonymousInnerClassHelper : ConstantScoreAutoRewrite
+        private class ConstantScoreAutoRewriteAnonymousClass : ConstantScoreAutoRewrite
         {
-            public ConstantScoreAutoRewriteAnonymousInnerClassHelper()
+            public ConstantScoreAutoRewriteAnonymousClass()
             {
             }
 
diff --git a/src/Lucene.Net/Search/NumericRangeQuery.cs b/src/Lucene.Net/Search/NumericRangeQuery.cs
index 70f7cde..ebcf8cf 100644
--- a/src/Lucene.Net/Search/NumericRangeQuery.cs
+++ b/src/Lucene.Net/Search/NumericRangeQuery.cs
@@ -351,7 +351,7 @@
                                 maxBound--;
                             }
 
-                            NumericUtils.SplitInt64Range(new Int64RangeBuilderAnonymousInnerClassHelper(this), this.outerInstance.precisionStep, minBound, maxBound);
+                            NumericUtils.SplitInt64Range(new Int64RangeBuilderAnonymousClass(this), this.outerInstance.precisionStep, minBound, maxBound);
                             break;
                         }
 
@@ -398,7 +398,7 @@
                                 maxBound--;
                             }
 
-                            NumericUtils.SplitInt32Range(new Int32RangeBuilderAnonymousInnerClassHelper(this), this.outerInstance.precisionStep, minBound, maxBound);
+                            NumericUtils.SplitInt32Range(new Int32RangeBuilderAnonymousClass(this), this.outerInstance.precisionStep, minBound, maxBound);
                             break;
                         }
 
@@ -410,11 +410,11 @@
                 termComp = Comparer;
             }
 
-            private class Int64RangeBuilderAnonymousInnerClassHelper : NumericUtils.Int64RangeBuilder
+            private class Int64RangeBuilderAnonymousClass : NumericUtils.Int64RangeBuilder
             {
                 private readonly NumericRangeTermsEnum outerInstance;
 
-                public Int64RangeBuilderAnonymousInnerClassHelper(NumericRangeTermsEnum outerInstance)
+                public Int64RangeBuilderAnonymousClass(NumericRangeTermsEnum outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
@@ -426,11 +426,11 @@
                 }
             }
 
-            private class Int32RangeBuilderAnonymousInnerClassHelper : NumericUtils.Int32RangeBuilder
+            private class Int32RangeBuilderAnonymousClass : NumericUtils.Int32RangeBuilder
             {
                 private readonly NumericRangeTermsEnum outerInstance;
 
-                public Int32RangeBuilderAnonymousInnerClassHelper(NumericRangeTermsEnum outerInstance)
+                public Int32RangeBuilderAnonymousClass(NumericRangeTermsEnum outerInstance)
                 {
                     this.outerInstance = outerInstance;
                 }
diff --git a/src/Lucene.Net/Search/QueryRescorer.cs b/src/Lucene.Net/Search/QueryRescorer.cs
index 7a83e9f..9dd70d0 100644
--- a/src/Lucene.Net/Search/QueryRescorer.cs
+++ b/src/Lucene.Net/Search/QueryRescorer.cs
@@ -183,14 +183,14 @@
         /// </summary>
         public static TopDocs Rescore(IndexSearcher searcher, TopDocs topDocs, Query query, double weight, int topN)
         {
-            return new QueryRescorerAnonymousInnerClassHelper(query, weight).Rescore(searcher, topDocs, topN);
+            return new QueryRescorerAnonymousClass(query, weight).Rescore(searcher, topDocs, topN);
         }
 
-        private class QueryRescorerAnonymousInnerClassHelper : QueryRescorer
+        private class QueryRescorerAnonymousClass : QueryRescorer
         {
             private readonly double weight;
 
-            public QueryRescorerAnonymousInnerClassHelper(Lucene.Net.Search.Query query, double weight)
+            public QueryRescorerAnonymousClass(Lucene.Net.Search.Query query, double weight)
                 : base(query)
             {
                 this.weight = weight;
diff --git a/src/Lucene.Net/Search/QueryWrapperFilter.cs b/src/Lucene.Net/Search/QueryWrapperFilter.cs
index e23781c..4802df8 100644
--- a/src/Lucene.Net/Search/QueryWrapperFilter.cs
+++ b/src/Lucene.Net/Search/QueryWrapperFilter.cs
@@ -53,16 +53,16 @@
             // get a private context that is used to rewrite, createWeight and score eventually
             AtomicReaderContext privateContext = context.AtomicReader.AtomicContext;
             Weight weight = (new IndexSearcher(privateContext)).CreateNormalizedWeight(query);
-            return new DocIdSetAnonymousInnerClassHelper(acceptDocs, privateContext, weight);
+            return new DocIdSetAnonymousClass(acceptDocs, privateContext, weight);
         }
 
-        private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+        private class DocIdSetAnonymousClass : DocIdSet
         {
             private readonly IBits acceptDocs;
             private readonly AtomicReaderContext privateContext;
             private readonly Weight weight;
 
-            public DocIdSetAnonymousInnerClassHelper(IBits acceptDocs, AtomicReaderContext privateContext, Weight weight)
+            public DocIdSetAnonymousClass(IBits acceptDocs, AtomicReaderContext privateContext, Weight weight)
             {
                 this.acceptDocs = acceptDocs;
                 this.privateContext = privateContext;
diff --git a/src/Lucene.Net/Search/RegexpQuery.cs b/src/Lucene.Net/Search/RegexpQuery.cs
index 237301d..53c57c3 100644
--- a/src/Lucene.Net/Search/RegexpQuery.cs
+++ b/src/Lucene.Net/Search/RegexpQuery.cs
@@ -54,9 +54,9 @@
         /// <summary>
         /// A provider that provides no named automata
         /// </summary>
-        private static readonly IAutomatonProvider defaultProvider = new AutomatonProviderAnonymousInnerClassHelper();
+        private static readonly IAutomatonProvider defaultProvider = new AutomatonProviderAnonymousClass();
 
-        private class AutomatonProviderAnonymousInnerClassHelper : IAutomatonProvider
+        private class AutomatonProviderAnonymousClass : IAutomatonProvider
         {
             public Automaton GetAutomaton(string name)
             {
diff --git a/src/Lucene.Net/Search/ScoringRewrite.cs b/src/Lucene.Net/Search/ScoringRewrite.cs
index fc2fde3..67de649 100644
--- a/src/Lucene.Net/Search/ScoringRewrite.cs
+++ b/src/Lucene.Net/Search/ScoringRewrite.cs
@@ -54,11 +54,11 @@
         /// exceeds <see cref="BooleanQuery.MaxClauseCount"/>.
         /// </summary>
         ///  <seealso cref="MultiTermQuery.MultiTermRewriteMethod"/>
-        public static readonly ScoringRewrite<BooleanQuery> SCORING_BOOLEAN_QUERY_REWRITE = new ScoringRewriteAnonymousInnerClassHelper();
+        public static readonly ScoringRewrite<BooleanQuery> SCORING_BOOLEAN_QUERY_REWRITE = new ScoringRewriteAnonymousClass();
 
-        private class ScoringRewriteAnonymousInnerClassHelper : ScoringRewrite<BooleanQuery>
+        private class ScoringRewriteAnonymousClass : ScoringRewrite<BooleanQuery>
         {
-            public ScoringRewriteAnonymousInnerClassHelper()
+            public ScoringRewriteAnonymousClass()
             {
             }
 
@@ -94,11 +94,11 @@
         /// exceeds <see cref="BooleanQuery.MaxClauseCount"/>.
         /// </summary>
         /// <seealso cref="MultiTermQuery.MultiTermRewriteMethod"/>
-        public static readonly RewriteMethod CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE = new RewriteMethodAnonymousInnerClassHelper();
+        public static readonly RewriteMethod CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE = new RewriteMethodAnonymousClass();
 
-        private class RewriteMethodAnonymousInnerClassHelper : RewriteMethod
+        private class RewriteMethodAnonymousClass : RewriteMethod
         {
-            public RewriteMethodAnonymousInnerClassHelper()
+            public RewriteMethodAnonymousClass()
             {
             }
 
diff --git a/src/Lucene.Net/Search/SortField.cs b/src/Lucene.Net/Search/SortField.cs
index f2f201f..fa180bd 100644
--- a/src/Lucene.Net/Search/SortField.cs
+++ b/src/Lucene.Net/Search/SortField.cs
@@ -170,11 +170,11 @@
         /// Pass this to <see cref="MissingValue"/> to have missing
         /// string values sort first.
         /// </summary>
-        public static readonly object STRING_FIRST = new ObjectAnonymousInnerClassHelper();
+        public static readonly object STRING_FIRST = new ObjectAnonymousClass();
 
-        private class ObjectAnonymousInnerClassHelper : object
+        private class ObjectAnonymousClass : object
         {
-            public ObjectAnonymousInnerClassHelper()
+            public ObjectAnonymousClass()
             {
             }
 
@@ -188,11 +188,11 @@
         /// Pass this to <see cref="MissingValue"/> to have missing
         /// string values sort last.
         /// </summary>
-        public static readonly object STRING_LAST = new ObjectAnonymousInnerClassHelper2();
+        public static readonly object STRING_LAST = new ObjectAnonymousClass2();
 
-        private class ObjectAnonymousInnerClassHelper2 : object
+        private class ObjectAnonymousClass2 : object
         {
-            public ObjectAnonymousInnerClassHelper2()
+            public ObjectAnonymousClass2()
             {
             }
 
diff --git a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs
index 815a3a1..da58a09 100644
--- a/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs
+++ b/src/Lucene.Net/Search/Spans/NearSpansOrdered.cs
@@ -81,11 +81,11 @@
         // perform better since it has a lower overhead than TimSorter for small arrays
         private readonly InPlaceMergeSorter sorter;
 
-        private class InPlaceMergeSorterAnonymousInnerClassHelper : InPlaceMergeSorter
+        private class InPlaceMergeSorterAnonymousClass : InPlaceMergeSorter
         {
             private readonly NearSpansOrdered outerInstance;
 
-            public InPlaceMergeSorterAnonymousInnerClassHelper(NearSpansOrdered outerInstance)
+            public InPlaceMergeSorterAnonymousClass(NearSpansOrdered outerInstance)
             {
                 this.outerInstance = outerInstance;
             }
@@ -111,7 +111,7 @@
 
         public NearSpansOrdered(SpanNearQuery spanNearQuery, AtomicReaderContext context, IBits acceptDocs, IDictionary<Term, TermContext> termContexts, bool collectPayloads)
         {
-            sorter = new InPlaceMergeSorterAnonymousInnerClassHelper(this);
+            sorter = new InPlaceMergeSorterAnonymousClass(this);
             if (spanNearQuery.GetClauses().Length < 2)
             {
                 throw new ArgumentException("Less than 2 clauses: " + spanNearQuery);
diff --git a/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs b/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs
index 4a8d9dc..57d6964 100644
--- a/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs
+++ b/src/Lucene.Net/Search/Spans/SpanFirstQuery.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System.Text;
 
@@ -93,7 +94,7 @@
         public override int GetHashCode()
         {
             int h = m_match.GetHashCode();
-            h ^= (h << 8) | ((int)((uint)h >> 25)); // reversible
+            h ^= (h << 8) | (h.TripleShift(25)); // reversible
             h ^= J2N.BitConversion.SingleToRawInt32Bits(Boost) ^ m_end;
             return h;
         }
diff --git a/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs b/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs
index b31ed9b..453fe22 100644
--- a/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs
+++ b/src/Lucene.Net/Search/Spans/SpanMultiTermQueryWrapper.cs
@@ -161,19 +161,19 @@
         /// scores as computed by the query.
         /// </summary>
         /// <seealso cref="MultiTermRewriteMethod"/>
-        public static readonly SpanRewriteMethod SCORING_SPAN_QUERY_REWRITE = new SpanRewriteMethodAnonymousInnerClassHelper();
+        public static readonly SpanRewriteMethod SCORING_SPAN_QUERY_REWRITE = new SpanRewriteMethodAnonymousClass();
 
-        private class SpanRewriteMethodAnonymousInnerClassHelper : SpanRewriteMethod
+        private class SpanRewriteMethodAnonymousClass : SpanRewriteMethod
         {
-            public SpanRewriteMethodAnonymousInnerClassHelper()
+            public SpanRewriteMethodAnonymousClass()
             {
             }
 
-            private readonly ScoringRewrite<SpanOrQuery> @delegate = new ScoringRewriteAnonymousInnerClassHelper();
+            private readonly ScoringRewrite<SpanOrQuery> @delegate = new ScoringRewriteAnonymousClass();
 
-            private class ScoringRewriteAnonymousInnerClassHelper : ScoringRewrite<SpanOrQuery>
+            private class ScoringRewriteAnonymousClass : ScoringRewrite<SpanOrQuery>
             {
-                public ScoringRewriteAnonymousInnerClassHelper()
+                public ScoringRewriteAnonymousClass()
                 {
                 }
 
@@ -224,12 +224,12 @@
             /// </summary>
             public TopTermsSpanBooleanQueryRewrite(int size)
             {
-                @delegate = new TopTermsRewriteAnonymousInnerClassHelper(size);
+                @delegate = new TopTermsRewriteAnonymousClass(size);
             }
 
-            private class TopTermsRewriteAnonymousInnerClassHelper : TopTermsRewrite<SpanOrQuery>
+            private class TopTermsRewriteAnonymousClass : TopTermsRewrite<SpanOrQuery>
             {
-                public TopTermsRewriteAnonymousInnerClassHelper(int size)
+                public TopTermsRewriteAnonymousClass(int size)
                     : base(size)
                 {
                 }
diff --git a/src/Lucene.Net/Search/Spans/SpanNearPayloadCheckQuery.cs b/src/Lucene.Net/Search/Spans/SpanNearPayloadCheckQuery.cs
index 13087b5..73a8f49 100644
--- a/src/Lucene.Net/Search/Spans/SpanNearPayloadCheckQuery.cs
+++ b/src/Lucene.Net/Search/Spans/SpanNearPayloadCheckQuery.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Support;
 using System.Collections;
 using System.Collections.Generic;
@@ -137,7 +138,7 @@
         public override int GetHashCode()
         {
             int h = m_match.GetHashCode();
-            h ^= (h << 8) | ((int)((uint)h >> 25)); // reversible
+            h ^= (h << 8) | (h.TripleShift(25)); // reversible
             //TODO: is this right?
             h ^= payloadEqualityComparer.GetHashCode(m_payloadToMatch); // LUCENENET NOTE: Need to use the structural equality comparer to compare equality of all contained values
             h ^= J2N.BitConversion.SingleToRawInt32Bits(Boost);
diff --git a/src/Lucene.Net/Search/Spans/SpanNearQuery.cs b/src/Lucene.Net/Search/Spans/SpanNearQuery.cs
index e846b6e..f244154 100644
--- a/src/Lucene.Net/Search/Spans/SpanNearQuery.cs
+++ b/src/Lucene.Net/Search/Spans/SpanNearQuery.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using System;
 using System.Collections.Generic;
 using System.Text;
@@ -221,7 +222,7 @@
             // Mix bits before folding in things like boost, since it could cancel the
             // last element of clauses.  this particular mix also serves to
             // differentiate SpanNearQuery hashcodes from others.
-            result ^= (result << 14) | ((int)((uint)result >> 19)); // reversible
+            result ^= (result << 14) | (result.TripleShift(19)); // reversible
             result += J2N.BitConversion.SingleToRawInt32Bits(Boost);
             result += m_slop;
             result ^= (m_inOrder ? unchecked((int)0x99AFD3BD) : 0);
diff --git a/src/Lucene.Net/Search/Spans/SpanNotQuery.cs b/src/Lucene.Net/Search/Spans/SpanNotQuery.cs
index ab15d25..06b5bd6 100644
--- a/src/Lucene.Net/Search/Spans/SpanNotQuery.cs
+++ b/src/Lucene.Net/Search/Spans/SpanNotQuery.cs
@@ -120,14 +120,14 @@
 
         public override Spans GetSpans(AtomicReaderContext context, IBits acceptDocs, IDictionary<Term, TermContext> termContexts)
         {
-            return new SpansAnonymousInnerClassHelper(this, context, acceptDocs, termContexts);
+            return new SpansAnonymousClass(this, context, acceptDocs, termContexts);
         }
 
-        private class SpansAnonymousInnerClassHelper : Spans
+        private class SpansAnonymousClass : Spans
         {
             private readonly SpanNotQuery outerInstance;
 
-            public SpansAnonymousInnerClassHelper(SpanNotQuery outerInstance, AtomicReaderContext context, IBits acceptDocs, IDictionary<Term, TermContext> termContexts)
+            public SpansAnonymousClass(SpanNotQuery outerInstance, AtomicReaderContext context, IBits acceptDocs, IDictionary<Term, TermContext> termContexts)
             {
                 this.outerInstance = outerInstance;
                 includeSpans = outerInstance.include.GetSpans(context, acceptDocs, termContexts);
diff --git a/src/Lucene.Net/Search/Spans/SpanOrQuery.cs b/src/Lucene.Net/Search/Spans/SpanOrQuery.cs
index 9f298dd..aa6cad5 100644
--- a/src/Lucene.Net/Search/Spans/SpanOrQuery.cs
+++ b/src/Lucene.Net/Search/Spans/SpanOrQuery.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using System;
 using System.Collections.Generic;
 using System.Text;
@@ -172,7 +173,7 @@
         {
             //If this doesn't work, hash all elemnts together instead. This version was used to reduce time complexity
             int h = clauses.GetHashCode();
-            h ^= (h << 10) | ((int)(((uint)h) >> 23));
+            h ^= (h << 10) | (h.TripleShift(23));
             h ^= J2N.BitConversion.SingleToRawInt32Bits(Boost);
             return h;
         }
@@ -211,10 +212,10 @@
                 return (clauses[0]).GetSpans(context, acceptDocs, termContexts);
             }
 
-            return new SpansAnonymousInnerClassHelper(this, context, acceptDocs, termContexts);
+            return new SpansAnonymousClass(this, context, acceptDocs, termContexts);
         }
 
-        private class SpansAnonymousInnerClassHelper : Spans
+        private class SpansAnonymousClass : Spans
         {
             private readonly SpanOrQuery outerInstance;
 
@@ -222,7 +223,7 @@
             private readonly IBits acceptDocs;
             private readonly IDictionary<Term, TermContext> termContexts;
 
-            public SpansAnonymousInnerClassHelper(SpanOrQuery outerInstance, AtomicReaderContext context, IBits acceptDocs, IDictionary<Term, TermContext> termContexts)
+            public SpansAnonymousClass(SpanOrQuery outerInstance, AtomicReaderContext context, IBits acceptDocs, IDictionary<Term, TermContext> termContexts)
             {
                 this.outerInstance = outerInstance;
                 this.context = context;
diff --git a/src/Lucene.Net/Search/Spans/SpanPayloadCheckQuery.cs b/src/Lucene.Net/Search/Spans/SpanPayloadCheckQuery.cs
index 3685f7c..9fb0dc2 100644
--- a/src/Lucene.Net/Search/Spans/SpanPayloadCheckQuery.cs
+++ b/src/Lucene.Net/Search/Spans/SpanPayloadCheckQuery.cs
@@ -1,8 +1,9 @@
-using Lucene.Net.Support;
+using Lucene.Net.Support;
 using System.Collections.Generic;
 using System;
 using System.Text;
 using System.Collections;
+using J2N.Numerics;
 
 namespace Lucene.Net.Search.Spans
 {
@@ -141,7 +142,7 @@
         public override int GetHashCode()
         {
             int h = m_match.GetHashCode();
-            h ^= (h << 8) | ((int)((uint)h >> 25)); // reversible
+            h ^= (h << 8) | (h.TripleShift(25)); // reversible
             //TODO: is this right?
             h ^= payloadEqualityComparer.GetHashCode(m_payloadToMatch); // LUCENENET NOTE: Need to use the structural equality comparer to compare equality of all contained values
             h ^= J2N.BitConversion.SingleToRawInt32Bits(Boost);
diff --git a/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs b/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs
index c0a7782..b37b948 100644
--- a/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs
+++ b/src/Lucene.Net/Search/Spans/SpanPositionRangeQuery.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System.Text;
 
@@ -98,7 +99,7 @@
         public override int GetHashCode()
         {
             int h = m_match.GetHashCode();
-            h ^= (h << 8) | ((int)((uint)h >> 25)); // reversible
+            h ^= (h << 8) | (h.TripleShift(25)); // reversible
             h ^= J2N.BitConversion.SingleToRawInt32Bits(Boost) ^ m_end ^ m_start;
             return h;
         }
diff --git a/src/Lucene.Net/Search/TopTermsRewrite.cs b/src/Lucene.Net/Search/TopTermsRewrite.cs
index 79d4e09..08e4a50 100644
--- a/src/Lucene.Net/Search/TopTermsRewrite.cs
+++ b/src/Lucene.Net/Search/TopTermsRewrite.cs
@@ -74,7 +74,7 @@
         {
             int maxSize = Math.Min(size, MaxSize);
             JCG.PriorityQueue<ScoreTerm> stQueue = new JCG.PriorityQueue<ScoreTerm>();
-            CollectTerms(reader, query, new TermCollectorAnonymousInnerClassHelper(maxSize, stQueue));
+            CollectTerms(reader, query, new TermCollectorAnonymousClass(maxSize, stQueue));
 
             var q = GetTopLevelQuery();
             ScoreTerm[] scoreTerms = stQueue.ToArray(/*new ScoreTerm[stQueue.Count]*/);
@@ -90,12 +90,12 @@
             return q;
         }
 
-        private class TermCollectorAnonymousInnerClassHelper : TermCollector
+        private class TermCollectorAnonymousClass : TermCollector
         {
             private readonly int maxSize;
             private readonly JCG.PriorityQueue<ScoreTerm> stQueue;
 
-            public TermCollectorAnonymousInnerClassHelper(int maxSize, JCG.PriorityQueue<ScoreTerm> stQueue)
+            public TermCollectorAnonymousClass(int maxSize, JCG.PriorityQueue<ScoreTerm> stQueue)
             {
                 this.maxSize = maxSize;
                 this.stQueue = stQueue;
diff --git a/src/Lucene.Net/Store/BufferedIndexInput.cs b/src/Lucene.Net/Store/BufferedIndexInput.cs
index 716f1c0..6da7586 100644
--- a/src/Lucene.Net/Store/BufferedIndexInput.cs
+++ b/src/Lucene.Net/Store/BufferedIndexInput.cs
@@ -260,26 +260,26 @@
             if (5 <= (bufferLength - bufferPosition))
             {
                 byte b = m_buffer[bufferPosition++];
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return b;
                 }
                 int i = b & 0x7F;
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7F) << 7;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7F) << 14;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7F) << 21;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
@@ -306,56 +306,56 @@
             if (9 <= bufferLength - bufferPosition)
             {
                 byte b = m_buffer[bufferPosition++];
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return b;
                 }
                 long i = b & 0x7FL;
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7FL) << 7;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7FL) << 14;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7FL) << 21;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7FL) << 28;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7FL) << 35;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7FL) << 42;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7FL) << 49;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
                 b = m_buffer[bufferPosition++];
                 i |= (b & 0x7FL) << 56;
-                if ((sbyte)b >= 0)
+                if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
                 {
                     return i;
                 }
diff --git a/src/Lucene.Net/Store/ByteArrayDataInput.cs b/src/Lucene.Net/Store/ByteArrayDataInput.cs
index 06d29cc..562355e 100644
--- a/src/Lucene.Net/Store/ByteArrayDataInput.cs
+++ b/src/Lucene.Net/Store/ByteArrayDataInput.cs
@@ -1,4 +1,4 @@
-using System;
+using System;
 
 namespace Lucene.Net.Store
 {
@@ -93,7 +93,7 @@
         /// </summary>
         public override short ReadInt16()
         {
-            return (short)(ushort)(((bytes[pos++] & 0xFF) << 8) | (bytes[pos++] & 0xFF));
+            return (short)(((bytes[pos++] & 0xFF) << 8) | (bytes[pos++] & 0xFF));
         }
 
         /// <summary>
@@ -123,26 +123,26 @@
         public override int ReadVInt32()
         {
             byte b = bytes[pos++];
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return b;
             }
             int i = b & 0x7F;
             b = bytes[pos++];
             i |= (b & 0x7F) << 7;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7F) << 14;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7F) << 21;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
@@ -162,56 +162,56 @@
         public override long ReadVInt64()
         {
             byte b = bytes[pos++];
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return b;
             }
             long i = b & 0x7FL;
             b = bytes[pos++];
             i |= (b & 0x7FL) << 7;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7FL) << 14;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7FL) << 21;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7FL) << 28;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7FL) << 35;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7FL) << 42;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7FL) << 49;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = bytes[pos++];
             i |= (b & 0x7FL) << 56;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
diff --git a/src/Lucene.Net/Store/ByteBufferIndexInput.cs b/src/Lucene.Net/Store/ByteBufferIndexInput.cs
index 73d6a7b..4b7951e 100644
--- a/src/Lucene.Net/Store/ByteBufferIndexInput.cs
+++ b/src/Lucene.Net/Store/ByteBufferIndexInput.cs
@@ -1,4 +1,5 @@
-using J2N.IO;
+using J2N.IO;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Util.Fst;
 using System;
@@ -89,7 +90,7 @@
             if (Debugging.AssertsEnabled)
             {
                 Debugging.Assert(chunkSizePower >= 0 && chunkSizePower <= 30);
-                Debugging.Assert(((long)((ulong)length >> chunkSizePower)) < int.MaxValue);
+                Debugging.Assert(length.TripleShift(chunkSizePower) < int.MaxValue);
             }
 
             // LUCENENET specific: MMapIndexInput calls SetBuffers() to populate
@@ -327,8 +328,8 @@
         {
             long sliceEnd = offset + length;
 
-            int startIndex = (int)((long)((ulong)offset >> chunkSizePower));
-            int endIndex = (int)((long)((ulong)sliceEnd >> chunkSizePower));
+            int startIndex = (int)(offset.TripleShift(chunkSizePower));
+            int endIndex = (int)(sliceEnd.TripleShift(chunkSizePower));
 
             // we always allocate one more slice, the last one may be a 0 byte one
             ByteBuffer[] slices = new ByteBuffer[endIndex - startIndex + 1];
diff --git a/src/Lucene.Net/Store/CompoundFileDirectory.cs b/src/Lucene.Net/Store/CompoundFileDirectory.cs
index 57c2702..c887250 100644
--- a/src/Lucene.Net/Store/CompoundFileDirectory.cs
+++ b/src/Lucene.Net/Store/CompoundFileDirectory.cs
@@ -427,16 +427,16 @@
                     " found (fileName=" + name + " files: " + 
                     string.Format(J2N.Text.StringFormatter.InvariantCulture, "{0}", entries.Keys) + ")");
             }
-            return new IndexInputSlicerAnonymousInnerClassHelper(this, entry);
+            return new IndexInputSlicerAnonymousClass(this, entry);
         }
 
-        private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer
+        private class IndexInputSlicerAnonymousClass : IndexInputSlicer
         {
             private readonly CompoundFileDirectory outerInstance;
 
             private readonly FileEntry entry;
 
-            public IndexInputSlicerAnonymousInnerClassHelper(CompoundFileDirectory outerInstance, FileEntry entry)
+            public IndexInputSlicerAnonymousClass(CompoundFileDirectory outerInstance, FileEntry entry)
             {
                 this.outerInstance = outerInstance;
                 this.entry = entry;
diff --git a/src/Lucene.Net/Store/DataInput.cs b/src/Lucene.Net/Store/DataInput.cs
index bec94e8..4609c9a 100644
--- a/src/Lucene.Net/Store/DataInput.cs
+++ b/src/Lucene.Net/Store/DataInput.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using System;
 using System.Collections.Generic;
 using System.IO;
@@ -95,7 +95,7 @@
         /// <seealso cref="DataOutput.WriteInt16(short)"/>
         public virtual short ReadInt16()
         {
-            return (short)(ushort)(((ReadByte() & 0xFF) << 8) | (ReadByte() & 0xFF));
+            return (short)(((ReadByte() & 0xFF) << 8) | (ReadByte() & 0xFF));
         }
 
         /// <summary>
@@ -123,26 +123,26 @@
         public virtual int ReadVInt32()
         {
             byte b = ReadByte();
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return b;
             }
             int i = b & 0x7F;
             b = ReadByte();
             i |= (b & 0x7F) << 7;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7F) << 14;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7F) << 21;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
@@ -191,56 +191,56 @@
             return i;
             */
             byte b = ReadByte();
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return b;
             }
             long i = b & 0x7FL;
             b = ReadByte();
             i |= (b & 0x7FL) << 7;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7FL) << 14;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7FL) << 21;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7FL) << 28;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7FL) << 35;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7FL) << 42;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7FL) << 49;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = ReadByte();
             i |= (b & 0x7FL) << 56;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
diff --git a/src/Lucene.Net/Store/DataOutput.cs b/src/Lucene.Net/Store/DataOutput.cs
index 93493e4..ac1d591 100644
--- a/src/Lucene.Net/Store/DataOutput.cs
+++ b/src/Lucene.Net/Store/DataOutput.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System.Collections.Generic;
 using System.IO;
@@ -72,10 +73,10 @@
         /// <seealso cref="DataInput.ReadInt32()"/>
         public virtual void WriteInt32(int i)
         {
-            WriteByte((byte)(sbyte)(i >> 24));
-            WriteByte((byte)(sbyte)(i >> 16));
-            WriteByte((byte)(sbyte)(i >> 8));
-            WriteByte((byte)(sbyte)i);
+            WriteByte((byte)(i >> 24));
+            WriteByte((byte)(i >> 16));
+            WriteByte((byte)(i >> 8));
+            WriteByte((byte)i);
         }
 
         /// <summary>
@@ -86,8 +87,8 @@
         /// <seealso cref="DataInput.ReadInt16()"/>
         public virtual void WriteInt16(short i)
         {
-            WriteByte((byte)(sbyte)((ushort)i >> 8));
-            WriteByte((byte)(sbyte)(ushort)i);
+            WriteByte((byte)((ushort)i >> 8));
+            WriteByte((byte)(ushort)i);
         }
 
         /// <summary>
@@ -199,10 +200,10 @@
         {
             while ((i & ~0x7F) != 0)
             {
-                WriteByte((byte)unchecked((sbyte)((i & 0x7F) | 0x80)));
-                i = (int)((uint)i >> 7);
+                WriteByte((byte)((i & 0x7F) | 0x80));
+                i = i.TripleShift(7);
             }
-            WriteByte((byte)(sbyte)i);
+            WriteByte((byte)i);
         }
 
         /// <summary>
@@ -234,10 +235,10 @@
             if (Debugging.AssertsEnabled) Debugging.Assert(i >= 0L);
             while ((i & ~0x7FL) != 0L)
             {
-                WriteByte((byte)unchecked((sbyte)((i & 0x7FL) | 0x80L)));
-                i = (long)((ulong)i >> 7);
+                WriteByte((byte)((i & 0x7FL) | 0x80L));
+                i = i.TripleShift(7);
             }
-            WriteByte((byte)(sbyte)i);
+            WriteByte((byte)i);
         }
 
         /// <summary>
diff --git a/src/Lucene.Net/Store/Directory.cs b/src/Lucene.Net/Store/Directory.cs
index e93249b..c460ce3 100644
--- a/src/Lucene.Net/Store/Directory.cs
+++ b/src/Lucene.Net/Store/Directory.cs
@@ -247,14 +247,14 @@
         public virtual IndexInputSlicer CreateSlicer(string name, IOContext context)
         {
             EnsureOpen();
-            return new IndexInputSlicerAnonymousInnerClassHelper(OpenInput(name, context));
+            return new IndexInputSlicerAnonymousClass(OpenInput(name, context));
         }
 
-        private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer
+        private class IndexInputSlicerAnonymousClass : IndexInputSlicer
         {
             private readonly IndexInput @base;
 
-            public IndexInputSlicerAnonymousInnerClassHelper(IndexInput @base)
+            public IndexInputSlicerAnonymousClass(IndexInput @base)
             {
                 this.@base = @base;
             }
diff --git a/src/Lucene.Net/Store/FlushInfo.cs b/src/Lucene.Net/Store/FlushInfo.cs
index d4a7f66..ca404f4 100644
--- a/src/Lucene.Net/Store/FlushInfo.cs
+++ b/src/Lucene.Net/Store/FlushInfo.cs
@@ -1,3 +1,5 @@
+using J2N.Numerics;
+
 namespace Lucene.Net.Store
 {
     /*
@@ -43,7 +45,7 @@
         {
             const int prime = 31;
             int result = 1;
-            result = prime * result + (int)(EstimatedSegmentSize ^ ((long)((ulong)EstimatedSegmentSize >> 32)));
+            result = prime * result + (int)(EstimatedSegmentSize ^ (EstimatedSegmentSize.TripleShift(32)));
             result = prime * result + NumDocs;
             return result;
         }
diff --git a/src/Lucene.Net/Store/LockVerifyServer.cs b/src/Lucene.Net/Store/LockVerifyServer.cs
index 6746d10..7d17bc0 100644
--- a/src/Lucene.Net/Store/LockVerifyServer.cs
+++ b/src/Lucene.Net/Store/LockVerifyServer.cs
@@ -76,7 +76,7 @@
             for (int count = 0; count < maxClients; count++)
             {
                 Socket cs = s.Accept();
-                threads[count] = new ThreadAnonymousInnerClassHelper(localLock, lockedID, startingGun, cs);
+                threads[count] = new ThreadAnonymousClass(localLock, lockedID, startingGun, cs);
                 threads[count].Start();
             }
 
@@ -96,14 +96,14 @@
             Console.WriteLine("Server terminated.");
         }
 
-        private class ThreadAnonymousInnerClassHelper : ThreadJob
+        private class ThreadAnonymousClass : ThreadJob
         {
             private readonly object localLock;
             private readonly int[] lockedID;
             private readonly CountdownEvent startingGun;
             private readonly Socket cs;
 
-            public ThreadAnonymousInnerClassHelper(object localLock, int[] lockedID, CountdownEvent startingGun, Socket cs)
+            public ThreadAnonymousClass(object localLock, int[] lockedID, CountdownEvent startingGun, Socket cs)
             {
                 this.localLock = localLock;
                 this.lockedID = lockedID;
diff --git a/src/Lucene.Net/Store/MMapDirectory.cs b/src/Lucene.Net/Store/MMapDirectory.cs
index cd01d15..61f8404 100644
--- a/src/Lucene.Net/Store/MMapDirectory.cs
+++ b/src/Lucene.Net/Store/MMapDirectory.cs
@@ -1,4 +1,4 @@
-using J2N.IO;
+using J2N.IO;
 using J2N.IO.MemoryMappedFiles;
 using J2N.Numerics;
 using Lucene.Net.Diagnostics;
@@ -189,16 +189,16 @@
         public override IndexInputSlicer CreateSlicer(string name, IOContext context)
         {
             var full = (MMapIndexInput)OpenInput(name, context);
-            return new IndexInputSlicerAnonymousInnerClassHelper(this, full);
+            return new IndexInputSlicerAnonymousClass(this, full);
         }
 
-        private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer
+        private class IndexInputSlicerAnonymousClass : IndexInputSlicer
         {
             private readonly MMapDirectory outerInstance;
 
             private readonly MMapIndexInput full;
 
-            public IndexInputSlicerAnonymousInnerClassHelper(MMapDirectory outerInstance, MMapIndexInput full)
+            public IndexInputSlicerAnonymousClass(MMapDirectory outerInstance, MMapIndexInput full)
             {
                 this.outerInstance = outerInstance;
                 this.full = full;
@@ -298,7 +298,7 @@
             long chunkSize = 1L << chunkSizePower;
 
             // we always allocate one more buffer, the last one may be a 0 byte one
-            int nrBuffers = (int)((long)((ulong)length >> chunkSizePower)) + 1;
+            int nrBuffers = (int)length.TripleShift(chunkSizePower) + 1;
 
             ByteBuffer[] buffers = new ByteBuffer[nrBuffers];
 
diff --git a/src/Lucene.Net/Store/MergeInfo.cs b/src/Lucene.Net/Store/MergeInfo.cs
index 696ba64..e7704c3 100644
--- a/src/Lucene.Net/Store/MergeInfo.cs
+++ b/src/Lucene.Net/Store/MergeInfo.cs
@@ -1,3 +1,5 @@
+using J2N.Numerics;
+
 namespace Lucene.Net.Store
 {
     /*
@@ -50,7 +52,7 @@
             const int prime = 31;
             int result = 1;
             result = prime * result 
-                + (int)(EstimatedMergeBytes ^ ((long)((ulong)EstimatedMergeBytes >> 32)));
+                + (int)(EstimatedMergeBytes ^ (EstimatedMergeBytes.TripleShift(32)));
             result = prime * result + (IsExternal ? 1231 : 1237);
             result = prime * result + MergeMaxNumSegments;
             result = prime * result + TotalDocCount;
diff --git a/src/Lucene.Net/Store/NIOFSDirectory.cs b/src/Lucene.Net/Store/NIOFSDirectory.cs
index 2c9e589..12487b2 100644
--- a/src/Lucene.Net/Store/NIOFSDirectory.cs
+++ b/src/Lucene.Net/Store/NIOFSDirectory.cs
@@ -113,16 +113,16 @@
             EnsureOpen();
             var path = new FileInfo(Path.Combine(Directory.FullName, name));
             var fc = new FileStream(path.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete);
-            return new IndexInputSlicerAnonymousInnerClassHelper(context, path, fc);
+            return new IndexInputSlicerAnonymousClass(context, path, fc);
         }
 
-        private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer
+        private class IndexInputSlicerAnonymousClass : IndexInputSlicer
         {
             private readonly IOContext context;
             private readonly FileInfo path;
             private readonly FileStream descriptor;
 
-            public IndexInputSlicerAnonymousInnerClassHelper(IOContext context, FileInfo path, FileStream descriptor)
+            public IndexInputSlicerAnonymousClass(IOContext context, FileInfo path, FileStream descriptor)
             {
                 this.context = context;
                 this.path = path;
diff --git a/src/Lucene.Net/Store/SimpleFSDirectory.cs b/src/Lucene.Net/Store/SimpleFSDirectory.cs
index e0b3329..6f94a3a 100644
--- a/src/Lucene.Net/Store/SimpleFSDirectory.cs
+++ b/src/Lucene.Net/Store/SimpleFSDirectory.cs
@@ -94,16 +94,16 @@
             EnsureOpen();
             var file = new FileInfo(Path.Combine(Directory.FullName, name));
             var descriptor = new FileStream(file.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
-            return new IndexInputSlicerAnonymousInnerClassHelper(context, file, descriptor);
+            return new IndexInputSlicerAnonymousClass(context, file, descriptor);
         }
 
-        private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer
+        private class IndexInputSlicerAnonymousClass : IndexInputSlicer
         {
             private readonly IOContext context;
             private readonly FileInfo file;
             private readonly FileStream descriptor;
 
-            public IndexInputSlicerAnonymousInnerClassHelper(IOContext context, FileInfo file, FileStream descriptor)
+            public IndexInputSlicerAnonymousClass(IOContext context, FileInfo file, FileStream descriptor)
             {
                 this.context = context;
                 this.file = file;
diff --git a/src/Lucene.Net/Support/Index/TaskMergeScheduler.cs b/src/Lucene.Net/Support/Index/TaskMergeScheduler.cs
index dc74342..5e19137 100644
--- a/src/Lucene.Net/Support/Index/TaskMergeScheduler.cs
+++ b/src/Lucene.Net/Support/Index/TaskMergeScheduler.cs
@@ -40,6 +40,7 @@
     ///  
     /// LUCENENET specific
     /// </summary>
+    [Obsolete("Use ConcurrentMergeScheduler instead. This class will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
     public class TaskMergeScheduler : MergeScheduler, IConcurrentMergeScheduler
     {
         public const string COMPONENT_NAME = "CMS";
diff --git a/src/Lucene.Net/Util/AttributeSource.cs b/src/Lucene.Net/Util/AttributeSource.cs
index dedba6c..1abe421 100644
--- a/src/Lucene.Net/Util/AttributeSource.cs
+++ b/src/Lucene.Net/Util/AttributeSource.cs
@@ -257,7 +257,7 @@
             State initState = GetCurrentState();
             if (initState != null)
             {
-                return new IteratorAnonymousInnerClassHelper(initState);
+                return new IteratorAnonymousClass(initState);
             }
             else
             {
@@ -265,9 +265,9 @@
             }
         }
 
-        private class IteratorAnonymousInnerClassHelper : IEnumerator<Attribute>
+        private class IteratorAnonymousClass : IEnumerator<Attribute>
         {
-            public IteratorAnonymousInnerClassHelper(AttributeSource.State initState)
+            public IteratorAnonymousClass(AttributeSource.State initState)
             {
                 state = initState;
             }
@@ -582,16 +582,16 @@
         public string ReflectAsString(bool prependAttClass)
         {
             StringBuilder buffer = new StringBuilder();
-            ReflectWith(new AttributeReflectorAnonymousInnerClassHelper(prependAttClass, buffer));
+            ReflectWith(new AttributeReflectorAnonymousClass(prependAttClass, buffer));
             return buffer.ToString();
         }
 
-        private class AttributeReflectorAnonymousInnerClassHelper : IAttributeReflector
+        private class AttributeReflectorAnonymousClass : IAttributeReflector
         {
             private readonly bool prependAttClass;
             private readonly StringBuilder buffer;
 
-            public AttributeReflectorAnonymousInnerClassHelper(bool prependAttClass, StringBuilder buffer)
+            public AttributeReflectorAnonymousClass(bool prependAttClass, StringBuilder buffer)
             {
                 this.prependAttClass = prependAttClass;
                 this.buffer = buffer;
diff --git a/src/Lucene.Net/Util/Automaton/SpecialOperations.cs b/src/Lucene.Net/Util/Automaton/SpecialOperations.cs
index b2e9dcd..1d8b8cb 100644
--- a/src/Lucene.Net/Util/Automaton/SpecialOperations.cs
+++ b/src/Lucene.Net/Util/Automaton/SpecialOperations.cs
@@ -1,4 +1,5 @@
-using J2N.Collections.Generic.Extensions;
+using J2N.Collections.Generic.Extensions;
+using J2N.Numerics;
 using J2N.Text;
 using System.Collections.Generic;
 using System.Linq;
@@ -57,7 +58,7 @@
             int b = points.Length;
             while (b - a > 1)
             {
-                int d = (int)((uint)(a + b) >> 1);
+                int d = (a + b).TripleShift(1);
                 if (points[d] > c)
                 {
                     b = d;
diff --git a/src/Lucene.Net/Util/BroadWord.cs b/src/Lucene.Net/Util/BroadWord.cs
index 3269054..3ae1823 100644
--- a/src/Lucene.Net/Util/BroadWord.cs
+++ b/src/Lucene.Net/Util/BroadWord.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System.Runtime.CompilerServices;
 
@@ -46,13 +46,13 @@
         internal static int BitCount(long x)
         {
             // Step 0 leaves in each pair of bits the number of ones originally contained in that pair:
-            x = x - ((long)((ulong)(x & unchecked((long)0xAAAAAAAAAAAAAAAAL)) >> 1));
+            x = x - ((x & unchecked((long)0xAAAAAAAAAAAAAAAAL)).TripleShift(1));
             // Step 1, idem for each nibble:
-            x = (x & 0x3333333333333333L) + (((long)((ulong)x >> 2)) & 0x3333333333333333L);
+            x = (x & 0x3333333333333333L) + ((x.TripleShift(2)) & 0x3333333333333333L);
             // Step 2, idem for each byte:
-            x = (x + ((long)((ulong)x >> 4))) & 0x0F0F0F0F0F0F0F0FL;
+            x = (x + (x.TripleShift(4))) & 0x0F0F0F0F0F0F0F0FL;
             // Multiply to sum them all into the high byte, and return the high byte:
-            return (int)((long)((ulong)(x * L8_L) >> 56));
+            return (int)((x * L8_L).TripleShift(56));
         }
 
         /// <summary>
@@ -60,29 +60,29 @@
         /// <returns> The index of the r-th 1 bit in x, or if no such bit exists, 72. </returns>
         public static int Select(long x, int r)
         {
-            long s = x - ((long)((ulong)(x & unchecked((long)0xAAAAAAAAAAAAAAAAL)) >> 1)); // Step 0, pairwise bitsums
+            long s = x - ((x & unchecked((long)0xAAAAAAAAAAAAAAAAL)).TripleShift(1)); // Step 0, pairwise bitsums
 
             // Correct a small mistake in algorithm 2:
             // Use s instead of x the second time in right shift 2, compare to Algorithm 1 in rank9 above.
-            s = (s & 0x3333333333333333L) + (((long)((ulong)s >> 2)) & 0x3333333333333333L); // Step 1, nibblewise bitsums
+            s = (s & 0x3333333333333333L) + ((s.TripleShift(2)) & 0x3333333333333333L); // Step 1, nibblewise bitsums
 
-            s = ((s + ((long)((ulong)s >> 4))) & 0x0F0F0F0F0F0F0F0FL) * L8_L; // Step 2, bytewise bitsums
+            s = ((s + (s.TripleShift(4))) & 0x0F0F0F0F0F0F0F0FL) * L8_L; // Step 2, bytewise bitsums
 
-            long b = (long)((ulong)(((long)((ulong)SmallerUpTo7_8(s, (r * L8_L)) >> 7)) * L8_L) >> 53); // & (~7L); // Step 3, side ways addition for byte number times 8
+            long b = ((SmallerUpTo7_8(s, (r * L8_L)).TripleShift(7)) * L8_L).TripleShift(53); // & (~7L); // Step 3, side ways addition for byte number times 8
 
-            long l = r - (((long)((ulong)(s << 8) >> (int)b)) & 0xFFL); // Step 4, byte wise rank, subtract the rank with byte at b-8, or zero for b=0;
+            long l = r - (((s << 8).TripleShift((int)b)) & 0xFFL); // Step 4, byte wise rank, subtract the rank with byte at b-8, or zero for b=0;
             if (Debugging.AssertsEnabled) Debugging.Assert(0L <= 1, "{0}", l);
             //assert l < 8 : l; //fails when bit r is not available.
 
             // Select bit l from byte (x >>> b):
-            long spr = ((((long)((ulong)x >> (int)b)) & 0xFFL) * L8_L) & L9_L; // spread the 8 bits of the byte at b over the long at L9 positions
+            long spr = (((x.TripleShift((int)b)) & 0xFFL) * L8_L) & L9_L; // spread the 8 bits of the byte at b over the long at L9 positions
 
             // long spr_bigger8_zero = smaller8(0L, spr); // inlined smaller8 with 0L argument:
             // FIXME: replace by biggerequal8_one formula from article page 6, line 9. four operators instead of five here.
             long spr_bigger8_zero = ((H8_L - (spr & (~H8_L))) ^ (~spr)) & H8_L;
-            s = ((long)((ulong)spr_bigger8_zero >> 7)) * L8_L; // Step 5, sideways byte add the 8 bits towards the high byte
+            s = (spr_bigger8_zero.TripleShift(7)) * L8_L; // Step 5, sideways byte add the 8 bits towards the high byte
 
-            int res = (int)(b + ((long)((ulong)(((long)((ulong)SmallerUpTo7_8(s, (l * L8_L)) >> 7)) * L8_L) >> 56))); // Step 6
+            int res = (int)(b + (((SmallerUpTo7_8(s, (l * L8_L)).TripleShift(7)) * L8_L).TripleShift(56))); // Step 6
             return res;
         }
 
@@ -159,7 +159,7 @@
             while ((x != 0L) && (r > 0))
             {
                 int ntz = x.TrailingZeroCount();
-                x = (long)((ulong)x >> (ntz + 1));
+                x = x.TripleShift(ntz + 1);
                 s += (ntz + 1);
                 r -= 1;
             }
diff --git a/src/Lucene.Net/Util/BytesRefArray.cs b/src/Lucene.Net/Util/BytesRefArray.cs
index 71e2134..7cd84d3 100644
--- a/src/Lucene.Net/Util/BytesRefArray.cs
+++ b/src/Lucene.Net/Util/BytesRefArray.cs
@@ -114,18 +114,18 @@
             {
                 orderedEntries[i] = i;
             }
-            new IntroSorterAnonymousInnerClassHelper(this, comp, orderedEntries).Sort(0, Length);
+            new IntroSorterAnonymousClass(this, comp, orderedEntries).Sort(0, Length);
             return orderedEntries;
         }
 
-        private class IntroSorterAnonymousInnerClassHelper : IntroSorter
+        private class IntroSorterAnonymousClass : IntroSorter
         {
             private readonly BytesRefArray outerInstance;
 
             private readonly IComparer<BytesRef> comp;
             private readonly int[] orderedEntries;
 
-            public IntroSorterAnonymousInnerClassHelper(BytesRefArray outerInstance, IComparer<BytesRef> comp, int[] orderedEntries)
+            public IntroSorterAnonymousClass(BytesRefArray outerInstance, IComparer<BytesRef> comp, int[] orderedEntries)
             {
                 this.outerInstance = outerInstance;
                 this.comp = comp;
@@ -198,11 +198,11 @@
             BytesRef spare = new BytesRef();
             int size = Length;
             int[] indices = comp == null ? null : Sort(comp);
-            return new BytesRefIteratorAnonymousInnerClassHelper(this, comp, spare, size, indices);
+            return new BytesRefIteratorAnonymousClass(this, comp, spare, size, indices);
         }
 
         [Obsolete("This class will be removed in 4.8.0 release candidate"), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
-        private class BytesRefIteratorAnonymousInnerClassHelper : IBytesRefIterator
+        private class BytesRefIteratorAnonymousClass : IBytesRefIterator
         {
             private readonly BytesRefArray outerInstance;
 
@@ -211,7 +211,7 @@
             private readonly int size;
             private readonly int[] indices;
 
-            public BytesRefIteratorAnonymousInnerClassHelper(BytesRefArray outerInstance, IComparer<BytesRef> comp, BytesRef spare, int size, int[] indices)
+            public BytesRefIteratorAnonymousClass(BytesRefArray outerInstance, IComparer<BytesRef> comp, BytesRef spare, int size, int[] indices)
             {
                 this.outerInstance = outerInstance;
                 this.comp = comp;
diff --git a/src/Lucene.Net/Util/BytesRefHash.cs b/src/Lucene.Net/Util/BytesRefHash.cs
index 433cdd3..8cc1d84 100644
--- a/src/Lucene.Net/Util/BytesRefHash.cs
+++ b/src/Lucene.Net/Util/BytesRefHash.cs
@@ -174,11 +174,11 @@
         public int[] Sort(IComparer<BytesRef> comp)
         {
             int[] compact = Compact();
-            new IntroSorterAnonymousInnerClassHelper(this, comp, compact).Sort(0, count);
+            new IntroSorterAnonymousClass(this, comp, compact).Sort(0, count);
             return compact;
         }
 
-        private class IntroSorterAnonymousInnerClassHelper : IntroSorter
+        private class IntroSorterAnonymousClass : IntroSorter
         {
             private readonly BytesRefHash outerInstance;
 
@@ -186,7 +186,7 @@
             private readonly int[] compact;
             private readonly BytesRef pivot = new BytesRef(), /*scratch1 = new BytesRef(), // LUCENENET: Never read */ scratch2 = new BytesRef();
 
-            public IntroSorterAnonymousInnerClassHelper(BytesRefHash outerInstance, IComparer<BytesRef> comp, int[] compact)
+            public IntroSorterAnonymousClass(BytesRefHash outerInstance, IComparer<BytesRef> comp, int[] compact)
             {
                 this.outerInstance = outerInstance;
                 this.comp = comp;
diff --git a/src/Lucene.Net/Util/FixedBitSet.cs b/src/Lucene.Net/Util/FixedBitSet.cs
index 58d1bd3..e6013e1 100644
--- a/src/Lucene.Net/Util/FixedBitSet.cs
+++ b/src/Lucene.Net/Util/FixedBitSet.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -157,7 +157,7 @@
         /// Returns the number of 64 bit words it would take to hold <paramref name="numBits"/> </summary>
         public static int Bits2words(int numBits)
         {
-            int numLong = (int)((uint)numBits >> 6);
+            int numLong = numBits.TripleShift(6);
             if ((numBits & 63) != 0)
             {
                 numLong++;
@@ -602,8 +602,7 @@
             */
 
             long startmask = -1L << startIndex;
-            long endmask = (long)(unchecked((ulong)-1) >> -endIndex);
-            //long endmask = -(int)((uint)1L >> -endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+            long endmask = (-1L).TripleShift(-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
 
             if (startWord == endWord)
             {
@@ -642,8 +641,7 @@
             int endWord = (endIndex - 1) >> 6;
 
             long startmask = -1L << startIndex;
-            long endmask = (long)(unchecked((ulong)-1) >> -endIndex);
-            //long endmask = -(int)((uint)1UL >> -endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+            long endmask = (-1L).TripleShift(-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
 
             if (startWord == endWord)
             {
@@ -730,7 +728,7 @@
             for (int i = numWords; --i >= 0; )
             {
                 h ^= bits[i];
-                h = (h << 1) | ((long)((ulong)h >> 63)); // rotate left
+                h = (h << 1) | (h.TripleShift(63)); // rotate left
             }
             // fold leftmost bits into right and add a constant to prevent
             // empty sets from returning 0, which is too common.
diff --git a/src/Lucene.Net/Util/Fst/FST.cs b/src/Lucene.Net/Util/Fst/FST.cs
index fe0379c..d405d43 100644
--- a/src/Lucene.Net/Util/Fst/FST.cs
+++ b/src/Lucene.Net/Util/Fst/FST.cs
@@ -1,4 +1,5 @@
-using J2N.Collections;
+using J2N.Collections;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -594,7 +595,7 @@
             if (inputType == FST.INPUT_TYPE.BYTE1)
             {
                 if (Debugging.AssertsEnabled) Debugging.Assert(v <= 255,"v={0}", v);
-                @out.WriteByte((byte)(sbyte)v);
+                @out.WriteByte((byte)v);
             }
             else if (inputType == FST.INPUT_TYPE.BYTE2)
             {
@@ -723,7 +724,7 @@
                     flags += FST.BIT_ARC_HAS_OUTPUT;
                 }
 
-                bytes.WriteByte((byte)(sbyte)flags);
+                bytes.WriteByte((byte)flags);
                 WriteLabel(bytes, arc.Label);
 
                 // System.out.println("  write arc: label=" + (char) arc.Label + " flags=" + flags + " target=" + target.Node + " pos=" + bytes.getPosition() + " output=" + outputs.outputToString(arc.Output));
@@ -1354,7 +1355,7 @@
                 while (low <= high)
                 {
                     //System.out.println("    cycle");
-                    int mid = (int)((uint)(low + high) >> 1);
+                    int mid = (low + high).TripleShift(1);
                     @in.Position = arc.PosArcsStart;
                     @in.SkipBytes(arc.BytesPerArc * mid + 1);
                     int midLabel = ReadLabel(@in);
@@ -1886,7 +1887,7 @@
                             }
 
                             if (Debugging.AssertsEnabled) Debugging.Assert(flags != FST.ARCS_AS_FIXED_ARRAY);
-                            writer.WriteByte((byte)(sbyte)flags);
+                            writer.WriteByte((byte)flags);
 
                             fst.WriteLabel(writer, arc.Label);
 
diff --git a/src/Lucene.Net/Util/Fst/FSTEnum.cs b/src/Lucene.Net/Util/Fst/FSTEnum.cs
index 5293480..888ba9f 100644
--- a/src/Lucene.Net/Util/Fst/FSTEnum.cs
+++ b/src/Lucene.Net/Util/Fst/FSTEnum.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Runtime.CompilerServices;
@@ -172,7 +173,7 @@
                     bool found = false;
                     while (low <= high)
                     {
-                        mid = (int)((uint)(low + high) >> 1);
+                        mid = (low + high).TripleShift(1);
                         @in.Position = arc.PosArcsStart;
                         @in.SkipBytes(arc.BytesPerArc * mid + 1);
                         int midLabel = m_fst.ReadLabel(@in);
@@ -345,7 +346,7 @@
                     bool found = false;
                     while (low <= high)
                     {
-                        mid = (int)((uint)(low + high) >> 1);
+                        mid = (low + high).TripleShift(1);
                         @in.Position = arc.PosArcsStart;
                         @in.SkipBytes(arc.BytesPerArc * mid + 1);
                         int midLabel = m_fst.ReadLabel(@in);
diff --git a/src/Lucene.Net/Util/Fst/NoOutputs.cs b/src/Lucene.Net/Util/Fst/NoOutputs.cs
index 02b0f33..bc5ca0d 100644
--- a/src/Lucene.Net/Util/Fst/NoOutputs.cs
+++ b/src/Lucene.Net/Util/Fst/NoOutputs.cs
@@ -31,11 +31,11 @@
     /// </summary>
     public sealed class NoOutputs : Outputs<object>
     {
-        internal static readonly object NO_OUTPUT = new ObjectAnonymousInnerClassHelper();
+        internal static readonly object NO_OUTPUT = new ObjectAnonymousClass();
 
-        private class ObjectAnonymousInnerClassHelper : object
+        private class ObjectAnonymousClass : object
         {
-            public ObjectAnonymousInnerClassHelper()
+            public ObjectAnonymousClass()
             {
             }
 
diff --git a/src/Lucene.Net/Util/Fst/Util.cs b/src/Lucene.Net/Util/Fst/Util.cs
index 40835da..8e6c7b3 100644
--- a/src/Lucene.Net/Util/Fst/Util.cs
+++ b/src/Lucene.Net/Util/Fst/Util.cs
@@ -1,4 +1,5 @@
-using J2N;
+using J2N;
+using J2N.Numerics;
 using J2N.Text;
 using Lucene.Net.Diagnostics;
 using System;
@@ -181,7 +182,7 @@
                         bool exact = false;
                         while (low <= high)
                         {
-                            mid = (int)((uint)(low + high) >> 1);
+                            mid = (low + high).TripleShift(1);
                             @in.Position = arc.PosArcsStart;
                             @in.SkipBytes(arc.BytesPerArc * mid);
                             var flags = (sbyte)@in.ReadByte();
@@ -1143,7 +1144,7 @@
                 // " targetLabel=" + targetLabel);
                 while (low <= high)
                 {
-                    mid = (int)((uint)(low + high) >> 1);
+                    mid = (low + high).TripleShift(1);
                     @in.Position = arc.PosArcsStart;
                     @in.SkipBytes(arc.BytesPerArc * mid + 1);
                     int midLabel = fst.ReadLabel(@in);
diff --git a/src/Lucene.Net/Util/InPlaceMergeSorter.cs b/src/Lucene.Net/Util/InPlaceMergeSorter.cs
index 6423f59..5f414df 100644
--- a/src/Lucene.Net/Util/InPlaceMergeSorter.cs
+++ b/src/Lucene.Net/Util/InPlaceMergeSorter.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System.Runtime.CompilerServices;
 
 namespace Lucene.Net.Util
@@ -53,7 +54,7 @@
             }
             else
             {
-                int mid = (int)((uint)(from + to) >> 1);
+                int mid = (from + to).TripleShift(1);
                 MergeSort(from, mid);
                 MergeSort(mid, to);
                 MergeInPlace(from, mid, to);
diff --git a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs
index a7849d6..d986709 100644
--- a/src/Lucene.Net/Util/IndexableBinaryStringTools.cs
+++ b/src/Lucene.Net/Util/IndexableBinaryStringTools.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Runtime.CompilerServices;
@@ -158,11 +159,14 @@
                     codingCase = CODING_CASES[caseNum];
                     if (2 == codingCase.numBytes)
                     {
-                        outputArray[outputCharNum] = (char)(((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift) + (((int)((uint)(inputArray[inputByteNum + 1] & 0xFF) >> codingCase.finalShift)) & codingCase.finalMask) & (short)0x7FFF);
+                        outputArray[outputCharNum] = (char)(((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift)
+                            + (((inputArray[inputByteNum + 1] & 0xFF).TripleShift(codingCase.finalShift)) & codingCase.finalMask) & /*(short)*/0x7FFF); // LUCENENET: Removed unnecessary cast
                     } // numBytes is 3
                     else
                     {
-                        outputArray[outputCharNum] = (char)(((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift) + ((inputArray[inputByteNum + 1] & 0xFF) << codingCase.middleShift) + (((int)((uint)(inputArray[inputByteNum + 2] & 0xFF) >> codingCase.finalShift)) & codingCase.finalMask) & (short)0x7FFF);
+                        outputArray[outputCharNum] = (char)(((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift)
+                            + ((inputArray[inputByteNum + 1] & 0xFF) << codingCase.middleShift)
+                            + (((inputArray[inputByteNum + 2] & 0xFF).TripleShift(codingCase.finalShift)) & codingCase.finalMask) & /*(short)*/0x7FFF); // LUCENENET: Removed unnecessary cast
                     }
                     inputByteNum += codingCase.advanceBytes;
                     if (++caseNum == CODING_CASES.Length)
@@ -175,13 +179,14 @@
 
                 if (inputByteNum + 1 < inputLength) // codingCase.numBytes must be 3
                 {
-                    outputArray[outputCharNum++] = (char)((((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift) + ((inputArray[inputByteNum + 1] & 0xFF) << codingCase.middleShift)) & (short)0x7FFF);
+                    outputArray[outputCharNum++] = (char)((((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift) + ((inputArray[inputByteNum + 1] & 0xFF) <<
+                        codingCase.middleShift)) & /*(short)*/0x7FFF); // LUCENENET: Removed unnecessary cast
                     // Add trailing char containing the number of full bytes in final char
                     outputArray[outputCharNum++] = (char)1;
                 }
                 else if (inputByteNum < inputLength)
                 {
-                    outputArray[outputCharNum++] = (char)(((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift) & (short)0x7FFF);
+                    outputArray[outputCharNum++] = (char)(((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift) & /*(short)*/0x7FFF); // LUCENENET: Removed unnecessary cast
                     // Add trailing char containing the number of full bytes in final char
                     outputArray[outputCharNum++] = caseNum == 0 ? (char)1 : (char)0;
                 } // No left over bits - last char is completely filled.
@@ -246,18 +251,18 @@
                     {
                         if (0 == caseNum)
                         {
-                            outputArray[outputByteNum] = (sbyte)((short)((ushort)inputChar >> codingCase.initialShift));
+                            outputArray[outputByteNum] = (sbyte)(inputChar.TripleShift(codingCase.initialShift));
                         }
                         else
                         {
-                            outputArray[outputByteNum] += (sbyte)((short)((ushort)inputChar >> codingCase.initialShift));
+                            outputArray[outputByteNum] += (sbyte)(inputChar.TripleShift(codingCase.initialShift));
                         }
                         outputArray[outputByteNum + 1] = (sbyte)((inputChar & codingCase.finalMask) << codingCase.finalShift);
                     } // numBytes is 3
                     else
                     {
-                        outputArray[outputByteNum] += (sbyte)((short)((ushort)inputChar >> codingCase.initialShift));
-                        outputArray[outputByteNum + 1] = (sbyte)((int)((uint)(inputChar & codingCase.middleMask) >> codingCase.middleShift));
+                        outputArray[outputByteNum] += (sbyte)(inputChar.TripleShift(codingCase.initialShift));
+                        outputArray[outputByteNum + 1] = (sbyte)((inputChar & codingCase.middleMask).TripleShift(codingCase.middleShift));
                         outputArray[outputByteNum + 2] = (sbyte)((inputChar & codingCase.finalMask) << codingCase.finalShift);
                     }
                     outputByteNum += codingCase.advanceBytes;
@@ -273,17 +278,17 @@
                 {
                     outputArray[outputByteNum] = 0;
                 }
-                outputArray[outputByteNum] += (sbyte)((short)((ushort)inputChar >> codingCase.initialShift));
+                outputArray[outputByteNum] += (sbyte)(inputChar.TripleShift(codingCase.initialShift));
                 int bytesLeft = numOutputBytes - outputByteNum;
                 if (bytesLeft > 1)
                 {
                     if (2 == codingCase.numBytes)
                     {
-                        outputArray[outputByteNum + 1] = (sbyte)((int)((uint)(inputChar & codingCase.finalMask) >> codingCase.finalShift));
+                        outputArray[outputByteNum + 1] = (sbyte)((inputChar & codingCase.finalMask).TripleShift(codingCase.finalShift));
                     } // numBytes is 3
                     else
                     {
-                        outputArray[outputByteNum + 1] = (sbyte)((int)((uint)(inputChar & codingCase.middleMask) >> codingCase.middleShift));
+                        outputArray[outputByteNum + 1] = (sbyte)((inputChar & codingCase.middleMask).TripleShift(codingCase.middleShift));
                         if (bytesLeft > 2)
                         {
                             outputArray[outputByteNum + 2] = (sbyte)((inputChar & codingCase.finalMask) << codingCase.finalShift);
@@ -304,8 +309,8 @@
                 this.initialShift = initialShift;
                 this.middleShift = middleShift;
                 this.finalShift = finalShift;
-                this.finalMask = (short)((int)((uint)(short)0xFF >> finalShift));
-                this.middleMask = (short)((short)0xFF << middleShift);
+                this.finalMask = /*(short)*/((short)0xFF.TripleShift(finalShift)); // LUCENENET: Removed unnecessary cast
+                this.middleMask = (short)(/*(short)*/0xFF << middleShift); // LUCENENET: Removed unnecessary cast
             }
 
             internal CodingCase(int initialShift, int finalShift)
@@ -313,7 +318,7 @@
                 this.numBytes = 2;
                 this.initialShift = initialShift;
                 this.finalShift = finalShift;
-                this.finalMask = (short)((int)((uint)(short)0xFF >> finalShift));
+                this.finalMask = /*(short)*/((short)0xFF.TripleShift(finalShift)); // LUCENENET: Removed unnecessary cast
                 if (finalShift != 0)
                 {
                     advanceBytes = 1;
diff --git a/src/Lucene.Net/Util/IntroSorter.cs b/src/Lucene.Net/Util/IntroSorter.cs
index e06bf4b..a5ebd05 100644
--- a/src/Lucene.Net/Util/IntroSorter.cs
+++ b/src/Lucene.Net/Util/IntroSorter.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using System.Runtime.CompilerServices;
 
 namespace Lucene.Net.Util
@@ -69,7 +69,7 @@
                 return;
             }
 
-            int mid = (int)((uint)(from + to) >> 1);
+            int mid = (from + to).TripleShift(1);
 
             if (Compare(from, mid) > 0)
             {
diff --git a/src/Lucene.Net/Util/LongBitSet.cs b/src/Lucene.Net/Util/LongBitSet.cs
index 7295513..ca87789 100644
--- a/src/Lucene.Net/Util/LongBitSet.cs
+++ b/src/Lucene.Net/Util/LongBitSet.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -73,7 +73,7 @@
         /// Returns the number of 64 bit words it would take to hold <paramref name="numBits"/>. </summary>
         public static int Bits2words(long numBits)
         {
-            int numLong = (int)((long)((ulong)numBits >> 6));
+            int numLong = (int)numBits.TripleShift(6);
             if ((numBits & 63) != 0)
             {
                 numLong++;
@@ -329,7 +329,7 @@
             */
 
             long startmask = -1L << (int)startIndex;
-            long endmask = (long)(unchecked(((ulong)-1L)) >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+            long endmask = (-1L).TripleShift((int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
 
             if (startWord == endWord)
             {
@@ -368,7 +368,7 @@
             int endWord = (int)((endIndex - 1) >> 6);
 
             long startmask = -1L << (int)startIndex;
-            long endmask = (long)(0xffffffffffffffffUL >> (int)-endIndex);//-(int)((uint)1L >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+            long endmask = (-1L).TripleShift((int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
 
             if (startWord == endWord)
             {
@@ -456,7 +456,7 @@
             for (int i = numWords; --i >= 0; )
             {
                 h ^= bits[i];
-                h = (h << 1) | ((long)((ulong)h >> 63)); // rotate left
+                h = (h << 1) | (h.TripleShift(63)); // rotate left
             }
             // fold leftmost bits into right and add a constant to prevent
             // empty sets from returning 0, which is too common.
diff --git a/src/Lucene.Net/Util/LongsRef.cs b/src/Lucene.Net/Util/LongsRef.cs
index 90f07e0..03443bc 100644
--- a/src/Lucene.Net/Util/LongsRef.cs
+++ b/src/Lucene.Net/Util/LongsRef.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -117,7 +118,7 @@
             long end = Offset + Length;
             for (int i = Offset; i < end; i++)
             {
-                result = prime * result + (int)(longs[i] ^ ((long)((ulong)longs[i] >> 32)));
+                result = prime * result + (int)(longs[i] ^ (longs[i].TripleShift(32)));
             }
             return result;
         }
diff --git a/src/Lucene.Net/Util/MathUtil.cs b/src/Lucene.Net/Util/MathUtil.cs
index 02de931..86b3fc2 100644
--- a/src/Lucene.Net/Util/MathUtil.cs
+++ b/src/Lucene.Net/Util/MathUtil.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using System;
 using System.Runtime.CompilerServices;
 
@@ -78,10 +78,10 @@
                 return a;
             }
             int commonTrailingZeros = (a | b).TrailingZeroCount();
-            a = (long)((ulong)a >> a.TrailingZeroCount());
+            a = a.TripleShift(a.TrailingZeroCount());
             while (true)
             {
-                b = (long)((ulong)b >> b.TrailingZeroCount());
+                b = b.TripleShift(b.TrailingZeroCount());
                 if (a == b)
                 {
                     break;
diff --git a/src/Lucene.Net/Util/Mutable/MutableValueDouble.cs b/src/Lucene.Net/Util/Mutable/MutableValueDouble.cs
index f61494a..2490cc8 100644
--- a/src/Lucene.Net/Util/Mutable/MutableValueDouble.cs
+++ b/src/Lucene.Net/Util/Mutable/MutableValueDouble.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System.Runtime.CompilerServices;
 
 namespace Lucene.Net.Util.Mutable
@@ -81,7 +82,7 @@
         public override int GetHashCode()
         {
             long x = J2N.BitConversion.DoubleToInt64Bits(Value);
-            return (int)x + (int)((long)((ulong)x >> 32));
+            return (int)x + (int)x.TripleShift(32);
         }
     }
 }
\ No newline at end of file
diff --git a/src/Lucene.Net/Util/NumericUtils.cs b/src/Lucene.Net/Util/NumericUtils.cs
index 10e92f7..54b969b 100644
--- a/src/Lucene.Net/Util/NumericUtils.cs
+++ b/src/Lucene.Net/Util/NumericUtils.cs
@@ -552,12 +552,12 @@
         ///         terms with a shift value of <c>0</c>. </returns>
         public static TermsEnum FilterPrefixCodedInt64s(TermsEnum termsEnum)
         {
-            return new FilteredTermsEnumAnonymousInnerClassHelper(termsEnum);
+            return new FilteredTermsEnumAnonymousClass(termsEnum);
         }
 
-        private class FilteredTermsEnumAnonymousInnerClassHelper : FilteredTermsEnum
+        private class FilteredTermsEnumAnonymousClass : FilteredTermsEnum
         {
-            public FilteredTermsEnumAnonymousInnerClassHelper(TermsEnum termsEnum)
+            public FilteredTermsEnumAnonymousClass(TermsEnum termsEnum)
                 : base(termsEnum, false)
             {
             }
@@ -582,12 +582,12 @@
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         public static TermsEnum FilterPrefixCodedInt32s(TermsEnum termsEnum)
         {
-            return new FilteredTermsEnumAnonymousInnerClassHelper2(termsEnum);
+            return new FilteredTermsEnumAnonymousClass2(termsEnum);
         }
 
-        private class FilteredTermsEnumAnonymousInnerClassHelper2 : FilteredTermsEnum
+        private class FilteredTermsEnumAnonymousClass2 : FilteredTermsEnum
         {
-            public FilteredTermsEnumAnonymousInnerClassHelper2(TermsEnum termsEnum)
+            public FilteredTermsEnumAnonymousClass2(TermsEnum termsEnum)
                 : base(termsEnum, false)
             {
             }
diff --git a/src/Lucene.Net/Util/OfflineSorter.cs b/src/Lucene.Net/Util/OfflineSorter.cs
index f9cffae..7f299b9 100644
--- a/src/Lucene.Net/Util/OfflineSorter.cs
+++ b/src/Lucene.Net/Util/OfflineSorter.cs
@@ -388,7 +388,7 @@
 
             var @out = new ByteSequencesWriter(outputFile);
 
-            PriorityQueue<FileAndTop> queue = new PriorityQueueAnonymousInnerClassHelper(this, merges.Count);
+            PriorityQueue<FileAndTop> queue = new PriorityQueueAnonymousClass(this, merges.Count);
 
             var streams = new ByteSequencesReader[merges.Count];
             try
@@ -440,11 +440,11 @@
             }
         }
 
-        private class PriorityQueueAnonymousInnerClassHelper : PriorityQueue<FileAndTop>
+        private class PriorityQueueAnonymousClass : PriorityQueue<FileAndTop>
         {
             private readonly OfflineSorter outerInstance;
 
-            public PriorityQueueAnonymousInnerClassHelper(OfflineSorter outerInstance, int size)
+            public PriorityQueueAnonymousClass(OfflineSorter outerInstance, int size)
                 : base(size)
             {
                 this.outerInstance = outerInstance;
diff --git a/src/Lucene.Net/Util/OpenBitSet.cs b/src/Lucene.Net/Util/OpenBitSet.cs
index 48e5ec6..de94d41 100644
--- a/src/Lucene.Net/Util/OpenBitSet.cs
+++ b/src/Lucene.Net/Util/OpenBitSet.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -260,7 +260,7 @@
             if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < numBits);
             int i = index >> 6; // div 64
             int bit = index & 0x3f; // mod 64
-            return ((int)((long)((ulong)m_bits[i] >> bit))) & 0x01;
+            return ((int)m_bits[i].TripleShift(bit)) & 0x01;
         }
 
         /*
@@ -467,7 +467,7 @@
             int endWord = (int)((endIndex - 1) >> 6);
 
             long startmask = -1L << (int)startIndex;
-            long endmask = -(int)((uint)1L >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
+            long endmask = (-1L).TripleShift((int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
 
             // invert masks since we are clearing
             startmask = ~startmask;
@@ -757,13 +757,13 @@
         /// </summary>
         public virtual long NextSetBit(long index)
         {
-            int i = (int)((long)((ulong)index >> 6));
+            int i = (int)index.TripleShift(6);
             if (i >= m_wlen)
             {
                 return -1;
             }
             int subIndex = (int)index & 0x3f; // index within the word
-            long word = (long)((ulong)m_bits[i] >> subIndex); // skip all the bits to the right of index
+            long word = m_bits[i].TripleShift(subIndex); // skip all the bits to the right of index
 
             if (word != 0)
             {
@@ -1110,7 +1110,7 @@
             for (int i = m_bits.Length; --i >= 0; )
             {
                 h ^= m_bits[i];
-                h = (h << 1) | ((long)((ulong)h >> 63)); // rotate left
+                h = (h << 1) | (h.TripleShift(63)); // rotate left
             }
             // fold leftmost bits into right and add a constant to prevent
             // empty sets from returning 0, which is too common.
diff --git a/src/Lucene.Net/Util/OpenBitSetIterator.cs b/src/Lucene.Net/Util/OpenBitSetIterator.cs
index a09cfe5..0bf27e8 100644
--- a/src/Lucene.Net/Util/OpenBitSetIterator.cs
+++ b/src/Lucene.Net/Util/OpenBitSetIterator.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System.Runtime.CompilerServices;
 
 namespace Lucene.Net.Util
@@ -58,17 +59,17 @@
             if ((int)word == 0)
             {
                 wordShift += 32;
-                word = (long)((ulong)word >> 32);
+                word = word.TripleShift(32);
             }
             if ((word & 0x0000FFFF) == 0)
             {
                 wordShift += 16;
-                word = (long)((ulong)word >> 16);
+                word = word.TripleShift(16);
             }
             if ((word & 0x000000FF) == 0)
             {
                 wordShift += 8;
-                word = (long)((ulong)word >> 8);
+                word = word.TripleShift(8);
             }
             indexArray = BitUtil.BitList((byte)word);
         }
@@ -101,7 +102,7 @@
             {
                 if (word != 0)
                 {
-                    word = (long)((ulong)word >> 8);
+                    word = word.TripleShift(8);
                     wordShift += 8;
                 }
 
@@ -121,7 +122,7 @@
             }
 
             int bitIndex = (indexArray & 0x0f) + wordShift;
-            indexArray = (int)((uint)indexArray >> 4);
+            indexArray = indexArray.TripleShift(4);
             // should i<<6 be cached as a separate variable?
             // it would only save one cycle in the best circumstances.
             return curDocId = (i << 6) + bitIndex;
@@ -137,7 +138,7 @@
                 return curDocId = NO_MORE_DOCS;
             }
             wordShift = target & 0x3f;
-            word = (long)((ulong)arr[i] >> wordShift);
+            word = arr[i].TripleShift(wordShift);
             if (word != 0)
             {
                 wordShift--; // compensate for 1 based arrIndex
@@ -158,7 +159,7 @@
             Shift();
 
             int bitIndex = (indexArray & 0x0f) + wordShift;
-            indexArray = (int)((uint)indexArray >> 4);
+            indexArray = indexArray.TripleShift(4);
             // should i<<6 be cached as a separate variable?
             // it would only save one cycle in the best circumstances.
             return curDocId = (i << 6) + bitIndex;
diff --git a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs
index e98da7c..84c2a64 100644
--- a/src/Lucene.Net/Util/PForDeltaDocIdSet.cs
+++ b/src/Lucene.Net/Util/PForDeltaDocIdSet.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -183,7 +183,7 @@
                 {
                     deltaSum += 1 + buffer[i];
                 }
-                int blockSize = (int)((uint)(deltaSum + 0x07) >> 3); // round to the next byte
+                int blockSize = (deltaSum + 0x07).TripleShift(3); // round to the next byte
                 ++blockSize; // header
                 if (bufferSize < BLOCK_SIZE)
                 {
@@ -233,7 +233,7 @@
                         if (buffer[i] > mask)
                         {
                             exceptionIndices[ex] = i;
-                            exceptions[ex++] = (int)((uint)buffer[i] >> bitsPerValue);
+                            exceptions[ex++] = buffer[i].TripleShift(bitsPerValue);
                             buffer[i] &= mask;
                         }
                     }
@@ -252,15 +252,15 @@
                 if (numExceptions > 0)
                 {
                     if (Debugging.AssertsEnabled) Debugging.Assert(bitsPerException > 0);
-                    data.WriteByte((byte)(sbyte)numExceptions);
-                    data.WriteByte((byte)(sbyte)bitsPerException);
+                    data.WriteByte((byte)numExceptions);
+                    data.WriteByte((byte)bitsPerException);
                     PackedInt32s.IEncoder encoder = PackedInt32s.GetEncoder(PackedInt32s.Format.PACKED, PackedInt32s.VERSION_CURRENT, bitsPerException);
                     int numIterations = (numExceptions + encoder.ByteValueCount - 1) / encoder.ByteValueCount;
                     encoder.Encode(exceptions, 0, data.Bytes, data.Length, numIterations);
                     data.Length += (int)PackedInt32s.Format.PACKED.ByteCount(PackedInt32s.VERSION_CURRENT, numExceptions, bitsPerException);
                     for (int i = 0; i < numExceptions; ++i)
                     {
-                        data.WriteByte((byte)(sbyte)exceptionIndices[i]);
+                        data.WriteByte((byte)exceptionIndices[i]);
                     }
                 }
             }
@@ -274,7 +274,7 @@
                     doc += 1 + buffer[i];
                     while (doc >= 8)
                     {
-                        data.WriteByte((byte)(sbyte)current);
+                        data.WriteByte((byte)current);
                         current = 0;
                         doc -= 8;
                     }
@@ -282,7 +282,7 @@
                 }
                 if (current != 0)
                 {
-                    data.WriteByte((byte)(sbyte)current);
+                    data.WriteByte((byte)current);
                 }
             }
 
@@ -305,7 +305,7 @@
                     {
                         token |= HAS_EXCEPTIONS;
                     }
-                    data.WriteByte((byte)(sbyte)token);
+                    data.WriteByte((byte)token);
                     PforEncode();
                 }
                 else
@@ -313,13 +313,13 @@
                     // use unary
                     blockSize = unaryBlockSize;
                     int token = UNARY | (bufferSize < BLOCK_SIZE ? LAST_BLOCK : 0);
-                    data.WriteByte((byte)(sbyte)token);
+                    data.WriteByte((byte)token);
                     UnaryEncode();
                 }
 
                 if (bufferSize < BLOCK_SIZE)
                 {
-                    data.WriteByte((byte)(sbyte)bufferSize);
+                    data.WriteByte((byte)bufferSize);
                 }
 
                 ++numBlocks;
@@ -483,7 +483,7 @@
                 for (int i = 0; i < BLOCK_SIZE; )
                 {
                     var b = data[offset++];
-                    for (int bitList = BitUtil.BitList(b); bitList != 0; ++i, bitList = (int)((uint)bitList >> 4))
+                    for (int bitList = BitUtil.BitList(b); bitList != 0; ++i, bitList = bitList.TripleShift(4))
                     {
                         nextDocs[i] = docID + (bitList & 0x0F);
                     }
@@ -561,7 +561,7 @@
                 // we found a window containing our target, let's binary search now
                 while (lo <= hi)
                 {
-                    int mid = (int)((uint)(lo + hi) >> 1);
+                    int mid = (lo + hi).TripleShift(1);
                     int midDocID = (int)docIDs.Get(mid);
                     if (midDocID <= target)
                     {
diff --git a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs
index 294e514..9d3d85f 100644
--- a/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs
+++ b/src/Lucene.Net/Util/Packed/AbstractBlockPackedWriter.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -47,10 +48,10 @@
             int k = 0;
             while ((i & ~0x7FL) != 0L && k++ < 8)
             {
-                @out.WriteByte(unchecked((byte)(sbyte)((i & 0x7FL) | 0x80L)));
-                i = (long)((ulong)i >> 7);
+                @out.WriteByte((byte)((i & 0x7FL) | 0x80L));
+                i = i.TripleShift(7);
             }
-            @out.WriteByte((byte)(sbyte)i);
+            @out.WriteByte((byte)i);
         }
 
         protected DataOutput m_out;
diff --git a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs
index 0a9a670..a90b0a9 100644
--- a/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs
+++ b/src/Lucene.Net/Util/Packed/AbstractPagedMutable.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Runtime.CompilerServices;
@@ -79,7 +80,7 @@
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         internal int PageIndex(long index)
         {
-            return (int)((long)((ulong)index >> pageShift));
+            return (int)index.TripleShift(pageShift);
         }
 
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -160,7 +161,7 @@
                 T result = (T)this;
                 return result;
             }
-            long extra = (long)((ulong)minSize >> 3);
+            long extra = minSize.TripleShift(3);
             if (extra < 3)
             {
                 extra = 3;
diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReader.cs b/src/Lucene.Net/Util/Packed/BlockPackedReader.cs
index f32c4ca..ac80ba4 100644
--- a/src/Lucene.Net/Util/Packed/BlockPackedReader.cs
+++ b/src/Lucene.Net/Util/Packed/BlockPackedReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
 using System;
@@ -46,7 +47,7 @@
             for (int i = 0; i < numBlocks; ++i)
             {
                 int token = @in.ReadByte() & 0xFF;
-                int bitsPerValue = (int)((uint)token >> AbstractBlockPackedWriter.BPV_SHIFT);
+                int bitsPerValue = token.TripleShift(AbstractBlockPackedWriter.BPV_SHIFT);
                 if (bitsPerValue > 64)
                 {
                     throw new Exception("Corrupted");
@@ -84,7 +85,7 @@
         public override long Get(long index)
         {
             if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < valueCount);
-            int block = (int)((long)((ulong)index >> blockShift));
+            int block = (int)(index.TripleShift(blockShift));
             int idx = (int)(index & blockMask);
             return (minValues == null ? 0 : minValues[block]) + subReaders[block].Get(idx);
         }
diff --git a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs
index 1b43ac8..b35825d 100644
--- a/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs
+++ b/src/Lucene.Net/Util/Packed/BlockPackedReaderIterator.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -37,7 +38,7 @@
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         internal static long ZigZagDecode(long n)
         {
-            return (((long)((ulong)n >> 1)) ^ -(n & 1));
+            return ((n.TripleShift(1)) ^ -(n & 1));
         }
 
         // same as DataInput.ReadVInt64 but supports negative values
@@ -48,50 +49,50 @@
         internal static long ReadVInt64(DataInput @in)
         {
             byte b = @in.ReadByte();
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return b;
             }
             long i = b & 0x7FL;
             b = @in.ReadByte();
             i |= (b & 0x7FL) << 7;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = @in.ReadByte();
             i |= (b & 0x7FL) << 14;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = @in.ReadByte();
             i |= (b & 0x7FL) << 21;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = @in.ReadByte();
             i |= (b & 0x7FL) << 28;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = @in.ReadByte();
             i |= (b & 0x7FL) << 35;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = @in.ReadByte();
             i |= (b & 0x7FL) << 42;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
             b = @in.ReadByte();
             i |= (b & 0x7FL) << 49;
-            if ((sbyte)b >= 0)
+            if (b <= sbyte.MaxValue) // LUCENENET: Optimized equivalent of "if ((sbyte)b >= 0)"
             {
                 return i;
             }
@@ -163,7 +164,7 @@
             while (count >= blockSize)
             {
                 int token = @in.ReadByte() & 0xFF;
-                int bitsPerValue = (int)((uint)token >> AbstractBlockPackedWriter.BPV_SHIFT);
+                int bitsPerValue = token.TripleShift(AbstractBlockPackedWriter.BPV_SHIFT);
                 if (bitsPerValue > 64)
                 {
                     throw new IOException("Corrupted");
@@ -257,7 +258,7 @@
         {
             int token = @in.ReadByte() & 0xFF;
             bool minEquals0 = (token & AbstractBlockPackedWriter.MIN_VALUE_EQUALS_0) != 0;
-            int bitsPerValue = (int)((uint)token >> AbstractBlockPackedWriter.BPV_SHIFT);
+            int bitsPerValue = token.TripleShift(AbstractBlockPackedWriter.BPV_SHIFT);
             if (bitsPerValue > 64)
             {
                 throw new IOException("Corrupted");
diff --git a/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs b/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs
index c6f579a..4ec3645 100644
--- a/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs
+++ b/src/Lucene.Net/Util/Packed/BlockPackedWriter.cs
@@ -1,4 +1,4 @@
-using Lucene.Net.Diagnostics;
+using Lucene.Net.Diagnostics;
 using System;
 using System.Runtime.CompilerServices;
 
@@ -93,7 +93,7 @@
             }
 
             int token = (bitsRequired << BPV_SHIFT) | (min == 0 ? MIN_VALUE_EQUALS_0 : 0);
-            m_out.WriteByte((byte)(sbyte)token);
+            m_out.WriteByte((byte)token);
 
             if (min != 0)
             {
diff --git a/src/Lucene.Net/Util/Packed/BulkOperation.cs b/src/Lucene.Net/Util/Packed/BulkOperation.cs
index 63281d4..0987bc8 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperation.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperation.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 
@@ -185,7 +186,7 @@
         {
             for (int j = 1; j <= 8; ++j)
             {
-                blocks[blocksOffset++] = (byte)((long)((ulong)block >> (64 - (j << 3))));
+                blocks[blocksOffset++] = (byte)block.TripleShift(64 - (j << 3));
             }
             return blocksOffset;
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs
index a887472..af04385 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 
@@ -40,7 +41,7 @@
             int blocks = bitsPerValue;
             while ((blocks & 1) == 0)
             {
-                blocks = (int)((uint)blocks >> 1);
+                blocks = blocks.TripleShift(1);
             }
             this.longBlockCount = blocks;
             this.longValueCount = 64 * longBlockCount / bitsPerValue;
@@ -48,8 +49,8 @@
             int byteValueCount = longValueCount;
             while ((byteBlockCount & 1) == 0 && (byteValueCount & 1) == 0)
             {
-                byteBlockCount = (int)((uint)byteBlockCount >> 1);
-                byteValueCount = (int)((uint)byteValueCount >> 1);
+                byteBlockCount = byteBlockCount.TripleShift(1);
+                byteValueCount = byteValueCount.TripleShift(1);
             }
             this.byteBlockCount = byteBlockCount;
             this.byteValueCount = byteValueCount;
@@ -87,12 +88,12 @@
                 bitsLeft -= bitsPerValue;
                 if (bitsLeft < 0)
                 {
-                    values[valuesOffset++] = ((blocks[blocksOffset++] & ((1L << (bitsPerValue + bitsLeft)) - 1)) << -bitsLeft) | ((long)((ulong)blocks[blocksOffset] >> (64 + bitsLeft)));
+                    values[valuesOffset++] = ((blocks[blocksOffset++] & ((1L << (bitsPerValue + bitsLeft)) - 1)) << -bitsLeft) | (blocks[blocksOffset].TripleShift(64 + bitsLeft));
                     bitsLeft += 64;
                 }
                 else
                 {
-                    values[valuesOffset++] = ((long)((ulong)blocks[blocksOffset] >> bitsLeft)) & mask;
+                    values[valuesOffset++] = (blocks[blocksOffset].TripleShift(bitsLeft)) & mask;
                 }
             }
         }
@@ -114,11 +115,11 @@
                 {
                     // flush
                     int bits = 8 - bitsLeft;
-                    values[valuesOffset++] = nextValue | ((long)((ulong)bytes >> bits));
+                    values[valuesOffset++] = nextValue | (bytes.TripleShift(bits));
                     while (bits >= bitsPerValue)
                     {
                         bits -= bitsPerValue;
-                        values[valuesOffset++] = ((long)((ulong)bytes >> bits)) & mask;
+                        values[valuesOffset++] = (bytes.TripleShift(bits)) & mask;
                     }
                     // then buffer
                     bitsLeft = bitsPerValue - bits;
@@ -140,12 +141,12 @@
                 bitsLeft -= bitsPerValue;
                 if (bitsLeft < 0)
                 {
-                    values[valuesOffset++] = (int)(((blocks[blocksOffset++] & ((1L << (bitsPerValue + bitsLeft)) - 1)) << -bitsLeft) | ((long)((ulong)blocks[blocksOffset] >> (64 + bitsLeft))));
+                    values[valuesOffset++] = (int)(((blocks[blocksOffset++] & ((1L << (bitsPerValue + bitsLeft)) - 1)) << -bitsLeft) | (blocks[blocksOffset].TripleShift(64 + bitsLeft)));
                     bitsLeft += 64;
                 }
                 else
                 {
-                    values[valuesOffset++] = (int)(((long)((ulong)blocks[blocksOffset] >> bitsLeft)) & mask);
+                    values[valuesOffset++] = (int)((blocks[blocksOffset].TripleShift(bitsLeft)) & mask);
                 }
             }
         }
@@ -167,11 +168,11 @@
                 {
                     // flush
                     int bits = 8 - bitsLeft;
-                    values[valuesOffset++] = nextValue | ((int)((uint)bytes >> bits));
+                    values[valuesOffset++] = nextValue | (bytes.TripleShift(bits));
                     while (bits >= bitsPerValue)
                     {
                         bits -= bitsPerValue;
-                        values[valuesOffset++] = ((int)((uint)bytes >> bits)) & intMask;
+                        values[valuesOffset++] = (bytes.TripleShift(bits)) & intMask;
                     }
                     // then buffer
                     bitsLeft = bitsPerValue - bits;
@@ -201,7 +202,7 @@
                 } // bitsLeft < 0
                 else
                 {
-                    nextBlock |= (long)((ulong)values[valuesOffset] >> -bitsLeft);
+                    nextBlock |= values[valuesOffset].TripleShift(-bitsLeft);
                     blocks[blocksOffset++] = nextBlock;
                     nextBlock = (values[valuesOffset++] & ((1L << -bitsLeft) - 1)) << (64 + bitsLeft);
                     bitsLeft += 64;
@@ -229,7 +230,7 @@
                 } // bitsLeft < 0
                 else
                 {
-                    nextBlock |= ((uint)(values[valuesOffset] & 0xFFFFFFFFL) >> -bitsLeft);
+                    nextBlock |= (values[valuesOffset] & 0xFFFFFFFFL).TripleShift(-bitsLeft);
                     blocks[blocksOffset++] = nextBlock;
                     nextBlock = (values[valuesOffset++] & ((1L << -bitsLeft) - 1)) << (64 + bitsLeft);
                     bitsLeft += 64;
@@ -255,11 +256,11 @@
                 {
                     // flush as many blocks as possible
                     int bits = bitsPerValue - bitsLeft;
-                    blocks[blocksOffset++] = (byte)((uint)nextBlock | ((long)((ulong)v >> bits)));
+                    blocks[blocksOffset++] = (byte)((uint)nextBlock | (v.TripleShift(bits)));
                     while (bits >= 8)
                     {
                         bits -= 8;
-                        blocks[blocksOffset++] = (byte)((long)((ulong)v >> bits));
+                        blocks[blocksOffset++] = (byte)(v.TripleShift(bits));
                     }
                     // then buffer
                     bitsLeft = 8 - bits;
@@ -287,11 +288,11 @@
                 {
                     // flush as many blocks as possible
                     int bits = bitsPerValue - bitsLeft;
-                    blocks[blocksOffset++] = (byte)(nextBlock | ((int)((uint)v >> bits)));
+                    blocks[blocksOffset++] = (byte)(nextBlock | (v.TripleShift(bits)));
                     while (bits >= 8)
                     {
                         bits -= 8;
-                        blocks[blocksOffset++] = (byte)((int)((uint)v >> bits));
+                        blocks[blocksOffset++] = (byte)(v.TripleShift(bits));
                     }
                     // then buffer
                     bitsLeft = 8 - bits;
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked1.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked1.cs
index 76c53e1..38dbe3e 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked1.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked1.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -36,7 +38,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 63; shift >= 0; shift -= 1)
                 {
-                    values[valuesOffset++] = (int)(((long)((ulong)block >> shift)) & 1);
+                    values[valuesOffset++] = (int)((block.TripleShift(shift)) & 1);
                 }
             }
         }
@@ -46,13 +48,13 @@
             for (int j = 0; j < iterations; ++j)
             {
                 var block = blocks[blocksOffset++];
-                values[valuesOffset++] = ((int)((uint)block >> 7)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 6)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 5)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 4)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 3)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 2)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 1)) & 1;
+                values[valuesOffset++] = block.TripleShift(7) & 1;
+                values[valuesOffset++] = block.TripleShift(6) & 1;
+                values[valuesOffset++] = block.TripleShift(5) & 1;
+                values[valuesOffset++] = block.TripleShift(4) & 1;
+                values[valuesOffset++] = block.TripleShift(3) & 1;
+                values[valuesOffset++] = block.TripleShift(2) & 1;
+                values[valuesOffset++] = block.TripleShift(1) & 1;
                 values[valuesOffset++] = block & 1;
             }
         }
@@ -64,7 +66,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 63; shift >= 0; shift -= 1)
                 {
-                    values[valuesOffset++] = ((long)((ulong)block >> shift)) & 1;
+                    values[valuesOffset++] = (block.TripleShift(shift)) & 1;
                 }
             }
         }
@@ -74,13 +76,13 @@
             for (int j = 0; j < iterations; ++j)
             {
                 var block = blocks[blocksOffset++];
-                values[valuesOffset++] = ((int)((uint)block >> 7)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 6)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 5)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 4)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 3)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 2)) & 1;
-                values[valuesOffset++] = ((int)((uint)block >> 1)) & 1;
+                values[valuesOffset++] = block.TripleShift(7) & 1;
+                values[valuesOffset++] = block.TripleShift(6) & 1;
+                values[valuesOffset++] = block.TripleShift(5) & 1;
+                values[valuesOffset++] = block.TripleShift(4) & 1;
+                values[valuesOffset++] = block.TripleShift(3) & 1;
+                values[valuesOffset++] = block.TripleShift(2) & 1;
+                values[valuesOffset++] = block.TripleShift(1) & 1;
                 values[valuesOffset++] = block & 1;
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked10.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked10.cs
index 521d7b6..92adcca 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked10.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked10.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,41 +36,41 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 54));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 44)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 34)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 24)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 14)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 4)) & 1023L);
+                values[valuesOffset++] = (int)(block0.TripleShift(54));
+                values[valuesOffset++] = (int)((block0.TripleShift(44)) & 1023L);
+                values[valuesOffset++] = (int)((block0.TripleShift(34)) & 1023L);
+                values[valuesOffset++] = (int)((block0.TripleShift(24)) & 1023L);
+                values[valuesOffset++] = (int)((block0.TripleShift(14)) & 1023L);
+                values[valuesOffset++] = (int)((block0.TripleShift(4)) & 1023L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 15L) << 6) | ((long)((ulong)block1 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 48)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 38)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 28)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 18)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 8)) & 1023L);
+                values[valuesOffset++] = (int)(((block0 & 15L) << 6) | (block1.TripleShift(58)));
+                values[valuesOffset++] = (int)((block1.TripleShift(48)) & 1023L);
+                values[valuesOffset++] = (int)((block1.TripleShift(38)) & 1023L);
+                values[valuesOffset++] = (int)((block1.TripleShift(28)) & 1023L);
+                values[valuesOffset++] = (int)((block1.TripleShift(18)) & 1023L);
+                values[valuesOffset++] = (int)((block1.TripleShift(8)) & 1023L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 255L) << 2) | ((long)((ulong)block2 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 52)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 42)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 32)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 22)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 2)) & 1023L);
+                values[valuesOffset++] = (int)(((block1 & 255L) << 2) | (block2.TripleShift(62)));
+                values[valuesOffset++] = (int)((block2.TripleShift(52)) & 1023L);
+                values[valuesOffset++] = (int)((block2.TripleShift(42)) & 1023L);
+                values[valuesOffset++] = (int)((block2.TripleShift(32)) & 1023L);
+                values[valuesOffset++] = (int)((block2.TripleShift(22)) & 1023L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 1023L);
+                values[valuesOffset++] = (int)((block2.TripleShift(2)) & 1023L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 3L) << 8) | ((long)((ulong)block3 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 46)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 36)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 26)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 16)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 6)) & 1023L);
+                values[valuesOffset++] = (int)(((block2 & 3L) << 8) | (block3.TripleShift(56)));
+                values[valuesOffset++] = (int)((block3.TripleShift(46)) & 1023L);
+                values[valuesOffset++] = (int)((block3.TripleShift(36)) & 1023L);
+                values[valuesOffset++] = (int)((block3.TripleShift(26)) & 1023L);
+                values[valuesOffset++] = (int)((block3.TripleShift(16)) & 1023L);
+                values[valuesOffset++] = (int)((block3.TripleShift(6)) & 1023L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 63L) << 4) | ((long)((ulong)block4 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 50)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 40)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 30)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 20)) & 1023L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 10)) & 1023L);
+                values[valuesOffset++] = (int)(((block3 & 63L) << 4) | (block4.TripleShift(60)));
+                values[valuesOffset++] = (int)((block4.TripleShift(50)) & 1023L);
+                values[valuesOffset++] = (int)((block4.TripleShift(40)) & 1023L);
+                values[valuesOffset++] = (int)((block4.TripleShift(30)) & 1023L);
+                values[valuesOffset++] = (int)((block4.TripleShift(20)) & 1023L);
+                values[valuesOffset++] = (int)((block4.TripleShift(10)) & 1023L);
                 values[valuesOffset++] = (int)(block4 & 1023L);
             }
         }
@@ -79,11 +81,11 @@
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 2) | ((int)((uint)byte1 >> 6));
+                values[valuesOffset++] = (byte0 << 2) | (byte1.TripleShift(6));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 63) << 4) | ((int)((uint)byte2 >> 4));
+                values[valuesOffset++] = ((byte1 & 63) << 4) | (byte2.TripleShift(4));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 15) << 6) | ((int)((uint)byte3 >> 2));
+                values[valuesOffset++] = ((byte2 & 15) << 6) | (byte3.TripleShift(2));
                 int byte4 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte3 & 3) << 8) | byte4;
             }
@@ -94,41 +96,41 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 54);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 44)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 34)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 24)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 14)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 4)) & 1023L;
+                values[valuesOffset++] = (block0.TripleShift(54));
+                values[valuesOffset++] = (block0.TripleShift(44)) & 1023L;
+                values[valuesOffset++] = (block0.TripleShift(34)) & 1023L;
+                values[valuesOffset++] = (block0.TripleShift(24)) & 1023L;
+                values[valuesOffset++] = (block0.TripleShift(14)) & 1023L;
+                values[valuesOffset++] = (block0.TripleShift(4)) & 1023L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 15L) << 6) | ((long)((ulong)block1 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 48)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 38)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 28)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 18)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 8)) & 1023L;
+                values[valuesOffset++] = ((block0 & 15L) << 6) | (block1.TripleShift(58));
+                values[valuesOffset++] = (block1.TripleShift(48)) & 1023L;
+                values[valuesOffset++] = (block1.TripleShift(38)) & 1023L;
+                values[valuesOffset++] = (block1.TripleShift(28)) & 1023L;
+                values[valuesOffset++] = (block1.TripleShift(18)) & 1023L;
+                values[valuesOffset++] = (block1.TripleShift(8)) & 1023L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 255L) << 2) | ((long)((ulong)block2 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 52)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 42)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 32)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 22)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 2)) & 1023L;
+                values[valuesOffset++] = ((block1 & 255L) << 2) | (block2.TripleShift(62));
+                values[valuesOffset++] = (block2.TripleShift(52)) & 1023L;
+                values[valuesOffset++] = (block2.TripleShift(42)) & 1023L;
+                values[valuesOffset++] = (block2.TripleShift(32)) & 1023L;
+                values[valuesOffset++] = (block2.TripleShift(22)) & 1023L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 1023L;
+                values[valuesOffset++] = (block2.TripleShift(2)) & 1023L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 3L) << 8) | ((long)((ulong)block3 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 46)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 36)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 26)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 16)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 6)) & 1023L;
+                values[valuesOffset++] = ((block2 & 3L) << 8) | (block3.TripleShift(56));
+                values[valuesOffset++] = (block3.TripleShift(46)) & 1023L;
+                values[valuesOffset++] = (block3.TripleShift(36)) & 1023L;
+                values[valuesOffset++] = (block3.TripleShift(26)) & 1023L;
+                values[valuesOffset++] = (block3.TripleShift(16)) & 1023L;
+                values[valuesOffset++] = (block3.TripleShift(6)) & 1023L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 63L) << 4) | ((long)((ulong)block4 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 50)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 40)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 30)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 20)) & 1023L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 10)) & 1023L;
+                values[valuesOffset++] = ((block3 & 63L) << 4) | (block4.TripleShift(60));
+                values[valuesOffset++] = (block4.TripleShift(50)) & 1023L;
+                values[valuesOffset++] = (block4.TripleShift(40)) & 1023L;
+                values[valuesOffset++] = (block4.TripleShift(30)) & 1023L;
+                values[valuesOffset++] = (block4.TripleShift(20)) & 1023L;
+                values[valuesOffset++] = (block4.TripleShift(10)) & 1023L;
                 values[valuesOffset++] = block4 & 1023L;
             }
         }
@@ -139,11 +141,11 @@
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 2) | ((long)((ulong)byte1 >> 6));
+                values[valuesOffset++] = (byte0 << 2) | (byte1.TripleShift(6));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 63) << 4) | ((long)((ulong)byte2 >> 4));
+                values[valuesOffset++] = ((byte1 & 63) << 4) | (byte2.TripleShift(4));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 15) << 6) | ((long)((ulong)byte3 >> 2));
+                values[valuesOffset++] = ((byte2 & 15) << 6) | (byte3.TripleShift(2));
                 long byte4 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte3 & 3) << 8) | byte4;
             }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked11.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked11.cs
index f19f299..c945cc1 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked11.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked11.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,79 +36,79 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 53));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 42)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 31)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 20)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 9)) & 2047L);
+                values[valuesOffset++] = (int)(block0.TripleShift(53));
+                values[valuesOffset++] = (int)((block0.TripleShift(42)) & 2047L);
+                values[valuesOffset++] = (int)((block0.TripleShift(31)) & 2047L);
+                values[valuesOffset++] = (int)((block0.TripleShift(20)) & 2047L);
+                values[valuesOffset++] = (int)((block0.TripleShift(9)) & 2047L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 511L) << 2) | ((long)((ulong)block1 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 51)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 40)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 29)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 18)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 7)) & 2047L);
+                values[valuesOffset++] = (int)(((block0 & 511L) << 2) | (block1.TripleShift(62)));
+                values[valuesOffset++] = (int)((block1.TripleShift(51)) & 2047L);
+                values[valuesOffset++] = (int)((block1.TripleShift(40)) & 2047L);
+                values[valuesOffset++] = (int)((block1.TripleShift(29)) & 2047L);
+                values[valuesOffset++] = (int)((block1.TripleShift(18)) & 2047L);
+                values[valuesOffset++] = (int)((block1.TripleShift(7)) & 2047L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 127L) << 4) | ((long)((ulong)block2 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 49)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 38)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 27)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 16)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 5)) & 2047L);
+                values[valuesOffset++] = (int)(((block1 & 127L) << 4) | (block2.TripleShift(60)));
+                values[valuesOffset++] = (int)((block2.TripleShift(49)) & 2047L);
+                values[valuesOffset++] = (int)((block2.TripleShift(38)) & 2047L);
+                values[valuesOffset++] = (int)((block2.TripleShift(27)) & 2047L);
+                values[valuesOffset++] = (int)((block2.TripleShift(16)) & 2047L);
+                values[valuesOffset++] = (int)((block2.TripleShift(5)) & 2047L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 31L) << 6) | ((long)((ulong)block3 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 47)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 36)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 25)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 14)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 3)) & 2047L);
+                values[valuesOffset++] = (int)(((block2 & 31L) << 6) | (block3.TripleShift(58)));
+                values[valuesOffset++] = (int)((block3.TripleShift(47)) & 2047L);
+                values[valuesOffset++] = (int)((block3.TripleShift(36)) & 2047L);
+                values[valuesOffset++] = (int)((block3.TripleShift(25)) & 2047L);
+                values[valuesOffset++] = (int)((block3.TripleShift(14)) & 2047L);
+                values[valuesOffset++] = (int)((block3.TripleShift(3)) & 2047L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 7L) << 8) | ((long)((ulong)block4 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 45)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 34)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 23)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 12)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 1)) & 2047L);
+                values[valuesOffset++] = (int)(((block3 & 7L) << 8) | (block4.TripleShift(56)));
+                values[valuesOffset++] = (int)((block4.TripleShift(45)) & 2047L);
+                values[valuesOffset++] = (int)((block4.TripleShift(34)) & 2047L);
+                values[valuesOffset++] = (int)((block4.TripleShift(23)) & 2047L);
+                values[valuesOffset++] = (int)((block4.TripleShift(12)) & 2047L);
+                values[valuesOffset++] = (int)((block4.TripleShift(1)) & 2047L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 1L) << 10) | ((long)((ulong)block5 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 43)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 32)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 21)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 10)) & 2047L);
+                values[valuesOffset++] = (int)(((block4 & 1L) << 10) | (block5.TripleShift(54)));
+                values[valuesOffset++] = (int)((block5.TripleShift(43)) & 2047L);
+                values[valuesOffset++] = (int)((block5.TripleShift(32)) & 2047L);
+                values[valuesOffset++] = (int)((block5.TripleShift(21)) & 2047L);
+                values[valuesOffset++] = (int)((block5.TripleShift(10)) & 2047L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 1023L) << 1) | ((long)((ulong)block6 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 52)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 41)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 30)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 19)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 8)) & 2047L);
+                values[valuesOffset++] = (int)(((block5 & 1023L) << 1) | (block6.TripleShift(63)));
+                values[valuesOffset++] = (int)((block6.TripleShift(52)) & 2047L);
+                values[valuesOffset++] = (int)((block6.TripleShift(41)) & 2047L);
+                values[valuesOffset++] = (int)((block6.TripleShift(30)) & 2047L);
+                values[valuesOffset++] = (int)((block6.TripleShift(19)) & 2047L);
+                values[valuesOffset++] = (int)((block6.TripleShift(8)) & 2047L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 255L) << 3) | ((long)((ulong)block7 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 50)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 39)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 28)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 17)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 6)) & 2047L);
+                values[valuesOffset++] = (int)(((block6 & 255L) << 3) | (block7.TripleShift(61)));
+                values[valuesOffset++] = (int)((block7.TripleShift(50)) & 2047L);
+                values[valuesOffset++] = (int)((block7.TripleShift(39)) & 2047L);
+                values[valuesOffset++] = (int)((block7.TripleShift(28)) & 2047L);
+                values[valuesOffset++] = (int)((block7.TripleShift(17)) & 2047L);
+                values[valuesOffset++] = (int)((block7.TripleShift(6)) & 2047L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 63L) << 5) | ((long)((ulong)block8 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 48)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 37)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 26)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 15)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 4)) & 2047L);
+                values[valuesOffset++] = (int)(((block7 & 63L) << 5) | (block8.TripleShift(59)));
+                values[valuesOffset++] = (int)((block8.TripleShift(48)) & 2047L);
+                values[valuesOffset++] = (int)((block8.TripleShift(37)) & 2047L);
+                values[valuesOffset++] = (int)((block8.TripleShift(26)) & 2047L);
+                values[valuesOffset++] = (int)((block8.TripleShift(15)) & 2047L);
+                values[valuesOffset++] = (int)((block8.TripleShift(4)) & 2047L);
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block8 & 15L) << 7) | ((long)((ulong)block9 >> 57)));
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 46)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 35)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 24)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 13)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 2)) & 2047L);
+                values[valuesOffset++] = (int)(((block8 & 15L) << 7) | (block9.TripleShift(57)));
+                values[valuesOffset++] = (int)((block9.TripleShift(46)) & 2047L);
+                values[valuesOffset++] = (int)((block9.TripleShift(35)) & 2047L);
+                values[valuesOffset++] = (int)((block9.TripleShift(24)) & 2047L);
+                values[valuesOffset++] = (int)((block9.TripleShift(13)) & 2047L);
+                values[valuesOffset++] = (int)((block9.TripleShift(2)) & 2047L);
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block9 & 3L) << 9) | ((long)((ulong)block10 >> 55)));
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 44)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 33)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 22)) & 2047L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 11)) & 2047L);
+                values[valuesOffset++] = (int)(((block9 & 3L) << 9) | (block10.TripleShift(55)));
+                values[valuesOffset++] = (int)((block10.TripleShift(44)) & 2047L);
+                values[valuesOffset++] = (int)((block10.TripleShift(33)) & 2047L);
+                values[valuesOffset++] = (int)((block10.TripleShift(22)) & 2047L);
+                values[valuesOffset++] = (int)((block10.TripleShift(11)) & 2047L);
                 values[valuesOffset++] = (int)(block10 & 2047L);
             }
         }
@@ -117,21 +119,21 @@
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 3) | ((int)((uint)byte1 >> 5));
+                values[valuesOffset++] = (byte0 << 3) | (byte1.TripleShift(5));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 31) << 6) | ((int)((uint)byte2 >> 2));
+                values[valuesOffset++] = ((byte1 & 31) << 6) | (byte2.TripleShift(2));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 3) << 9) | (byte3 << 1) | ((int)((uint)byte4 >> 7));
+                values[valuesOffset++] = ((byte2 & 3) << 9) | (byte3 << 1) | (byte4.TripleShift(7));
                 int byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 127) << 4) | ((int)((uint)byte5 >> 4));
+                values[valuesOffset++] = ((byte4 & 127) << 4) | (byte5.TripleShift(4));
                 int byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 15) << 7) | ((int)((uint)byte6 >> 1));
+                values[valuesOffset++] = ((byte5 & 15) << 7) | (byte6.TripleShift(1));
                 int byte7 = blocks[blocksOffset++] & 0xFF;
                 int byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte6 & 1) << 10) | (byte7 << 2) | ((int)((uint)byte8 >> 6));
+                values[valuesOffset++] = ((byte6 & 1) << 10) | (byte7 << 2) | (byte8.TripleShift(6));
                 int byte9 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte8 & 63) << 5) | ((int)((uint)byte9 >> 3));
+                values[valuesOffset++] = ((byte8 & 63) << 5) | (byte9.TripleShift(3));
                 int byte10 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte9 & 7) << 8) | byte10;
             }
@@ -142,79 +144,79 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 53);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 42)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 31)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 20)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 9)) & 2047L;
+                values[valuesOffset++] = block0.TripleShift(53);
+                values[valuesOffset++] = (block0.TripleShift(42)) & 2047L;
+                values[valuesOffset++] = (block0.TripleShift(31)) & 2047L;
+                values[valuesOffset++] = (block0.TripleShift(20)) & 2047L;
+                values[valuesOffset++] = (block0.TripleShift(9)) & 2047L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 511L) << 2) | ((long)((ulong)block1 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 51)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 40)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 29)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 18)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 7)) & 2047L;
+                values[valuesOffset++] = ((block0 & 511L) << 2) | (block1.TripleShift(62));
+                values[valuesOffset++] = (block1.TripleShift(51)) & 2047L;
+                values[valuesOffset++] = (block1.TripleShift(40)) & 2047L;
+                values[valuesOffset++] = (block1.TripleShift(29)) & 2047L;
+                values[valuesOffset++] = (block1.TripleShift(18)) & 2047L;
+                values[valuesOffset++] = (block1.TripleShift(7)) & 2047L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 127L) << 4) | ((long)((ulong)block2 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 49)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 38)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 27)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 16)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 5)) & 2047L;
+                values[valuesOffset++] = ((block1 & 127L) << 4) | (block2.TripleShift(60));
+                values[valuesOffset++] = (block2.TripleShift(49)) & 2047L;
+                values[valuesOffset++] = (block2.TripleShift(38)) & 2047L;
+                values[valuesOffset++] = (block2.TripleShift(27)) & 2047L;
+                values[valuesOffset++] = (block2.TripleShift(16)) & 2047L;
+                values[valuesOffset++] = (block2.TripleShift(5)) & 2047L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 31L) << 6) | ((long)((ulong)block3 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 47)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 36)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 25)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 14)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 3)) & 2047L;
+                values[valuesOffset++] = ((block2 & 31L) << 6) | (block3.TripleShift(58));
+                values[valuesOffset++] = (block3.TripleShift(47)) & 2047L;
+                values[valuesOffset++] = (block3.TripleShift(36)) & 2047L;
+                values[valuesOffset++] = (block3.TripleShift(25)) & 2047L;
+                values[valuesOffset++] = (block3.TripleShift(14)) & 2047L;
+                values[valuesOffset++] = (block3.TripleShift(3)) & 2047L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 7L) << 8) | ((long)((ulong)block4 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 45)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 34)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 23)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 12)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 1)) & 2047L;
+                values[valuesOffset++] = ((block3 & 7L) << 8) | (block4.TripleShift(56));
+                values[valuesOffset++] = (block4.TripleShift(45)) & 2047L;
+                values[valuesOffset++] = (block4.TripleShift(34)) & 2047L;
+                values[valuesOffset++] = (block4.TripleShift(23)) & 2047L;
+                values[valuesOffset++] = (block4.TripleShift(12)) & 2047L;
+                values[valuesOffset++] = (block4.TripleShift(1)) & 2047L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 1L) << 10) | ((long)((ulong)block5 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 43)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 32)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 21)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 10)) & 2047L;
+                values[valuesOffset++] = ((block4 & 1L) << 10) | (block5.TripleShift(54));
+                values[valuesOffset++] = (block5.TripleShift(43)) & 2047L;
+                values[valuesOffset++] = (block5.TripleShift(32)) & 2047L;
+                values[valuesOffset++] = (block5.TripleShift(21)) & 2047L;
+                values[valuesOffset++] = (block5.TripleShift(10)) & 2047L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 1023L) << 1) | ((long)((ulong)block6 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 52)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 41)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 30)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 19)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 8)) & 2047L;
+                values[valuesOffset++] = ((block5 & 1023L) << 1) | (block6.TripleShift(63));
+                values[valuesOffset++] = (block6.TripleShift(52)) & 2047L;
+                values[valuesOffset++] = (block6.TripleShift(41)) & 2047L;
+                values[valuesOffset++] = (block6.TripleShift(30)) & 2047L;
+                values[valuesOffset++] = (block6.TripleShift(19)) & 2047L;
+                values[valuesOffset++] = (block6.TripleShift(8)) & 2047L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 255L) << 3) | ((long)((ulong)block7 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 50)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 39)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 28)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 17)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 6)) & 2047L;
+                values[valuesOffset++] = ((block6 & 255L) << 3) | (block7.TripleShift(61));
+                values[valuesOffset++] = (block7.TripleShift(50)) & 2047L;
+                values[valuesOffset++] = (block7.TripleShift(39)) & 2047L;
+                values[valuesOffset++] = (block7.TripleShift(28)) & 2047L;
+                values[valuesOffset++] = (block7.TripleShift(17)) & 2047L;
+                values[valuesOffset++] = (block7.TripleShift(6)) & 2047L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 63L) << 5) | ((long)((ulong)block8 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 48)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 37)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 26)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 15)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 4)) & 2047L;
+                values[valuesOffset++] = ((block7 & 63L) << 5) | (block8.TripleShift(59));
+                values[valuesOffset++] = (block8.TripleShift(48)) & 2047L;
+                values[valuesOffset++] = (block8.TripleShift(37)) & 2047L;
+                values[valuesOffset++] = (block8.TripleShift(26)) & 2047L;
+                values[valuesOffset++] = (block8.TripleShift(15)) & 2047L;
+                values[valuesOffset++] = (block8.TripleShift(4)) & 2047L;
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block8 & 15L) << 7) | ((long)((ulong)block9 >> 57));
-                values[valuesOffset++] = ((long)((ulong)block9 >> 46)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 35)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 24)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 13)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 2)) & 2047L;
+                values[valuesOffset++] = ((block8 & 15L) << 7) | (block9.TripleShift(57));
+                values[valuesOffset++] = (block9.TripleShift(46)) & 2047L;
+                values[valuesOffset++] = (block9.TripleShift(35)) & 2047L;
+                values[valuesOffset++] = (block9.TripleShift(24)) & 2047L;
+                values[valuesOffset++] = (block9.TripleShift(13)) & 2047L;
+                values[valuesOffset++] = (block9.TripleShift(2)) & 2047L;
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block9 & 3L) << 9) | ((long)((ulong)block10 >> 55));
-                values[valuesOffset++] = ((long)((ulong)block10 >> 44)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 33)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 22)) & 2047L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 11)) & 2047L;
+                values[valuesOffset++] = ((block9 & 3L) << 9) | (block10.TripleShift(55));
+                values[valuesOffset++] = (block10.TripleShift(44)) & 2047L;
+                values[valuesOffset++] = (block10.TripleShift(33)) & 2047L;
+                values[valuesOffset++] = (block10.TripleShift(22)) & 2047L;
+                values[valuesOffset++] = (block10.TripleShift(11)) & 2047L;
                 values[valuesOffset++] = block10 & 2047L;
             }
         }
@@ -225,21 +227,21 @@
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 3) | ((long)((ulong)byte1 >> 5));
+                values[valuesOffset++] = (byte0 << 3) | (byte1.TripleShift(5));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 31) << 6) | ((long)((ulong)byte2 >> 2));
+                values[valuesOffset++] = ((byte1 & 31) << 6) | (byte2.TripleShift(2));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 3) << 9) | (byte3 << 1) | ((long)((ulong)byte4 >> 7));
+                values[valuesOffset++] = ((byte2 & 3) << 9) | (byte3 << 1) | (byte4.TripleShift(7));
                 long byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 127) << 4) | ((long)((ulong)byte5 >> 4));
+                values[valuesOffset++] = ((byte4 & 127) << 4) | (byte5.TripleShift(4));
                 long byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 15) << 7) | ((long)((ulong)byte6 >> 1));
+                values[valuesOffset++] = ((byte5 & 15) << 7) | (byte6.TripleShift(1));
                 long byte7 = blocks[blocksOffset++] & 0xFF;
                 long byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte6 & 1) << 10) | (byte7 << 2) | ((long)((ulong)byte8 >> 6));
+                values[valuesOffset++] = ((byte6 & 1) << 10) | (byte7 << 2) | (byte8.TripleShift(6));
                 long byte9 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte8 & 63) << 5) | ((long)((ulong)byte9 >> 3));
+                values[valuesOffset++] = ((byte8 & 63) << 5) | (byte9.TripleShift(3));
                 long byte10 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte9 & 7) << 8) | byte10;
             }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked12.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked12.cs
index c72492d..41b4ab2 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked12.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked12.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,23 +36,23 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 52));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 40)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 28)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 16)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 4)) & 4095L);
+                values[valuesOffset++] = (int)(block0.TripleShift(52));
+                values[valuesOffset++] = (int)((block0.TripleShift(40)) & 4095L);
+                values[valuesOffset++] = (int)((block0.TripleShift(28)) & 4095L);
+                values[valuesOffset++] = (int)((block0.TripleShift(16)) & 4095L);
+                values[valuesOffset++] = (int)((block0.TripleShift(4)) & 4095L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 15L) << 8) | ((long)((ulong)block1 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 44)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 32)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 20)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 8)) & 4095L);
+                values[valuesOffset++] = (int)(((block0 & 15L) << 8) | (block1.TripleShift(56)));
+                values[valuesOffset++] = (int)((block1.TripleShift(44)) & 4095L);
+                values[valuesOffset++] = (int)((block1.TripleShift(32)) & 4095L);
+                values[valuesOffset++] = (int)((block1.TripleShift(20)) & 4095L);
+                values[valuesOffset++] = (int)((block1.TripleShift(8)) & 4095L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 255L) << 4) | ((long)((ulong)block2 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 48)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 36)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 24)) & 4095L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 4095L);
+                values[valuesOffset++] = (int)(((block1 & 255L) << 4) | (block2.TripleShift(60)));
+                values[valuesOffset++] = (int)((block2.TripleShift(48)) & 4095L);
+                values[valuesOffset++] = (int)((block2.TripleShift(36)) & 4095L);
+                values[valuesOffset++] = (int)((block2.TripleShift(24)) & 4095L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 4095L);
                 values[valuesOffset++] = (int)(block2 & 4095L);
             }
         }
@@ -61,7 +63,7 @@
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 4) | ((int)((uint)byte1 >> 4));
+                values[valuesOffset++] = (byte0 << 4) | (byte1.TripleShift(4));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte1 & 15) << 8) | byte2;
             }
@@ -72,23 +74,23 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 52);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 40)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 28)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 16)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 4)) & 4095L;
+                values[valuesOffset++] = block0.TripleShift(52);
+                values[valuesOffset++] = (block0.TripleShift(40)) & 4095L;
+                values[valuesOffset++] = (block0.TripleShift(28)) & 4095L;
+                values[valuesOffset++] = (block0.TripleShift(16)) & 4095L;
+                values[valuesOffset++] = (block0.TripleShift(4)) & 4095L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 15L) << 8) | ((long)((ulong)block1 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 44)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 32)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 20)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 8)) & 4095L;
+                values[valuesOffset++] = ((block0 & 15L) << 8) | (block1.TripleShift(56));
+                values[valuesOffset++] = (block1.TripleShift(44)) & 4095L;
+                values[valuesOffset++] = (block1.TripleShift(32)) & 4095L;
+                values[valuesOffset++] = (block1.TripleShift(20)) & 4095L;
+                values[valuesOffset++] = (block1.TripleShift(8)) & 4095L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 255L) << 4) | ((long)((ulong)block2 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 48)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 36)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 24)) & 4095L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 4095L;
+                values[valuesOffset++] = ((block1 & 255L) << 4) | (block2.TripleShift(60));
+                values[valuesOffset++] = (block2.TripleShift(48)) & 4095L;
+                values[valuesOffset++] = (block2.TripleShift(36)) & 4095L;
+                values[valuesOffset++] = (block2.TripleShift(24)) & 4095L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 4095L;
                 values[valuesOffset++] = block2 & 4095L;
             }
         }
@@ -99,7 +101,7 @@
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 4) | ((long)((ulong)byte1 >> 4));
+                values[valuesOffset++] = (byte0 << 4) | (byte1.TripleShift(4));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte1 & 15) << 8) | byte2;
             }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked13.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked13.cs
index c1f3f75..cafecee 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked13.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked13.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,81 +36,81 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 51));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 38)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 25)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 12)) & 8191L);
+                values[valuesOffset++] = (int)(block0.TripleShift(51));
+                values[valuesOffset++] = (int)((block0.TripleShift(38)) & 8191L);
+                values[valuesOffset++] = (int)((block0.TripleShift(25)) & 8191L);
+                values[valuesOffset++] = (int)((block0.TripleShift(12)) & 8191L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 4095L) << 1) | ((long)((ulong)block1 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 50)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 37)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 24)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 11)) & 8191L);
+                values[valuesOffset++] = (int)(((block0 & 4095L) << 1) | (block1.TripleShift(63)));
+                values[valuesOffset++] = (int)((block1.TripleShift(50)) & 8191L);
+                values[valuesOffset++] = (int)((block1.TripleShift(37)) & 8191L);
+                values[valuesOffset++] = (int)((block1.TripleShift(24)) & 8191L);
+                values[valuesOffset++] = (int)((block1.TripleShift(11)) & 8191L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 2047L) << 2) | ((long)((ulong)block2 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 49)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 36)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 23)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 10)) & 8191L);
+                values[valuesOffset++] = (int)(((block1 & 2047L) << 2) | (block2.TripleShift(62)));
+                values[valuesOffset++] = (int)((block2.TripleShift(49)) & 8191L);
+                values[valuesOffset++] = (int)((block2.TripleShift(36)) & 8191L);
+                values[valuesOffset++] = (int)((block2.TripleShift(23)) & 8191L);
+                values[valuesOffset++] = (int)((block2.TripleShift(10)) & 8191L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 1023L) << 3) | ((long)((ulong)block3 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 48)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 35)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 22)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 9)) & 8191L);
+                values[valuesOffset++] = (int)(((block2 & 1023L) << 3) | (block3.TripleShift(61)));
+                values[valuesOffset++] = (int)((block3.TripleShift(48)) & 8191L);
+                values[valuesOffset++] = (int)((block3.TripleShift(35)) & 8191L);
+                values[valuesOffset++] = (int)((block3.TripleShift(22)) & 8191L);
+                values[valuesOffset++] = (int)((block3.TripleShift(9)) & 8191L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 511L) << 4) | ((long)((ulong)block4 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 47)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 34)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 21)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 8)) & 8191L);
+                values[valuesOffset++] = (int)(((block3 & 511L) << 4) | (block4.TripleShift(60)));
+                values[valuesOffset++] = (int)((block4.TripleShift(47)) & 8191L);
+                values[valuesOffset++] = (int)((block4.TripleShift(34)) & 8191L);
+                values[valuesOffset++] = (int)((block4.TripleShift(21)) & 8191L);
+                values[valuesOffset++] = (int)((block4.TripleShift(8)) & 8191L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 255L) << 5) | ((long)((ulong)block5 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 46)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 33)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 20)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 7)) & 8191L);
+                values[valuesOffset++] = (int)(((block4 & 255L) << 5) | (block5.TripleShift(59)));
+                values[valuesOffset++] = (int)((block5.TripleShift(46)) & 8191L);
+                values[valuesOffset++] = (int)((block5.TripleShift(33)) & 8191L);
+                values[valuesOffset++] = (int)((block5.TripleShift(20)) & 8191L);
+                values[valuesOffset++] = (int)((block5.TripleShift(7)) & 8191L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 127L) << 6) | ((long)((ulong)block6 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 45)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 32)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 19)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 6)) & 8191L);
+                values[valuesOffset++] = (int)(((block5 & 127L) << 6) | (block6.TripleShift(58)));
+                values[valuesOffset++] = (int)((block6.TripleShift(45)) & 8191L);
+                values[valuesOffset++] = (int)((block6.TripleShift(32)) & 8191L);
+                values[valuesOffset++] = (int)((block6.TripleShift(19)) & 8191L);
+                values[valuesOffset++] = (int)((block6.TripleShift(6)) & 8191L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 63L) << 7) | ((long)((ulong)block7 >> 57)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 44)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 31)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 18)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 5)) & 8191L);
+                values[valuesOffset++] = (int)(((block6 & 63L) << 7) | (block7.TripleShift(57)));
+                values[valuesOffset++] = (int)((block7.TripleShift(44)) & 8191L);
+                values[valuesOffset++] = (int)((block7.TripleShift(31)) & 8191L);
+                values[valuesOffset++] = (int)((block7.TripleShift(18)) & 8191L);
+                values[valuesOffset++] = (int)((block7.TripleShift(5)) & 8191L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 31L) << 8) | ((long)((ulong)block8 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 43)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 30)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 17)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 4)) & 8191L);
+                values[valuesOffset++] = (int)(((block7 & 31L) << 8) | (block8.TripleShift(56)));
+                values[valuesOffset++] = (int)((block8.TripleShift(43)) & 8191L);
+                values[valuesOffset++] = (int)((block8.TripleShift(30)) & 8191L);
+                values[valuesOffset++] = (int)((block8.TripleShift(17)) & 8191L);
+                values[valuesOffset++] = (int)((block8.TripleShift(4)) & 8191L);
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block8 & 15L) << 9) | ((long)((ulong)block9 >> 55)));
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 42)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 29)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 16)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 3)) & 8191L);
+                values[valuesOffset++] = (int)(((block8 & 15L) << 9) | (block9.TripleShift(55)));
+                values[valuesOffset++] = (int)((block9.TripleShift(42)) & 8191L);
+                values[valuesOffset++] = (int)((block9.TripleShift(29)) & 8191L);
+                values[valuesOffset++] = (int)((block9.TripleShift(16)) & 8191L);
+                values[valuesOffset++] = (int)((block9.TripleShift(3)) & 8191L);
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block9 & 7L) << 10) | ((long)((ulong)block10 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 41)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 28)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 15)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 2)) & 8191L);
+                values[valuesOffset++] = (int)(((block9 & 7L) << 10) | (block10.TripleShift(54)));
+                values[valuesOffset++] = (int)((block10.TripleShift(41)) & 8191L);
+                values[valuesOffset++] = (int)((block10.TripleShift(28)) & 8191L);
+                values[valuesOffset++] = (int)((block10.TripleShift(15)) & 8191L);
+                values[valuesOffset++] = (int)((block10.TripleShift(2)) & 8191L);
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block10 & 3L) << 11) | ((long)((ulong)block11 >> 53)));
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 40)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 27)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 14)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 1)) & 8191L);
+                values[valuesOffset++] = (int)(((block10 & 3L) << 11) | (block11.TripleShift(53)));
+                values[valuesOffset++] = (int)((block11.TripleShift(40)) & 8191L);
+                values[valuesOffset++] = (int)((block11.TripleShift(27)) & 8191L);
+                values[valuesOffset++] = (int)((block11.TripleShift(14)) & 8191L);
+                values[valuesOffset++] = (int)((block11.TripleShift(1)) & 8191L);
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block11 & 1L) << 12) | ((long)((ulong)block12 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 39)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 26)) & 8191L);
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 13)) & 8191L);
+                values[valuesOffset++] = (int)(((block11 & 1L) << 12) | (block12.TripleShift(52)));
+                values[valuesOffset++] = (int)((block12.TripleShift(39)) & 8191L);
+                values[valuesOffset++] = (int)((block12.TripleShift(26)) & 8191L);
+                values[valuesOffset++] = (int)((block12.TripleShift(13)) & 8191L);
                 values[valuesOffset++] = (int)(block12 & 8191L);
             }
         }
@@ -119,23 +121,23 @@
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 5) | ((int)((uint)byte1 >> 3));
+                values[valuesOffset++] = (byte0 << 5) | (byte1.TripleShift(3));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
                 int byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 7) << 10) | (byte2 << 2) | ((int)((uint)byte3 >> 6));
+                values[valuesOffset++] = ((byte1 & 7) << 10) | (byte2 << 2) | (byte3.TripleShift(6));
                 int byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 63) << 7) | ((int)((uint)byte4 >> 1));
+                values[valuesOffset++] = ((byte3 & 63) << 7) | (byte4.TripleShift(1));
                 int byte5 = blocks[blocksOffset++] & 0xFF;
                 int byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 1) << 12) | (byte5 << 4) | ((int)((uint)byte6 >> 4));
+                values[valuesOffset++] = ((byte4 & 1) << 12) | (byte5 << 4) | (byte6.TripleShift(4));
                 int byte7 = blocks[blocksOffset++] & 0xFF;
                 int byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte6 & 15) << 9) | (byte7 << 1) | ((int)((uint)byte8 >> 7));
+                values[valuesOffset++] = ((byte6 & 15) << 9) | (byte7 << 1) | (byte8.TripleShift(7));
                 int byte9 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte8 & 127) << 6) | ((int)((uint)byte9 >> 2));
+                values[valuesOffset++] = ((byte8 & 127) << 6) | (byte9.TripleShift(2));
                 int byte10 = blocks[blocksOffset++] & 0xFF;
                 int byte11 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte9 & 3) << 11) | (byte10 << 3) | ((int)((uint)byte11 >> 5));
+                values[valuesOffset++] = ((byte9 & 3) << 11) | (byte10 << 3) | (byte11.TripleShift(5));
                 int byte12 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte11 & 31) << 8) | byte12;
             }
@@ -146,81 +148,81 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 51);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 38)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 25)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 12)) & 8191L;
+                values[valuesOffset++] = block0.TripleShift(51);
+                values[valuesOffset++] = (block0.TripleShift(38)) & 8191L;
+                values[valuesOffset++] = (block0.TripleShift(25)) & 8191L;
+                values[valuesOffset++] = (block0.TripleShift(12)) & 8191L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 4095L) << 1) | ((long)((ulong)block1 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 50)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 37)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 24)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 11)) & 8191L;
+                values[valuesOffset++] = ((block0 & 4095L) << 1) | (block1.TripleShift(63));
+                values[valuesOffset++] = (block1.TripleShift(50)) & 8191L;
+                values[valuesOffset++] = (block1.TripleShift(37)) & 8191L;
+                values[valuesOffset++] = (block1.TripleShift(24)) & 8191L;
+                values[valuesOffset++] = (block1.TripleShift(11)) & 8191L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 2047L) << 2) | ((long)((ulong)block2 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 49)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 36)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 23)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 10)) & 8191L;
+                values[valuesOffset++] = ((block1 & 2047L) << 2) | (block2.TripleShift(62));
+                values[valuesOffset++] = (block2.TripleShift(49)) & 8191L;
+                values[valuesOffset++] = (block2.TripleShift(36)) & 8191L;
+                values[valuesOffset++] = (block2.TripleShift(23)) & 8191L;
+                values[valuesOffset++] = (block2.TripleShift(10)) & 8191L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 1023L) << 3) | ((long)((ulong)block3 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 48)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 35)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 22)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 9)) & 8191L;
+                values[valuesOffset++] = ((block2 & 1023L) << 3) | (block3.TripleShift(61));
+                values[valuesOffset++] = (block3.TripleShift(48)) & 8191L;
+                values[valuesOffset++] = (block3.TripleShift(35)) & 8191L;
+                values[valuesOffset++] = (block3.TripleShift(22)) & 8191L;
+                values[valuesOffset++] = (block3.TripleShift(9)) & 8191L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 511L) << 4) | ((long)((ulong)block4 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 47)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 34)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 21)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 8)) & 8191L;
+                values[valuesOffset++] = ((block3 & 511L) << 4) | (block4.TripleShift(60));
+                values[valuesOffset++] = (block4.TripleShift(47)) & 8191L;
+                values[valuesOffset++] = (block4.TripleShift(34)) & 8191L;
+                values[valuesOffset++] = (block4.TripleShift(21)) & 8191L;
+                values[valuesOffset++] = (block4.TripleShift(8)) & 8191L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 255L) << 5) | ((long)((ulong)block5 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 46)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 33)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 20)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 7)) & 8191L;
+                values[valuesOffset++] = ((block4 & 255L) << 5) | (block5.TripleShift(59));
+                values[valuesOffset++] = (block5.TripleShift(46)) & 8191L;
+                values[valuesOffset++] = (block5.TripleShift(33)) & 8191L;
+                values[valuesOffset++] = (block5.TripleShift(20)) & 8191L;
+                values[valuesOffset++] = (block5.TripleShift(7)) & 8191L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 127L) << 6) | ((long)((ulong)block6 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 45)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 32)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 19)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 6)) & 8191L;
+                values[valuesOffset++] = ((block5 & 127L) << 6) | (block6.TripleShift(58));
+                values[valuesOffset++] = (block6.TripleShift(45)) & 8191L;
+                values[valuesOffset++] = (block6.TripleShift(32)) & 8191L;
+                values[valuesOffset++] = (block6.TripleShift(19)) & 8191L;
+                values[valuesOffset++] = (block6.TripleShift(6)) & 8191L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 63L) << 7) | ((long)((ulong)block7 >> 57));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 44)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 31)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 18)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 5)) & 8191L;
+                values[valuesOffset++] = ((block6 & 63L) << 7) | (block7.TripleShift(57));
+                values[valuesOffset++] = (block7.TripleShift(44)) & 8191L;
+                values[valuesOffset++] = (block7.TripleShift(31)) & 8191L;
+                values[valuesOffset++] = (block7.TripleShift(18)) & 8191L;
+                values[valuesOffset++] = (block7.TripleShift(5)) & 8191L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 31L) << 8) | ((long)((ulong)block8 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 43)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 30)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 17)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 4)) & 8191L;
+                values[valuesOffset++] = ((block7 & 31L) << 8) | (block8.TripleShift(56));
+                values[valuesOffset++] = (block8.TripleShift(43)) & 8191L;
+                values[valuesOffset++] = (block8.TripleShift(30)) & 8191L;
+                values[valuesOffset++] = (block8.TripleShift(17)) & 8191L;
+                values[valuesOffset++] = (block8.TripleShift(4)) & 8191L;
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block8 & 15L) << 9) | ((long)((ulong)block9 >> 55));
-                values[valuesOffset++] = ((long)((ulong)block9 >> 42)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 29)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 16)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 3)) & 8191L;
+                values[valuesOffset++] = ((block8 & 15L) << 9) | (block9.TripleShift(55));
+                values[valuesOffset++] = (block9.TripleShift(42)) & 8191L;
+                values[valuesOffset++] = (block9.TripleShift(29)) & 8191L;
+                values[valuesOffset++] = (block9.TripleShift(16)) & 8191L;
+                values[valuesOffset++] = (block9.TripleShift(3)) & 8191L;
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block9 & 7L) << 10) | ((long)((ulong)block10 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block10 >> 41)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 28)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 15)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 2)) & 8191L;
+                values[valuesOffset++] = ((block9 & 7L) << 10) | (block10.TripleShift(54));
+                values[valuesOffset++] = (block10.TripleShift(41)) & 8191L;
+                values[valuesOffset++] = (block10.TripleShift(28)) & 8191L;
+                values[valuesOffset++] = (block10.TripleShift(15)) & 8191L;
+                values[valuesOffset++] = (block10.TripleShift(2)) & 8191L;
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block10 & 3L) << 11) | ((long)((ulong)block11 >> 53));
-                values[valuesOffset++] = ((long)((ulong)block11 >> 40)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 27)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 14)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 1)) & 8191L;
+                values[valuesOffset++] = ((block10 & 3L) << 11) | (block11.TripleShift(53));
+                values[valuesOffset++] = (block11.TripleShift(40)) & 8191L;
+                values[valuesOffset++] = (block11.TripleShift(27)) & 8191L;
+                values[valuesOffset++] = (block11.TripleShift(14)) & 8191L;
+                values[valuesOffset++] = (block11.TripleShift(1)) & 8191L;
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block11 & 1L) << 12) | ((long)((ulong)block12 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block12 >> 39)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block12 >> 26)) & 8191L;
-                values[valuesOffset++] = ((long)((ulong)block12 >> 13)) & 8191L;
+                values[valuesOffset++] = ((block11 & 1L) << 12) | (block12.TripleShift(52));
+                values[valuesOffset++] = (block12.TripleShift(39)) & 8191L;
+                values[valuesOffset++] = (block12.TripleShift(26)) & 8191L;
+                values[valuesOffset++] = (block12.TripleShift(13)) & 8191L;
                 values[valuesOffset++] = block12 & 8191L;
             }
         }
@@ -231,23 +233,23 @@
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 5) | ((long)((ulong)byte1 >> 3));
+                values[valuesOffset++] = (byte0 << 5) | (byte1.TripleShift(3));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
                 long byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 7) << 10) | (byte2 << 2) | ((long)((ulong)byte3 >> 6));
+                values[valuesOffset++] = ((byte1 & 7) << 10) | (byte2 << 2) | (byte3.TripleShift(6));
                 long byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 63) << 7) | ((long)((ulong)byte4 >> 1));
+                values[valuesOffset++] = ((byte3 & 63) << 7) | (byte4.TripleShift(1));
                 long byte5 = blocks[blocksOffset++] & 0xFF;
                 long byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 1) << 12) | (byte5 << 4) | ((long)((ulong)byte6 >> 4));
+                values[valuesOffset++] = ((byte4 & 1) << 12) | (byte5 << 4) | (byte6.TripleShift(4));
                 long byte7 = blocks[blocksOffset++] & 0xFF;
                 long byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte6 & 15) << 9) | (byte7 << 1) | ((long)((ulong)byte8 >> 7));
+                values[valuesOffset++] = ((byte6 & 15) << 9) | (byte7 << 1) | (byte8.TripleShift(7));
                 long byte9 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte8 & 127) << 6) | ((long)((ulong)byte9 >> 2));
+                values[valuesOffset++] = ((byte8 & 127) << 6) | (byte9.TripleShift(2));
                 long byte10 = blocks[blocksOffset++] & 0xFF;
                 long byte11 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte9 & 3) << 11) | (byte10 << 3) | ((long)((ulong)byte11 >> 5));
+                values[valuesOffset++] = ((byte9 & 3) << 11) | (byte10 << 3) | (byte11.TripleShift(5));
                 long byte12 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte11 & 31) << 8) | byte12;
             }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked14.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked14.cs
index a8847ce..277f7f2 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked14.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked14.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,43 +36,43 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 50));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 36)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 22)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 8)) & 16383L);
+                values[valuesOffset++] = (int)(block0.TripleShift(50));
+                values[valuesOffset++] = (int)((block0.TripleShift(36)) & 16383L);
+                values[valuesOffset++] = (int)((block0.TripleShift(22)) & 16383L);
+                values[valuesOffset++] = (int)((block0.TripleShift(8)) & 16383L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 255L) << 6) | ((long)((ulong)block1 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 44)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 30)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 16)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 2)) & 16383L);
+                values[valuesOffset++] = (int)(((block0 & 255L) << 6) | (block1.TripleShift(58)));
+                values[valuesOffset++] = (int)((block1.TripleShift(44)) & 16383L);
+                values[valuesOffset++] = (int)((block1.TripleShift(30)) & 16383L);
+                values[valuesOffset++] = (int)((block1.TripleShift(16)) & 16383L);
+                values[valuesOffset++] = (int)((block1.TripleShift(2)) & 16383L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 3L) << 12) | ((long)((ulong)block2 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 38)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 24)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 10)) & 16383L);
+                values[valuesOffset++] = (int)(((block1 & 3L) << 12) | (block2.TripleShift(52)));
+                values[valuesOffset++] = (int)((block2.TripleShift(38)) & 16383L);
+                values[valuesOffset++] = (int)((block2.TripleShift(24)) & 16383L);
+                values[valuesOffset++] = (int)((block2.TripleShift(10)) & 16383L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 1023L) << 4) | ((long)((ulong)block3 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 46)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 32)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 18)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 4)) & 16383L);
+                values[valuesOffset++] = (int)(((block2 & 1023L) << 4) | (block3.TripleShift(60)));
+                values[valuesOffset++] = (int)((block3.TripleShift(46)) & 16383L);
+                values[valuesOffset++] = (int)((block3.TripleShift(32)) & 16383L);
+                values[valuesOffset++] = (int)((block3.TripleShift(18)) & 16383L);
+                values[valuesOffset++] = (int)((block3.TripleShift(4)) & 16383L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 15L) << 10) | ((long)((ulong)block4 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 40)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 26)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 12)) & 16383L);
+                values[valuesOffset++] = (int)(((block3 & 15L) << 10) | (block4.TripleShift(54)));
+                values[valuesOffset++] = (int)((block4.TripleShift(40)) & 16383L);
+                values[valuesOffset++] = (int)((block4.TripleShift(26)) & 16383L);
+                values[valuesOffset++] = (int)((block4.TripleShift(12)) & 16383L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 4095L) << 2) | ((long)((ulong)block5 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 48)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 34)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 20)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 6)) & 16383L);
+                values[valuesOffset++] = (int)(((block4 & 4095L) << 2) | (block5.TripleShift(62)));
+                values[valuesOffset++] = (int)((block5.TripleShift(48)) & 16383L);
+                values[valuesOffset++] = (int)((block5.TripleShift(34)) & 16383L);
+                values[valuesOffset++] = (int)((block5.TripleShift(20)) & 16383L);
+                values[valuesOffset++] = (int)((block5.TripleShift(6)) & 16383L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 63L) << 8) | ((long)((ulong)block6 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 42)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 28)) & 16383L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 14)) & 16383L);
+                values[valuesOffset++] = (int)(((block5 & 63L) << 8) | (block6.TripleShift(56)));
+                values[valuesOffset++] = (int)((block6.TripleShift(42)) & 16383L);
+                values[valuesOffset++] = (int)((block6.TripleShift(28)) & 16383L);
+                values[valuesOffset++] = (int)((block6.TripleShift(14)) & 16383L);
                 values[valuesOffset++] = (int)(block6 & 16383L);
             }
         }
@@ -81,13 +83,13 @@
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 6) | ((int)((uint)byte1 >> 2));
+                values[valuesOffset++] = (byte0 << 6) | (byte1.TripleShift(2));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
                 int byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 3) << 12) | (byte2 << 4) | ((int)((uint)byte3 >> 4));
+                values[valuesOffset++] = ((byte1 & 3) << 12) | (byte2 << 4) | (byte3.TripleShift(4));
                 int byte4 = blocks[blocksOffset++] & 0xFF;
                 int byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 15) << 10) | (byte4 << 2) | ((int)((uint)byte5 >> 6));
+                values[valuesOffset++] = ((byte3 & 15) << 10) | (byte4 << 2) | (byte5.TripleShift(6));
                 int byte6 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte5 & 63) << 8) | byte6;
             }
@@ -98,43 +100,43 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 50);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 36)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 22)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 8)) & 16383L;
+                values[valuesOffset++] = block0.TripleShift(50);
+                values[valuesOffset++] = (block0.TripleShift(36)) & 16383L;
+                values[valuesOffset++] = (block0.TripleShift(22)) & 16383L;
+                values[valuesOffset++] = (block0.TripleShift(8)) & 16383L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 255L) << 6) | ((long)((ulong)block1 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 44)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 30)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 16)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 2)) & 16383L;
+                values[valuesOffset++] = ((block0 & 255L) << 6) | (block1.TripleShift(58));
+                values[valuesOffset++] = (block1.TripleShift(44)) & 16383L;
+                values[valuesOffset++] = (block1.TripleShift(30)) & 16383L;
+                values[valuesOffset++] = (block1.TripleShift(16)) & 16383L;
+                values[valuesOffset++] = (block1.TripleShift(2)) & 16383L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 3L) << 12) | ((long)((ulong)block2 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 38)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 24)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 10)) & 16383L;
+                values[valuesOffset++] = ((block1 & 3L) << 12) | (block2.TripleShift(52));
+                values[valuesOffset++] = (block2.TripleShift(38)) & 16383L;
+                values[valuesOffset++] = (block2.TripleShift(24)) & 16383L;
+                values[valuesOffset++] = (block2.TripleShift(10)) & 16383L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 1023L) << 4) | ((long)((ulong)block3 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 46)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 32)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 18)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 4)) & 16383L;
+                values[valuesOffset++] = ((block2 & 1023L) << 4) | (block3.TripleShift(60));
+                values[valuesOffset++] = (block3.TripleShift(46)) & 16383L;
+                values[valuesOffset++] = (block3.TripleShift(32)) & 16383L;
+                values[valuesOffset++] = (block3.TripleShift(18)) & 16383L;
+                values[valuesOffset++] = (block3.TripleShift(4)) & 16383L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 15L) << 10) | ((long)((ulong)block4 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 40)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 26)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 12)) & 16383L;
+                values[valuesOffset++] = ((block3 & 15L) << 10) | (block4.TripleShift(54));
+                values[valuesOffset++] = (block4.TripleShift(40)) & 16383L;
+                values[valuesOffset++] = (block4.TripleShift(26)) & 16383L;
+                values[valuesOffset++] = (block4.TripleShift(12)) & 16383L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 4095L) << 2) | ((long)((ulong)block5 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 48)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 34)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 20)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 6)) & 16383L;
+                values[valuesOffset++] = ((block4 & 4095L) << 2) | (block5.TripleShift(62));
+                values[valuesOffset++] = (block5.TripleShift(48)) & 16383L;
+                values[valuesOffset++] = (block5.TripleShift(34)) & 16383L;
+                values[valuesOffset++] = (block5.TripleShift(20)) & 16383L;
+                values[valuesOffset++] = (block5.TripleShift(6)) & 16383L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 63L) << 8) | ((long)((ulong)block6 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 42)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 28)) & 16383L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 14)) & 16383L;
+                values[valuesOffset++] = ((block5 & 63L) << 8) | (block6.TripleShift(56));
+                values[valuesOffset++] = (block6.TripleShift(42)) & 16383L;
+                values[valuesOffset++] = (block6.TripleShift(28)) & 16383L;
+                values[valuesOffset++] = (block6.TripleShift(14)) & 16383L;
                 values[valuesOffset++] = block6 & 16383L;
             }
         }
@@ -145,13 +147,13 @@
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 6) | ((long)((ulong)byte1 >> 2));
+                values[valuesOffset++] = (byte0 << 6) | (byte1.TripleShift(2));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
                 long byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 3) << 12) | (byte2 << 4) | ((long)((ulong)byte3 >> 4));
+                values[valuesOffset++] = ((byte1 & 3) << 12) | (byte2 << 4) | (byte3.TripleShift(4));
                 long byte4 = blocks[blocksOffset++] & 0xFF;
                 long byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 15) << 10) | (byte4 << 2) | ((long)((ulong)byte5 >> 6));
+                values[valuesOffset++] = ((byte3 & 15) << 10) | (byte4 << 2) | (byte5.TripleShift(6));
                 long byte6 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte5 & 63) << 8) | byte6;
             }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked15.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked15.cs
index dd8ef4e..358ef54 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked15.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked15.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,83 +36,83 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 49));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 34)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 19)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 4)) & 32767L);
+                values[valuesOffset++] = (int)(block0.TripleShift(49));
+                values[valuesOffset++] = (int)((block0.TripleShift(34)) & 32767L);
+                values[valuesOffset++] = (int)((block0.TripleShift(19)) & 32767L);
+                values[valuesOffset++] = (int)((block0.TripleShift(4)) & 32767L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 15L) << 11) | ((long)((ulong)block1 >> 53)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 38)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 23)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 8)) & 32767L);
+                values[valuesOffset++] = (int)(((block0 & 15L) << 11) | (block1.TripleShift(53)));
+                values[valuesOffset++] = (int)((block1.TripleShift(38)) & 32767L);
+                values[valuesOffset++] = (int)((block1.TripleShift(23)) & 32767L);
+                values[valuesOffset++] = (int)((block1.TripleShift(8)) & 32767L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 255L) << 7) | ((long)((ulong)block2 >> 57)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 42)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 27)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 32767L);
+                values[valuesOffset++] = (int)(((block1 & 255L) << 7) | (block2.TripleShift(57)));
+                values[valuesOffset++] = (int)((block2.TripleShift(42)) & 32767L);
+                values[valuesOffset++] = (int)((block2.TripleShift(27)) & 32767L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 32767L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 4095L) << 3) | ((long)((ulong)block3 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 46)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 31)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 16)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 1)) & 32767L);
+                values[valuesOffset++] = (int)(((block2 & 4095L) << 3) | (block3.TripleShift(61)));
+                values[valuesOffset++] = (int)((block3.TripleShift(46)) & 32767L);
+                values[valuesOffset++] = (int)((block3.TripleShift(31)) & 32767L);
+                values[valuesOffset++] = (int)((block3.TripleShift(16)) & 32767L);
+                values[valuesOffset++] = (int)((block3.TripleShift(1)) & 32767L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 1L) << 14) | ((long)((ulong)block4 >> 50)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 35)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 20)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 5)) & 32767L);
+                values[valuesOffset++] = (int)(((block3 & 1L) << 14) | (block4.TripleShift(50)));
+                values[valuesOffset++] = (int)((block4.TripleShift(35)) & 32767L);
+                values[valuesOffset++] = (int)((block4.TripleShift(20)) & 32767L);
+                values[valuesOffset++] = (int)((block4.TripleShift(5)) & 32767L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 31L) << 10) | ((long)((ulong)block5 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 39)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 24)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 9)) & 32767L);
+                values[valuesOffset++] = (int)(((block4 & 31L) << 10) | (block5.TripleShift(54)));
+                values[valuesOffset++] = (int)((block5.TripleShift(39)) & 32767L);
+                values[valuesOffset++] = (int)((block5.TripleShift(24)) & 32767L);
+                values[valuesOffset++] = (int)((block5.TripleShift(9)) & 32767L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 511L) << 6) | ((long)((ulong)block6 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 43)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 28)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 13)) & 32767L);
+                values[valuesOffset++] = (int)(((block5 & 511L) << 6) | (block6.TripleShift(58)));
+                values[valuesOffset++] = (int)((block6.TripleShift(43)) & 32767L);
+                values[valuesOffset++] = (int)((block6.TripleShift(28)) & 32767L);
+                values[valuesOffset++] = (int)((block6.TripleShift(13)) & 32767L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 8191L) << 2) | ((long)((ulong)block7 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 47)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 32)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 17)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 2)) & 32767L);
+                values[valuesOffset++] = (int)(((block6 & 8191L) << 2) | (block7.TripleShift(62)));
+                values[valuesOffset++] = (int)((block7.TripleShift(47)) & 32767L);
+                values[valuesOffset++] = (int)((block7.TripleShift(32)) & 32767L);
+                values[valuesOffset++] = (int)((block7.TripleShift(17)) & 32767L);
+                values[valuesOffset++] = (int)((block7.TripleShift(2)) & 32767L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 3L) << 13) | ((long)((ulong)block8 >> 51)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 36)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 21)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 6)) & 32767L);
+                values[valuesOffset++] = (int)(((block7 & 3L) << 13) | (block8.TripleShift(51)));
+                values[valuesOffset++] = (int)((block8.TripleShift(36)) & 32767L);
+                values[valuesOffset++] = (int)((block8.TripleShift(21)) & 32767L);
+                values[valuesOffset++] = (int)((block8.TripleShift(6)) & 32767L);
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block8 & 63L) << 9) | ((long)((ulong)block9 >> 55)));
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 40)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 25)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 10)) & 32767L);
+                values[valuesOffset++] = (int)(((block8 & 63L) << 9) | (block9.TripleShift(55)));
+                values[valuesOffset++] = (int)((block9.TripleShift(40)) & 32767L);
+                values[valuesOffset++] = (int)((block9.TripleShift(25)) & 32767L);
+                values[valuesOffset++] = (int)((block9.TripleShift(10)) & 32767L);
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block9 & 1023L) << 5) | ((long)((ulong)block10 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 44)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 29)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 14)) & 32767L);
+                values[valuesOffset++] = (int)(((block9 & 1023L) << 5) | (block10.TripleShift(59)));
+                values[valuesOffset++] = (int)((block10.TripleShift(44)) & 32767L);
+                values[valuesOffset++] = (int)((block10.TripleShift(29)) & 32767L);
+                values[valuesOffset++] = (int)((block10.TripleShift(14)) & 32767L);
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block10 & 16383L) << 1) | ((long)((ulong)block11 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 48)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 33)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 18)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 3)) & 32767L);
+                values[valuesOffset++] = (int)(((block10 & 16383L) << 1) | (block11.TripleShift(63)));
+                values[valuesOffset++] = (int)((block11.TripleShift(48)) & 32767L);
+                values[valuesOffset++] = (int)((block11.TripleShift(33)) & 32767L);
+                values[valuesOffset++] = (int)((block11.TripleShift(18)) & 32767L);
+                values[valuesOffset++] = (int)((block11.TripleShift(3)) & 32767L);
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block11 & 7L) << 12) | ((long)((ulong)block12 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 37)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 22)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 7)) & 32767L);
+                values[valuesOffset++] = (int)(((block11 & 7L) << 12) | (block12.TripleShift(52)));
+                values[valuesOffset++] = (int)((block12.TripleShift(37)) & 32767L);
+                values[valuesOffset++] = (int)((block12.TripleShift(22)) & 32767L);
+                values[valuesOffset++] = (int)((block12.TripleShift(7)) & 32767L);
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block12 & 127L) << 8) | ((long)((ulong)block13 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 41)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 26)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 11)) & 32767L);
+                values[valuesOffset++] = (int)(((block12 & 127L) << 8) | (block13.TripleShift(56)));
+                values[valuesOffset++] = (int)((block13.TripleShift(41)) & 32767L);
+                values[valuesOffset++] = (int)((block13.TripleShift(26)) & 32767L);
+                values[valuesOffset++] = (int)((block13.TripleShift(11)) & 32767L);
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block13 & 2047L) << 4) | ((long)((ulong)block14 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 45)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 30)) & 32767L);
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 15)) & 32767L);
+                values[valuesOffset++] = (int)(((block13 & 2047L) << 4) | (block14.TripleShift(60)));
+                values[valuesOffset++] = (int)((block14.TripleShift(45)) & 32767L);
+                values[valuesOffset++] = (int)((block14.TripleShift(30)) & 32767L);
+                values[valuesOffset++] = (int)((block14.TripleShift(15)) & 32767L);
                 values[valuesOffset++] = (int)(block14 & 32767L);
             }
         }
@@ -121,25 +123,25 @@
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 7) | ((int)((uint)byte1 >> 1));
+                values[valuesOffset++] = (byte0 << 7) | (byte1.TripleShift(1));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
                 int byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 1) << 14) | (byte2 << 6) | ((int)((uint)byte3 >> 2));
+                values[valuesOffset++] = ((byte1 & 1) << 14) | (byte2 << 6) | (byte3.TripleShift(2));
                 int byte4 = blocks[blocksOffset++] & 0xFF;
                 int byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 3) << 13) | (byte4 << 5) | ((int)((uint)byte5 >> 3));
+                values[valuesOffset++] = ((byte3 & 3) << 13) | (byte4 << 5) | (byte5.TripleShift(3));
                 int byte6 = blocks[blocksOffset++] & 0xFF;
                 int byte7 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 7) << 12) | (byte6 << 4) | ((int)((uint)byte7 >> 4));
+                values[valuesOffset++] = ((byte5 & 7) << 12) | (byte6 << 4) | (byte7.TripleShift(4));
                 int byte8 = blocks[blocksOffset++] & 0xFF;
                 int byte9 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte7 & 15) << 11) | (byte8 << 3) | ((int)((uint)byte9 >> 5));
+                values[valuesOffset++] = ((byte7 & 15) << 11) | (byte8 << 3) | (byte9.TripleShift(5));
                 int byte10 = blocks[blocksOffset++] & 0xFF;
                 int byte11 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte9 & 31) << 10) | (byte10 << 2) | ((int)((uint)byte11 >> 6));
+                values[valuesOffset++] = ((byte9 & 31) << 10) | (byte10 << 2) | (byte11.TripleShift(6));
                 int byte12 = blocks[blocksOffset++] & 0xFF;
                 int byte13 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte11 & 63) << 9) | (byte12 << 1) | ((int)((uint)byte13 >> 7));
+                values[valuesOffset++] = ((byte11 & 63) << 9) | (byte12 << 1) | (byte13.TripleShift(7));
                 int byte14 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte13 & 127) << 8) | byte14;
             }
@@ -150,83 +152,83 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 49);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 34)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 19)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 4)) & 32767L;
+                values[valuesOffset++] = block0.TripleShift(49);
+                values[valuesOffset++] = (block0.TripleShift(34)) & 32767L;
+                values[valuesOffset++] = (block0.TripleShift(19)) & 32767L;
+                values[valuesOffset++] = (block0.TripleShift(4)) & 32767L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 15L) << 11) | ((long)((ulong)block1 >> 53));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 38)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 23)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 8)) & 32767L;
+                values[valuesOffset++] = ((block0 & 15L) << 11) | (block1.TripleShift(53));
+                values[valuesOffset++] = (block1.TripleShift(38)) & 32767L;
+                values[valuesOffset++] = (block1.TripleShift(23)) & 32767L;
+                values[valuesOffset++] = (block1.TripleShift(8)) & 32767L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 255L) << 7) | ((long)((ulong)block2 >> 57));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 42)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 27)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 32767L;
+                values[valuesOffset++] = ((block1 & 255L) << 7) | (block2.TripleShift(57));
+                values[valuesOffset++] = (block2.TripleShift(42)) & 32767L;
+                values[valuesOffset++] = (block2.TripleShift(27)) & 32767L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 32767L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 4095L) << 3) | ((long)((ulong)block3 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 46)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 31)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 16)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 1)) & 32767L;
+                values[valuesOffset++] = ((block2 & 4095L) << 3) | (block3.TripleShift(61));
+                values[valuesOffset++] = (block3.TripleShift(46)) & 32767L;
+                values[valuesOffset++] = (block3.TripleShift(31)) & 32767L;
+                values[valuesOffset++] = (block3.TripleShift(16)) & 32767L;
+                values[valuesOffset++] = (block3.TripleShift(1)) & 32767L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 1L) << 14) | ((long)((ulong)block4 >> 50));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 35)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 20)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 5)) & 32767L;
+                values[valuesOffset++] = ((block3 & 1L) << 14) | (block4.TripleShift(50));
+                values[valuesOffset++] = (block4.TripleShift(35)) & 32767L;
+                values[valuesOffset++] = (block4.TripleShift(20)) & 32767L;
+                values[valuesOffset++] = (block4.TripleShift(5)) & 32767L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 31L) << 10) | ((long)((ulong)block5 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 39)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 24)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 9)) & 32767L;
+                values[valuesOffset++] = ((block4 & 31L) << 10) | (block5.TripleShift(54));
+                values[valuesOffset++] = (block5.TripleShift(39)) & 32767L;
+                values[valuesOffset++] = (block5.TripleShift(24)) & 32767L;
+                values[valuesOffset++] = (block5.TripleShift(9)) & 32767L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 511L) << 6) | ((long)((ulong)block6 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 43)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 28)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 13)) & 32767L;
+                values[valuesOffset++] = ((block5 & 511L) << 6) | (block6.TripleShift(58));
+                values[valuesOffset++] = (block6.TripleShift(43)) & 32767L;
+                values[valuesOffset++] = (block6.TripleShift(28)) & 32767L;
+                values[valuesOffset++] = (block6.TripleShift(13)) & 32767L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 8191L) << 2) | ((long)((ulong)block7 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 47)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 32)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 17)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 2)) & 32767L;
+                values[valuesOffset++] = ((block6 & 8191L) << 2) | (block7.TripleShift(62));
+                values[valuesOffset++] = (block7.TripleShift(47)) & 32767L;
+                values[valuesOffset++] = (block7.TripleShift(32)) & 32767L;
+                values[valuesOffset++] = (block7.TripleShift(17)) & 32767L;
+                values[valuesOffset++] = (block7.TripleShift(2)) & 32767L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 3L) << 13) | ((long)((ulong)block8 >> 51));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 36)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 21)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 6)) & 32767L;
+                values[valuesOffset++] = ((block7 & 3L) << 13) | (block8.TripleShift(51));
+                values[valuesOffset++] = (block8.TripleShift(36)) & 32767L;
+                values[valuesOffset++] = (block8.TripleShift(21)) & 32767L;
+                values[valuesOffset++] = (block8.TripleShift(6)) & 32767L;
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block8 & 63L) << 9) | ((long)((ulong)block9 >> 55));
-                values[valuesOffset++] = ((long)((ulong)block9 >> 40)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 25)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 10)) & 32767L;
+                values[valuesOffset++] = ((block8 & 63L) << 9) | (block9.TripleShift(55));
+                values[valuesOffset++] = (block9.TripleShift(40)) & 32767L;
+                values[valuesOffset++] = (block9.TripleShift(25)) & 32767L;
+                values[valuesOffset++] = (block9.TripleShift(10)) & 32767L;
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block9 & 1023L) << 5) | ((long)((ulong)block10 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block10 >> 44)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 29)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 14)) & 32767L;
+                values[valuesOffset++] = ((block9 & 1023L) << 5) | (block10.TripleShift(59));
+                values[valuesOffset++] = (block10.TripleShift(44)) & 32767L;
+                values[valuesOffset++] = (block10.TripleShift(29)) & 32767L;
+                values[valuesOffset++] = (block10.TripleShift(14)) & 32767L;
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block10 & 16383L) << 1) | ((long)((ulong)block11 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block11 >> 48)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 33)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 18)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 3)) & 32767L;
+                values[valuesOffset++] = ((block10 & 16383L) << 1) | (block11.TripleShift(63));
+                values[valuesOffset++] = (block11.TripleShift(48)) & 32767L;
+                values[valuesOffset++] = (block11.TripleShift(33)) & 32767L;
+                values[valuesOffset++] = (block11.TripleShift(18)) & 32767L;
+                values[valuesOffset++] = (block11.TripleShift(3)) & 32767L;
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block11 & 7L) << 12) | ((long)((ulong)block12 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block12 >> 37)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block12 >> 22)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block12 >> 7)) & 32767L;
+                values[valuesOffset++] = ((block11 & 7L) << 12) | (block12.TripleShift(52));
+                values[valuesOffset++] = (block12.TripleShift(37)) & 32767L;
+                values[valuesOffset++] = (block12.TripleShift(22)) & 32767L;
+                values[valuesOffset++] = (block12.TripleShift(7)) & 32767L;
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block12 & 127L) << 8) | ((long)((ulong)block13 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block13 >> 41)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block13 >> 26)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block13 >> 11)) & 32767L;
+                values[valuesOffset++] = ((block12 & 127L) << 8) | (block13.TripleShift(56));
+                values[valuesOffset++] = (block13.TripleShift(41)) & 32767L;
+                values[valuesOffset++] = (block13.TripleShift(26)) & 32767L;
+                values[valuesOffset++] = (block13.TripleShift(11)) & 32767L;
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block13 & 2047L) << 4) | ((long)((ulong)block14 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block14 >> 45)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block14 >> 30)) & 32767L;
-                values[valuesOffset++] = ((long)((ulong)block14 >> 15)) & 32767L;
+                values[valuesOffset++] = ((block13 & 2047L) << 4) | (block14.TripleShift(60));
+                values[valuesOffset++] = (block14.TripleShift(45)) & 32767L;
+                values[valuesOffset++] = (block14.TripleShift(30)) & 32767L;
+                values[valuesOffset++] = (block14.TripleShift(15)) & 32767L;
                 values[valuesOffset++] = block14 & 32767L;
             }
         }
@@ -237,25 +239,25 @@
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 7) | ((long)((ulong)byte1 >> 1));
+                values[valuesOffset++] = (byte0 << 7) | (byte1.TripleShift(1));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
                 long byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 1) << 14) | (byte2 << 6) | ((long)((ulong)byte3 >> 2));
+                values[valuesOffset++] = ((byte1 & 1) << 14) | (byte2 << 6) | (byte3.TripleShift(2));
                 long byte4 = blocks[blocksOffset++] & 0xFF;
                 long byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 3) << 13) | (byte4 << 5) | ((long)((ulong)byte5 >> 3));
+                values[valuesOffset++] = ((byte3 & 3) << 13) | (byte4 << 5) | (byte5.TripleShift(3));
                 long byte6 = blocks[blocksOffset++] & 0xFF;
                 long byte7 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 7) << 12) | (byte6 << 4) | ((long)((ulong)byte7 >> 4));
+                values[valuesOffset++] = ((byte5 & 7) << 12) | (byte6 << 4) | (byte7.TripleShift(4));
                 long byte8 = blocks[blocksOffset++] & 0xFF;
                 long byte9 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte7 & 15) << 11) | (byte8 << 3) | ((long)((ulong)byte9 >> 5));
+                values[valuesOffset++] = ((byte7 & 15) << 11) | (byte8 << 3) | (byte9.TripleShift(5));
                 long byte10 = blocks[blocksOffset++] & 0xFF;
                 long byte11 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte9 & 31) << 10) | (byte10 << 2) | ((long)((ulong)byte11 >> 6));
+                values[valuesOffset++] = ((byte9 & 31) << 10) | (byte10 << 2) | (byte11.TripleShift(6));
                 long byte12 = blocks[blocksOffset++] & 0xFF;
                 long byte13 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte11 & 63) << 9) | (byte12 << 1) | ((long)((ulong)byte13 >> 7));
+                values[valuesOffset++] = ((byte11 & 63) << 9) | (byte12 << 1) | (byte13.TripleShift(7));
                 long byte14 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte13 & 127) << 8) | byte14;
             }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked16.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked16.cs
index 01c4bcf..0d67d52 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked16.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked16.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -36,7 +38,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 48; shift >= 0; shift -= 16)
                 {
-                    values[valuesOffset++] = (int)(((long)((ulong)block >> shift)) & 65535);
+                    values[valuesOffset++] = (int)((block.TripleShift(shift)) & 65535);
                 }
             }
         }
@@ -56,7 +58,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 48; shift >= 0; shift -= 16)
                 {
-                    values[valuesOffset++] = ((long)((ulong)block >> shift)) & 65535;
+                    values[valuesOffset++] = (block.TripleShift(shift)) & 65535;
                 }
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked17.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked17.cs
index 2fc2ff1..75a04ca 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked17.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked17.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,85 +36,85 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 47));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 30)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 13)) & 131071L);
+                values[valuesOffset++] = (int)(block0.TripleShift(47));
+                values[valuesOffset++] = (int)((block0.TripleShift(30)) & 131071L);
+                values[valuesOffset++] = (int)((block0.TripleShift(13)) & 131071L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 8191L) << 4) | ((long)((ulong)block1 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 43)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 26)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 9)) & 131071L);
+                values[valuesOffset++] = (int)(((block0 & 8191L) << 4) | (block1.TripleShift(60)));
+                values[valuesOffset++] = (int)((block1.TripleShift(43)) & 131071L);
+                values[valuesOffset++] = (int)((block1.TripleShift(26)) & 131071L);
+                values[valuesOffset++] = (int)((block1.TripleShift(9)) & 131071L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 511L) << 8) | ((long)((ulong)block2 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 39)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 22)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 5)) & 131071L);
+                values[valuesOffset++] = (int)(((block1 & 511L) << 8) | (block2.TripleShift(56)));
+                values[valuesOffset++] = (int)((block2.TripleShift(39)) & 131071L);
+                values[valuesOffset++] = (int)((block2.TripleShift(22)) & 131071L);
+                values[valuesOffset++] = (int)((block2.TripleShift(5)) & 131071L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 31L) << 12) | ((long)((ulong)block3 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 35)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 18)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 1)) & 131071L);
+                values[valuesOffset++] = (int)(((block2 & 31L) << 12) | (block3.TripleShift(52)));
+                values[valuesOffset++] = (int)((block3.TripleShift(35)) & 131071L);
+                values[valuesOffset++] = (int)((block3.TripleShift(18)) & 131071L);
+                values[valuesOffset++] = (int)((block3.TripleShift(1)) & 131071L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 1L) << 16) | ((long)((ulong)block4 >> 48)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 31)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 14)) & 131071L);
+                values[valuesOffset++] = (int)(((block3 & 1L) << 16) | (block4.TripleShift(48)));
+                values[valuesOffset++] = (int)((block4.TripleShift(31)) & 131071L);
+                values[valuesOffset++] = (int)((block4.TripleShift(14)) & 131071L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 16383L) << 3) | ((long)((ulong)block5 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 44)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 27)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 10)) & 131071L);
+                values[valuesOffset++] = (int)(((block4 & 16383L) << 3) | (block5.TripleShift(61)));
+                values[valuesOffset++] = (int)((block5.TripleShift(44)) & 131071L);
+                values[valuesOffset++] = (int)((block5.TripleShift(27)) & 131071L);
+                values[valuesOffset++] = (int)((block5.TripleShift(10)) & 131071L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 1023L) << 7) | ((long)((ulong)block6 >> 57)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 40)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 23)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 6)) & 131071L);
+                values[valuesOffset++] = (int)(((block5 & 1023L) << 7) | (block6.TripleShift(57)));
+                values[valuesOffset++] = (int)((block6.TripleShift(40)) & 131071L);
+                values[valuesOffset++] = (int)((block6.TripleShift(23)) & 131071L);
+                values[valuesOffset++] = (int)((block6.TripleShift(6)) & 131071L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 63L) << 11) | ((long)((ulong)block7 >> 53)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 36)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 19)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 2)) & 131071L);
+                values[valuesOffset++] = (int)(((block6 & 63L) << 11) | (block7.TripleShift(53)));
+                values[valuesOffset++] = (int)((block7.TripleShift(36)) & 131071L);
+                values[valuesOffset++] = (int)((block7.TripleShift(19)) & 131071L);
+                values[valuesOffset++] = (int)((block7.TripleShift(2)) & 131071L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 3L) << 15) | ((long)((ulong)block8 >> 49)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 32)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 15)) & 131071L);
+                values[valuesOffset++] = (int)(((block7 & 3L) << 15) | (block8.TripleShift(49)));
+                values[valuesOffset++] = (int)((block8.TripleShift(32)) & 131071L);
+                values[valuesOffset++] = (int)((block8.TripleShift(15)) & 131071L);
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block8 & 32767L) << 2) | ((long)((ulong)block9 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 45)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 28)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 11)) & 131071L);
+                values[valuesOffset++] = (int)(((block8 & 32767L) << 2) | (block9.TripleShift(62)));
+                values[valuesOffset++] = (int)((block9.TripleShift(45)) & 131071L);
+                values[valuesOffset++] = (int)((block9.TripleShift(28)) & 131071L);
+                values[valuesOffset++] = (int)((block9.TripleShift(11)) & 131071L);
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block9 & 2047L) << 6) | ((long)((ulong)block10 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 41)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 24)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 7)) & 131071L);
+                values[valuesOffset++] = (int)(((block9 & 2047L) << 6) | (block10.TripleShift(58)));
+                values[valuesOffset++] = (int)((block10.TripleShift(41)) & 131071L);
+                values[valuesOffset++] = (int)((block10.TripleShift(24)) & 131071L);
+                values[valuesOffset++] = (int)((block10.TripleShift(7)) & 131071L);
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block10 & 127L) << 10) | ((long)((ulong)block11 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 37)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 20)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 3)) & 131071L);
+                values[valuesOffset++] = (int)(((block10 & 127L) << 10) | (block11.TripleShift(54)));
+                values[valuesOffset++] = (int)((block11.TripleShift(37)) & 131071L);
+                values[valuesOffset++] = (int)((block11.TripleShift(20)) & 131071L);
+                values[valuesOffset++] = (int)((block11.TripleShift(3)) & 131071L);
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block11 & 7L) << 14) | ((long)((ulong)block12 >> 50)));
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 33)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 16)) & 131071L);
+                values[valuesOffset++] = (int)(((block11 & 7L) << 14) | (block12.TripleShift(50)));
+                values[valuesOffset++] = (int)((block12.TripleShift(33)) & 131071L);
+                values[valuesOffset++] = (int)((block12.TripleShift(16)) & 131071L);
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block12 & 65535L) << 1) | ((long)((ulong)block13 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 46)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 29)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 12)) & 131071L);
+                values[valuesOffset++] = (int)(((block12 & 65535L) << 1) | (block13.TripleShift(63)));
+                values[valuesOffset++] = (int)((block13.TripleShift(46)) & 131071L);
+                values[valuesOffset++] = (int)((block13.TripleShift(29)) & 131071L);
+                values[valuesOffset++] = (int)((block13.TripleShift(12)) & 131071L);
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block13 & 4095L) << 5) | ((long)((ulong)block14 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 42)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 25)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 8)) & 131071L);
+                values[valuesOffset++] = (int)(((block13 & 4095L) << 5) | (block14.TripleShift(59)));
+                values[valuesOffset++] = (int)((block14.TripleShift(42)) & 131071L);
+                values[valuesOffset++] = (int)((block14.TripleShift(25)) & 131071L);
+                values[valuesOffset++] = (int)((block14.TripleShift(8)) & 131071L);
                 long block15 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block14 & 255L) << 9) | ((long)((ulong)block15 >> 55)));
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 38)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 21)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 4)) & 131071L);
+                values[valuesOffset++] = (int)(((block14 & 255L) << 9) | (block15.TripleShift(55)));
+                values[valuesOffset++] = (int)((block15.TripleShift(38)) & 131071L);
+                values[valuesOffset++] = (int)((block15.TripleShift(21)) & 131071L);
+                values[valuesOffset++] = (int)((block15.TripleShift(4)) & 131071L);
                 long block16 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block15 & 15L) << 13) | ((long)((ulong)block16 >> 51)));
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 34)) & 131071L);
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 17)) & 131071L);
+                values[valuesOffset++] = (int)(((block15 & 15L) << 13) | (block16.TripleShift(51)));
+                values[valuesOffset++] = (int)((block16.TripleShift(34)) & 131071L);
+                values[valuesOffset++] = (int)((block16.TripleShift(17)) & 131071L);
                 values[valuesOffset++] = (int)(block16 & 131071L);
             }
         }
@@ -124,25 +126,25 @@
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 9) | (byte1 << 1) | ((int)((uint)byte2 >> 7));
+                values[valuesOffset++] = (byte0 << 9) | (byte1 << 1) | (byte2.TripleShift(7));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 127) << 10) | (byte3 << 2) | ((int)((uint)byte4 >> 6));
+                values[valuesOffset++] = ((byte2 & 127) << 10) | (byte3 << 2) | (byte4.TripleShift(6));
                 int byte5 = blocks[blocksOffset++] & 0xFF;
                 int byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 63) << 11) | (byte5 << 3) | ((int)((uint)byte6 >> 5));
+                values[valuesOffset++] = ((byte4 & 63) << 11) | (byte5 << 3) | (byte6.TripleShift(5));
                 int byte7 = blocks[blocksOffset++] & 0xFF;
                 int byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte6 & 31) << 12) | (byte7 << 4) | ((int)((uint)byte8 >> 4));
+                values[valuesOffset++] = ((byte6 & 31) << 12) | (byte7 << 4) | (byte8.TripleShift(4));
                 int byte9 = blocks[blocksOffset++] & 0xFF;
                 int byte10 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte8 & 15) << 13) | (byte9 << 5) | ((int)((uint)byte10 >> 3));
+                values[valuesOffset++] = ((byte8 & 15) << 13) | (byte9 << 5) | (byte10.TripleShift(3));
                 int byte11 = blocks[blocksOffset++] & 0xFF;
                 int byte12 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte10 & 7) << 14) | (byte11 << 6) | ((int)((uint)byte12 >> 2));
+                values[valuesOffset++] = ((byte10 & 7) << 14) | (byte11 << 6) | (byte12.TripleShift(2));
                 int byte13 = blocks[blocksOffset++] & 0xFF;
                 int byte14 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte12 & 3) << 15) | (byte13 << 7) | ((int)((uint)byte14 >> 1));
+                values[valuesOffset++] = ((byte12 & 3) << 15) | (byte13 << 7) | (byte14.TripleShift(1));
                 int byte15 = blocks[blocksOffset++] & 0xFF;
                 int byte16 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte14 & 1) << 16) | (byte15 << 8) | byte16;
@@ -154,85 +156,85 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 47);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 30)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 13)) & 131071L;
+                values[valuesOffset++] = block0.TripleShift(47);
+                values[valuesOffset++] = (block0.TripleShift(30)) & 131071L;
+                values[valuesOffset++] = (block0.TripleShift(13)) & 131071L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 8191L) << 4) | ((long)((ulong)block1 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 43)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 26)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 9)) & 131071L;
+                values[valuesOffset++] = ((block0 & 8191L) << 4) | (block1.TripleShift(60));
+                values[valuesOffset++] = (block1.TripleShift(43)) & 131071L;
+                values[valuesOffset++] = (block1.TripleShift(26)) & 131071L;
+                values[valuesOffset++] = (block1.TripleShift(9)) & 131071L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 511L) << 8) | ((long)((ulong)block2 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 39)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 22)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 5)) & 131071L;
+                values[valuesOffset++] = ((block1 & 511L) << 8) | (block2.TripleShift(56));
+                values[valuesOffset++] = (block2.TripleShift(39)) & 131071L;
+                values[valuesOffset++] = (block2.TripleShift(22)) & 131071L;
+                values[valuesOffset++] = (block2.TripleShift(5)) & 131071L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 31L) << 12) | ((long)((ulong)block3 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 35)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 18)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 1)) & 131071L;
+                values[valuesOffset++] = ((block2 & 31L) << 12) | (block3.TripleShift(52));
+                values[valuesOffset++] = (block3.TripleShift(35)) & 131071L;
+                values[valuesOffset++] = (block3.TripleShift(18)) & 131071L;
+                values[valuesOffset++] = (block3.TripleShift(1)) & 131071L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 1L) << 16) | ((long)((ulong)block4 >> 48));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 31)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 14)) & 131071L;
+                values[valuesOffset++] = ((block3 & 1L) << 16) | (block4.TripleShift(48));
+                values[valuesOffset++] = (block4.TripleShift(31)) & 131071L;
+                values[valuesOffset++] = (block4.TripleShift(14)) & 131071L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 16383L) << 3) | ((long)((ulong)block5 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 44)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 27)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 10)) & 131071L;
+                values[valuesOffset++] = ((block4 & 16383L) << 3) | (block5.TripleShift(61));
+                values[valuesOffset++] = (block5.TripleShift(44)) & 131071L;
+                values[valuesOffset++] = (block5.TripleShift(27)) & 131071L;
+                values[valuesOffset++] = (block5.TripleShift(10)) & 131071L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 1023L) << 7) | ((long)((ulong)block6 >> 57));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 40)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 23)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 6)) & 131071L;
+                values[valuesOffset++] = ((block5 & 1023L) << 7) | (block6.TripleShift(57));
+                values[valuesOffset++] = (block6.TripleShift(40)) & 131071L;
+                values[valuesOffset++] = (block6.TripleShift(23)) & 131071L;
+                values[valuesOffset++] = (block6.TripleShift(6)) & 131071L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 63L) << 11) | ((long)((ulong)block7 >> 53));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 36)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 19)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 2)) & 131071L;
+                values[valuesOffset++] = ((block6 & 63L) << 11) | (block7.TripleShift(53));
+                values[valuesOffset++] = (block7.TripleShift(36)) & 131071L;
+                values[valuesOffset++] = (block7.TripleShift(19)) & 131071L;
+                values[valuesOffset++] = (block7.TripleShift(2)) & 131071L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 3L) << 15) | ((long)((ulong)block8 >> 49));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 32)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 15)) & 131071L;
+                values[valuesOffset++] = ((block7 & 3L) << 15) | (block8.TripleShift(49));
+                values[valuesOffset++] = (block8.TripleShift(32)) & 131071L;
+                values[valuesOffset++] = (block8.TripleShift(15)) & 131071L;
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block8 & 32767L) << 2) | ((long)((ulong)block9 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block9 >> 45)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 28)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 11)) & 131071L;
+                values[valuesOffset++] = ((block8 & 32767L) << 2) | (block9.TripleShift(62));
+                values[valuesOffset++] = (block9.TripleShift(45)) & 131071L;
+                values[valuesOffset++] = (block9.TripleShift(28)) & 131071L;
+                values[valuesOffset++] = (block9.TripleShift(11)) & 131071L;
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block9 & 2047L) << 6) | ((long)((ulong)block10 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block10 >> 41)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 24)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 7)) & 131071L;
+                values[valuesOffset++] = ((block9 & 2047L) << 6) | (block10.TripleShift(58));
+                values[valuesOffset++] = (block10.TripleShift(41)) & 131071L;
+                values[valuesOffset++] = (block10.TripleShift(24)) & 131071L;
+                values[valuesOffset++] = (block10.TripleShift(7)) & 131071L;
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block10 & 127L) << 10) | ((long)((ulong)block11 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block11 >> 37)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 20)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 3)) & 131071L;
+                values[valuesOffset++] = ((block10 & 127L) << 10) | (block11.TripleShift(54));
+                values[valuesOffset++] = (block11.TripleShift(37)) & 131071L;
+                values[valuesOffset++] = (block11.TripleShift(20)) & 131071L;
+                values[valuesOffset++] = (block11.TripleShift(3)) & 131071L;
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block11 & 7L) << 14) | ((long)((ulong)block12 >> 50));
-                values[valuesOffset++] = ((long)((ulong)block12 >> 33)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block12 >> 16)) & 131071L;
+                values[valuesOffset++] = ((block11 & 7L) << 14) | (block12.TripleShift(50));
+                values[valuesOffset++] = (block12.TripleShift(33)) & 131071L;
+                values[valuesOffset++] = (block12.TripleShift(16)) & 131071L;
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block12 & 65535L) << 1) | ((long)((ulong)block13 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block13 >> 46)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block13 >> 29)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block13 >> 12)) & 131071L;
+                values[valuesOffset++] = ((block12 & 65535L) << 1) | (block13.TripleShift(63));
+                values[valuesOffset++] = (block13.TripleShift(46)) & 131071L;
+                values[valuesOffset++] = (block13.TripleShift(29)) & 131071L;
+                values[valuesOffset++] = (block13.TripleShift(12)) & 131071L;
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block13 & 4095L) << 5) | ((long)((ulong)block14 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block14 >> 42)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block14 >> 25)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block14 >> 8)) & 131071L;
+                values[valuesOffset++] = ((block13 & 4095L) << 5) | (block14.TripleShift(59));
+                values[valuesOffset++] = (block14.TripleShift(42)) & 131071L;
+                values[valuesOffset++] = (block14.TripleShift(25)) & 131071L;
+                values[valuesOffset++] = (block14.TripleShift(8)) & 131071L;
                 long block15 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block14 & 255L) << 9) | ((long)((ulong)block15 >> 55));
-                values[valuesOffset++] = ((long)((ulong)block15 >> 38)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block15 >> 21)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block15 >> 4)) & 131071L;
+                values[valuesOffset++] = ((block14 & 255L) << 9) | (block15.TripleShift(55));
+                values[valuesOffset++] = (block15.TripleShift(38)) & 131071L;
+                values[valuesOffset++] = (block15.TripleShift(21)) & 131071L;
+                values[valuesOffset++] = (block15.TripleShift(4)) & 131071L;
                 long block16 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block15 & 15L) << 13) | ((long)((ulong)block16 >> 51));
-                values[valuesOffset++] = ((long)((ulong)block16 >> 34)) & 131071L;
-                values[valuesOffset++] = ((long)((ulong)block16 >> 17)) & 131071L;
+                values[valuesOffset++] = ((block15 & 15L) << 13) | (block16.TripleShift(51));
+                values[valuesOffset++] = (block16.TripleShift(34)) & 131071L;
+                values[valuesOffset++] = (block16.TripleShift(17)) & 131071L;
                 values[valuesOffset++] = block16 & 131071L;
             }
         }
@@ -244,25 +246,25 @@
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 9) | (byte1 << 1) | ((long)((ulong)byte2 >> 7));
+                values[valuesOffset++] = (byte0 << 9) | (byte1 << 1) | (byte2.TripleShift(7));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 127) << 10) | (byte3 << 2) | ((long)((ulong)byte4 >> 6));
+                values[valuesOffset++] = ((byte2 & 127) << 10) | (byte3 << 2) | (byte4.TripleShift(6));
                 long byte5 = blocks[blocksOffset++] & 0xFF;
                 long byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 63) << 11) | (byte5 << 3) | ((long)((ulong)byte6 >> 5));
+                values[valuesOffset++] = ((byte4 & 63) << 11) | (byte5 << 3) | (byte6.TripleShift(5));
                 long byte7 = blocks[blocksOffset++] & 0xFF;
                 long byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte6 & 31) << 12) | (byte7 << 4) | ((long)((ulong)byte8 >> 4));
+                values[valuesOffset++] = ((byte6 & 31) << 12) | (byte7 << 4) | (byte8.TripleShift(4));
                 long byte9 = blocks[blocksOffset++] & 0xFF;
                 long byte10 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte8 & 15) << 13) | (byte9 << 5) | ((long)((ulong)byte10 >> 3));
+                values[valuesOffset++] = ((byte8 & 15) << 13) | (byte9 << 5) | (byte10.TripleShift(3));
                 long byte11 = blocks[blocksOffset++] & 0xFF;
                 long byte12 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte10 & 7) << 14) | (byte11 << 6) | ((long)((ulong)byte12 >> 2));
+                values[valuesOffset++] = ((byte10 & 7) << 14) | (byte11 << 6) | (byte12.TripleShift(2));
                 long byte13 = blocks[blocksOffset++] & 0xFF;
                 long byte14 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte12 & 3) << 15) | (byte13 << 7) | ((long)((ulong)byte14 >> 1));
+                values[valuesOffset++] = ((byte12 & 3) << 15) | (byte13 << 7) | (byte14.TripleShift(1));
                 long byte15 = blocks[blocksOffset++] & 0xFF;
                 long byte16 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte14 & 1) << 16) | (byte15 << 8) | byte16;
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked18.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked18.cs
index 6ed22d4..209661f 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked18.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked18.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,45 +36,45 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 46));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 28)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 10)) & 262143L);
+                values[valuesOffset++] = (int)(block0.TripleShift(46));
+                values[valuesOffset++] = (int)((block0.TripleShift(28)) & 262143L);
+                values[valuesOffset++] = (int)((block0.TripleShift(10)) & 262143L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 1023L) << 8) | ((long)((ulong)block1 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 38)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 20)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 2)) & 262143L);
+                values[valuesOffset++] = (int)(((block0 & 1023L) << 8) | (block1.TripleShift(56)));
+                values[valuesOffset++] = (int)((block1.TripleShift(38)) & 262143L);
+                values[valuesOffset++] = (int)((block1.TripleShift(20)) & 262143L);
+                values[valuesOffset++] = (int)((block1.TripleShift(2)) & 262143L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 3L) << 16) | ((long)((ulong)block2 >> 48)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 30)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 262143L);
+                values[valuesOffset++] = (int)(((block1 & 3L) << 16) | (block2.TripleShift(48)));
+                values[valuesOffset++] = (int)((block2.TripleShift(30)) & 262143L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 262143L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 4095L) << 6) | ((long)((ulong)block3 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 40)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 22)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 4)) & 262143L);
+                values[valuesOffset++] = (int)(((block2 & 4095L) << 6) | (block3.TripleShift(58)));
+                values[valuesOffset++] = (int)((block3.TripleShift(40)) & 262143L);
+                values[valuesOffset++] = (int)((block3.TripleShift(22)) & 262143L);
+                values[valuesOffset++] = (int)((block3.TripleShift(4)) & 262143L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 15L) << 14) | ((long)((ulong)block4 >> 50)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 32)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 14)) & 262143L);
+                values[valuesOffset++] = (int)(((block3 & 15L) << 14) | (block4.TripleShift(50)));
+                values[valuesOffset++] = (int)((block4.TripleShift(32)) & 262143L);
+                values[valuesOffset++] = (int)((block4.TripleShift(14)) & 262143L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 16383L) << 4) | ((long)((ulong)block5 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 42)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 24)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 6)) & 262143L);
+                values[valuesOffset++] = (int)(((block4 & 16383L) << 4) | (block5.TripleShift(60)));
+                values[valuesOffset++] = (int)((block5.TripleShift(42)) & 262143L);
+                values[valuesOffset++] = (int)((block5.TripleShift(24)) & 262143L);
+                values[valuesOffset++] = (int)((block5.TripleShift(6)) & 262143L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 63L) << 12) | ((long)((ulong)block6 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 34)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 16)) & 262143L);
+                values[valuesOffset++] = (int)(((block5 & 63L) << 12) | (block6.TripleShift(52)));
+                values[valuesOffset++] = (int)((block6.TripleShift(34)) & 262143L);
+                values[valuesOffset++] = (int)((block6.TripleShift(16)) & 262143L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 65535L) << 2) | ((long)((ulong)block7 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 44)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 26)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 8)) & 262143L);
+                values[valuesOffset++] = (int)(((block6 & 65535L) << 2) | (block7.TripleShift(62)));
+                values[valuesOffset++] = (int)((block7.TripleShift(44)) & 262143L);
+                values[valuesOffset++] = (int)((block7.TripleShift(26)) & 262143L);
+                values[valuesOffset++] = (int)((block7.TripleShift(8)) & 262143L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 255L) << 10) | ((long)((ulong)block8 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 36)) & 262143L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 18)) & 262143L);
+                values[valuesOffset++] = (int)(((block7 & 255L) << 10) | (block8.TripleShift(54)));
+                values[valuesOffset++] = (int)((block8.TripleShift(36)) & 262143L);
+                values[valuesOffset++] = (int)((block8.TripleShift(18)) & 262143L);
                 values[valuesOffset++] = (int)(block8 & 262143L);
             }
         }
@@ -84,13 +86,13 @@
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 10) | (byte1 << 2) | ((int)((uint)byte2 >> 6));
+                values[valuesOffset++] = (byte0 << 10) | (byte1 << 2) | (byte2.TripleShift(6));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 63) << 12) | (byte3 << 4) | ((int)((uint)byte4 >> 4));
+                values[valuesOffset++] = ((byte2 & 63) << 12) | (byte3 << 4) | (byte4.TripleShift(4));
                 int byte5 = blocks[blocksOffset++] & 0xFF;
                 int byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 15) << 14) | (byte5 << 6) | ((int)((uint)byte6 >> 2));
+                values[valuesOffset++] = ((byte4 & 15) << 14) | (byte5 << 6) | (byte6.TripleShift(2));
                 int byte7 = blocks[blocksOffset++] & 0xFF;
                 int byte8 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte6 & 3) << 16) | (byte7 << 8) | byte8;
@@ -102,45 +104,45 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 46);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 28)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 10)) & 262143L;
+                values[valuesOffset++] = block0.TripleShift(46);
+                values[valuesOffset++] = (block0.TripleShift(28)) & 262143L;
+                values[valuesOffset++] = (block0.TripleShift(10)) & 262143L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 1023L) << 8) | ((long)((ulong)block1 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 38)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 20)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 2)) & 262143L;
+                values[valuesOffset++] = ((block0 & 1023L) << 8) | (block1.TripleShift(56));
+                values[valuesOffset++] = (block1.TripleShift(38)) & 262143L;
+                values[valuesOffset++] = (block1.TripleShift(20)) & 262143L;
+                values[valuesOffset++] = (block1.TripleShift(2)) & 262143L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 3L) << 16) | ((long)((ulong)block2 >> 48));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 30)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 262143L;
+                values[valuesOffset++] = ((block1 & 3L) << 16) | (block2.TripleShift(48));
+                values[valuesOffset++] = (block2.TripleShift(30)) & 262143L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 262143L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 4095L) << 6) | ((long)((ulong)block3 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 40)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 22)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 4)) & 262143L;
+                values[valuesOffset++] = ((block2 & 4095L) << 6) | (block3.TripleShift(58));
+                values[valuesOffset++] = (block3.TripleShift(40)) & 262143L;
+                values[valuesOffset++] = (block3.TripleShift(22)) & 262143L;
+                values[valuesOffset++] = (block3.TripleShift(4)) & 262143L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 15L) << 14) | ((long)((ulong)block4 >> 50));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 32)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 14)) & 262143L;
+                values[valuesOffset++] = ((block3 & 15L) << 14) | (block4.TripleShift(50));
+                values[valuesOffset++] = (block4.TripleShift(32)) & 262143L;
+                values[valuesOffset++] = (block4.TripleShift(14)) & 262143L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 16383L) << 4) | ((long)((ulong)block5 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 42)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 24)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 6)) & 262143L;
+                values[valuesOffset++] = ((block4 & 16383L) << 4) | (block5.TripleShift(60));
+                values[valuesOffset++] = (block5.TripleShift(42)) & 262143L;
+                values[valuesOffset++] = (block5.TripleShift(24)) & 262143L;
+                values[valuesOffset++] = (block5.TripleShift(6)) & 262143L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 63L) << 12) | ((long)((ulong)block6 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 34)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 16)) & 262143L;
+                values[valuesOffset++] = ((block5 & 63L) << 12) | (block6.TripleShift(52));
+                values[valuesOffset++] = (block6.TripleShift(34)) & 262143L;
+                values[valuesOffset++] = (block6.TripleShift(16)) & 262143L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 65535L) << 2) | ((long)((ulong)block7 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 44)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 26)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 8)) & 262143L;
+                values[valuesOffset++] = ((block6 & 65535L) << 2) | (block7.TripleShift(62));
+                values[valuesOffset++] = (block7.TripleShift(44)) & 262143L;
+                values[valuesOffset++] = (block7.TripleShift(26)) & 262143L;
+                values[valuesOffset++] = (block7.TripleShift(8)) & 262143L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 255L) << 10) | ((long)((ulong)block8 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 36)) & 262143L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 18)) & 262143L;
+                values[valuesOffset++] = ((block7 & 255L) << 10) | (block8.TripleShift(54));
+                values[valuesOffset++] = (block8.TripleShift(36)) & 262143L;
+                values[valuesOffset++] = (block8.TripleShift(18)) & 262143L;
                 values[valuesOffset++] = block8 & 262143L;
             }
         }
@@ -152,13 +154,13 @@
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 10) | (byte1 << 2) | ((long)((ulong)byte2 >> 6));
+                values[valuesOffset++] = (byte0 << 10) | (byte1 << 2) | (byte2.TripleShift(6));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 63) << 12) | (byte3 << 4) | ((long)((ulong)byte4 >> 4));
+                values[valuesOffset++] = ((byte2 & 63) << 12) | (byte3 << 4) | (byte4.TripleShift(4));
                 long byte5 = blocks[blocksOffset++] & 0xFF;
                 long byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 15) << 14) | (byte5 << 6) | ((long)((ulong)byte6 >> 2));
+                values[valuesOffset++] = ((byte4 & 15) << 14) | (byte5 << 6) | (byte6.TripleShift(2));
                 long byte7 = blocks[blocksOffset++] & 0xFF;
                 long byte8 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte6 & 3) << 16) | (byte7 << 8) | byte8;
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked19.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked19.cs
index ed52c91..8a83fc3 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked19.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked19.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,87 +36,87 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 45));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 26)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 7)) & 524287L);
+                values[valuesOffset++] = (int)(block0.TripleShift(45));
+                values[valuesOffset++] = (int)((block0.TripleShift(26)) & 524287L);
+                values[valuesOffset++] = (int)((block0.TripleShift(7)) & 524287L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 127L) << 12) | ((long)((ulong)block1 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 33)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 14)) & 524287L);
+                values[valuesOffset++] = (int)(((block0 & 127L) << 12) | (block1.TripleShift(52)));
+                values[valuesOffset++] = (int)((block1.TripleShift(33)) & 524287L);
+                values[valuesOffset++] = (int)((block1.TripleShift(14)) & 524287L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 16383L) << 5) | ((long)((ulong)block2 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 40)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 21)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 2)) & 524287L);
+                values[valuesOffset++] = (int)(((block1 & 16383L) << 5) | (block2.TripleShift(59)));
+                values[valuesOffset++] = (int)((block2.TripleShift(40)) & 524287L);
+                values[valuesOffset++] = (int)((block2.TripleShift(21)) & 524287L);
+                values[valuesOffset++] = (int)((block2.TripleShift(2)) & 524287L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 3L) << 17) | ((long)((ulong)block3 >> 47)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 28)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 9)) & 524287L);
+                values[valuesOffset++] = (int)(((block2 & 3L) << 17) | (block3.TripleShift(47)));
+                values[valuesOffset++] = (int)((block3.TripleShift(28)) & 524287L);
+                values[valuesOffset++] = (int)((block3.TripleShift(9)) & 524287L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 511L) << 10) | ((long)((ulong)block4 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 35)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 16)) & 524287L);
+                values[valuesOffset++] = (int)(((block3 & 511L) << 10) | (block4.TripleShift(54)));
+                values[valuesOffset++] = (int)((block4.TripleShift(35)) & 524287L);
+                values[valuesOffset++] = (int)((block4.TripleShift(16)) & 524287L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 65535L) << 3) | ((long)((ulong)block5 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 42)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 23)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 4)) & 524287L);
+                values[valuesOffset++] = (int)(((block4 & 65535L) << 3) | (block5.TripleShift(61)));
+                values[valuesOffset++] = (int)((block5.TripleShift(42)) & 524287L);
+                values[valuesOffset++] = (int)((block5.TripleShift(23)) & 524287L);
+                values[valuesOffset++] = (int)((block5.TripleShift(4)) & 524287L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 15L) << 15) | ((long)((ulong)block6 >> 49)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 30)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 11)) & 524287L);
+                values[valuesOffset++] = (int)(((block5 & 15L) << 15) | (block6.TripleShift(49)));
+                values[valuesOffset++] = (int)((block6.TripleShift(30)) & 524287L);
+                values[valuesOffset++] = (int)((block6.TripleShift(11)) & 524287L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 2047L) << 8) | ((long)((ulong)block7 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 37)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 18)) & 524287L);
+                values[valuesOffset++] = (int)(((block6 & 2047L) << 8) | (block7.TripleShift(56)));
+                values[valuesOffset++] = (int)((block7.TripleShift(37)) & 524287L);
+                values[valuesOffset++] = (int)((block7.TripleShift(18)) & 524287L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 262143L) << 1) | ((long)((ulong)block8 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 44)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 25)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 6)) & 524287L);
+                values[valuesOffset++] = (int)(((block7 & 262143L) << 1) | (block8.TripleShift(63)));
+                values[valuesOffset++] = (int)((block8.TripleShift(44)) & 524287L);
+                values[valuesOffset++] = (int)((block8.TripleShift(25)) & 524287L);
+                values[valuesOffset++] = (int)((block8.TripleShift(6)) & 524287L);
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block8 & 63L) << 13) | ((long)((ulong)block9 >> 51)));
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 32)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 13)) & 524287L);
+                values[valuesOffset++] = (int)(((block8 & 63L) << 13) | (block9.TripleShift(51)));
+                values[valuesOffset++] = (int)((block9.TripleShift(32)) & 524287L);
+                values[valuesOffset++] = (int)((block9.TripleShift(13)) & 524287L);
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block9 & 8191L) << 6) | ((long)((ulong)block10 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 39)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 20)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 1)) & 524287L);
+                values[valuesOffset++] = (int)(((block9 & 8191L) << 6) | (block10.TripleShift(58)));
+                values[valuesOffset++] = (int)((block10.TripleShift(39)) & 524287L);
+                values[valuesOffset++] = (int)((block10.TripleShift(20)) & 524287L);
+                values[valuesOffset++] = (int)((block10.TripleShift(1)) & 524287L);
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block10 & 1L) << 18) | ((long)((ulong)block11 >> 46)));
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 27)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 8)) & 524287L);
+                values[valuesOffset++] = (int)(((block10 & 1L) << 18) | (block11.TripleShift(46)));
+                values[valuesOffset++] = (int)((block11.TripleShift(27)) & 524287L);
+                values[valuesOffset++] = (int)((block11.TripleShift(8)) & 524287L);
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block11 & 255L) << 11) | ((long)((ulong)block12 >> 53)));
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 34)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 15)) & 524287L);
+                values[valuesOffset++] = (int)(((block11 & 255L) << 11) | (block12.TripleShift(53)));
+                values[valuesOffset++] = (int)((block12.TripleShift(34)) & 524287L);
+                values[valuesOffset++] = (int)((block12.TripleShift(15)) & 524287L);
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block12 & 32767L) << 4) | ((long)((ulong)block13 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 41)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 22)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 3)) & 524287L);
+                values[valuesOffset++] = (int)(((block12 & 32767L) << 4) | (block13.TripleShift(60)));
+                values[valuesOffset++] = (int)((block13.TripleShift(41)) & 524287L);
+                values[valuesOffset++] = (int)((block13.TripleShift(22)) & 524287L);
+                values[valuesOffset++] = (int)((block13.TripleShift(3)) & 524287L);
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block13 & 7L) << 16) | ((long)((ulong)block14 >> 48)));
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 29)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 10)) & 524287L);
+                values[valuesOffset++] = (int)(((block13 & 7L) << 16) | (block14.TripleShift(48)));
+                values[valuesOffset++] = (int)((block14.TripleShift(29)) & 524287L);
+                values[valuesOffset++] = (int)((block14.TripleShift(10)) & 524287L);
                 long block15 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block14 & 1023L) << 9) | ((long)((ulong)block15 >> 55)));
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 36)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 17)) & 524287L);
+                values[valuesOffset++] = (int)(((block14 & 1023L) << 9) | (block15.TripleShift(55)));
+                values[valuesOffset++] = (int)((block15.TripleShift(36)) & 524287L);
+                values[valuesOffset++] = (int)((block15.TripleShift(17)) & 524287L);
                 long block16 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block15 & 131071L) << 2) | ((long)((ulong)block16 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 43)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 24)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 5)) & 524287L);
+                values[valuesOffset++] = (int)(((block15 & 131071L) << 2) | (block16.TripleShift(62)));
+                values[valuesOffset++] = (int)((block16.TripleShift(43)) & 524287L);
+                values[valuesOffset++] = (int)((block16.TripleShift(24)) & 524287L);
+                values[valuesOffset++] = (int)((block16.TripleShift(5)) & 524287L);
                 long block17 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block16 & 31L) << 14) | ((long)((ulong)block17 >> 50)));
-                values[valuesOffset++] = (int)(((long)((ulong)block17 >> 31)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block17 >> 12)) & 524287L);
+                values[valuesOffset++] = (int)(((block16 & 31L) << 14) | (block17.TripleShift(50)));
+                values[valuesOffset++] = (int)((block17.TripleShift(31)) & 524287L);
+                values[valuesOffset++] = (int)((block17.TripleShift(12)) & 524287L);
                 long block18 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block17 & 4095L) << 7) | ((long)((ulong)block18 >> 57)));
-                values[valuesOffset++] = (int)(((long)((ulong)block18 >> 38)) & 524287L);
-                values[valuesOffset++] = (int)(((long)((ulong)block18 >> 19)) & 524287L);
+                values[valuesOffset++] = (int)(((block17 & 4095L) << 7) | (block18.TripleShift(57)));
+                values[valuesOffset++] = (int)((block18.TripleShift(38)) & 524287L);
+                values[valuesOffset++] = (int)((block18.TripleShift(19)) & 524287L);
                 values[valuesOffset++] = (int)(block18 & 524287L);
             }
         }
@@ -126,27 +128,27 @@
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 11) | (byte1 << 3) | ((int)((uint)byte2 >> 5));
+                values[valuesOffset++] = (byte0 << 11) | (byte1 << 3) | (byte2.TripleShift(5));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 31) << 14) | (byte3 << 6) | ((int)((uint)byte4 >> 2));
+                values[valuesOffset++] = ((byte2 & 31) << 14) | (byte3 << 6) | (byte4.TripleShift(2));
                 int byte5 = blocks[blocksOffset++] & 0xFF;
                 int byte6 = blocks[blocksOffset++] & 0xFF;
                 int byte7 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 3) << 17) | (byte5 << 9) | (byte6 << 1) | ((int)((uint)byte7 >> 7));
+                values[valuesOffset++] = ((byte4 & 3) << 17) | (byte5 << 9) | (byte6 << 1) | (byte7.TripleShift(7));
                 int byte8 = blocks[blocksOffset++] & 0xFF;
                 int byte9 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte7 & 127) << 12) | (byte8 << 4) | ((int)((uint)byte9 >> 4));
+                values[valuesOffset++] = ((byte7 & 127) << 12) | (byte8 << 4) | (byte9.TripleShift(4));
                 int byte10 = blocks[blocksOffset++] & 0xFF;
                 int byte11 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte9 & 15) << 15) | (byte10 << 7) | ((int)((uint)byte11 >> 1));
+                values[valuesOffset++] = ((byte9 & 15) << 15) | (byte10 << 7) | (byte11.TripleShift(1));
                 int byte12 = blocks[blocksOffset++] & 0xFF;
                 int byte13 = blocks[blocksOffset++] & 0xFF;
                 int byte14 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte11 & 1) << 18) | (byte12 << 10) | (byte13 << 2) | ((int)((uint)byte14 >> 6));
+                values[valuesOffset++] = ((byte11 & 1) << 18) | (byte12 << 10) | (byte13 << 2) | (byte14.TripleShift(6));
                 int byte15 = blocks[blocksOffset++] & 0xFF;
                 int byte16 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte14 & 63) << 13) | (byte15 << 5) | ((int)((uint)byte16 >> 3));
+                values[valuesOffset++] = ((byte14 & 63) << 13) | (byte15 << 5) | (byte16.TripleShift(3));
                 int byte17 = blocks[blocksOffset++] & 0xFF;
                 int byte18 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte16 & 7) << 16) | (byte17 << 8) | byte18;
@@ -158,87 +160,87 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 45);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 26)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 7)) & 524287L;
+                values[valuesOffset++] = block0.TripleShift(45);
+                values[valuesOffset++] = (block0.TripleShift(26)) & 524287L;
+                values[valuesOffset++] = (block0.TripleShift(7)) & 524287L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 127L) << 12) | ((long)((ulong)block1 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 33)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 14)) & 524287L;
+                values[valuesOffset++] = ((block0 & 127L) << 12) | (block1.TripleShift(52));
+                values[valuesOffset++] = (block1.TripleShift(33)) & 524287L;
+                values[valuesOffset++] = (block1.TripleShift(14)) & 524287L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 16383L) << 5) | ((long)((ulong)block2 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 40)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 21)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 2)) & 524287L;
+                values[valuesOffset++] = ((block1 & 16383L) << 5) | (block2.TripleShift(59));
+                values[valuesOffset++] = (block2.TripleShift(40)) & 524287L;
+                values[valuesOffset++] = (block2.TripleShift(21)) & 524287L;
+                values[valuesOffset++] = (block2.TripleShift(2)) & 524287L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 3L) << 17) | ((long)((ulong)block3 >> 47));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 28)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 9)) & 524287L;
+                values[valuesOffset++] = ((block2 & 3L) << 17) | (block3.TripleShift(47));
+                values[valuesOffset++] = (block3.TripleShift(28)) & 524287L;
+                values[valuesOffset++] = (block3.TripleShift(9)) & 524287L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 511L) << 10) | ((long)((ulong)block4 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 35)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 16)) & 524287L;
+                values[valuesOffset++] = ((block3 & 511L) << 10) | (block4.TripleShift(54));
+                values[valuesOffset++] = (block4.TripleShift(35)) & 524287L;
+                values[valuesOffset++] = (block4.TripleShift(16)) & 524287L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 65535L) << 3) | ((long)((ulong)block5 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 42)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 23)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 4)) & 524287L;
+                values[valuesOffset++] = ((block4 & 65535L) << 3) | (block5.TripleShift(61));
+                values[valuesOffset++] = (block5.TripleShift(42)) & 524287L;
+                values[valuesOffset++] = (block5.TripleShift(23)) & 524287L;
+                values[valuesOffset++] = (block5.TripleShift(4)) & 524287L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 15L) << 15) | ((long)((ulong)block6 >> 49));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 30)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 11)) & 524287L;
+                values[valuesOffset++] = ((block5 & 15L) << 15) | (block6.TripleShift(49));
+                values[valuesOffset++] = (block6.TripleShift(30)) & 524287L;
+                values[valuesOffset++] = (block6.TripleShift(11)) & 524287L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 2047L) << 8) | ((long)((ulong)block7 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 37)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 18)) & 524287L;
+                values[valuesOffset++] = ((block6 & 2047L) << 8) | (block7.TripleShift(56));
+                values[valuesOffset++] = (block7.TripleShift(37)) & 524287L;
+                values[valuesOffset++] = (block7.TripleShift(18)) & 524287L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 262143L) << 1) | ((long)((ulong)block8 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 44)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 25)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 6)) & 524287L;
+                values[valuesOffset++] = ((block7 & 262143L) << 1) | (block8.TripleShift(63));
+                values[valuesOffset++] = (block8.TripleShift(44)) & 524287L;
+                values[valuesOffset++] = (block8.TripleShift(25)) & 524287L;
+                values[valuesOffset++] = (block8.TripleShift(6)) & 524287L;
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block8 & 63L) << 13) | ((long)((ulong)block9 >> 51));
-                values[valuesOffset++] = ((long)((ulong)block9 >> 32)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 13)) & 524287L;
+                values[valuesOffset++] = ((block8 & 63L) << 13) | (block9.TripleShift(51));
+                values[valuesOffset++] = (block9.TripleShift(32)) & 524287L;
+                values[valuesOffset++] = (block9.TripleShift(13)) & 524287L;
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block9 & 8191L) << 6) | ((long)((ulong)block10 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block10 >> 39)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 20)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 1)) & 524287L;
+                values[valuesOffset++] = ((block9 & 8191L) << 6) | (block10.TripleShift(58));
+                values[valuesOffset++] = (block10.TripleShift(39)) & 524287L;
+                values[valuesOffset++] = (block10.TripleShift(20)) & 524287L;
+                values[valuesOffset++] = (block10.TripleShift(1)) & 524287L;
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block10 & 1L) << 18) | ((long)((ulong)block11 >> 46));
-                values[valuesOffset++] = ((long)((ulong)block11 >> 27)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 8)) & 524287L;
+                values[valuesOffset++] = ((block10 & 1L) << 18) | (block11.TripleShift(46));
+                values[valuesOffset++] = (block11.TripleShift(27)) & 524287L;
+                values[valuesOffset++] = (block11.TripleShift(8)) & 524287L;
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block11 & 255L) << 11) | ((long)((ulong)block12 >> 53));
-                values[valuesOffset++] = ((long)((ulong)block12 >> 34)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block12 >> 15)) & 524287L;
+                values[valuesOffset++] = ((block11 & 255L) << 11) | (block12.TripleShift(53));
+                values[valuesOffset++] = (block12.TripleShift(34)) & 524287L;
+                values[valuesOffset++] = (block12.TripleShift(15)) & 524287L;
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block12 & 32767L) << 4) | ((long)((ulong)block13 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block13 >> 41)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block13 >> 22)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block13 >> 3)) & 524287L;
+                values[valuesOffset++] = ((block12 & 32767L) << 4) | (block13.TripleShift(60));
+                values[valuesOffset++] = (block13.TripleShift(41)) & 524287L;
+                values[valuesOffset++] = (block13.TripleShift(22)) & 524287L;
+                values[valuesOffset++] = (block13.TripleShift(3)) & 524287L;
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block13 & 7L) << 16) | ((long)((ulong)block14 >> 48));
-                values[valuesOffset++] = ((long)((ulong)block14 >> 29)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block14 >> 10)) & 524287L;
+                values[valuesOffset++] = ((block13 & 7L) << 16) | (block14.TripleShift(48));
+                values[valuesOffset++] = (block14.TripleShift(29)) & 524287L;
+                values[valuesOffset++] = (block14.TripleShift(10)) & 524287L;
                 long block15 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block14 & 1023L) << 9) | ((long)((ulong)block15 >> 55));
-                values[valuesOffset++] = ((long)((ulong)block15 >> 36)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block15 >> 17)) & 524287L;
+                values[valuesOffset++] = ((block14 & 1023L) << 9) | (block15.TripleShift(55));
+                values[valuesOffset++] = (block15.TripleShift(36)) & 524287L;
+                values[valuesOffset++] = (block15.TripleShift(17)) & 524287L;
                 long block16 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block15 & 131071L) << 2) | ((long)((ulong)block16 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block16 >> 43)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block16 >> 24)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block16 >> 5)) & 524287L;
+                values[valuesOffset++] = ((block15 & 131071L) << 2) | (block16.TripleShift(62));
+                values[valuesOffset++] = (block16.TripleShift(43)) & 524287L;
+                values[valuesOffset++] = (block16.TripleShift(24)) & 524287L;
+                values[valuesOffset++] = (block16.TripleShift(5)) & 524287L;
                 long block17 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block16 & 31L) << 14) | ((long)((ulong)block17 >> 50));
-                values[valuesOffset++] = ((long)((ulong)block17 >> 31)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block17 >> 12)) & 524287L;
+                values[valuesOffset++] = ((block16 & 31L) << 14) | (block17.TripleShift(50));
+                values[valuesOffset++] = (block17.TripleShift(31)) & 524287L;
+                values[valuesOffset++] = (block17.TripleShift(12)) & 524287L;
                 long block18 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block17 & 4095L) << 7) | ((long)((ulong)block18 >> 57));
-                values[valuesOffset++] = ((long)((ulong)block18 >> 38)) & 524287L;
-                values[valuesOffset++] = ((long)((ulong)block18 >> 19)) & 524287L;
+                values[valuesOffset++] = ((block17 & 4095L) << 7) | (block18.TripleShift(57));
+                values[valuesOffset++] = (block18.TripleShift(38)) & 524287L;
+                values[valuesOffset++] = (block18.TripleShift(19)) & 524287L;
                 values[valuesOffset++] = block18 & 524287L;
             }
         }
@@ -250,27 +252,27 @@
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 11) | (byte1 << 3) | ((long)((ulong)byte2 >> 5));
+                values[valuesOffset++] = (byte0 << 11) | (byte1 << 3) | (byte2.TripleShift(5));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 31) << 14) | (byte3 << 6) | ((long)((ulong)byte4 >> 2));
+                values[valuesOffset++] = ((byte2 & 31) << 14) | (byte3 << 6) | (byte4.TripleShift(2));
                 long byte5 = blocks[blocksOffset++] & 0xFF;
                 long byte6 = blocks[blocksOffset++] & 0xFF;
                 long byte7 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 3) << 17) | (byte5 << 9) | (byte6 << 1) | ((long)((ulong)byte7 >> 7));
+                values[valuesOffset++] = ((byte4 & 3) << 17) | (byte5 << 9) | (byte6 << 1) | (byte7.TripleShift(7));
                 long byte8 = blocks[blocksOffset++] & 0xFF;
                 long byte9 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte7 & 127) << 12) | (byte8 << 4) | ((long)((ulong)byte9 >> 4));
+                values[valuesOffset++] = ((byte7 & 127) << 12) | (byte8 << 4) | (byte9.TripleShift(4));
                 long byte10 = blocks[blocksOffset++] & 0xFF;
                 long byte11 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte9 & 15) << 15) | (byte10 << 7) | ((long)((ulong)byte11 >> 1));
+                values[valuesOffset++] = ((byte9 & 15) << 15) | (byte10 << 7) | (byte11.TripleShift(1));
                 long byte12 = blocks[blocksOffset++] & 0xFF;
                 long byte13 = blocks[blocksOffset++] & 0xFF;
                 long byte14 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte11 & 1) << 18) | (byte12 << 10) | (byte13 << 2) | ((long)((ulong)byte14 >> 6));
+                values[valuesOffset++] = ((byte11 & 1) << 18) | (byte12 << 10) | (byte13 << 2) | (byte14.TripleShift(6));
                 long byte15 = blocks[blocksOffset++] & 0xFF;
                 long byte16 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte14 & 63) << 13) | (byte15 << 5) | ((long)((ulong)byte16 >> 3));
+                values[valuesOffset++] = ((byte14 & 63) << 13) | (byte15 << 5) | (byte16.TripleShift(3));
                 long byte17 = blocks[blocksOffset++] & 0xFF;
                 long byte18 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte16 & 7) << 16) | (byte17 << 8) | byte18;
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked2.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked2.cs
index 713b0fe..8d91a99 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked2.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked2.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -36,7 +38,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 62; shift >= 0; shift -= 2)
                 {
-                    values[valuesOffset++] = (int)(((long)((ulong)block >> shift)) & 3);
+                    values[valuesOffset++] = (int)((block.TripleShift(shift)) & 3);
                 }
             }
         }
@@ -46,9 +48,9 @@
             for (int j = 0; j < iterations; ++j)
             {
                 var block = blocks[blocksOffset++];
-                values[valuesOffset++] = ((int)((uint)block >> 6)) & 3;
-                values[valuesOffset++] = ((int)((uint)block >> 4)) & 3;
-                values[valuesOffset++] = ((int)((uint)block >> 2)) & 3;
+                values[valuesOffset++] = (block.TripleShift(6)) & 3;
+                values[valuesOffset++] = (block.TripleShift(4)) & 3;
+                values[valuesOffset++] = (block.TripleShift(2)) & 3;
                 values[valuesOffset++] = block & 3;
             }
         }
@@ -60,7 +62,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 62; shift >= 0; shift -= 2)
                 {
-                    values[valuesOffset++] = ((long)((ulong)block >> shift)) & 3;
+                    values[valuesOffset++] = (block.TripleShift(shift)) & 3;
                 }
             }
         }
@@ -70,9 +72,9 @@
             for (int j = 0; j < iterations; ++j)
             {
                 var block = blocks[blocksOffset++];
-                values[valuesOffset++] = ((int)((uint)block >> 6)) & 3;
-                values[valuesOffset++] = ((int)((uint)block >> 4)) & 3;
-                values[valuesOffset++] = ((int)((uint)block >> 2)) & 3;
+                values[valuesOffset++] = (block.TripleShift(6)) & 3;
+                values[valuesOffset++] = (block.TripleShift(4)) & 3;
+                values[valuesOffset++] = (block.TripleShift(2)) & 3;
                 values[valuesOffset++] = block & 3;
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked20.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked20.cs
index 95f7873..e826aed 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked20.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked20.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,25 +36,25 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 44));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 24)) & 1048575L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 4)) & 1048575L);
+                values[valuesOffset++] = (int)(block0.TripleShift(44));
+                values[valuesOffset++] = (int)((block0.TripleShift(24)) & 1048575L);
+                values[valuesOffset++] = (int)((block0.TripleShift(4)) & 1048575L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 15L) << 16) | ((long)((ulong)block1 >> 48)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 28)) & 1048575L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 8)) & 1048575L);
+                values[valuesOffset++] = (int)(((block0 & 15L) << 16) | (block1.TripleShift(48)));
+                values[valuesOffset++] = (int)((block1.TripleShift(28)) & 1048575L);
+                values[valuesOffset++] = (int)((block1.TripleShift(8)) & 1048575L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 255L) << 12) | ((long)((ulong)block2 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 32)) & 1048575L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 1048575L);
+                values[valuesOffset++] = (int)(((block1 & 255L) << 12) | (block2.TripleShift(52)));
+                values[valuesOffset++] = (int)((block2.TripleShift(32)) & 1048575L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 1048575L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 4095L) << 8) | ((long)((ulong)block3 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 36)) & 1048575L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 16)) & 1048575L);
+                values[valuesOffset++] = (int)(((block2 & 4095L) << 8) | (block3.TripleShift(56)));
+                values[valuesOffset++] = (int)((block3.TripleShift(36)) & 1048575L);
+                values[valuesOffset++] = (int)((block3.TripleShift(16)) & 1048575L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 65535L) << 4) | ((long)((ulong)block4 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 40)) & 1048575L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 20)) & 1048575L);
+                values[valuesOffset++] = (int)(((block3 & 65535L) << 4) | (block4.TripleShift(60)));
+                values[valuesOffset++] = (int)((block4.TripleShift(40)) & 1048575L);
+                values[valuesOffset++] = (int)((block4.TripleShift(20)) & 1048575L);
                 values[valuesOffset++] = (int)(block4 & 1048575L);
             }
         }
@@ -64,7 +66,7 @@
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 12) | (byte1 << 4) | ((int)((uint)byte2 >> 4));
+                values[valuesOffset++] = (byte0 << 12) | (byte1 << 4) | (byte2.TripleShift(4));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte2 & 15) << 16) | (byte3 << 8) | byte4;
@@ -76,25 +78,25 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 44);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 24)) & 1048575L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 4)) & 1048575L;
+                values[valuesOffset++] = block0.TripleShift(44);
+                values[valuesOffset++] = (block0.TripleShift(24)) & 1048575L;
+                values[valuesOffset++] = (block0.TripleShift(4)) & 1048575L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 15L) << 16) | ((long)((ulong)block1 >> 48));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 28)) & 1048575L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 8)) & 1048575L;
+                values[valuesOffset++] = ((block0 & 15L) << 16) | (block1.TripleShift(48));
+                values[valuesOffset++] = (block1.TripleShift(28)) & 1048575L;
+                values[valuesOffset++] = (block1.TripleShift(8)) & 1048575L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 255L) << 12) | ((long)((ulong)block2 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 32)) & 1048575L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 1048575L;
+                values[valuesOffset++] = ((block1 & 255L) << 12) | (block2.TripleShift(52));
+                values[valuesOffset++] = (block2.TripleShift(32)) & 1048575L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 1048575L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 4095L) << 8) | ((long)((ulong)block3 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 36)) & 1048575L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 16)) & 1048575L;
+                values[valuesOffset++] = ((block2 & 4095L) << 8) | (block3.TripleShift(56));
+                values[valuesOffset++] = (block3.TripleShift(36)) & 1048575L;
+                values[valuesOffset++] = (block3.TripleShift(16)) & 1048575L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 65535L) << 4) | ((long)((ulong)block4 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 40)) & 1048575L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 20)) & 1048575L;
+                values[valuesOffset++] = ((block3 & 65535L) << 4) | (block4.TripleShift(60));
+                values[valuesOffset++] = (block4.TripleShift(40)) & 1048575L;
+                values[valuesOffset++] = (block4.TripleShift(20)) & 1048575L;
                 values[valuesOffset++] = block4 & 1048575L;
             }
         }
@@ -106,7 +108,7 @@
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 12) | (byte1 << 4) | ((long)((ulong)byte2 >> 4));
+                values[valuesOffset++] = (byte0 << 12) | (byte1 << 4) | (byte2.TripleShift(4));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte2 & 15) << 16) | (byte3 << 8) | byte4;
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked21.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked21.cs
index d5daa92..c737d88 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked21.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked21.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,89 +36,89 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 43));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 22)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 1)) & 2097151L);
+                values[valuesOffset++] = (int)(block0.TripleShift(43));
+                values[valuesOffset++] = (int)((block0.TripleShift(22)) & 2097151L);
+                values[valuesOffset++] = (int)((block0.TripleShift(1)) & 2097151L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 1L) << 20) | ((long)((ulong)block1 >> 44)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 23)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 2)) & 2097151L);
+                values[valuesOffset++] = (int)(((block0 & 1L) << 20) | (block1.TripleShift(44)));
+                values[valuesOffset++] = (int)((block1.TripleShift(23)) & 2097151L);
+                values[valuesOffset++] = (int)((block1.TripleShift(2)) & 2097151L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 3L) << 19) | ((long)((ulong)block2 >> 45)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 24)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 3)) & 2097151L);
+                values[valuesOffset++] = (int)(((block1 & 3L) << 19) | (block2.TripleShift(45)));
+                values[valuesOffset++] = (int)((block2.TripleShift(24)) & 2097151L);
+                values[valuesOffset++] = (int)((block2.TripleShift(3)) & 2097151L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 7L) << 18) | ((long)((ulong)block3 >> 46)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 25)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 4)) & 2097151L);
+                values[valuesOffset++] = (int)(((block2 & 7L) << 18) | (block3.TripleShift(46)));
+                values[valuesOffset++] = (int)((block3.TripleShift(25)) & 2097151L);
+                values[valuesOffset++] = (int)((block3.TripleShift(4)) & 2097151L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 15L) << 17) | ((long)((ulong)block4 >> 47)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 26)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 5)) & 2097151L);
+                values[valuesOffset++] = (int)(((block3 & 15L) << 17) | (block4.TripleShift(47)));
+                values[valuesOffset++] = (int)((block4.TripleShift(26)) & 2097151L);
+                values[valuesOffset++] = (int)((block4.TripleShift(5)) & 2097151L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 31L) << 16) | ((long)((ulong)block5 >> 48)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 27)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 6)) & 2097151L);
+                values[valuesOffset++] = (int)(((block4 & 31L) << 16) | (block5.TripleShift(48)));
+                values[valuesOffset++] = (int)((block5.TripleShift(27)) & 2097151L);
+                values[valuesOffset++] = (int)((block5.TripleShift(6)) & 2097151L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 63L) << 15) | ((long)((ulong)block6 >> 49)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 28)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 7)) & 2097151L);
+                values[valuesOffset++] = (int)(((block5 & 63L) << 15) | (block6.TripleShift(49)));
+                values[valuesOffset++] = (int)((block6.TripleShift(28)) & 2097151L);
+                values[valuesOffset++] = (int)((block6.TripleShift(7)) & 2097151L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 127L) << 14) | ((long)((ulong)block7 >> 50)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 29)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 8)) & 2097151L);
+                values[valuesOffset++] = (int)(((block6 & 127L) << 14) | (block7.TripleShift(50)));
+                values[valuesOffset++] = (int)((block7.TripleShift(29)) & 2097151L);
+                values[valuesOffset++] = (int)((block7.TripleShift(8)) & 2097151L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 255L) << 13) | ((long)((ulong)block8 >> 51)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 30)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 9)) & 2097151L);
+                values[valuesOffset++] = (int)(((block7 & 255L) << 13) | (block8.TripleShift(51)));
+                values[valuesOffset++] = (int)((block8.TripleShift(30)) & 2097151L);
+                values[valuesOffset++] = (int)((block8.TripleShift(9)) & 2097151L);
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block8 & 511L) << 12) | ((long)((ulong)block9 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 31)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 10)) & 2097151L);
+                values[valuesOffset++] = (int)(((block8 & 511L) << 12) | (block9.TripleShift(52)));
+                values[valuesOffset++] = (int)((block9.TripleShift(31)) & 2097151L);
+                values[valuesOffset++] = (int)((block9.TripleShift(10)) & 2097151L);
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block9 & 1023L) << 11) | ((long)((ulong)block10 >> 53)));
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 32)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 11)) & 2097151L);
+                values[valuesOffset++] = (int)(((block9 & 1023L) << 11) | (block10.TripleShift(53)));
+                values[valuesOffset++] = (int)((block10.TripleShift(32)) & 2097151L);
+                values[valuesOffset++] = (int)((block10.TripleShift(11)) & 2097151L);
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block10 & 2047L) << 10) | ((long)((ulong)block11 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 33)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 12)) & 2097151L);
+                values[valuesOffset++] = (int)(((block10 & 2047L) << 10) | (block11.TripleShift(54)));
+                values[valuesOffset++] = (int)((block11.TripleShift(33)) & 2097151L);
+                values[valuesOffset++] = (int)((block11.TripleShift(12)) & 2097151L);
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block11 & 4095L) << 9) | ((long)((ulong)block12 >> 55)));
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 34)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 13)) & 2097151L);
+                values[valuesOffset++] = (int)(((block11 & 4095L) << 9) | (block12.TripleShift(55)));
+                values[valuesOffset++] = (int)((block12.TripleShift(34)) & 2097151L);
+                values[valuesOffset++] = (int)((block12.TripleShift(13)) & 2097151L);
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block12 & 8191L) << 8) | ((long)((ulong)block13 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 35)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 14)) & 2097151L);
+                values[valuesOffset++] = (int)(((block12 & 8191L) << 8) | (block13.TripleShift(56)));
+                values[valuesOffset++] = (int)((block13.TripleShift(35)) & 2097151L);
+                values[valuesOffset++] = (int)((block13.TripleShift(14)) & 2097151L);
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block13 & 16383L) << 7) | ((long)((ulong)block14 >> 57)));
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 36)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 15)) & 2097151L);
+                values[valuesOffset++] = (int)(((block13 & 16383L) << 7) | (block14.TripleShift(57)));
+                values[valuesOffset++] = (int)((block14.TripleShift(36)) & 2097151L);
+                values[valuesOffset++] = (int)((block14.TripleShift(15)) & 2097151L);
                 long block15 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block14 & 32767L) << 6) | ((long)((ulong)block15 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 37)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 16)) & 2097151L);
+                values[valuesOffset++] = (int)(((block14 & 32767L) << 6) | (block15.TripleShift(58)));
+                values[valuesOffset++] = (int)((block15.TripleShift(37)) & 2097151L);
+                values[valuesOffset++] = (int)((block15.TripleShift(16)) & 2097151L);
                 long block16 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block15 & 65535L) << 5) | ((long)((ulong)block16 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 38)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 17)) & 2097151L);
+                values[valuesOffset++] = (int)(((block15 & 65535L) << 5) | (block16.TripleShift(59)));
+                values[valuesOffset++] = (int)((block16.TripleShift(38)) & 2097151L);
+                values[valuesOffset++] = (int)((block16.TripleShift(17)) & 2097151L);
                 long block17 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block16 & 131071L) << 4) | ((long)((ulong)block17 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block17 >> 39)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block17 >> 18)) & 2097151L);
+                values[valuesOffset++] = (int)(((block16 & 131071L) << 4) | (block17.TripleShift(60)));
+                values[valuesOffset++] = (int)((block17.TripleShift(39)) & 2097151L);
+                values[valuesOffset++] = (int)((block17.TripleShift(18)) & 2097151L);
                 long block18 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block17 & 262143L) << 3) | ((long)((ulong)block18 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block18 >> 40)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block18 >> 19)) & 2097151L);
+                values[valuesOffset++] = (int)(((block17 & 262143L) << 3) | (block18.TripleShift(61)));
+                values[valuesOffset++] = (int)((block18.TripleShift(40)) & 2097151L);
+                values[valuesOffset++] = (int)((block18.TripleShift(19)) & 2097151L);
                 long block19 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block18 & 524287L) << 2) | ((long)((ulong)block19 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block19 >> 41)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block19 >> 20)) & 2097151L);
+                values[valuesOffset++] = (int)(((block18 & 524287L) << 2) | (block19.TripleShift(62)));
+                values[valuesOffset++] = (int)((block19.TripleShift(41)) & 2097151L);
+                values[valuesOffset++] = (int)((block19.TripleShift(20)) & 2097151L);
                 long block20 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block19 & 1048575L) << 1) | ((long)((ulong)block20 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block20 >> 42)) & 2097151L);
-                values[valuesOffset++] = (int)(((long)((ulong)block20 >> 21)) & 2097151L);
+                values[valuesOffset++] = (int)(((block19 & 1048575L) << 1) | (block20.TripleShift(63)));
+                values[valuesOffset++] = (int)((block20.TripleShift(42)) & 2097151L);
+                values[valuesOffset++] = (int)((block20.TripleShift(21)) & 2097151L);
                 values[valuesOffset++] = (int)(block20 & 2097151L);
             }
         }
@@ -128,29 +130,29 @@
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 13) | (byte1 << 5) | ((int)((uint)byte2 >> 3));
+                values[valuesOffset++] = (byte0 << 13) | (byte1 << 5) | (byte2.TripleShift(3));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
                 int byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 7) << 18) | (byte3 << 10) | (byte4 << 2) | ((int)((uint)byte5 >> 6));
+                values[valuesOffset++] = ((byte2 & 7) << 18) | (byte3 << 10) | (byte4 << 2) | (byte5.TripleShift(6));
                 int byte6 = blocks[blocksOffset++] & 0xFF;
                 int byte7 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 63) << 15) | (byte6 << 7) | ((int)((uint)byte7 >> 1));
+                values[valuesOffset++] = ((byte5 & 63) << 15) | (byte6 << 7) | (byte7.TripleShift(1));
                 int byte8 = blocks[blocksOffset++] & 0xFF;
                 int byte9 = blocks[blocksOffset++] & 0xFF;
                 int byte10 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte7 & 1) << 20) | (byte8 << 12) | (byte9 << 4) | ((int)((uint)byte10 >> 4));
+                values[valuesOffset++] = ((byte7 & 1) << 20) | (byte8 << 12) | (byte9 << 4) | (byte10.TripleShift(4));
                 int byte11 = blocks[blocksOffset++] & 0xFF;
                 int byte12 = blocks[blocksOffset++] & 0xFF;
                 int byte13 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte10 & 15) << 17) | (byte11 << 9) | (byte12 << 1) | ((int)((uint)byte13 >> 7));
+                values[valuesOffset++] = ((byte10 & 15) << 17) | (byte11 << 9) | (byte12 << 1) | (byte13.TripleShift(7));
                 int byte14 = blocks[blocksOffset++] & 0xFF;
                 int byte15 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte13 & 127) << 14) | (byte14 << 6) | ((int)((uint)byte15 >> 2));
+                values[valuesOffset++] = ((byte13 & 127) << 14) | (byte14 << 6) | (byte15.TripleShift(2));
                 int byte16 = blocks[blocksOffset++] & 0xFF;
                 int byte17 = blocks[blocksOffset++] & 0xFF;
                 int byte18 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte15 & 3) << 19) | (byte16 << 11) | (byte17 << 3) | ((int)((uint)byte18 >> 5));
+                values[valuesOffset++] = ((byte15 & 3) << 19) | (byte16 << 11) | (byte17 << 3) | (byte18.TripleShift(5));
                 int byte19 = blocks[blocksOffset++] & 0xFF;
                 int byte20 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte18 & 31) << 16) | (byte19 << 8) | byte20;
@@ -162,89 +164,89 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 43);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 22)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 1)) & 2097151L;
+                values[valuesOffset++] = block0.TripleShift(43);
+                values[valuesOffset++] = (block0.TripleShift(22)) & 2097151L;
+                values[valuesOffset++] = (block0.TripleShift(1)) & 2097151L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 1L) << 20) | ((long)((ulong)block1 >> 44));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 23)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 2)) & 2097151L;
+                values[valuesOffset++] = ((block0 & 1L) << 20) | (block1.TripleShift(44));
+                values[valuesOffset++] = (block1.TripleShift(23)) & 2097151L;
+                values[valuesOffset++] = (block1.TripleShift(2)) & 2097151L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 3L) << 19) | ((long)((ulong)block2 >> 45));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 24)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 3)) & 2097151L;
+                values[valuesOffset++] = ((block1 & 3L) << 19) | (block2.TripleShift(45));
+                values[valuesOffset++] = (block2.TripleShift(24)) & 2097151L;
+                values[valuesOffset++] = (block2.TripleShift(3)) & 2097151L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 7L) << 18) | ((long)((ulong)block3 >> 46));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 25)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 4)) & 2097151L;
+                values[valuesOffset++] = ((block2 & 7L) << 18) | (block3.TripleShift(46));
+                values[valuesOffset++] = (block3.TripleShift(25)) & 2097151L;
+                values[valuesOffset++] = (block3.TripleShift(4)) & 2097151L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 15L) << 17) | ((long)((ulong)block4 >> 47));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 26)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 5)) & 2097151L;
+                values[valuesOffset++] = ((block3 & 15L) << 17) | (block4.TripleShift(47));
+                values[valuesOffset++] = (block4.TripleShift(26)) & 2097151L;
+                values[valuesOffset++] = (block4.TripleShift(5)) & 2097151L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 31L) << 16) | ((long)((ulong)block5 >> 48));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 27)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 6)) & 2097151L;
+                values[valuesOffset++] = ((block4 & 31L) << 16) | (block5.TripleShift(48));
+                values[valuesOffset++] = (block5.TripleShift(27)) & 2097151L;
+                values[valuesOffset++] = (block5.TripleShift(6)) & 2097151L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 63L) << 15) | ((long)((ulong)block6 >> 49));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 28)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 7)) & 2097151L;
+                values[valuesOffset++] = ((block5 & 63L) << 15) | (block6.TripleShift(49));
+                values[valuesOffset++] = (block6.TripleShift(28)) & 2097151L;
+                values[valuesOffset++] = (block6.TripleShift(7)) & 2097151L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 127L) << 14) | ((long)((ulong)block7 >> 50));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 29)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 8)) & 2097151L;
+                values[valuesOffset++] = ((block6 & 127L) << 14) | (block7.TripleShift(50));
+                values[valuesOffset++] = (block7.TripleShift(29)) & 2097151L;
+                values[valuesOffset++] = (block7.TripleShift(8)) & 2097151L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 255L) << 13) | ((long)((ulong)block8 >> 51));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 30)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 9)) & 2097151L;
+                values[valuesOffset++] = ((block7 & 255L) << 13) | (block8.TripleShift(51));
+                values[valuesOffset++] = (block8.TripleShift(30)) & 2097151L;
+                values[valuesOffset++] = (block8.TripleShift(9)) & 2097151L;
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block8 & 511L) << 12) | ((long)((ulong)block9 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block9 >> 31)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 10)) & 2097151L;
+                values[valuesOffset++] = ((block8 & 511L) << 12) | (block9.TripleShift(52));
+                values[valuesOffset++] = (block9.TripleShift(31)) & 2097151L;
+                values[valuesOffset++] = (block9.TripleShift(10)) & 2097151L;
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block9 & 1023L) << 11) | ((long)((ulong)block10 >> 53));
-                values[valuesOffset++] = ((long)((ulong)block10 >> 32)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 11)) & 2097151L;
+                values[valuesOffset++] = ((block9 & 1023L) << 11) | (block10.TripleShift(53));
+                values[valuesOffset++] = (block10.TripleShift(32)) & 2097151L;
+                values[valuesOffset++] = (block10.TripleShift(11)) & 2097151L;
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block10 & 2047L) << 10) | ((long)((ulong)block11 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block11 >> 33)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 12)) & 2097151L;
+                values[valuesOffset++] = ((block10 & 2047L) << 10) | (block11.TripleShift(54));
+                values[valuesOffset++] = (block11.TripleShift(33)) & 2097151L;
+                values[valuesOffset++] = (block11.TripleShift(12)) & 2097151L;
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block11 & 4095L) << 9) | ((long)((ulong)block12 >> 55));
-                values[valuesOffset++] = ((long)((ulong)block12 >> 34)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block12 >> 13)) & 2097151L;
+                values[valuesOffset++] = ((block11 & 4095L) << 9) | (block12.TripleShift(55));
+                values[valuesOffset++] = (block12.TripleShift(34)) & 2097151L;
+                values[valuesOffset++] = (block12.TripleShift(13)) & 2097151L;
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block12 & 8191L) << 8) | ((long)((ulong)block13 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block13 >> 35)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block13 >> 14)) & 2097151L;
+                values[valuesOffset++] = ((block12 & 8191L) << 8) | (block13.TripleShift(56));
+                values[valuesOffset++] = (block13.TripleShift(35)) & 2097151L;
+                values[valuesOffset++] = (block13.TripleShift(14)) & 2097151L;
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block13 & 16383L) << 7) | ((long)((ulong)block14 >> 57));
-                values[valuesOffset++] = ((long)((ulong)block14 >> 36)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block14 >> 15)) & 2097151L;
+                values[valuesOffset++] = ((block13 & 16383L) << 7) | (block14.TripleShift(57));
+                values[valuesOffset++] = (block14.TripleShift(36)) & 2097151L;
+                values[valuesOffset++] = (block14.TripleShift(15)) & 2097151L;
                 long block15 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block14 & 32767L) << 6) | ((long)((ulong)block15 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block15 >> 37)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block15 >> 16)) & 2097151L;
+                values[valuesOffset++] = ((block14 & 32767L) << 6) | (block15.TripleShift(58));
+                values[valuesOffset++] = (block15.TripleShift(37)) & 2097151L;
+                values[valuesOffset++] = (block15.TripleShift(16)) & 2097151L;
                 long block16 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block15 & 65535L) << 5) | ((long)((ulong)block16 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block16 >> 38)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block16 >> 17)) & 2097151L;
+                values[valuesOffset++] = ((block15 & 65535L) << 5) | (block16.TripleShift(59));
+                values[valuesOffset++] = (block16.TripleShift(38)) & 2097151L;
+                values[valuesOffset++] = (block16.TripleShift(17)) & 2097151L;
                 long block17 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block16 & 131071L) << 4) | ((long)((ulong)block17 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block17 >> 39)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block17 >> 18)) & 2097151L;
+                values[valuesOffset++] = ((block16 & 131071L) << 4) | (block17.TripleShift(60));
+                values[valuesOffset++] = (block17.TripleShift(39)) & 2097151L;
+                values[valuesOffset++] = (block17.TripleShift(18)) & 2097151L;
                 long block18 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block17 & 262143L) << 3) | ((long)((ulong)block18 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block18 >> 40)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block18 >> 19)) & 2097151L;
+                values[valuesOffset++] = ((block17 & 262143L) << 3) | (block18.TripleShift(61));
+                values[valuesOffset++] = (block18.TripleShift(40)) & 2097151L;
+                values[valuesOffset++] = (block18.TripleShift(19)) & 2097151L;
                 long block19 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block18 & 524287L) << 2) | ((long)((ulong)block19 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block19 >> 41)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block19 >> 20)) & 2097151L;
+                values[valuesOffset++] = ((block18 & 524287L) << 2) | (block19.TripleShift(62));
+                values[valuesOffset++] = (block19.TripleShift(41)) & 2097151L;
+                values[valuesOffset++] = (block19.TripleShift(20)) & 2097151L;
                 long block20 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block19 & 1048575L) << 1) | ((long)((ulong)block20 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block20 >> 42)) & 2097151L;
-                values[valuesOffset++] = ((long)((ulong)block20 >> 21)) & 2097151L;
+                values[valuesOffset++] = ((block19 & 1048575L) << 1) | (block20.TripleShift(63));
+                values[valuesOffset++] = (block20.TripleShift(42)) & 2097151L;
+                values[valuesOffset++] = (block20.TripleShift(21)) & 2097151L;
                 values[valuesOffset++] = block20 & 2097151L;
             }
         }
@@ -256,29 +258,29 @@
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 13) | (byte1 << 5) | ((long)((ulong)byte2 >> 3));
+                values[valuesOffset++] = (byte0 << 13) | (byte1 << 5) | (byte2.TripleShift(3));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
                 long byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 7) << 18) | (byte3 << 10) | (byte4 << 2) | ((long)((ulong)byte5 >> 6));
+                values[valuesOffset++] = ((byte2 & 7) << 18) | (byte3 << 10) | (byte4 << 2) | (byte5.TripleShift(6));
                 long byte6 = blocks[blocksOffset++] & 0xFF;
                 long byte7 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 63) << 15) | (byte6 << 7) | ((long)((ulong)byte7 >> 1));
+                values[valuesOffset++] = ((byte5 & 63) << 15) | (byte6 << 7) | (byte7.TripleShift(1));
                 long byte8 = blocks[blocksOffset++] & 0xFF;
                 long byte9 = blocks[blocksOffset++] & 0xFF;
                 long byte10 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte7 & 1) << 20) | (byte8 << 12) | (byte9 << 4) | ((long)((ulong)byte10 >> 4));
+                values[valuesOffset++] = ((byte7 & 1) << 20) | (byte8 << 12) | (byte9 << 4) | (byte10.TripleShift(4));
                 long byte11 = blocks[blocksOffset++] & 0xFF;
                 long byte12 = blocks[blocksOffset++] & 0xFF;
                 long byte13 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte10 & 15) << 17) | (byte11 << 9) | (byte12 << 1) | ((long)((ulong)byte13 >> 7));
+                values[valuesOffset++] = ((byte10 & 15) << 17) | (byte11 << 9) | (byte12 << 1) | (byte13.TripleShift(7));
                 long byte14 = blocks[blocksOffset++] & 0xFF;
                 long byte15 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte13 & 127) << 14) | (byte14 << 6) | ((long)((ulong)byte15 >> 2));
+                values[valuesOffset++] = ((byte13 & 127) << 14) | (byte14 << 6) | (byte15.TripleShift(2));
                 long byte16 = blocks[blocksOffset++] & 0xFF;
                 long byte17 = blocks[blocksOffset++] & 0xFF;
                 long byte18 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte15 & 3) << 19) | (byte16 << 11) | (byte17 << 3) | ((long)((ulong)byte18 >> 5));
+                values[valuesOffset++] = ((byte15 & 3) << 19) | (byte16 << 11) | (byte17 << 3) | (byte18.TripleShift(5));
                 long byte19 = blocks[blocksOffset++] & 0xFF;
                 long byte20 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte18 & 31) << 16) | (byte19 << 8) | byte20;
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked22.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked22.cs
index 6f2ead3..d29c76e 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked22.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked22.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,47 +36,47 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 42));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 20)) & 4194303L);
+                values[valuesOffset++] = (int)(block0.TripleShift(42));
+                values[valuesOffset++] = (int)((block0.TripleShift(20)) & 4194303L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 1048575L) << 2) | ((long)((ulong)block1 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 40)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 18)) & 4194303L);
+                values[valuesOffset++] = (int)(((block0 & 1048575L) << 2) | (block1.TripleShift(62)));
+                values[valuesOffset++] = (int)((block1.TripleShift(40)) & 4194303L);
+                values[valuesOffset++] = (int)((block1.TripleShift(18)) & 4194303L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 262143L) << 4) | ((long)((ulong)block2 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 38)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 16)) & 4194303L);
+                values[valuesOffset++] = (int)(((block1 & 262143L) << 4) | (block2.TripleShift(60)));
+                values[valuesOffset++] = (int)((block2.TripleShift(38)) & 4194303L);
+                values[valuesOffset++] = (int)((block2.TripleShift(16)) & 4194303L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 65535L) << 6) | ((long)((ulong)block3 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 36)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 14)) & 4194303L);
+                values[valuesOffset++] = (int)(((block2 & 65535L) << 6) | (block3.TripleShift(58)));
+                values[valuesOffset++] = (int)((block3.TripleShift(36)) & 4194303L);
+                values[valuesOffset++] = (int)((block3.TripleShift(14)) & 4194303L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 16383L) << 8) | ((long)((ulong)block4 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 34)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 12)) & 4194303L);
+                values[valuesOffset++] = (int)(((block3 & 16383L) << 8) | (block4.TripleShift(56)));
+                values[valuesOffset++] = (int)((block4.TripleShift(34)) & 4194303L);
+                values[valuesOffset++] = (int)((block4.TripleShift(12)) & 4194303L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 4095L) << 10) | ((long)((ulong)block5 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 32)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 10)) & 4194303L);
+                values[valuesOffset++] = (int)(((block4 & 4095L) << 10) | (block5.TripleShift(54)));
+                values[valuesOffset++] = (int)((block5.TripleShift(32)) & 4194303L);
+                values[valuesOffset++] = (int)((block5.TripleShift(10)) & 4194303L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 1023L) << 12) | ((long)((ulong)block6 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 30)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 8)) & 4194303L);
+                values[valuesOffset++] = (int)(((block5 & 1023L) << 12) | (block6.TripleShift(52)));
+                values[valuesOffset++] = (int)((block6.TripleShift(30)) & 4194303L);
+                values[valuesOffset++] = (int)((block6.TripleShift(8)) & 4194303L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 255L) << 14) | ((long)((ulong)block7 >> 50)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 28)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 6)) & 4194303L);
+                values[valuesOffset++] = (int)(((block6 & 255L) << 14) | (block7.TripleShift(50)));
+                values[valuesOffset++] = (int)((block7.TripleShift(28)) & 4194303L);
+                values[valuesOffset++] = (int)((block7.TripleShift(6)) & 4194303L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 63L) << 16) | ((long)((ulong)block8 >> 48)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 26)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 4)) & 4194303L);
+                values[valuesOffset++] = (int)(((block7 & 63L) << 16) | (block8.TripleShift(48)));
+                values[valuesOffset++] = (int)((block8.TripleShift(26)) & 4194303L);
+                values[valuesOffset++] = (int)((block8.TripleShift(4)) & 4194303L);
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block8 & 15L) << 18) | ((long)((ulong)block9 >> 46)));
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 24)) & 4194303L);
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 2)) & 4194303L);
+                values[valuesOffset++] = (int)(((block8 & 15L) << 18) | (block9.TripleShift(46)));
+                values[valuesOffset++] = (int)((block9.TripleShift(24)) & 4194303L);
+                values[valuesOffset++] = (int)((block9.TripleShift(2)) & 4194303L);
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block9 & 3L) << 20) | ((long)((ulong)block10 >> 44)));
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 22)) & 4194303L);
+                values[valuesOffset++] = (int)(((block9 & 3L) << 20) | (block10.TripleShift(44)));
+                values[valuesOffset++] = (int)((block10.TripleShift(22)) & 4194303L);
                 values[valuesOffset++] = (int)(block10 & 4194303L);
             }
         }
@@ -86,15 +88,15 @@
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 14) | (byte1 << 6) | ((int)((uint)byte2 >> 2));
+                values[valuesOffset++] = (byte0 << 14) | (byte1 << 6) | (byte2.TripleShift(2));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
                 int byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 3) << 20) | (byte3 << 12) | (byte4 << 4) | ((int)((uint)byte5 >> 4));
+                values[valuesOffset++] = ((byte2 & 3) << 20) | (byte3 << 12) | (byte4 << 4) | (byte5.TripleShift(4));
                 int byte6 = blocks[blocksOffset++] & 0xFF;
                 int byte7 = blocks[blocksOffset++] & 0xFF;
                 int byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 15) << 18) | (byte6 << 10) | (byte7 << 2) | ((int)((uint)byte8 >> 6));
+                values[valuesOffset++] = ((byte5 & 15) << 18) | (byte6 << 10) | (byte7 << 2) | (byte8.TripleShift(6));
                 int byte9 = blocks[blocksOffset++] & 0xFF;
                 int byte10 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte8 & 63) << 16) | (byte9 << 8) | byte10;
@@ -106,47 +108,47 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 42);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 20)) & 4194303L;
+                values[valuesOffset++] = block0.TripleShift(42);
+                values[valuesOffset++] = (block0.TripleShift(20)) & 4194303L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 1048575L) << 2) | ((long)((ulong)block1 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 40)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 18)) & 4194303L;
+                values[valuesOffset++] = ((block0 & 1048575L) << 2) | (block1.TripleShift(62));
+                values[valuesOffset++] = (block1.TripleShift(40)) & 4194303L;
+                values[valuesOffset++] = (block1.TripleShift(18)) & 4194303L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 262143L) << 4) | ((long)((ulong)block2 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 38)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 16)) & 4194303L;
+                values[valuesOffset++] = ((block1 & 262143L) << 4) | (block2.TripleShift(60));
+                values[valuesOffset++] = (block2.TripleShift(38)) & 4194303L;
+                values[valuesOffset++] = (block2.TripleShift(16)) & 4194303L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 65535L) << 6) | ((long)((ulong)block3 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 36)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 14)) & 4194303L;
+                values[valuesOffset++] = ((block2 & 65535L) << 6) | (block3.TripleShift(58));
+                values[valuesOffset++] = (block3.TripleShift(36)) & 4194303L;
+                values[valuesOffset++] = (block3.TripleShift(14)) & 4194303L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 16383L) << 8) | ((long)((ulong)block4 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 34)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 12)) & 4194303L;
+                values[valuesOffset++] = ((block3 & 16383L) << 8) | (block4.TripleShift(56));
+                values[valuesOffset++] = (block4.TripleShift(34)) & 4194303L;
+                values[valuesOffset++] = (block4.TripleShift(12)) & 4194303L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 4095L) << 10) | ((long)((ulong)block5 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 32)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 10)) & 4194303L;
+                values[valuesOffset++] = ((block4 & 4095L) << 10) | (block5.TripleShift(54));
+                values[valuesOffset++] = (block5.TripleShift(32)) & 4194303L;
+                values[valuesOffset++] = (block5.TripleShift(10)) & 4194303L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 1023L) << 12) | ((long)((ulong)block6 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 30)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 8)) & 4194303L;
+                values[valuesOffset++] = ((block5 & 1023L) << 12) | (block6.TripleShift(52));
+                values[valuesOffset++] = (block6.TripleShift(30)) & 4194303L;
+                values[valuesOffset++] = (block6.TripleShift(8)) & 4194303L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 255L) << 14) | ((long)((ulong)block7 >> 50));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 28)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 6)) & 4194303L;
+                values[valuesOffset++] = ((block6 & 255L) << 14) | (block7.TripleShift(50));
+                values[valuesOffset++] = (block7.TripleShift(28)) & 4194303L;
+                values[valuesOffset++] = (block7.TripleShift(6)) & 4194303L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 63L) << 16) | ((long)((ulong)block8 >> 48));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 26)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 4)) & 4194303L;
+                values[valuesOffset++] = ((block7 & 63L) << 16) | (block8.TripleShift(48));
+                values[valuesOffset++] = (block8.TripleShift(26)) & 4194303L;
+                values[valuesOffset++] = (block8.TripleShift(4)) & 4194303L;
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block8 & 15L) << 18) | ((long)((ulong)block9 >> 46));
-                values[valuesOffset++] = ((long)((ulong)block9 >> 24)) & 4194303L;
-                values[valuesOffset++] = ((long)((ulong)block9 >> 2)) & 4194303L;
+                values[valuesOffset++] = ((block8 & 15L) << 18) | (block9.TripleShift(46));
+                values[valuesOffset++] = (block9.TripleShift(24)) & 4194303L;
+                values[valuesOffset++] = (block9.TripleShift(2)) & 4194303L;
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block9 & 3L) << 20) | ((long)((ulong)block10 >> 44));
-                values[valuesOffset++] = ((long)((ulong)block10 >> 22)) & 4194303L;
+                values[valuesOffset++] = ((block9 & 3L) << 20) | (block10.TripleShift(44));
+                values[valuesOffset++] = (block10.TripleShift(22)) & 4194303L;
                 values[valuesOffset++] = block10 & 4194303L;
             }
         }
@@ -158,15 +160,15 @@
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 14) | (byte1 << 6) | ((long)((ulong)byte2 >> 2));
+                values[valuesOffset++] = (byte0 << 14) | (byte1 << 6) | (byte2.TripleShift(2));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
                 long byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 3) << 20) | (byte3 << 12) | (byte4 << 4) | ((long)((ulong)byte5 >> 4));
+                values[valuesOffset++] = ((byte2 & 3) << 20) | (byte3 << 12) | (byte4 << 4) | (byte5.TripleShift(4));
                 long byte6 = blocks[blocksOffset++] & 0xFF;
                 long byte7 = blocks[blocksOffset++] & 0xFF;
                 long byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 15) << 18) | (byte6 << 10) | (byte7 << 2) | ((long)((ulong)byte8 >> 6));
+                values[valuesOffset++] = ((byte5 & 15) << 18) | (byte6 << 10) | (byte7 << 2) | (byte8.TripleShift(6));
                 long byte9 = blocks[blocksOffset++] & 0xFF;
                 long byte10 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte8 & 63) << 16) | (byte9 << 8) | byte10;
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked23.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked23.cs
index edca46c..59d6936 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked23.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked23.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,91 +36,91 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 41));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 18)) & 8388607L);
+                values[valuesOffset++] = (int)(block0.TripleShift(41));
+                values[valuesOffset++] = (int)((block0.TripleShift(18)) & 8388607L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 262143L) << 5) | ((long)((ulong)block1 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 36)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 13)) & 8388607L);
+                values[valuesOffset++] = (int)(((block0 & 262143L) << 5) | (block1.TripleShift(59)));
+                values[valuesOffset++] = (int)((block1.TripleShift(36)) & 8388607L);
+                values[valuesOffset++] = (int)((block1.TripleShift(13)) & 8388607L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 8191L) << 10) | ((long)((ulong)block2 >> 54)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 31)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 8)) & 8388607L);
+                values[valuesOffset++] = (int)(((block1 & 8191L) << 10) | (block2.TripleShift(54)));
+                values[valuesOffset++] = (int)((block2.TripleShift(31)) & 8388607L);
+                values[valuesOffset++] = (int)((block2.TripleShift(8)) & 8388607L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 255L) << 15) | ((long)((ulong)block3 >> 49)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 26)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 3)) & 8388607L);
+                values[valuesOffset++] = (int)(((block2 & 255L) << 15) | (block3.TripleShift(49)));
+                values[valuesOffset++] = (int)((block3.TripleShift(26)) & 8388607L);
+                values[valuesOffset++] = (int)((block3.TripleShift(3)) & 8388607L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 7L) << 20) | ((long)((ulong)block4 >> 44)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 21)) & 8388607L);
+                values[valuesOffset++] = (int)(((block3 & 7L) << 20) | (block4.TripleShift(44)));
+                values[valuesOffset++] = (int)((block4.TripleShift(21)) & 8388607L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 2097151L) << 2) | ((long)((ulong)block5 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 39)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 16)) & 8388607L);
+                values[valuesOffset++] = (int)(((block4 & 2097151L) << 2) | (block5.TripleShift(62)));
+                values[valuesOffset++] = (int)((block5.TripleShift(39)) & 8388607L);
+                values[valuesOffset++] = (int)((block5.TripleShift(16)) & 8388607L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 65535L) << 7) | ((long)((ulong)block6 >> 57)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 34)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 11)) & 8388607L);
+                values[valuesOffset++] = (int)(((block5 & 65535L) << 7) | (block6.TripleShift(57)));
+                values[valuesOffset++] = (int)((block6.TripleShift(34)) & 8388607L);
+                values[valuesOffset++] = (int)((block6.TripleShift(11)) & 8388607L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 2047L) << 12) | ((long)((ulong)block7 >> 52)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 29)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 6)) & 8388607L);
+                values[valuesOffset++] = (int)(((block6 & 2047L) << 12) | (block7.TripleShift(52)));
+                values[valuesOffset++] = (int)((block7.TripleShift(29)) & 8388607L);
+                values[valuesOffset++] = (int)((block7.TripleShift(6)) & 8388607L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 63L) << 17) | ((long)((ulong)block8 >> 47)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 24)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 1)) & 8388607L);
+                values[valuesOffset++] = (int)(((block7 & 63L) << 17) | (block8.TripleShift(47)));
+                values[valuesOffset++] = (int)((block8.TripleShift(24)) & 8388607L);
+                values[valuesOffset++] = (int)((block8.TripleShift(1)) & 8388607L);
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block8 & 1L) << 22) | ((long)((ulong)block9 >> 42)));
-                values[valuesOffset++] = (int)(((long)((ulong)block9 >> 19)) & 8388607L);
+                values[valuesOffset++] = (int)(((block8 & 1L) << 22) | (block9.TripleShift(42)));
+                values[valuesOffset++] = (int)((block9.TripleShift(19)) & 8388607L);
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block9 & 524287L) << 4) | ((long)((ulong)block10 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 37)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block10 >> 14)) & 8388607L);
+                values[valuesOffset++] = (int)(((block9 & 524287L) << 4) | (block10.TripleShift(60)));
+                values[valuesOffset++] = (int)((block10.TripleShift(37)) & 8388607L);
+                values[valuesOffset++] = (int)((block10.TripleShift(14)) & 8388607L);
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block10 & 16383L) << 9) | ((long)((ulong)block11 >> 55)));
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 32)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block11 >> 9)) & 8388607L);
+                values[valuesOffset++] = (int)(((block10 & 16383L) << 9) | (block11.TripleShift(55)));
+                values[valuesOffset++] = (int)((block11.TripleShift(32)) & 8388607L);
+                values[valuesOffset++] = (int)((block11.TripleShift(9)) & 8388607L);
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block11 & 511L) << 14) | ((long)((ulong)block12 >> 50)));
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 27)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block12 >> 4)) & 8388607L);
+                values[valuesOffset++] = (int)(((block11 & 511L) << 14) | (block12.TripleShift(50)));
+                values[valuesOffset++] = (int)((block12.TripleShift(27)) & 8388607L);
+                values[valuesOffset++] = (int)((block12.TripleShift(4)) & 8388607L);
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block12 & 15L) << 19) | ((long)((ulong)block13 >> 45)));
-                values[valuesOffset++] = (int)(((long)((ulong)block13 >> 22)) & 8388607L);
+                values[valuesOffset++] = (int)(((block12 & 15L) << 19) | (block13.TripleShift(45)));
+                values[valuesOffset++] = (int)((block13.TripleShift(22)) & 8388607L);
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block13 & 4194303L) << 1) | ((long)((ulong)block14 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 40)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block14 >> 17)) & 8388607L);
+                values[valuesOffset++] = (int)(((block13 & 4194303L) << 1) | (block14.TripleShift(63)));
+                values[valuesOffset++] = (int)((block14.TripleShift(40)) & 8388607L);
+                values[valuesOffset++] = (int)((block14.TripleShift(17)) & 8388607L);
                 long block15 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block14 & 131071L) << 6) | ((long)((ulong)block15 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 35)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block15 >> 12)) & 8388607L);
+                values[valuesOffset++] = (int)(((block14 & 131071L) << 6) | (block15.TripleShift(58)));
+                values[valuesOffset++] = (int)((block15.TripleShift(35)) & 8388607L);
+                values[valuesOffset++] = (int)((block15.TripleShift(12)) & 8388607L);
                 long block16 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block15 & 4095L) << 11) | ((long)((ulong)block16 >> 53)));
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 30)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block16 >> 7)) & 8388607L);
+                values[valuesOffset++] = (int)(((block15 & 4095L) << 11) | (block16.TripleShift(53)));
+                values[valuesOffset++] = (int)((block16.TripleShift(30)) & 8388607L);
+                values[valuesOffset++] = (int)((block16.TripleShift(7)) & 8388607L);
                 long block17 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block16 & 127L) << 16) | ((long)((ulong)block17 >> 48)));
-                values[valuesOffset++] = (int)(((long)((ulong)block17 >> 25)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block17 >> 2)) & 8388607L);
+                values[valuesOffset++] = (int)(((block16 & 127L) << 16) | (block17.TripleShift(48)));
+                values[valuesOffset++] = (int)((block17.TripleShift(25)) & 8388607L);
+                values[valuesOffset++] = (int)((block17.TripleShift(2)) & 8388607L);
                 long block18 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block17 & 3L) << 21) | ((long)((ulong)block18 >> 43)));
-                values[valuesOffset++] = (int)(((long)((ulong)block18 >> 20)) & 8388607L);
+                values[valuesOffset++] = (int)(((block17 & 3L) << 21) | (block18.TripleShift(43)));
+                values[valuesOffset++] = (int)((block18.TripleShift(20)) & 8388607L);
                 long block19 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block18 & 1048575L) << 3) | ((long)((ulong)block19 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block19 >> 38)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block19 >> 15)) & 8388607L);
+                values[valuesOffset++] = (int)(((block18 & 1048575L) << 3) | (block19.TripleShift(61)));
+                values[valuesOffset++] = (int)((block19.TripleShift(38)) & 8388607L);
+                values[valuesOffset++] = (int)((block19.TripleShift(15)) & 8388607L);
                 long block20 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block19 & 32767L) << 8) | ((long)((ulong)block20 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block20 >> 33)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block20 >> 10)) & 8388607L);
+                values[valuesOffset++] = (int)(((block19 & 32767L) << 8) | (block20.TripleShift(56)));
+                values[valuesOffset++] = (int)((block20.TripleShift(33)) & 8388607L);
+                values[valuesOffset++] = (int)((block20.TripleShift(10)) & 8388607L);
                 long block21 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block20 & 1023L) << 13) | ((long)((ulong)block21 >> 51)));
-                values[valuesOffset++] = (int)(((long)((ulong)block21 >> 28)) & 8388607L);
-                values[valuesOffset++] = (int)(((long)((ulong)block21 >> 5)) & 8388607L);
+                values[valuesOffset++] = (int)(((block20 & 1023L) << 13) | (block21.TripleShift(51)));
+                values[valuesOffset++] = (int)((block21.TripleShift(28)) & 8388607L);
+                values[valuesOffset++] = (int)((block21.TripleShift(5)) & 8388607L);
                 long block22 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block21 & 31L) << 18) | ((long)((ulong)block22 >> 46)));
-                values[valuesOffset++] = (int)(((long)((ulong)block22 >> 23)) & 8388607L);
+                values[valuesOffset++] = (int)(((block21 & 31L) << 18) | (block22.TripleShift(46)));
+                values[valuesOffset++] = (int)((block22.TripleShift(23)) & 8388607L);
                 values[valuesOffset++] = (int)(block22 & 8388607L);
             }
         }
@@ -130,31 +132,31 @@
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 15) | (byte1 << 7) | ((int)((uint)byte2 >> 1));
+                values[valuesOffset++] = (byte0 << 15) | (byte1 << 7) | (byte2.TripleShift(1));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
                 int byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 1) << 22) | (byte3 << 14) | (byte4 << 6) | ((int)((uint)byte5 >> 2));
+                values[valuesOffset++] = ((byte2 & 1) << 22) | (byte3 << 14) | (byte4 << 6) | (byte5.TripleShift(2));
                 int byte6 = blocks[blocksOffset++] & 0xFF;
                 int byte7 = blocks[blocksOffset++] & 0xFF;
                 int byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 3) << 21) | (byte6 << 13) | (byte7 << 5) | ((int)((uint)byte8 >> 3));
+                values[valuesOffset++] = ((byte5 & 3) << 21) | (byte6 << 13) | (byte7 << 5) | (byte8.TripleShift(3));
                 int byte9 = blocks[blocksOffset++] & 0xFF;
                 int byte10 = blocks[blocksOffset++] & 0xFF;
                 int byte11 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte8 & 7) << 20) | (byte9 << 12) | (byte10 << 4) | ((int)((uint)byte11 >> 4));
+                values[valuesOffset++] = ((byte8 & 7) << 20) | (byte9 << 12) | (byte10 << 4) | (byte11.TripleShift(4));
                 int byte12 = blocks[blocksOffset++] & 0xFF;
                 int byte13 = blocks[blocksOffset++] & 0xFF;
                 int byte14 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte11 & 15) << 19) | (byte12 << 11) | (byte13 << 3) | ((int)((uint)byte14 >> 5));
+                values[valuesOffset++] = ((byte11 & 15) << 19) | (byte12 << 11) | (byte13 << 3) | (byte14.TripleShift(5));
                 int byte15 = blocks[blocksOffset++] & 0xFF;
                 int byte16 = blocks[blocksOffset++] & 0xFF;
                 int byte17 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte14 & 31) << 18) | (byte15 << 10) | (byte16 << 2) | ((int)((uint)byte17 >> 6));
+                values[valuesOffset++] = ((byte14 & 31) << 18) | (byte15 << 10) | (byte16 << 2) | (byte17.TripleShift(6));
                 int byte18 = blocks[blocksOffset++] & 0xFF;
                 int byte19 = blocks[blocksOffset++] & 0xFF;
                 int byte20 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte17 & 63) << 17) | (byte18 << 9) | (byte19 << 1) | ((int)((uint)byte20 >> 7));
+                values[valuesOffset++] = ((byte17 & 63) << 17) | (byte18 << 9) | (byte19 << 1) | (byte20.TripleShift(7));
                 int byte21 = blocks[blocksOffset++] & 0xFF;
                 int byte22 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte20 & 127) << 16) | (byte21 << 8) | byte22;
@@ -166,91 +168,91 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 41);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 18)) & 8388607L;
+                values[valuesOffset++] = block0.TripleShift(41);
+                values[valuesOffset++] = (block0.TripleShift(18)) & 8388607L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 262143L) << 5) | ((long)((ulong)block1 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 36)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 13)) & 8388607L;
+                values[valuesOffset++] = ((block0 & 262143L) << 5) | (block1.TripleShift(59));
+                values[valuesOffset++] = (block1.TripleShift(36)) & 8388607L;
+                values[valuesOffset++] = (block1.TripleShift(13)) & 8388607L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 8191L) << 10) | ((long)((ulong)block2 >> 54));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 31)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 8)) & 8388607L;
+                values[valuesOffset++] = ((block1 & 8191L) << 10) | (block2.TripleShift(54));
+                values[valuesOffset++] = (block2.TripleShift(31)) & 8388607L;
+                values[valuesOffset++] = (block2.TripleShift(8)) & 8388607L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 255L) << 15) | ((long)((ulong)block3 >> 49));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 26)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 3)) & 8388607L;
+                values[valuesOffset++] = ((block2 & 255L) << 15) | (block3.TripleShift(49));
+                values[valuesOffset++] = (block3.TripleShift(26)) & 8388607L;
+                values[valuesOffset++] = (block3.TripleShift(3)) & 8388607L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 7L) << 20) | ((long)((ulong)block4 >> 44));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 21)) & 8388607L;
+                values[valuesOffset++] = ((block3 & 7L) << 20) | (block4.TripleShift(44));
+                values[valuesOffset++] = (block4.TripleShift(21)) & 8388607L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 2097151L) << 2) | ((long)((ulong)block5 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 39)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 16)) & 8388607L;
+                values[valuesOffset++] = ((block4 & 2097151L) << 2) | (block5.TripleShift(62));
+                values[valuesOffset++] = (block5.TripleShift(39)) & 8388607L;
+                values[valuesOffset++] = (block5.TripleShift(16)) & 8388607L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 65535L) << 7) | ((long)((ulong)block6 >> 57));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 34)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 11)) & 8388607L;
+                values[valuesOffset++] = ((block5 & 65535L) << 7) | (block6.TripleShift(57));
+                values[valuesOffset++] = (block6.TripleShift(34)) & 8388607L;
+                values[valuesOffset++] = (block6.TripleShift(11)) & 8388607L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 2047L) << 12) | ((long)((ulong)block7 >> 52));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 29)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 6)) & 8388607L;
+                values[valuesOffset++] = ((block6 & 2047L) << 12) | (block7.TripleShift(52));
+                values[valuesOffset++] = (block7.TripleShift(29)) & 8388607L;
+                values[valuesOffset++] = (block7.TripleShift(6)) & 8388607L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 63L) << 17) | ((long)((ulong)block8 >> 47));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 24)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 1)) & 8388607L;
+                values[valuesOffset++] = ((block7 & 63L) << 17) | (block8.TripleShift(47));
+                values[valuesOffset++] = (block8.TripleShift(24)) & 8388607L;
+                values[valuesOffset++] = (block8.TripleShift(1)) & 8388607L;
                 long block9 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block8 & 1L) << 22) | ((long)((ulong)block9 >> 42));
-                values[valuesOffset++] = ((long)((ulong)block9 >> 19)) & 8388607L;
+                values[valuesOffset++] = ((block8 & 1L) << 22) | (block9.TripleShift(42));
+                values[valuesOffset++] = (block9.TripleShift(19)) & 8388607L;
                 long block10 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block9 & 524287L) << 4) | ((long)((ulong)block10 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block10 >> 37)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block10 >> 14)) & 8388607L;
+                values[valuesOffset++] = ((block9 & 524287L) << 4) | (block10.TripleShift(60));
+                values[valuesOffset++] = (block10.TripleShift(37)) & 8388607L;
+                values[valuesOffset++] = (block10.TripleShift(14)) & 8388607L;
                 long block11 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block10 & 16383L) << 9) | ((long)((ulong)block11 >> 55));
-                values[valuesOffset++] = ((long)((ulong)block11 >> 32)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block11 >> 9)) & 8388607L;
+                values[valuesOffset++] = ((block10 & 16383L) << 9) | (block11.TripleShift(55));
+                values[valuesOffset++] = (block11.TripleShift(32)) & 8388607L;
+                values[valuesOffset++] = (block11.TripleShift(9)) & 8388607L;
                 long block12 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block11 & 511L) << 14) | ((long)((ulong)block12 >> 50));
-                values[valuesOffset++] = ((long)((ulong)block12 >> 27)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block12 >> 4)) & 8388607L;
+                values[valuesOffset++] = ((block11 & 511L) << 14) | (block12.TripleShift(50));
+                values[valuesOffset++] = (block12.TripleShift(27)) & 8388607L;
+                values[valuesOffset++] = (block12.TripleShift(4)) & 8388607L;
                 long block13 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block12 & 15L) << 19) | ((long)((ulong)block13 >> 45));
-                values[valuesOffset++] = ((long)((ulong)block13 >> 22)) & 8388607L;
+                values[valuesOffset++] = ((block12 & 15L) << 19) | (block13.TripleShift(45));
+                values[valuesOffset++] = (block13.TripleShift(22)) & 8388607L;
                 long block14 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block13 & 4194303L) << 1) | ((long)((ulong)block14 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block14 >> 40)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block14 >> 17)) & 8388607L;
+                values[valuesOffset++] = ((block13 & 4194303L) << 1) | (block14.TripleShift(63));
+                values[valuesOffset++] = (block14.TripleShift(40)) & 8388607L;
+                values[valuesOffset++] = (block14.TripleShift(17)) & 8388607L;
                 long block15 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block14 & 131071L) << 6) | ((long)((ulong)block15 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block15 >> 35)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block15 >> 12)) & 8388607L;
+                values[valuesOffset++] = ((block14 & 131071L) << 6) | (block15.TripleShift(58));
+                values[valuesOffset++] = (block15.TripleShift(35)) & 8388607L;
+                values[valuesOffset++] = (block15.TripleShift(12)) & 8388607L;
                 long block16 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block15 & 4095L) << 11) | ((long)((ulong)block16 >> 53));
-                values[valuesOffset++] = ((long)((ulong)block16 >> 30)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block16 >> 7)) & 8388607L;
+                values[valuesOffset++] = ((block15 & 4095L) << 11) | (block16.TripleShift(53));
+                values[valuesOffset++] = (block16.TripleShift(30)) & 8388607L;
+                values[valuesOffset++] = (block16.TripleShift(7)) & 8388607L;
                 long block17 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block16 & 127L) << 16) | ((long)((ulong)block17 >> 48));
-                values[valuesOffset++] = ((long)((ulong)block17 >> 25)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block17 >> 2)) & 8388607L;
+                values[valuesOffset++] = ((block16 & 127L) << 16) | (block17.TripleShift(48));
+                values[valuesOffset++] = (block17.TripleShift(25)) & 8388607L;
+                values[valuesOffset++] = (block17.TripleShift(2)) & 8388607L;
                 long block18 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block17 & 3L) << 21) | ((long)((ulong)block18 >> 43));
-                values[valuesOffset++] = ((long)((ulong)block18 >> 20)) & 8388607L;
+                values[valuesOffset++] = ((block17 & 3L) << 21) | (block18.TripleShift(43));
+                values[valuesOffset++] = (block18.TripleShift(20)) & 8388607L;
                 long block19 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block18 & 1048575L) << 3) | ((long)((ulong)block19 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block19 >> 38)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block19 >> 15)) & 8388607L;
+                values[valuesOffset++] = ((block18 & 1048575L) << 3) | (block19.TripleShift(61));
+                values[valuesOffset++] = (block19.TripleShift(38)) & 8388607L;
+                values[valuesOffset++] = (block19.TripleShift(15)) & 8388607L;
                 long block20 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block19 & 32767L) << 8) | ((long)((ulong)block20 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block20 >> 33)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block20 >> 10)) & 8388607L;
+                values[valuesOffset++] = ((block19 & 32767L) << 8) | (block20.TripleShift(56));
+                values[valuesOffset++] = (block20.TripleShift(33)) & 8388607L;
+                values[valuesOffset++] = (block20.TripleShift(10)) & 8388607L;
                 long block21 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block20 & 1023L) << 13) | ((long)((ulong)block21 >> 51));
-                values[valuesOffset++] = ((long)((ulong)block21 >> 28)) & 8388607L;
-                values[valuesOffset++] = ((long)((ulong)block21 >> 5)) & 8388607L;
+                values[valuesOffset++] = ((block20 & 1023L) << 13) | (block21.TripleShift(51));
+                values[valuesOffset++] = (block21.TripleShift(28)) & 8388607L;
+                values[valuesOffset++] = (block21.TripleShift(5)) & 8388607L;
                 long block22 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block21 & 31L) << 18) | ((long)((ulong)block22 >> 46));
-                values[valuesOffset++] = ((long)((ulong)block22 >> 23)) & 8388607L;
+                values[valuesOffset++] = ((block21 & 31L) << 18) | (block22.TripleShift(46));
+                values[valuesOffset++] = (block22.TripleShift(23)) & 8388607L;
                 values[valuesOffset++] = block22 & 8388607L;
             }
         }
@@ -262,31 +264,31 @@
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 15) | (byte1 << 7) | ((long)((ulong)byte2 >> 1));
+                values[valuesOffset++] = (byte0 << 15) | (byte1 << 7) | (byte2.TripleShift(1));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
                 long byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 1) << 22) | (byte3 << 14) | (byte4 << 6) | ((long)((ulong)byte5 >> 2));
+                values[valuesOffset++] = ((byte2 & 1) << 22) | (byte3 << 14) | (byte4 << 6) | (byte5.TripleShift(2));
                 long byte6 = blocks[blocksOffset++] & 0xFF;
                 long byte7 = blocks[blocksOffset++] & 0xFF;
                 long byte8 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 3) << 21) | (byte6 << 13) | (byte7 << 5) | ((long)((ulong)byte8 >> 3));
+                values[valuesOffset++] = ((byte5 & 3) << 21) | (byte6 << 13) | (byte7 << 5) | (byte8.TripleShift(3));
                 long byte9 = blocks[blocksOffset++] & 0xFF;
                 long byte10 = blocks[blocksOffset++] & 0xFF;
                 long byte11 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte8 & 7) << 20) | (byte9 << 12) | (byte10 << 4) | ((long)((ulong)byte11 >> 4));
+                values[valuesOffset++] = ((byte8 & 7) << 20) | (byte9 << 12) | (byte10 << 4) | (byte11.TripleShift(4));
                 long byte12 = blocks[blocksOffset++] & 0xFF;
                 long byte13 = blocks[blocksOffset++] & 0xFF;
                 long byte14 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte11 & 15) << 19) | (byte12 << 11) | (byte13 << 3) | ((long)((ulong)byte14 >> 5));
+                values[valuesOffset++] = ((byte11 & 15) << 19) | (byte12 << 11) | (byte13 << 3) | (byte14.TripleShift(5));
                 long byte15 = blocks[blocksOffset++] & 0xFF;
                 long byte16 = blocks[blocksOffset++] & 0xFF;
                 long byte17 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte14 & 31) << 18) | (byte15 << 10) | (byte16 << 2) | ((long)((ulong)byte17 >> 6));
+                values[valuesOffset++] = ((byte14 & 31) << 18) | (byte15 << 10) | (byte16 << 2) | (byte17.TripleShift(6));
                 long byte18 = blocks[blocksOffset++] & 0xFF;
                 long byte19 = blocks[blocksOffset++] & 0xFF;
                 long byte20 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte17 & 63) << 17) | (byte18 << 9) | (byte19 << 1) | ((long)((ulong)byte20 >> 7));
+                values[valuesOffset++] = ((byte17 & 63) << 17) | (byte18 << 9) | (byte19 << 1) | (byte20.TripleShift(7));
                 long byte21 = blocks[blocksOffset++] & 0xFF;
                 long byte22 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte20 & 127) << 16) | (byte21 << 8) | byte22;
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked24.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked24.cs
index c8178c3..a6be910 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked24.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked24.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,15 +36,15 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 40));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 16)) & 16777215L);
+                values[valuesOffset++] = (int)(block0.TripleShift(40));
+                values[valuesOffset++] = (int)((block0.TripleShift(16)) & 16777215L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 65535L) << 8) | ((long)((ulong)block1 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 32)) & 16777215L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 8)) & 16777215L);
+                values[valuesOffset++] = (int)(((block0 & 65535L) << 8) | (block1.TripleShift(56)));
+                values[valuesOffset++] = (int)((block1.TripleShift(32)) & 16777215L);
+                values[valuesOffset++] = (int)((block1.TripleShift(8)) & 16777215L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 255L) << 16) | ((long)((ulong)block2 >> 48)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 24)) & 16777215L);
+                values[valuesOffset++] = (int)(((block1 & 255L) << 16) | (block2.TripleShift(48)));
+                values[valuesOffset++] = (int)((block2.TripleShift(24)) & 16777215L);
                 values[valuesOffset++] = (int)(block2 & 16777215L);
             }
         }
@@ -63,15 +65,15 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 40);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 16)) & 16777215L;
+                values[valuesOffset++] = block0.TripleShift(40);
+                values[valuesOffset++] = (block0.TripleShift(16)) & 16777215L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 65535L) << 8) | ((long)((ulong)block1 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 32)) & 16777215L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 8)) & 16777215L;
+                values[valuesOffset++] = ((block0 & 65535L) << 8) | (block1.TripleShift(56));
+                values[valuesOffset++] = (block1.TripleShift(32)) & 16777215L;
+                values[valuesOffset++] = (block1.TripleShift(8)) & 16777215L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 255L) << 16) | ((long)((ulong)block2 >> 48));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 24)) & 16777215L;
+                values[valuesOffset++] = ((block1 & 255L) << 16) | (block2.TripleShift(48));
+                values[valuesOffset++] = (block2.TripleShift(24)) & 16777215L;
                 values[valuesOffset++] = block2 & 16777215L;
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked3.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked3.cs
index ab99609..da6c1d7 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked3.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked3.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,71 +36,71 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 61));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 58)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 55)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 52)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 49)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 46)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 43)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 40)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 37)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 34)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 31)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 28)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 25)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 22)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 19)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 16)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 13)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 10)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 7)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 4)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 1)) & 7L);
+                values[valuesOffset++] = (int)(block0.TripleShift(61));
+                values[valuesOffset++] = (int)((block0.TripleShift(58)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(55)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(52)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(49)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(46)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(43)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(40)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(37)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(34)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(31)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(28)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(25)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(22)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(19)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(16)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(13)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(10)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(7)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(4)) & 7L);
+                values[valuesOffset++] = (int)((block0.TripleShift(1)) & 7L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 1L) << 2) | ((long)((ulong)block1 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 59)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 56)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 53)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 50)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 47)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 44)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 41)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 38)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 35)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 32)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 29)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 26)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 23)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 20)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 17)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 14)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 11)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 8)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 5)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 2)) & 7L);
+                values[valuesOffset++] = (int)(((block0 & 1L) << 2) | (block1.TripleShift(62)));
+                values[valuesOffset++] = (int)((block1.TripleShift(59)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(56)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(53)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(50)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(47)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(44)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(41)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(38)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(35)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(32)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(29)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(26)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(23)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(20)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(17)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(14)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(11)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(8)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(5)) & 7L);
+                values[valuesOffset++] = (int)((block1.TripleShift(2)) & 7L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 3L) << 1) | ((long)((ulong)block2 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 60)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 57)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 54)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 51)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 48)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 45)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 42)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 39)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 36)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 33)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 30)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 27)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 24)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 21)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 18)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 15)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 9)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 6)) & 7L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 3)) & 7L);
+                values[valuesOffset++] = (int)(((block1 & 3L) << 1) | (block2.TripleShift(63)));
+                values[valuesOffset++] = (int)((block2.TripleShift(60)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(57)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(54)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(51)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(48)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(45)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(42)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(39)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(36)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(33)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(30)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(27)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(24)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(21)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(18)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(15)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(9)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(6)) & 7L);
+                values[valuesOffset++] = (int)((block2.TripleShift(3)) & 7L);
                 values[valuesOffset++] = (int)(block2 & 7L);
             }
         }
@@ -108,15 +110,15 @@
             for (int i = 0; i < iterations; ++i)
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (int)((uint)byte0 >> 5);
-                values[valuesOffset++] = ((int)((uint)byte0 >> 2)) & 7;
+                values[valuesOffset++] = byte0.TripleShift(5);
+                values[valuesOffset++] = (byte0.TripleShift(2)) & 7;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte0 & 3) << 1) | ((int)((uint)byte1 >> 7));
-                values[valuesOffset++] = ((int)((uint)byte1 >> 4)) & 7;
-                values[valuesOffset++] = ((int)((uint)byte1 >> 1)) & 7;
+                values[valuesOffset++] = ((byte0 & 3) << 1) | (byte1.TripleShift(7));
+                values[valuesOffset++] = (byte1.TripleShift(4)) & 7;
+                values[valuesOffset++] = (byte1.TripleShift(1)) & 7;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 1) << 2) | ((int)((uint)byte2 >> 6));
-                values[valuesOffset++] = ((int)((uint)byte2 >> 3)) & 7;
+                values[valuesOffset++] = ((byte1 & 1) << 2) | (byte2.TripleShift(6));
+                values[valuesOffset++] = (byte2.TripleShift(3)) & 7;
                 values[valuesOffset++] = byte2 & 7;
             }
         }
@@ -126,71 +128,71 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 61);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 58)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 55)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 52)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 49)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 46)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 43)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 40)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 37)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 34)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 31)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 28)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 25)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 22)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 19)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 16)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 13)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 10)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 7)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 4)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 1)) & 7L;
+                values[valuesOffset++] = block0.TripleShift(61);
+                values[valuesOffset++] = (block0.TripleShift(58)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(55)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(52)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(49)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(46)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(43)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(40)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(37)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(34)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(31)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(28)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(25)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(22)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(19)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(16)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(13)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(10)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(7)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(4)) & 7L;
+                values[valuesOffset++] = (block0.TripleShift(1)) & 7L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 1L) << 2) | ((long)((ulong)block1 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 59)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 56)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 53)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 50)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 47)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 44)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 41)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 38)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 35)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 32)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 29)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 26)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 23)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 20)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 17)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 14)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 11)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 8)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 5)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 2)) & 7L;
+                values[valuesOffset++] = ((block0 & 1L) << 2) | (block1.TripleShift(62));
+                values[valuesOffset++] = (block1.TripleShift(59)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(56)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(53)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(50)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(47)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(44)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(41)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(38)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(35)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(32)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(29)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(26)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(23)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(20)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(17)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(14)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(11)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(8)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(5)) & 7L;
+                values[valuesOffset++] = (block1.TripleShift(2)) & 7L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 3L) << 1) | ((long)((ulong)block2 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 60)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 57)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 54)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 51)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 48)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 45)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 42)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 39)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 36)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 33)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 30)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 27)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 24)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 21)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 18)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 15)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 9)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 6)) & 7L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 3)) & 7L;
+                values[valuesOffset++] = ((block1 & 3L) << 1) | (block2.TripleShift(63));
+                values[valuesOffset++] = (block2.TripleShift(60)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(57)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(54)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(51)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(48)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(45)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(42)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(39)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(36)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(33)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(30)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(27)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(24)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(21)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(18)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(15)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(9)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(6)) & 7L;
+                values[valuesOffset++] = (block2.TripleShift(3)) & 7L;
                 values[valuesOffset++] = block2 & 7L;
             }
         }
@@ -200,15 +202,15 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (long)((ulong)byte0 >> 5);
-                values[valuesOffset++] = ((long)((ulong)byte0 >> 2)) & 7;
+                values[valuesOffset++] = byte0.TripleShift(5);
+                values[valuesOffset++] = (byte0.TripleShift(2)) & 7;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte0 & 3) << 1) | ((long)((ulong)byte1 >> 7));
-                values[valuesOffset++] = ((long)((ulong)byte1 >> 4)) & 7;
-                values[valuesOffset++] = ((long)((ulong)byte1 >> 1)) & 7;
+                values[valuesOffset++] = ((byte0 & 3) << 1) | (byte1.TripleShift(7));
+                values[valuesOffset++] = (byte1.TripleShift(4)) & 7;
+                values[valuesOffset++] = (byte1.TripleShift(1)) & 7;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 1) << 2) | ((long)((ulong)byte2 >> 6));
-                values[valuesOffset++] = ((long)((ulong)byte2 >> 3)) & 7;
+                values[valuesOffset++] = ((byte1 & 1) << 2) | (byte2.TripleShift(6));
+                values[valuesOffset++] = (byte2.TripleShift(3)) & 7;
                 values[valuesOffset++] = byte2 & 7;
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked4.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked4.cs
index 0c5279e..28f1c34 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked4.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked4.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -36,7 +38,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 60; shift >= 0; shift -= 4)
                 {
-                    values[valuesOffset++] = (int)(((long)((ulong)block >> shift)) & 15);
+                    values[valuesOffset++] = (int)((block.TripleShift(shift)) & 15);
                 }
             }
         }
@@ -46,7 +48,7 @@
             for (int j = 0; j < iterations; ++j)
             {
                 var block = blocks[blocksOffset++];
-                values[valuesOffset++] = ((int)((uint)block >> 4)) & 15;
+                values[valuesOffset++] = (block.TripleShift(4)) & 15;
                 values[valuesOffset++] = block & 15;
             }
         }
@@ -58,7 +60,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 60; shift >= 0; shift -= 4)
                 {
-                    values[valuesOffset++] = ((long)((ulong)block >> shift)) & 15;
+                    values[valuesOffset++] = (block.TripleShift(shift)) & 15;
                 }
             }
         }
@@ -68,7 +70,7 @@
             for (int j = 0; j < iterations; ++j)
             {
                 var block = blocks[blocksOffset++];
-                values[valuesOffset++] = ((int)((uint)block >> 4)) & 15;
+                values[valuesOffset++] = (block.TripleShift(4)) & 15;
                 values[valuesOffset++] = block & 15;
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked5.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked5.cs
index ff32c4f..5478aca 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked5.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked5.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,73 +36,73 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 59));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 54)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 49)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 44)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 39)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 34)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 29)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 24)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 19)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 14)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 9)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 4)) & 31L);
+                values[valuesOffset++] = (int)(block0.TripleShift(59));
+                values[valuesOffset++] = (int)((block0.TripleShift(54)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(49)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(44)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(39)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(34)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(29)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(24)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(19)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(14)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(9)) & 31L);
+                values[valuesOffset++] = (int)((block0.TripleShift(4)) & 31L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 15L) << 1) | ((long)((ulong)block1 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 58)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 53)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 48)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 43)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 38)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 33)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 28)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 23)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 18)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 13)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 8)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 3)) & 31L);
+                values[valuesOffset++] = (int)(((block0 & 15L) << 1) | (block1.TripleShift(63)));
+                values[valuesOffset++] = (int)((block1.TripleShift(58)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(53)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(48)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(43)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(38)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(33)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(28)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(23)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(18)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(13)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(8)) & 31L);
+                values[valuesOffset++] = (int)((block1.TripleShift(3)) & 31L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 7L) << 2) | ((long)((ulong)block2 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 57)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 52)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 47)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 42)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 37)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 32)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 27)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 22)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 17)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 7)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 2)) & 31L);
+                values[valuesOffset++] = (int)(((block1 & 7L) << 2) | (block2.TripleShift(62)));
+                values[valuesOffset++] = (int)((block2.TripleShift(57)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(52)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(47)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(42)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(37)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(32)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(27)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(22)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(17)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(7)) & 31L);
+                values[valuesOffset++] = (int)((block2.TripleShift(2)) & 31L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 3L) << 3) | ((long)((ulong)block3 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 56)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 51)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 46)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 41)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 36)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 31)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 26)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 21)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 16)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 11)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 6)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 1)) & 31L);
+                values[valuesOffset++] = (int)(((block2 & 3L) << 3) | (block3.TripleShift(61)));
+                values[valuesOffset++] = (int)((block3.TripleShift(56)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(51)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(46)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(41)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(36)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(31)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(26)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(21)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(16)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(11)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(6)) & 31L);
+                values[valuesOffset++] = (int)((block3.TripleShift(1)) & 31L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 1L) << 4) | ((long)((ulong)block4 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 55)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 50)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 45)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 40)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 35)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 30)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 25)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 20)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 15)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 10)) & 31L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 5)) & 31L);
+                values[valuesOffset++] = (int)(((block3 & 1L) << 4) | (block4.TripleShift(60)));
+                values[valuesOffset++] = (int)((block4.TripleShift(55)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(50)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(45)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(40)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(35)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(30)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(25)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(20)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(15)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(10)) & 31L);
+                values[valuesOffset++] = (int)((block4.TripleShift(5)) & 31L);
                 values[valuesOffset++] = (int)(block4 & 31L);
             }
         }
@@ -110,17 +112,17 @@
             for (int i = 0; i < iterations; ++i)
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (int)((uint)byte0 >> 3);
+                values[valuesOffset++] = byte0.TripleShift(3);
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte0 & 7) << 2) | ((int)((uint)byte1 >> 6));
-                values[valuesOffset++] = ((int)((uint)byte1 >> 1)) & 31;
+                values[valuesOffset++] = ((byte0 & 7) << 2) | (byte1.TripleShift(6));
+                values[valuesOffset++] = (byte1.TripleShift(1)) & 31;
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 1) << 4) | ((int)((uint)byte2 >> 4));
+                values[valuesOffset++] = ((byte1 & 1) << 4) | (byte2.TripleShift(4));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 15) << 1) | ((int)((uint)byte3 >> 7));
-                values[valuesOffset++] = ((int)((uint)byte3 >> 2)) & 31;
+                values[valuesOffset++] = ((byte2 & 15) << 1) | (byte3.TripleShift(7));
+                values[valuesOffset++] = (byte3.TripleShift(2)) & 31;
                 int byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 3) << 3) | ((int)((uint)byte4 >> 5));
+                values[valuesOffset++] = ((byte3 & 3) << 3) | (byte4.TripleShift(5));
                 values[valuesOffset++] = byte4 & 31;
             }
         }
@@ -130,73 +132,73 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 59);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 54)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 49)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 44)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 39)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 34)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 29)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 24)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 19)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 14)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 9)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 4)) & 31L;
+                values[valuesOffset++] = block0.TripleShift(59);
+                values[valuesOffset++] = (block0.TripleShift(54)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(49)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(44)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(39)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(34)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(29)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(24)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(19)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(14)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(9)) & 31L;
+                values[valuesOffset++] = (block0.TripleShift(4)) & 31L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 15L) << 1) | ((long)((ulong)block1 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 58)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 53)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 48)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 43)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 38)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 33)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 28)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 23)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 18)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 13)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 8)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 3)) & 31L;
+                values[valuesOffset++] = ((block0 & 15L) << 1) | (block1.TripleShift(63));
+                values[valuesOffset++] = (block1.TripleShift(58)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(53)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(48)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(43)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(38)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(33)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(28)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(23)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(18)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(13)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(8)) & 31L;
+                values[valuesOffset++] = (block1.TripleShift(3)) & 31L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 7L) << 2) | ((long)((ulong)block2 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 57)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 52)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 47)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 42)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 37)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 32)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 27)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 22)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 17)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 7)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 2)) & 31L;
+                values[valuesOffset++] = ((block1 & 7L) << 2) | (block2.TripleShift(62));
+                values[valuesOffset++] = (block2.TripleShift(57)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(52)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(47)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(42)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(37)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(32)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(27)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(22)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(17)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(7)) & 31L;
+                values[valuesOffset++] = (block2.TripleShift(2)) & 31L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 3L) << 3) | ((long)((ulong)block3 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 56)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 51)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 46)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 41)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 36)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 31)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 26)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 21)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 16)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 11)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 6)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 1)) & 31L;
+                values[valuesOffset++] = ((block2 & 3L) << 3) | (block3.TripleShift(61));
+                values[valuesOffset++] = (block3.TripleShift(56)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(51)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(46)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(41)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(36)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(31)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(26)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(21)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(16)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(11)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(6)) & 31L;
+                values[valuesOffset++] = (block3.TripleShift(1)) & 31L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 1L) << 4) | ((long)((ulong)block4 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 55)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 50)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 45)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 40)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 35)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 30)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 25)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 20)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 15)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 10)) & 31L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 5)) & 31L;
+                values[valuesOffset++] = ((block3 & 1L) << 4) | (block4.TripleShift(60));
+                values[valuesOffset++] = (block4.TripleShift(55)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(50)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(45)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(40)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(35)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(30)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(25)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(20)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(15)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(10)) & 31L;
+                values[valuesOffset++] = (block4.TripleShift(5)) & 31L;
                 values[valuesOffset++] = block4 & 31L;
             }
         }
@@ -206,17 +208,17 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (long)((ulong)byte0 >> 3);
+                values[valuesOffset++] = byte0.TripleShift(3);
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte0 & 7) << 2) | ((long)((ulong)byte1 >> 6));
-                values[valuesOffset++] = ((long)((ulong)byte1 >> 1)) & 31;
+                values[valuesOffset++] = ((byte0 & 7) << 2) | (byte1.TripleShift(6));
+                values[valuesOffset++] = (byte1.TripleShift(1)) & 31;
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 1) << 4) | ((long)((ulong)byte2 >> 4));
+                values[valuesOffset++] = ((byte1 & 1) << 4) | (byte2.TripleShift(4));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 15) << 1) | ((long)((ulong)byte3 >> 7));
-                values[valuesOffset++] = ((long)((ulong)byte3 >> 2)) & 31;
+                values[valuesOffset++] = ((byte2 & 15) << 1) | (byte3.TripleShift(7));
+                values[valuesOffset++] = (byte3.TripleShift(2)) & 31;
                 long byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 3) << 3) | ((long)((ulong)byte4 >> 5));
+                values[valuesOffset++] = ((byte3 & 3) << 3) | (byte4.TripleShift(5));
                 values[valuesOffset++] = byte4 & 31;
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked6.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked6.cs
index 6450d7a..b244f89 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked6.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked6.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,39 +36,39 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 58));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 52)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 46)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 40)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 34)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 28)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 22)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 16)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 10)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 4)) & 63L);
+                values[valuesOffset++] = (int)(block0.TripleShift(58));
+                values[valuesOffset++] = (int)((block0.TripleShift(52)) & 63L);
+                values[valuesOffset++] = (int)((block0.TripleShift(46)) & 63L);
+                values[valuesOffset++] = (int)((block0.TripleShift(40)) & 63L);
+                values[valuesOffset++] = (int)((block0.TripleShift(34)) & 63L);
+                values[valuesOffset++] = (int)((block0.TripleShift(28)) & 63L);
+                values[valuesOffset++] = (int)((block0.TripleShift(22)) & 63L);
+                values[valuesOffset++] = (int)((block0.TripleShift(16)) & 63L);
+                values[valuesOffset++] = (int)((block0.TripleShift(10)) & 63L);
+                values[valuesOffset++] = (int)((block0.TripleShift(4)) & 63L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 15L) << 2) | ((long)((ulong)block1 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 56)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 50)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 44)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 38)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 32)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 26)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 20)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 14)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 8)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 2)) & 63L);
+                values[valuesOffset++] = (int)(((block0 & 15L) << 2) | (block1.TripleShift(62)));
+                values[valuesOffset++] = (int)((block1.TripleShift(56)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(50)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(44)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(38)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(32)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(26)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(20)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(14)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(8)) & 63L);
+                values[valuesOffset++] = (int)((block1.TripleShift(2)) & 63L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 3L) << 4) | ((long)((ulong)block2 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 54)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 48)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 42)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 36)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 30)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 24)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 18)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 63L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 6)) & 63L);
+                values[valuesOffset++] = (int)(((block1 & 3L) << 4) | (block2.TripleShift(60)));
+                values[valuesOffset++] = (int)((block2.TripleShift(54)) & 63L);
+                values[valuesOffset++] = (int)((block2.TripleShift(48)) & 63L);
+                values[valuesOffset++] = (int)((block2.TripleShift(42)) & 63L);
+                values[valuesOffset++] = (int)((block2.TripleShift(36)) & 63L);
+                values[valuesOffset++] = (int)((block2.TripleShift(30)) & 63L);
+                values[valuesOffset++] = (int)((block2.TripleShift(24)) & 63L);
+                values[valuesOffset++] = (int)((block2.TripleShift(18)) & 63L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 63L);
+                values[valuesOffset++] = (int)((block2.TripleShift(6)) & 63L);
                 values[valuesOffset++] = (int)(block2 & 63L);
             }
         }
@@ -76,11 +78,11 @@
             for (int i = 0; i < iterations; ++i)
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (int)((uint)byte0 >> 2);
+                values[valuesOffset++] = byte0.TripleShift(2);
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte0 & 3) << 4) | ((int)((uint)byte1 >> 4));
+                values[valuesOffset++] = ((byte0 & 3) << 4) | (byte1.TripleShift(4));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 15) << 2) | ((int)((uint)byte2 >> 6));
+                values[valuesOffset++] = ((byte1 & 15) << 2) | (byte2.TripleShift(6));
                 values[valuesOffset++] = byte2 & 63;
             }
         }
@@ -90,39 +92,39 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 58);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 52)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 46)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 40)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 34)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 28)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 22)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 16)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 10)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 4)) & 63L;
+                values[valuesOffset++] = block0.TripleShift(58);
+                values[valuesOffset++] = (block0.TripleShift(52)) & 63L;
+                values[valuesOffset++] = (block0.TripleShift(46)) & 63L;
+                values[valuesOffset++] = (block0.TripleShift(40)) & 63L;
+                values[valuesOffset++] = (block0.TripleShift(34)) & 63L;
+                values[valuesOffset++] = (block0.TripleShift(28)) & 63L;
+                values[valuesOffset++] = (block0.TripleShift(22)) & 63L;
+                values[valuesOffset++] = (block0.TripleShift(16)) & 63L;
+                values[valuesOffset++] = (block0.TripleShift(10)) & 63L;
+                values[valuesOffset++] = (block0.TripleShift(4)) & 63L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 15L) << 2) | ((long)((ulong)block1 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 56)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 50)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 44)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 38)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 32)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 26)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 20)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 14)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 8)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 2)) & 63L;
+                values[valuesOffset++] = ((block0 & 15L) << 2) | (block1.TripleShift(62));
+                values[valuesOffset++] = (block1.TripleShift(56)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(50)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(44)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(38)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(32)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(26)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(20)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(14)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(8)) & 63L;
+                values[valuesOffset++] = (block1.TripleShift(2)) & 63L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 3L) << 4) | ((long)((ulong)block2 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 54)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 48)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 42)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 36)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 30)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 24)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 18)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 63L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 6)) & 63L;
+                values[valuesOffset++] = ((block1 & 3L) << 4) | (block2.TripleShift(60));
+                values[valuesOffset++] = (block2.TripleShift(54)) & 63L;
+                values[valuesOffset++] = (block2.TripleShift(48)) & 63L;
+                values[valuesOffset++] = (block2.TripleShift(42)) & 63L;
+                values[valuesOffset++] = (block2.TripleShift(36)) & 63L;
+                values[valuesOffset++] = (block2.TripleShift(30)) & 63L;
+                values[valuesOffset++] = (block2.TripleShift(24)) & 63L;
+                values[valuesOffset++] = (block2.TripleShift(18)) & 63L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 63L;
+                values[valuesOffset++] = (block2.TripleShift(6)) & 63L;
                 values[valuesOffset++] = block2 & 63L;
             }
         }
@@ -132,11 +134,11 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (long)((ulong)byte0 >> 2);
+                values[valuesOffset++] = byte0.TripleShift(2);
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte0 & 3) << 4) | ((long)((ulong)byte1 >> 4));
+                values[valuesOffset++] = ((byte0 & 3) << 4) | (byte1.TripleShift(4));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 15) << 2) | ((long)((ulong)byte2 >> 6));
+                values[valuesOffset++] = ((byte1 & 15) << 2) | (byte2.TripleShift(6));
                 values[valuesOffset++] = byte2 & 63;
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked7.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked7.cs
index 9ad7361..749e4a2 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked7.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked7.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,75 +36,75 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 57));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 50)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 43)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 36)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 29)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 22)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 15)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 8)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 1)) & 127L);
+                values[valuesOffset++] = (int)(block0.TripleShift(57));
+                values[valuesOffset++] = (int)((block0.TripleShift(50)) & 127L);
+                values[valuesOffset++] = (int)((block0.TripleShift(43)) & 127L);
+                values[valuesOffset++] = (int)((block0.TripleShift(36)) & 127L);
+                values[valuesOffset++] = (int)((block0.TripleShift(29)) & 127L);
+                values[valuesOffset++] = (int)((block0.TripleShift(22)) & 127L);
+                values[valuesOffset++] = (int)((block0.TripleShift(15)) & 127L);
+                values[valuesOffset++] = (int)((block0.TripleShift(8)) & 127L);
+                values[valuesOffset++] = (int)((block0.TripleShift(1)) & 127L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 1L) << 6) | ((long)((ulong)block1 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 51)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 44)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 37)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 30)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 23)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 16)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 9)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 2)) & 127L);
+                values[valuesOffset++] = (int)(((block0 & 1L) << 6) | (block1.TripleShift(58)));
+                values[valuesOffset++] = (int)((block1.TripleShift(51)) & 127L);
+                values[valuesOffset++] = (int)((block1.TripleShift(44)) & 127L);
+                values[valuesOffset++] = (int)((block1.TripleShift(37)) & 127L);
+                values[valuesOffset++] = (int)((block1.TripleShift(30)) & 127L);
+                values[valuesOffset++] = (int)((block1.TripleShift(23)) & 127L);
+                values[valuesOffset++] = (int)((block1.TripleShift(16)) & 127L);
+                values[valuesOffset++] = (int)((block1.TripleShift(9)) & 127L);
+                values[valuesOffset++] = (int)((block1.TripleShift(2)) & 127L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 3L) << 5) | ((long)((ulong)block2 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 52)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 45)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 38)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 31)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 24)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 17)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 10)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 3)) & 127L);
+                values[valuesOffset++] = (int)(((block1 & 3L) << 5) | (block2.TripleShift(59)));
+                values[valuesOffset++] = (int)((block2.TripleShift(52)) & 127L);
+                values[valuesOffset++] = (int)((block2.TripleShift(45)) & 127L);
+                values[valuesOffset++] = (int)((block2.TripleShift(38)) & 127L);
+                values[valuesOffset++] = (int)((block2.TripleShift(31)) & 127L);
+                values[valuesOffset++] = (int)((block2.TripleShift(24)) & 127L);
+                values[valuesOffset++] = (int)((block2.TripleShift(17)) & 127L);
+                values[valuesOffset++] = (int)((block2.TripleShift(10)) & 127L);
+                values[valuesOffset++] = (int)((block2.TripleShift(3)) & 127L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 7L) << 4) | ((long)((ulong)block3 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 53)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 46)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 39)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 32)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 25)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 18)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 11)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 4)) & 127L);
+                values[valuesOffset++] = (int)(((block2 & 7L) << 4) | (block3.TripleShift(60)));
+                values[valuesOffset++] = (int)((block3.TripleShift(53)) & 127L);
+                values[valuesOffset++] = (int)((block3.TripleShift(46)) & 127L);
+                values[valuesOffset++] = (int)((block3.TripleShift(39)) & 127L);
+                values[valuesOffset++] = (int)((block3.TripleShift(32)) & 127L);
+                values[valuesOffset++] = (int)((block3.TripleShift(25)) & 127L);
+                values[valuesOffset++] = (int)((block3.TripleShift(18)) & 127L);
+                values[valuesOffset++] = (int)((block3.TripleShift(11)) & 127L);
+                values[valuesOffset++] = (int)((block3.TripleShift(4)) & 127L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 15L) << 3) | ((long)((ulong)block4 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 54)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 47)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 40)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 33)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 26)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 19)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 12)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 5)) & 127L);
+                values[valuesOffset++] = (int)(((block3 & 15L) << 3) | (block4.TripleShift(61)));
+                values[valuesOffset++] = (int)((block4.TripleShift(54)) & 127L);
+                values[valuesOffset++] = (int)((block4.TripleShift(47)) & 127L);
+                values[valuesOffset++] = (int)((block4.TripleShift(40)) & 127L);
+                values[valuesOffset++] = (int)((block4.TripleShift(33)) & 127L);
+                values[valuesOffset++] = (int)((block4.TripleShift(26)) & 127L);
+                values[valuesOffset++] = (int)((block4.TripleShift(19)) & 127L);
+                values[valuesOffset++] = (int)((block4.TripleShift(12)) & 127L);
+                values[valuesOffset++] = (int)((block4.TripleShift(5)) & 127L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 31L) << 2) | ((long)((ulong)block5 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 55)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 48)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 41)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 34)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 27)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 20)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 13)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 6)) & 127L);
+                values[valuesOffset++] = (int)(((block4 & 31L) << 2) | (block5.TripleShift(62)));
+                values[valuesOffset++] = (int)((block5.TripleShift(55)) & 127L);
+                values[valuesOffset++] = (int)((block5.TripleShift(48)) & 127L);
+                values[valuesOffset++] = (int)((block5.TripleShift(41)) & 127L);
+                values[valuesOffset++] = (int)((block5.TripleShift(34)) & 127L);
+                values[valuesOffset++] = (int)((block5.TripleShift(27)) & 127L);
+                values[valuesOffset++] = (int)((block5.TripleShift(20)) & 127L);
+                values[valuesOffset++] = (int)((block5.TripleShift(13)) & 127L);
+                values[valuesOffset++] = (int)((block5.TripleShift(6)) & 127L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 63L) << 1) | ((long)((ulong)block6 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 56)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 49)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 42)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 35)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 28)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 21)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 14)) & 127L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 7)) & 127L);
+                values[valuesOffset++] = (int)(((block5 & 63L) << 1) | (block6.TripleShift(63)));
+                values[valuesOffset++] = (int)((block6.TripleShift(56)) & 127L);
+                values[valuesOffset++] = (int)((block6.TripleShift(49)) & 127L);
+                values[valuesOffset++] = (int)((block6.TripleShift(42)) & 127L);
+                values[valuesOffset++] = (int)((block6.TripleShift(35)) & 127L);
+                values[valuesOffset++] = (int)((block6.TripleShift(28)) & 127L);
+                values[valuesOffset++] = (int)((block6.TripleShift(21)) & 127L);
+                values[valuesOffset++] = (int)((block6.TripleShift(14)) & 127L);
+                values[valuesOffset++] = (int)((block6.TripleShift(7)) & 127L);
                 values[valuesOffset++] = (int)(block6 & 127L);
             }
         }
@@ -112,19 +114,19 @@
             for (int i = 0; i < iterations; ++i)
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (int)((uint)byte0 >> 1);
+                values[valuesOffset++] = byte0.TripleShift(1);
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte0 & 1) << 6) | ((int)((uint)byte1 >> 2));
+                values[valuesOffset++] = ((byte0 & 1) << 6) | (byte1.TripleShift(2));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 3) << 5) | ((int)((uint)byte2 >> 3));
+                values[valuesOffset++] = ((byte1 & 3) << 5) | (byte2.TripleShift(3));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 7) << 4) | ((int)((uint)byte3 >> 4));
+                values[valuesOffset++] = ((byte2 & 7) << 4) | (byte3.TripleShift(4));
                 int byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 15) << 3) | ((int)((uint)byte4 >> 5));
+                values[valuesOffset++] = ((byte3 & 15) << 3) | (byte4.TripleShift(5));
                 int byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 31) << 2) | ((int)((uint)byte5 >> 6));
+                values[valuesOffset++] = ((byte4 & 31) << 2) | (byte5.TripleShift(6));
                 int byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 63) << 1) | ((int)((uint)byte6 >> 7));
+                values[valuesOffset++] = ((byte5 & 63) << 1) | (byte6.TripleShift(7));
                 values[valuesOffset++] = byte6 & 127;
             }
         }
@@ -134,75 +136,75 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (long)((ulong)block0 >> 57);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 50)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 43)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 36)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 29)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 22)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 15)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 8)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 1)) & 127L;
+                values[valuesOffset++] = block0.TripleShift(57);
+                values[valuesOffset++] = (block0.TripleShift(50)) & 127L;
+                values[valuesOffset++] = (block0.TripleShift(43)) & 127L;
+                values[valuesOffset++] = (block0.TripleShift(36)) & 127L;
+                values[valuesOffset++] = (block0.TripleShift(29)) & 127L;
+                values[valuesOffset++] = (block0.TripleShift(22)) & 127L;
+                values[valuesOffset++] = (block0.TripleShift(15)) & 127L;
+                values[valuesOffset++] = (block0.TripleShift(8)) & 127L;
+                values[valuesOffset++] = (block0.TripleShift(1)) & 127L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 1L) << 6) | ((long)((ulong)block1 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 51)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 44)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 37)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 30)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 23)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 16)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 9)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 2)) & 127L;
+                values[valuesOffset++] = ((block0 & 1L) << 6) | (block1.TripleShift(58));
+                values[valuesOffset++] = (block1.TripleShift(51)) & 127L;
+                values[valuesOffset++] = (block1.TripleShift(44)) & 127L;
+                values[valuesOffset++] = (block1.TripleShift(37)) & 127L;
+                values[valuesOffset++] = (block1.TripleShift(30)) & 127L;
+                values[valuesOffset++] = (block1.TripleShift(23)) & 127L;
+                values[valuesOffset++] = (block1.TripleShift(16)) & 127L;
+                values[valuesOffset++] = (block1.TripleShift(9)) & 127L;
+                values[valuesOffset++] = (block1.TripleShift(2)) & 127L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 3L) << 5) | ((long)((ulong)block2 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 52)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 45)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 38)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 31)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 24)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 17)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 10)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 3)) & 127L;
+                values[valuesOffset++] = ((block1 & 3L) << 5) | (block2.TripleShift(59));
+                values[valuesOffset++] = (block2.TripleShift(52)) & 127L;
+                values[valuesOffset++] = (block2.TripleShift(45)) & 127L;
+                values[valuesOffset++] = (block2.TripleShift(38)) & 127L;
+                values[valuesOffset++] = (block2.TripleShift(31)) & 127L;
+                values[valuesOffset++] = (block2.TripleShift(24)) & 127L;
+                values[valuesOffset++] = (block2.TripleShift(17)) & 127L;
+                values[valuesOffset++] = (block2.TripleShift(10)) & 127L;
+                values[valuesOffset++] = (block2.TripleShift(3)) & 127L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 7L) << 4) | ((long)((ulong)block3 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 53)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 46)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 39)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 32)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 25)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 18)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 11)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 4)) & 127L;
+                values[valuesOffset++] = ((block2 & 7L) << 4) | (block3.TripleShift(60));
+                values[valuesOffset++] = (block3.TripleShift(53)) & 127L;
+                values[valuesOffset++] = (block3.TripleShift(46)) & 127L;
+                values[valuesOffset++] = (block3.TripleShift(39)) & 127L;
+                values[valuesOffset++] = (block3.TripleShift(32)) & 127L;
+                values[valuesOffset++] = (block3.TripleShift(25)) & 127L;
+                values[valuesOffset++] = (block3.TripleShift(18)) & 127L;
+                values[valuesOffset++] = (block3.TripleShift(11)) & 127L;
+                values[valuesOffset++] = (block3.TripleShift(4)) & 127L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 15L) << 3) | ((long)((ulong)block4 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 54)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 47)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 40)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 33)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 26)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 19)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 12)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 5)) & 127L;
+                values[valuesOffset++] = ((block3 & 15L) << 3) | (block4.TripleShift(61));
+                values[valuesOffset++] = (block4.TripleShift(54)) & 127L;
+                values[valuesOffset++] = (block4.TripleShift(47)) & 127L;
+                values[valuesOffset++] = (block4.TripleShift(40)) & 127L;
+                values[valuesOffset++] = (block4.TripleShift(33)) & 127L;
+                values[valuesOffset++] = (block4.TripleShift(26)) & 127L;
+                values[valuesOffset++] = (block4.TripleShift(19)) & 127L;
+                values[valuesOffset++] = (block4.TripleShift(12)) & 127L;
+                values[valuesOffset++] = (block4.TripleShift(5)) & 127L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 31L) << 2) | ((long)((ulong)block5 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 55)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 48)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 41)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 34)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 27)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 20)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 13)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 6)) & 127L;
+                values[valuesOffset++] = ((block4 & 31L) << 2) | (block5.TripleShift(62));
+                values[valuesOffset++] = (block5.TripleShift(55)) & 127L;
+                values[valuesOffset++] = (block5.TripleShift(48)) & 127L;
+                values[valuesOffset++] = (block5.TripleShift(41)) & 127L;
+                values[valuesOffset++] = (block5.TripleShift(34)) & 127L;
+                values[valuesOffset++] = (block5.TripleShift(27)) & 127L;
+                values[valuesOffset++] = (block5.TripleShift(20)) & 127L;
+                values[valuesOffset++] = (block5.TripleShift(13)) & 127L;
+                values[valuesOffset++] = (block5.TripleShift(6)) & 127L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 63L) << 1) | ((long)((ulong)block6 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 56)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 49)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 42)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 35)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 28)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 21)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 14)) & 127L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 7)) & 127L;
+                values[valuesOffset++] = ((block5 & 63L) << 1) | (block6.TripleShift(63));
+                values[valuesOffset++] = (block6.TripleShift(56)) & 127L;
+                values[valuesOffset++] = (block6.TripleShift(49)) & 127L;
+                values[valuesOffset++] = (block6.TripleShift(42)) & 127L;
+                values[valuesOffset++] = (block6.TripleShift(35)) & 127L;
+                values[valuesOffset++] = (block6.TripleShift(28)) & 127L;
+                values[valuesOffset++] = (block6.TripleShift(21)) & 127L;
+                values[valuesOffset++] = (block6.TripleShift(14)) & 127L;
+                values[valuesOffset++] = (block6.TripleShift(7)) & 127L;
                 values[valuesOffset++] = block6 & 127L;
             }
         }
@@ -212,19 +214,19 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (long)((ulong)byte0 >> 1);
+                values[valuesOffset++] = byte0.TripleShift(1);
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte0 & 1) << 6) | ((long)((ulong)byte1 >> 2));
+                values[valuesOffset++] = ((byte0 & 1) << 6) | (byte1.TripleShift(2));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 3) << 5) | ((long)((ulong)byte2 >> 3));
+                values[valuesOffset++] = ((byte1 & 3) << 5) | (byte2.TripleShift(3));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 7) << 4) | ((long)((ulong)byte3 >> 4));
+                values[valuesOffset++] = ((byte2 & 7) << 4) | (byte3.TripleShift(4));
                 long byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 15) << 3) | ((long)((ulong)byte4 >> 5));
+                values[valuesOffset++] = ((byte3 & 15) << 3) | (byte4.TripleShift(5));
                 long byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 31) << 2) | ((long)((ulong)byte5 >> 6));
+                values[valuesOffset++] = ((byte4 & 31) << 2) | (byte5.TripleShift(6));
                 long byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 63) << 1) | ((long)((ulong)byte6 >> 7));
+                values[valuesOffset++] = ((byte5 & 63) << 1) | (byte6.TripleShift(7));
                 values[valuesOffset++] = byte6 & 127;
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked8.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked8.cs
index fe96aba..80f5ea0 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked8.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked8.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -36,7 +38,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 56; shift >= 0; shift -= 8)
                 {
-                    values[valuesOffset++] = (int)(((long)((ulong)block >> shift)) & 255);
+                    values[valuesOffset++] = (int)((block.TripleShift(shift)) & 255);
                 }
             }
         }
@@ -56,7 +58,7 @@
                 long block = blocks[blocksOffset++];
                 for (int shift = 56; shift >= 0; shift -= 8)
                 {
-                    values[valuesOffset++] = ((long)((ulong)block >> shift)) & 255;
+                    values[valuesOffset++] = (block.TripleShift(shift)) & 255;
                 }
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPacked9.cs b/src/Lucene.Net/Util/Packed/BulkOperationPacked9.cs
index 5d73724..34058ee 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPacked9.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPacked9.cs
@@ -1,4 +1,6 @@
-// this file has been automatically generated, DO NOT EDIT
+// this file has been automatically generated, DO NOT EDIT
+
+using J2N.Numerics;
 
 namespace Lucene.Net.Util.Packed
 {
@@ -34,77 +36,77 @@
             for (int i = 0; i < iterations; ++i)
             {
                 long block0 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)((long)((ulong)block0 >> 55));
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 46)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 37)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 28)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 19)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 10)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block0 >> 1)) & 511L);
+                values[valuesOffset++] = (int)(block0.TripleShift(55));
+                values[valuesOffset++] = (int)((block0.TripleShift(46)) & 511L);
+                values[valuesOffset++] = (int)((block0.TripleShift(37)) & 511L);
+                values[valuesOffset++] = (int)((block0.TripleShift(28)) & 511L);
+                values[valuesOffset++] = (int)((block0.TripleShift(19)) & 511L);
+                values[valuesOffset++] = (int)((block0.TripleShift(10)) & 511L);
+                values[valuesOffset++] = (int)((block0.TripleShift(1)) & 511L);
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block0 & 1L) << 8) | ((long)((ulong)block1 >> 56)));
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 47)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 38)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 29)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 20)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 11)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block1 >> 2)) & 511L);
+                values[valuesOffset++] = (int)(((block0 & 1L) << 8) | (block1.TripleShift(56)));
+                values[valuesOffset++] = (int)((block1.TripleShift(47)) & 511L);
+                values[valuesOffset++] = (int)((block1.TripleShift(38)) & 511L);
+                values[valuesOffset++] = (int)((block1.TripleShift(29)) & 511L);
+                values[valuesOffset++] = (int)((block1.TripleShift(20)) & 511L);
+                values[valuesOffset++] = (int)((block1.TripleShift(11)) & 511L);
+                values[valuesOffset++] = (int)((block1.TripleShift(2)) & 511L);
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block1 & 3L) << 7) | ((long)((ulong)block2 >> 57)));
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 48)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 39)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 30)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 21)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 12)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block2 >> 3)) & 511L);
+                values[valuesOffset++] = (int)(((block1 & 3L) << 7) | (block2.TripleShift(57)));
+                values[valuesOffset++] = (int)((block2.TripleShift(48)) & 511L);
+                values[valuesOffset++] = (int)((block2.TripleShift(39)) & 511L);
+                values[valuesOffset++] = (int)((block2.TripleShift(30)) & 511L);
+                values[valuesOffset++] = (int)((block2.TripleShift(21)) & 511L);
+                values[valuesOffset++] = (int)((block2.TripleShift(12)) & 511L);
+                values[valuesOffset++] = (int)((block2.TripleShift(3)) & 511L);
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block2 & 7L) << 6) | ((long)((ulong)block3 >> 58)));
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 49)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 40)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 31)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 22)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 13)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block3 >> 4)) & 511L);
+                values[valuesOffset++] = (int)(((block2 & 7L) << 6) | (block3.TripleShift(58)));
+                values[valuesOffset++] = (int)((block3.TripleShift(49)) & 511L);
+                values[valuesOffset++] = (int)((block3.TripleShift(40)) & 511L);
+                values[valuesOffset++] = (int)((block3.TripleShift(31)) & 511L);
+                values[valuesOffset++] = (int)((block3.TripleShift(22)) & 511L);
+                values[valuesOffset++] = (int)((block3.TripleShift(13)) & 511L);
+                values[valuesOffset++] = (int)((block3.TripleShift(4)) & 511L);
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block3 & 15L) << 5) | ((long)((ulong)block4 >> 59)));
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 50)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 41)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 32)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 23)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 14)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block4 >> 5)) & 511L);
+                values[valuesOffset++] = (int)(((block3 & 15L) << 5) | (block4.TripleShift(59)));
+                values[valuesOffset++] = (int)((block4.TripleShift(50)) & 511L);
+                values[valuesOffset++] = (int)((block4.TripleShift(41)) & 511L);
+                values[valuesOffset++] = (int)((block4.TripleShift(32)) & 511L);
+                values[valuesOffset++] = (int)((block4.TripleShift(23)) & 511L);
+                values[valuesOffset++] = (int)((block4.TripleShift(14)) & 511L);
+                values[valuesOffset++] = (int)((block4.TripleShift(5)) & 511L);
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block4 & 31L) << 4) | ((long)((ulong)block5 >> 60)));
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 51)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 42)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 33)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 24)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 15)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block5 >> 6)) & 511L);
+                values[valuesOffset++] = (int)(((block4 & 31L) << 4) | (block5.TripleShift(60)));
+                values[valuesOffset++] = (int)((block5.TripleShift(51)) & 511L);
+                values[valuesOffset++] = (int)((block5.TripleShift(42)) & 511L);
+                values[valuesOffset++] = (int)((block5.TripleShift(33)) & 511L);
+                values[valuesOffset++] = (int)((block5.TripleShift(24)) & 511L);
+                values[valuesOffset++] = (int)((block5.TripleShift(15)) & 511L);
+                values[valuesOffset++] = (int)((block5.TripleShift(6)) & 511L);
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block5 & 63L) << 3) | ((long)((ulong)block6 >> 61)));
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 52)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 43)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 34)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 25)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 16)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block6 >> 7)) & 511L);
+                values[valuesOffset++] = (int)(((block5 & 63L) << 3) | (block6.TripleShift(61)));
+                values[valuesOffset++] = (int)((block6.TripleShift(52)) & 511L);
+                values[valuesOffset++] = (int)((block6.TripleShift(43)) & 511L);
+                values[valuesOffset++] = (int)((block6.TripleShift(34)) & 511L);
+                values[valuesOffset++] = (int)((block6.TripleShift(25)) & 511L);
+                values[valuesOffset++] = (int)((block6.TripleShift(16)) & 511L);
+                values[valuesOffset++] = (int)((block6.TripleShift(7)) & 511L);
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block6 & 127L) << 2) | ((long)((ulong)block7 >> 62)));
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 53)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 44)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 35)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 26)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 17)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block7 >> 8)) & 511L);
+                values[valuesOffset++] = (int)(((block6 & 127L) << 2) | (block7.TripleShift(62)));
+                values[valuesOffset++] = (int)((block7.TripleShift(53)) & 511L);
+                values[valuesOffset++] = (int)((block7.TripleShift(44)) & 511L);
+                values[valuesOffset++] = (int)((block7.TripleShift(35)) & 511L);
+                values[valuesOffset++] = (int)((block7.TripleShift(26)) & 511L);
+                values[valuesOffset++] = (int)((block7.TripleShift(17)) & 511L);
+                values[valuesOffset++] = (int)((block7.TripleShift(8)) & 511L);
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = (int)(((block7 & 255L) << 1) | ((long)((ulong)block8 >> 63)));
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 54)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 45)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 36)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 27)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 18)) & 511L);
-                values[valuesOffset++] = (int)(((long)((ulong)block8 >> 9)) & 511L);
+                values[valuesOffset++] = (int)(((block7 & 255L) << 1) | (block8.TripleShift(63)));
+                values[valuesOffset++] = (int)((block8.TripleShift(54)) & 511L);
+                values[valuesOffset++] = (int)((block8.TripleShift(45)) & 511L);
+                values[valuesOffset++] = (int)((block8.TripleShift(36)) & 511L);
+                values[valuesOffset++] = (int)((block8.TripleShift(27)) & 511L);
+                values[valuesOffset++] = (int)((block8.TripleShift(18)) & 511L);
+                values[valuesOffset++] = (int)((block8.TripleShift(9)) & 511L);
                 values[valuesOffset++] = (int)(block8 & 511L);
             }
         }
@@ -115,19 +117,19 @@
             {
                 int byte0 = blocks[blocksOffset++] & 0xFF;
                 int byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 1) | ((int)((uint)byte1 >> 7));
+                values[valuesOffset++] = (byte0 << 1) | (byte1.TripleShift(7));
                 int byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 127) << 2) | ((int)((uint)byte2 >> 6));
+                values[valuesOffset++] = ((byte1 & 127) << 2) | (byte2.TripleShift(6));
                 int byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 63) << 3) | ((int)((uint)byte3 >> 5));
+                values[valuesOffset++] = ((byte2 & 63) << 3) | (byte3.TripleShift(5));
                 int byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 31) << 4) | ((int)((uint)byte4 >> 4));
+                values[valuesOffset++] = ((byte3 & 31) << 4) | (byte4.TripleShift(4));
                 int byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 15) << 5) | ((int)((uint)byte5 >> 3));
+                values[valuesOffset++] = ((byte4 & 15) << 5) | (byte5.TripleShift(3));
                 int byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 7) << 6) | ((int)((uint)byte6 >> 2));
+                values[valuesOffset++] = ((byte5 & 7) << 6) | (byte6.TripleShift(2));
                 int byte7 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte6 & 3) << 7) | ((int)((uint)byte7 >> 1));
+                values[valuesOffset++] = ((byte6 & 3) << 7) | (byte7.TripleShift(1));
                 int byte8 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte7 & 1) << 8) | byte8;
             }
@@ -139,76 +141,76 @@
             {
                 long block0 = blocks[blocksOffset++];
                 values[valuesOffset++] = (long)((ulong)block0 >> 55);
-                values[valuesOffset++] = ((long)((ulong)block0 >> 46)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 37)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 28)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 19)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 10)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block0 >> 1)) & 511L;
+                values[valuesOffset++] = (block0.TripleShift(46)) & 511L;
+                values[valuesOffset++] = (block0.TripleShift(37)) & 511L;
+                values[valuesOffset++] = (block0.TripleShift(28)) & 511L;
+                values[valuesOffset++] = (block0.TripleShift(19)) & 511L;
+                values[valuesOffset++] = (block0.TripleShift(10)) & 511L;
+                values[valuesOffset++] = (block0.TripleShift(1)) & 511L;
                 long block1 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block0 & 1L) << 8) | ((long)((ulong)block1 >> 56));
-                values[valuesOffset++] = ((long)((ulong)block1 >> 47)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 38)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 29)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 20)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 11)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block1 >> 2)) & 511L;
+                values[valuesOffset++] = ((block0 & 1L) << 8) | (block1.TripleShift(56));
+                values[valuesOffset++] = (block1.TripleShift(47)) & 511L;
+                values[valuesOffset++] = (block1.TripleShift(38)) & 511L;
+                values[valuesOffset++] = (block1.TripleShift(29)) & 511L;
+                values[valuesOffset++] = (block1.TripleShift(20)) & 511L;
+                values[valuesOffset++] = (block1.TripleShift(11)) & 511L;
+                values[valuesOffset++] = (block1.TripleShift(2)) & 511L;
                 long block2 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block1 & 3L) << 7) | ((long)((ulong)block2 >> 57));
-                values[valuesOffset++] = ((long)((ulong)block2 >> 48)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 39)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 30)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 21)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 12)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block2 >> 3)) & 511L;
+                values[valuesOffset++] = ((block1 & 3L) << 7) | (block2.TripleShift(57));
+                values[valuesOffset++] = (block2.TripleShift(48)) & 511L;
+                values[valuesOffset++] = (block2.TripleShift(39)) & 511L;
+                values[valuesOffset++] = (block2.TripleShift(30)) & 511L;
+                values[valuesOffset++] = (block2.TripleShift(21)) & 511L;
+                values[valuesOffset++] = (block2.TripleShift(12)) & 511L;
+                values[valuesOffset++] = (block2.TripleShift(3)) & 511L;
                 long block3 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block2 & 7L) << 6) | ((long)((ulong)block3 >> 58));
-                values[valuesOffset++] = ((long)((ulong)block3 >> 49)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 40)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 31)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 22)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 13)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block3 >> 4)) & 511L;
+                values[valuesOffset++] = ((block2 & 7L) << 6) | (block3.TripleShift(58));
+                values[valuesOffset++] = (block3.TripleShift(49)) & 511L;
+                values[valuesOffset++] = (block3.TripleShift(40)) & 511L;
+                values[valuesOffset++] = (block3.TripleShift(31)) & 511L;
+                values[valuesOffset++] = (block3.TripleShift(22)) & 511L;
+                values[valuesOffset++] = (block3.TripleShift(13)) & 511L;
+                values[valuesOffset++] = (block3.TripleShift(4)) & 511L;
                 long block4 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block3 & 15L) << 5) | ((long)((ulong)block4 >> 59));
-                values[valuesOffset++] = ((long)((ulong)block4 >> 50)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 41)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 32)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 23)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 14)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block4 >> 5)) & 511L;
+                values[valuesOffset++] = ((block3 & 15L) << 5) | (block4.TripleShift(59));
+                values[valuesOffset++] = (block4.TripleShift(50)) & 511L;
+                values[valuesOffset++] = (block4.TripleShift(41)) & 511L;
+                values[valuesOffset++] = (block4.TripleShift(32)) & 511L;
+                values[valuesOffset++] = (block4.TripleShift(23)) & 511L;
+                values[valuesOffset++] = (block4.TripleShift(14)) & 511L;
+                values[valuesOffset++] = (block4.TripleShift(5)) & 511L;
                 long block5 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block4 & 31L) << 4) | ((long)((ulong)block5 >> 60));
-                values[valuesOffset++] = ((long)((ulong)block5 >> 51)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 42)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 33)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 24)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 15)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block5 >> 6)) & 511L;
+                values[valuesOffset++] = ((block4 & 31L) << 4) | (block5.TripleShift(60));
+                values[valuesOffset++] = (block5.TripleShift(51)) & 511L;
+                values[valuesOffset++] = (block5.TripleShift(42)) & 511L;
+                values[valuesOffset++] = (block5.TripleShift(33)) & 511L;
+                values[valuesOffset++] = (block5.TripleShift(24)) & 511L;
+                values[valuesOffset++] = (block5.TripleShift(15)) & 511L;
+                values[valuesOffset++] = (block5.TripleShift(6)) & 511L;
                 long block6 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block5 & 63L) << 3) | ((long)((ulong)block6 >> 61));
-                values[valuesOffset++] = ((long)((ulong)block6 >> 52)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 43)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 34)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 25)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 16)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block6 >> 7)) & 511L;
+                values[valuesOffset++] = ((block5 & 63L) << 3) | (block6.TripleShift(61));
+                values[valuesOffset++] = (block6.TripleShift(52)) & 511L;
+                values[valuesOffset++] = (block6.TripleShift(43)) & 511L;
+                values[valuesOffset++] = (block6.TripleShift(34)) & 511L;
+                values[valuesOffset++] = (block6.TripleShift(25)) & 511L;
+                values[valuesOffset++] = (block6.TripleShift(16)) & 511L;
+                values[valuesOffset++] = (block6.TripleShift(7)) & 511L;
                 long block7 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block6 & 127L) << 2) | ((long)((ulong)block7 >> 62));
-                values[valuesOffset++] = ((long)((ulong)block7 >> 53)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 44)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 35)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 26)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 17)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block7 >> 8)) & 511L;
+                values[valuesOffset++] = ((block6 & 127L) << 2) | (block7.TripleShift(62));
+                values[valuesOffset++] = (block7.TripleShift(53)) & 511L;
+                values[valuesOffset++] = (block7.TripleShift(44)) & 511L;
+                values[valuesOffset++] = (block7.TripleShift(35)) & 511L;
+                values[valuesOffset++] = (block7.TripleShift(26)) & 511L;
+                values[valuesOffset++] = (block7.TripleShift(17)) & 511L;
+                values[valuesOffset++] = (block7.TripleShift(8)) & 511L;
                 long block8 = blocks[blocksOffset++];
-                values[valuesOffset++] = ((block7 & 255L) << 1) | ((long)((ulong)block8 >> 63));
-                values[valuesOffset++] = ((long)((ulong)block8 >> 54)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 45)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 36)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 27)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 18)) & 511L;
-                values[valuesOffset++] = ((long)((ulong)block8 >> 9)) & 511L;
+                values[valuesOffset++] = ((block7 & 255L) << 1) | (block8.TripleShift(63));
+                values[valuesOffset++] = (block8.TripleShift(54)) & 511L;
+                values[valuesOffset++] = (block8.TripleShift(45)) & 511L;
+                values[valuesOffset++] = (block8.TripleShift(36)) & 511L;
+                values[valuesOffset++] = (block8.TripleShift(27)) & 511L;
+                values[valuesOffset++] = (block8.TripleShift(18)) & 511L;
+                values[valuesOffset++] = (block8.TripleShift(9)) & 511L;
                 values[valuesOffset++] = block8 & 511L;
             }
         }
@@ -219,19 +221,19 @@
             {
                 long byte0 = blocks[blocksOffset++] & 0xFF;
                 long byte1 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = (byte0 << 1) | ((long)((ulong)byte1 >> 7));
+                values[valuesOffset++] = (byte0 << 1) | (byte1.TripleShift(7));
                 long byte2 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte1 & 127) << 2) | ((long)((ulong)byte2 >> 6));
+                values[valuesOffset++] = ((byte1 & 127) << 2) | (byte2.TripleShift(6));
                 long byte3 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte2 & 63) << 3) | ((long)((ulong)byte3 >> 5));
+                values[valuesOffset++] = ((byte2 & 63) << 3) | (byte3.TripleShift(5));
                 long byte4 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte3 & 31) << 4) | ((long)((ulong)byte4 >> 4));
+                values[valuesOffset++] = ((byte3 & 31) << 4) | (byte4.TripleShift(4));
                 long byte5 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte4 & 15) << 5) | ((long)((ulong)byte5 >> 3));
+                values[valuesOffset++] = ((byte4 & 15) << 5) | (byte5.TripleShift(3));
                 long byte6 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte5 & 7) << 6) | ((long)((ulong)byte6 >> 2));
+                values[valuesOffset++] = ((byte5 & 7) << 6) | (byte6.TripleShift(2));
                 long byte7 = blocks[blocksOffset++] & 0xFF;
-                values[valuesOffset++] = ((byte6 & 3) << 7) | ((long)((ulong)byte7 >> 1));
+                values[valuesOffset++] = ((byte6 & 3) << 7) | (byte7.TripleShift(1));
                 long byte8 = blocks[blocksOffset++] & 0xFF;
                 values[valuesOffset++] = ((byte7 & 1) << 8) | byte8;
             }
diff --git a/src/Lucene.Net/Util/Packed/BulkOperationPackedSingleBlock.cs b/src/Lucene.Net/Util/Packed/BulkOperationPackedSingleBlock.cs
index c7b9bb3..b09fcbf 100644
--- a/src/Lucene.Net/Util/Packed/BulkOperationPackedSingleBlock.cs
+++ b/src/Lucene.Net/Util/Packed/BulkOperationPackedSingleBlock.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System;
 
 namespace Lucene.Net.Util.Packed
@@ -67,7 +68,7 @@
             values[valuesOffset++] = block & mask;
             for (int j = 1; j < valueCount; ++j)
             {
-                block = (long)((ulong)block >> bitsPerValue);
+                block = block.TripleShift(bitsPerValue);
                 values[valuesOffset++] = block & mask;
             }
             return valuesOffset;
@@ -78,7 +79,7 @@
             values[valuesOffset++] = (int)(block & mask);
             for (int j = 1; j < valueCount; ++j)
             {
-                block = (long)((ulong)block >> bitsPerValue);
+                block = block.TripleShift(bitsPerValue);
                 values[valuesOffset++] = (int)(block & mask);
             }
             return valuesOffset;
diff --git a/src/Lucene.Net/Util/Packed/DirectPacked64SingleBlockReader.cs b/src/Lucene.Net/Util/Packed/DirectPacked64SingleBlockReader.cs
index b4ecd33..4c618b6 100644
--- a/src/Lucene.Net/Util/Packed/DirectPacked64SingleBlockReader.cs
+++ b/src/Lucene.Net/Util/Packed/DirectPacked64SingleBlockReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System;
 using System.IO;
 using System.Runtime.CompilerServices;
@@ -49,7 +50,7 @@
 
                 long block = @in.ReadInt64();
                 int offsetInBlock = index % valuesPerBlock;
-                return ((long)((ulong)block >> (offsetInBlock * m_bitsPerValue))) & mask;
+                return (block.TripleShift(offsetInBlock * m_bitsPerValue)) & mask;
             }
             catch (IOException e)
             {
diff --git a/src/Lucene.Net/Util/Packed/DirectPackedReader.cs b/src/Lucene.Net/Util/Packed/DirectPackedReader.cs
index 6829ed6..754ca7c 100644
--- a/src/Lucene.Net/Util/Packed/DirectPackedReader.cs
+++ b/src/Lucene.Net/Util/Packed/DirectPackedReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System;
 using System.IO;
 using System.Runtime.CompilerServices;
@@ -50,7 +51,7 @@
         public override long Get(int index)
         {
             long majorBitPos = (long)index * m_bitsPerValue;
-            long elementPos = (long)((ulong)majorBitPos >> 3);
+            long elementPos = majorBitPos.TripleShift(3);
             try
             {
                 @in.Seek(startPointer + elementPos);
@@ -62,7 +63,7 @@
                 int shiftRightBits = roundedBits - bitPos - m_bitsPerValue;
 
                 long rawValue;
-                switch ((int)((uint)roundedBits >> 3))
+                switch (roundedBits.TripleShift(3))
                 {
                     case 1:
                         rawValue = @in.ReadByte();
@@ -106,7 +107,7 @@
                     default:
                         throw new InvalidOperationException("bitsPerValue too large: " + m_bitsPerValue);
                 }
-                return ((long)((ulong)rawValue >> shiftRightBits)) & valueMask;
+                return (rawValue.TripleShift(shiftRightBits)) & valueMask;
             }
             catch (IOException ioe)
             {
diff --git a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs
index 12a00a3..acdf86e 100644
--- a/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs
+++ b/src/Lucene.Net/Util/Packed/EliasFanoDecoder.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Globalization;
@@ -115,9 +115,9 @@
                 return 0;
             }
             long bitPos = packIndex * numBits;
-            int index = (int)((long)((ulong)bitPos >> LOG2_INT64_SIZE));
+            int index = (int)(bitPos.TripleShift(LOG2_INT64_SIZE));
             int bitPosAtIndex = (int)(bitPos & ((sizeof(long) * 8) - 1));
-            long value = (long)((ulong)longArray[index] >> bitPosAtIndex);
+            long value = longArray[index].TripleShift(bitPosAtIndex);
             if ((bitPosAtIndex + numBits) > (sizeof(long) * 8))
             {
                 value |= (longArray[index + 1] << ((sizeof(long) * 8) - bitPosAtIndex));
@@ -190,8 +190,8 @@
                 return false;
             }
             setBitForIndex += 1;
-            int highIndex = (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE));
-            curHighLong = (long)((ulong)efEncoder.upperLongs[highIndex] >> CurrentRightShift);
+            int highIndex = (int)(setBitForIndex.TripleShift(LOG2_INT64_SIZE));
+            curHighLong = efEncoder.upperLongs[highIndex].TripleShift(CurrentRightShift);
             return true;
         }
 
@@ -205,7 +205,7 @@
         {
             setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1));
             //assert getCurrentRightShift() == 0;
-            int highIndex = (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE));
+            int highIndex = (int)(setBitForIndex.TripleShift(LOG2_INT64_SIZE));
             curHighLong = efEncoder.upperLongs[highIndex];
         }
 
@@ -304,12 +304,12 @@
             }
             setBitForIndex += 1; // the high bit at setBitForIndex belongs to the unary code for efIndex
 
-            int highIndex = (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE));
+            int highIndex = (int)(setBitForIndex.TripleShift(LOG2_INT64_SIZE));
             long upperLong = efEncoder.upperLongs[highIndex];
-            curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); // may contain the unary 1 bit for efIndex
+            curHighLong = upperLong.TripleShift(((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); // may contain the unary 1 bit for efIndex
 
             // determine index entry to advance to
-            long highTarget = (long)((ulong)target >> efEncoder.numLowBits);
+            long highTarget = target.TripleShift(efEncoder.numLowBits);
 
             long indexEntryIndex = (highTarget / efEncoder.indexInterval) - 1;
             if (indexEntryIndex >= 0) // not before first index entry
@@ -324,9 +324,9 @@
                 {
                     setBitForIndex = UnPackValue(efEncoder.upperZeroBitPositionIndex, efEncoder.nIndexEntryBits, indexEntryIndex, indexMask);
                     efIndex = setBitForIndex - indexHighValue; // the high bit at setBitForIndex belongs to the unary code for efIndex
-                    highIndex = (int)(((ulong)setBitForIndex >> LOG2_INT64_SIZE));
+                    highIndex = (int)setBitForIndex.TripleShift(LOG2_INT64_SIZE);
                     upperLong = efEncoder.upperLongs[highIndex];
-                    curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1)))); // may contain the unary 1 bit for efIndex
+                    curHighLong = upperLong.TripleShift((int)(setBitForIndex & ((sizeof(long) * 8) - 1))); // may contain the unary 1 bit for efIndex
                 }
                 if (Debugging.AssertsEnabled) Debugging.Assert(efIndex < numEncoded); // there is a high value to be found.
             }
@@ -344,7 +344,7 @@
                 }
                 setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1));
                 // highIndex = (int)(setBitForIndex >>> LOG2_LONG_SIZE);
-                if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)((long)((ulong)setBitForIndex >> LOG2_INT64_SIZE)));
+                if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)(setBitForIndex.TripleShift(LOG2_INT64_SIZE)));
                 highIndex += 1;
                 upperLong = efEncoder.upperLongs[highIndex];
                 curHighLong = upperLong;
@@ -355,7 +355,7 @@
             while (curHighLong == 0L)
             {
                 setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1));
-                if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE));
+                if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == setBitForIndex.TripleShift(LOG2_INT64_SIZE));
                 highIndex += 1;
                 upperLong = efEncoder.upperLongs[highIndex];
                 curHighLong = upperLong;
@@ -379,22 +379,22 @@
 
                 if ((setBitForIndex & ((sizeof(long) * 8) - 1)) == 0L) // exhausted curHighLong
                 {
-                    if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE));
+                    if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == setBitForIndex.TripleShift(LOG2_INT64_SIZE));
                     highIndex += 1;
                     upperLong = efEncoder.upperLongs[highIndex];
                     curHighLong = upperLong;
                 }
                 else
                 {
-                    if (Debugging.AssertsEnabled) Debugging.Assert(highIndex == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE));
-                    curHighLong = (long)((ulong)upperLong >> ((int)(setBitForIndex & ((sizeof(long) * 8) - 1))));
+                    if (Debugging.AssertsEnabled) Debugging.Assert(highIndex == setBitForIndex.TripleShift(LOG2_INT64_SIZE));
+                    curHighLong = upperLong.TripleShift(((int)(setBitForIndex & ((sizeof(long) * 8) - 1))));
                 }
                 // curHighLong has enough clear bits to reach highTarget, and may not have enough set bits.
 
                 while (curHighLong == 0L)
                 {
                     setBitForIndex += (sizeof(long) * 8) - (setBitForIndex & ((sizeof(long) * 8) - 1));
-                    if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE));
+                    if (Debugging.AssertsEnabled) Debugging.Assert((highIndex + 1) == setBitForIndex.TripleShift(LOG2_INT64_SIZE));
                     highIndex += 1;
                     upperLong = efEncoder.upperLongs[highIndex];
                     curHighLong = upperLong;
@@ -425,7 +425,7 @@
         public virtual void ToAfterSequence()
         {
             efIndex = numEncoded; // just after last index
-            setBitForIndex = ((long)((ulong)efEncoder.lastEncoded >> efEncoder.numLowBits)) + numEncoded;
+            setBitForIndex = (efEncoder.lastEncoded.TripleShift(efEncoder.numLowBits)) + numEncoded;
         }
 
         /// <returns> the number of bits in a long before (<see cref="setBitForIndex"/> modulo <c>sizeof(long)</c>) </returns>
@@ -450,7 +450,7 @@
                 return false;
             }
             setBitForIndex -= 1;
-            int highIndex = (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE);
+            int highIndex = (int)setBitForIndex.TripleShift(LOG2_INT64_SIZE);
             curHighLong = efEncoder.upperLongs[highIndex] << CurrentLeftShift;
             return true;
         }
@@ -465,7 +465,7 @@
         {
             setBitForIndex -= (setBitForIndex & ((sizeof(long) * 8) - 1)) + 1;
             //assert getCurrentLeftShift() == 0;
-            int highIndex = (int)((ulong)setBitForIndex >> LOG2_INT64_SIZE);
+            int highIndex = (int)setBitForIndex.TripleShift(LOG2_INT64_SIZE);
             curHighLong = efEncoder.upperLongs[highIndex];
         }
 
@@ -549,7 +549,7 @@
             {
                 return NO_MORE_VALUES;
             }
-            long highTarget = (long)((ulong)target >> efEncoder.numLowBits);
+            long highTarget = target.TripleShift(efEncoder.numLowBits);
             long highValue = BackToHighValue(highTarget);
             if (highValue == NO_MORE_VALUES)
             {
diff --git a/src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs b/src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs
index ddcd907..bf79801 100644
--- a/src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs
+++ b/src/Lucene.Net/Util/Packed/EliasFanoDocIdSet.cs
@@ -79,12 +79,12 @@
             {
                 throw new NotSupportedException("Highest encoded value too high for DocIdSetIterator.NO_MORE_DOCS: " + efEncoder.lastEncoded);
             }
-            return new DocIdSetIteratorAnonymousInnerClassHelper(this);
+            return new DocIdSetIteratorAnonymousClass(this);
         }
 
-        private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+        private class DocIdSetIteratorAnonymousClass : DocIdSetIterator
         {
-            public DocIdSetIteratorAnonymousInnerClassHelper(EliasFanoDocIdSet outerInstance)
+            public DocIdSetIteratorAnonymousClass(EliasFanoDocIdSet outerInstance)
             {
                 curDocId = -1;
                 efDecoder = outerInstance.efEncoder.GetDecoder();
diff --git a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
index 6457e84..0d438c1 100644
--- a/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
+++ b/src/Lucene.Net/Util/Packed/EliasFanoEncoder.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -81,7 +81,7 @@
     ///
     /// <para/>The articles originally describing the Elias-Fano representation are:
     /// <para/>Peter Elias, "Efficient storage and retrieval by content and address of static files",
-    /// J. Assoc. Comput. Mach., 21(2):246â€"260, 1974.
+    /// J. Assoc. Comput. Mach., 21(2):246â€"260, 1974.
     /// <para/>Robert M. Fano, "On the number of bits required to implement an associative memory",
     ///  Memorandum 61, Computer Structures Group, Project MAC, MIT, Cambridge, Mass., 1971.
     /// <para/>
@@ -169,7 +169,7 @@
                 }
             }
             this.numLowBits = nLowBits;
-            this.lowerBitsMask = (long)(unchecked((ulong)long.MaxValue) >> (sizeof(long) * 8 - 1 - this.numLowBits));
+            this.lowerBitsMask = long.MaxValue.TripleShift(sizeof(long) * 8 - 1 - this.numLowBits);
 
             long numLongsForLowBits = NumInt64sForBits(numValues * numLowBits);
             if (numLongsForLowBits > int.MaxValue)
@@ -178,7 +178,7 @@
             }
             this.lowerLongs = new long[(int)numLongsForLowBits];
 
-            long numHighBitsClear = (long)((ulong)((this.upperBound > 0) ? this.upperBound : 0) >> this.numLowBits);
+            long numHighBitsClear = ((this.upperBound > 0) ? this.upperBound : 0).TripleShift(this.numLowBits);
             if (Debugging.AssertsEnabled) Debugging.Assert(numHighBitsClear <= (2 * this.numValues));
             long numHighBitsSet = this.numValues;
 
@@ -193,7 +193,7 @@
                 throw new ArgumentException("indexInterval should at least 2: " + indexInterval);
             }
             // For the index:
-            long maxHighValue = (long)((ulong)upperBound >> this.numLowBits);
+            long maxHighValue = upperBound.TripleShift(this.numLowBits);
             long nIndexEntries = maxHighValue / indexInterval; // no zero value index entry
             this.numIndexEntries = (nIndexEntries >= 0) ? nIndexEntries : 0;
             long maxIndexEntry = maxHighValue + numValues - 1; // clear upper bits, set upper bits, start at zero
@@ -223,7 +223,7 @@
         private static long NumInt64sForBits(long numBits) // Note: int version in FixedBitSet.bits2words()
         {
             if (Debugging.AssertsEnabled) Debugging.Assert(numBits >= 0, "{0}", numBits);
-            return (long)((ulong)(numBits + (sizeof(long) * 8 - 1)) >> LOG2_INT64_SIZE);
+            return (numBits + (sizeof(long) * 8 - 1)).TripleShift(LOG2_INT64_SIZE);
         }
 
         /// <summary>
@@ -249,7 +249,7 @@
             {
                 throw new ArgumentException(x + " larger than upperBound " + upperBound);
             }
-            long highValue = (long)((ulong)x >> numLowBits);
+            long highValue = x.TripleShift(numLowBits);
             EncodeUpperBits(highValue);
             EncodeLowerBits(x & lowerBitsMask);
             lastEncoded = x;
@@ -269,7 +269,7 @@
         private void EncodeUpperBits(long highValue)
         {
             long nextHighBitNum = numEncoded + highValue; // sequence of unary gaps
-            upperLongs[(int)((long)((ulong)nextHighBitNum >> LOG2_INT64_SIZE))] |= (1L << (int)(nextHighBitNum & ((sizeof(long) * 8) - 1)));
+            upperLongs[(int)(nextHighBitNum.TripleShift(LOG2_INT64_SIZE))] |= (1L << (int)(nextHighBitNum & ((sizeof(long) * 8) - 1)));
         }
 
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -284,12 +284,12 @@
             if (numBits != 0)
             {
                 long bitPos = numBits * packIndex;
-                int index = (int)((long)((ulong)bitPos >> LOG2_INT64_SIZE));
+                int index = (int)(bitPos.TripleShift(LOG2_INT64_SIZE));
                 int bitPosAtIndex = (int)(bitPos & ((sizeof(long) * 8) - 1));
                 longArray[index] |= (value << bitPosAtIndex);
                 if ((bitPosAtIndex + numBits) > (sizeof(long) * 8))
                 {
-                    longArray[index + 1] = ((long)((ulong)value >> ((sizeof(long) * 8) - bitPosAtIndex)));
+                    longArray[index + 1] = value.TripleShift((sizeof(long) * 8) - bitPosAtIndex);
                 }
             }
         }
diff --git a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs
index ffb8afb..a59a619 100644
--- a/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs
+++ b/src/Lucene.Net/Util/Packed/MonotonicAppendingLongBuffer.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -36,7 +37,7 @@
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         internal static long ZigZagDecode(long n)
         {
-            return (((long)((ulong)n >> 1)) ^ -(n & 1));
+            return (n.TripleShift(1) ^ -(n & 1));
         }
 
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
diff --git a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs
index 3acd831..cca6ea4 100644
--- a/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs
+++ b/src/Lucene.Net/Util/Packed/MonotonicBlockPackedReader.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Store;
 using System;
@@ -79,7 +80,7 @@
         public override long Get(long index)
         {
             if (Debugging.AssertsEnabled) Debugging.Assert(index >= 0 && index < valueCount);
-            int block = (int)((long)((ulong)index >> blockShift));
+            int block = (int)(index.TripleShift(blockShift));
             int idx = (int)(index & blockMask);
             // LUCENENET NOTE: IMPORTANT: The cast to float is critical here for it to work in x86
             return minValues[block] + (long)(float)(idx * averages[block]) + BlockPackedReaderIterator.ZigZagDecode(subReaders[block].Get(idx));
diff --git a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs
index a19570e..e22793a 100644
--- a/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs
+++ b/src/Lucene.Net/Util/Packed/Packed16ThreeBlocks.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -89,8 +90,8 @@
         public override void Set(int index, long value)
         {
             int o = index * 3;
-            blocks[o] = (short)((long)((ulong)value >> 32));
-            blocks[o + 1] = (short)((long)((ulong)value >> 16));
+            blocks[o] = (short)(value.TripleShift(32));
+            blocks[o + 1] = (short)(value.TripleShift(16));
             blocks[o + 2] = (short)value;
         }
 
@@ -107,8 +108,8 @@
             for (int i = off, o = index * 3, end = off + sets; i < end; ++i)
             {
                 long value = arr[i];
-                blocks[o++] = (short)((long)((ulong)value >> 32));
-                blocks[o++] = (short)((long)((ulong)value >> 16));
+                blocks[o++] = (short)(value.TripleShift(32));
+                blocks[o++] = (short)(value.TripleShift(16));
                 blocks[o++] = (short)value;
             }
             return sets;
@@ -116,8 +117,8 @@
 
         public override void Fill(int fromIndex, int toIndex, long val)
         {
-            short block1 = (short)((long)((ulong)val >> 32));
-            short block2 = (short)((long)((ulong)val >> 16));
+            short block1 = (short)(val.TripleShift(32));
+            short block2 = (short)(val.TripleShift(16));
             short block3 = (short)val;
             for (int i = fromIndex * 3, end = toIndex * 3; i < end; i += 3)
             {
diff --git a/src/Lucene.Net/Util/Packed/Packed64.cs b/src/Lucene.Net/Util/Packed/Packed64.cs
index 9c21f70..ef57134 100644
--- a/src/Lucene.Net/Util/Packed/Packed64.cs
+++ b/src/Lucene.Net/Util/Packed/Packed64.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -75,7 +76,7 @@
             PackedInt32s.Format format = PackedInt32s.Format.PACKED;
             int longCount = format.Int64Count(PackedInt32s.VERSION_CURRENT, valueCount, bitsPerValue);
             this.blocks = new long[longCount];
-            maskRight = (long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue));
+            maskRight = (~0L << (BLOCK_SIZE - bitsPerValue)).TripleShift(BLOCK_SIZE - bitsPerValue);
             bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE;
         }
 
@@ -109,7 +110,7 @@
                 }
                 blocks[blocks.Length - 1] = lastLong;
             }
-            maskRight = (long)((ulong)(~0L << (BLOCK_SIZE - bitsPerValue)) >> (BLOCK_SIZE - bitsPerValue));
+            maskRight = (~0L << (BLOCK_SIZE - bitsPerValue)).TripleShift(BLOCK_SIZE - bitsPerValue);
             bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE;
         }
 
@@ -120,16 +121,16 @@
             // The abstract index in a bit stream
             long majorBitPos = (long)index * m_bitsPerValue;
             // The index in the backing long-array
-            int elementPos = (int)(((ulong)majorBitPos) >> BLOCK_BITS);
+            int elementPos = (int)majorBitPos.TripleShift(BLOCK_BITS);
             // The number of value-bits in the second long
             long endBits = (majorBitPos & MOD_MASK) + bpvMinusBlockSize;
 
             if (endBits <= 0) // Single block
             {
-                return ((long)((ulong)blocks[elementPos] >> (int)-endBits)) & maskRight;
+                return (blocks[elementPos].TripleShift((int)-endBits)) & maskRight;
             }
             // Two blocks
-            return ((blocks[elementPos] << (int)endBits) | ((long)((ulong)blocks[elementPos + 1] >> (int)(BLOCK_SIZE - endBits)))) & maskRight;
+            return ((blocks[elementPos] << (int)endBits) | (blocks[elementPos + 1].TripleShift((int)(BLOCK_SIZE - endBits)))) & maskRight;
         }
 
         public override int Get(int index, long[] arr, int off, int len)
@@ -159,7 +160,7 @@
 
             // bulk get
             if (Debugging.AssertsEnabled) Debugging.Assert(index % decoder.Int64ValueCount == 0);
-            int blockIndex = (int)((ulong)((long)index * m_bitsPerValue) >> BLOCK_BITS);
+            int blockIndex = (int)(((long)index * m_bitsPerValue).TripleShift(BLOCK_BITS));
             if (Debugging.AssertsEnabled) Debugging.Assert((((long)index * m_bitsPerValue) & MOD_MASK) == 0);
             int iterations = len / decoder.Int64ValueCount;
             decoder.Decode(blocks, blockIndex, arr, off, iterations);
@@ -186,7 +187,7 @@
             // The abstract index in a contiguous bit stream
             long majorBitPos = (long)index * m_bitsPerValue;
             // The index in the backing long-array
-            int elementPos = (int)((long)((ulong)majorBitPos >> BLOCK_BITS)); // / BLOCK_SIZE
+            int elementPos = (int)(majorBitPos.TripleShift(BLOCK_BITS)); // / BLOCK_SIZE
             // The number of value-bits in the second long
             long endBits = (majorBitPos & MOD_MASK) + bpvMinusBlockSize;
 
@@ -196,8 +197,10 @@
                 return;
             }
             // Two blocks
-            blocks[elementPos] = blocks[elementPos] & ~((long)((ulong)maskRight >> (int)endBits)) | ((long)((ulong)value >> (int)endBits));
-            blocks[elementPos + 1] = blocks[elementPos + 1] & ((long)(unchecked((ulong)~0L) >> (int)endBits)) | (value << (int)(BLOCK_SIZE - endBits));
+            blocks[elementPos] = blocks[elementPos] & ~(maskRight.TripleShift((int)endBits))
+                | (value.TripleShift((int)endBits));
+            blocks[elementPos + 1] = blocks[elementPos + 1] & (~0L).TripleShift((int)endBits)
+                | (value << (int)(BLOCK_SIZE - endBits));
         }
 
         public override int Set(int index, long[] arr, int off, int len)
@@ -227,7 +230,7 @@
 
             // bulk set
             if (Debugging.AssertsEnabled) Debugging.Assert(index % encoder.Int64ValueCount == 0);
-            int blockIndex = (int)((ulong)((long)index * m_bitsPerValue) >> BLOCK_BITS);
+            int blockIndex = (int)(((long)index * m_bitsPerValue).TripleShift(BLOCK_BITS));
             if (Debugging.AssertsEnabled) Debugging.Assert((((long)index * m_bitsPerValue) & MOD_MASK) == 0);
             int iterations = len / encoder.Int64ValueCount;
             encoder.Encode(arr, off, blocks, blockIndex, iterations);
@@ -308,8 +311,8 @@
                 nAlignedValuesBlocks = values.blocks;
                 if (Debugging.AssertsEnabled) Debugging.Assert(nAlignedBlocks <= nAlignedValuesBlocks.Length);
             }
-            int startBlock = (int)((ulong)((long)fromIndex * m_bitsPerValue) >> 6);
-            int endBlock = (int)((ulong)((long)toIndex * m_bitsPerValue) >> 6);
+            int startBlock = (int)(((long)fromIndex * m_bitsPerValue).TripleShift(6));
+            int endBlock = (int)(((long)toIndex * m_bitsPerValue).TripleShift(6));
             for (int block = startBlock; block < endBlock; ++block)
             {
                 long blockValue = nAlignedValuesBlocks[block % nAlignedBlocks];
diff --git a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs
index 29de919..9ad4d5f 100644
--- a/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs
+++ b/src/Lucene.Net/Util/Packed/Packed64SingleBlock.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -309,15 +310,15 @@
 
             public override long Get(int index)
             {
-                int o = (int)((uint)index >> 6);
+                int o = index.TripleShift(6);
                 int b = index & 63;
                 int shift = b << 0;
-                return ((long)((ulong)blocks[o] >> shift)) & 1L;
+                return (blocks[o].TripleShift(shift)) & 1L;
             }
 
             public override void Set(int index, long value)
             {
-                int o = (int)((uint)index >> 6);
+                int o = index.TripleShift(6);
                 int b = index & 63;
                 int shift = b << 0;
                 blocks[o] = (blocks[o] & ~(1L << shift)) | (value << shift);
@@ -333,15 +334,15 @@
 
             public override long Get(int index)
             {
-                int o = (int)((uint)index >> 5);
+                int o = index.TripleShift(5);
                 int b = index & 31;
                 int shift = b << 1;
-                return ((long)((ulong)blocks[o] >> shift)) & 3L;
+                return (blocks[o].TripleShift(shift)) & 3L;
             }
 
             public override void Set(int index, long value)
             {
-                int o = (int)((uint)index >> 5);
+                int o = index.TripleShift(5);
                 int b = index & 31;
                 int shift = b << 1;
                 blocks[o] = (blocks[o] & ~(3L << shift)) | (value << shift);
@@ -360,7 +361,7 @@
                 int o = index / 21;
                 int b = index % 21;
                 int shift = b * 3;
-                return ((long)((ulong)blocks[o] >> shift)) & 7L;
+                return (blocks[o].TripleShift(shift)) & 7L;
             }
 
             public override void Set(int index, long value)
@@ -381,15 +382,15 @@
 
             public override long Get(int index)
             {
-                int o = (int)((uint)index >> 4);
+                int o = index.TripleShift(4);
                 int b = index & 15;
                 int shift = b << 2;
-                return ((long)((ulong)blocks[o] >> shift)) & 15L;
+                return (blocks[o].TripleShift(shift)) & 15L;
             }
 
             public override void Set(int index, long value)
             {
-                int o = (int)((uint)index >> 4);
+                int o = index.TripleShift(4);
                 int b = index & 15;
                 int shift = b << 2;
                 blocks[o] = (blocks[o] & ~(15L << shift)) | (value << shift);
@@ -408,7 +409,7 @@
                 int o = index / 12;
                 int b = index % 12;
                 int shift = b * 5;
-                return ((long)((ulong)blocks[o] >> shift)) & 31L;
+                return (blocks[o].TripleShift(shift)) & 31L;
             }
 
             public override void Set(int index, long value)
@@ -432,7 +433,7 @@
                 int o = index / 10;
                 int b = index % 10;
                 int shift = b * 6;
-                return ((long)((ulong)blocks[o] >> shift)) & 63L;
+                return (blocks[o].TripleShift(shift)) & 63L;
             }
 
             public override void Set(int index, long value)
@@ -456,7 +457,7 @@
                 int o = index / 9;
                 int b = index % 9;
                 int shift = b * 7;
-                return ((long)((ulong)blocks[o] >> shift)) & 127L;
+                return (blocks[o].TripleShift(shift)) & 127L;
             }
 
             public override void Set(int index, long value)
@@ -477,15 +478,15 @@
 
             public override long Get(int index)
             {
-                int o = (int)((uint)index >> 3);
+                int o = index.TripleShift(3);
                 int b = index & 7;
                 int shift = b << 3;
-                return ((long)((ulong)blocks[o] >> shift)) & 255L;
+                return (blocks[o].TripleShift(shift)) & 255L;
             }
 
             public override void Set(int index, long value)
             {
-                int o = (int)((uint)index >> 3);
+                int o = index.TripleShift(3);
                 int b = index & 7;
                 int shift = b << 3;
                 blocks[o] = (blocks[o] & ~(255L << shift)) | (value << shift);
@@ -504,7 +505,7 @@
                 int o = index / 7;
                 int b = index % 7;
                 int shift = b * 9;
-                return ((long)((ulong)blocks[o] >> shift)) & 511L;
+                return (blocks[o].TripleShift(shift)) & 511L;
             }
 
             public override void Set(int index, long value)
@@ -528,7 +529,7 @@
                 int o = index / 6;
                 int b = index % 6;
                 int shift = b * 10;
-                return ((long)((ulong)blocks[o] >> shift)) & 1023L;
+                return (blocks[o].TripleShift(shift)) & 1023L;
             }
 
             public override void Set(int index, long value)
@@ -552,7 +553,7 @@
                 int o = index / 5;
                 int b = index % 5;
                 int shift = b * 12;
-                return ((long)((ulong)blocks[o] >> shift)) & 4095L;
+                return (blocks[o].TripleShift(shift)) & 4095L;
             }
 
             public override void Set(int index, long value)
@@ -573,15 +574,15 @@
 
             public override long Get(int index)
             {
-                int o = (int)((uint)index >> 2);
+                int o = index.TripleShift(2);
                 int b = index & 3;
                 int shift = b << 4;
-                return ((long)((ulong)blocks[o] >> shift)) & 65535L;
+                return (blocks[o].TripleShift(shift)) & 65535L;
             }
 
             public override void Set(int index, long value)
             {
-                int o = (int)((uint)index >> 2);
+                int o = index.TripleShift(2);
                 int b = index & 3;
                 int shift = b << 4;
                 blocks[o] = (blocks[o] & ~(65535L << shift)) | (value << shift);
@@ -600,7 +601,7 @@
                 int o = index / 3;
                 int b = index % 3;
                 int shift = b * 21;
-                return ((long)((ulong)blocks[o] >> shift)) & 2097151L;
+                return (blocks[o].TripleShift(shift)) & 2097151L;
             }
 
             public override void Set(int index, long value)
@@ -621,15 +622,15 @@
 
             public override long Get(int index)
             {
-                int o = (int)((uint)index >> 1);
+                int o = index.TripleShift(1);
                 int b = index & 1;
                 int shift = b << 5;
-                return ((long)((ulong)blocks[o] >> shift)) & 4294967295L;
+                return (blocks[o].TripleShift(shift)) & 4294967295L;
             }
 
             public override void Set(int index, long value)
             {
-                int o = (int)((uint)index >> 1);
+                int o = index.TripleShift(1);
                 int b = index & 1;
                 int shift = b << 5;
                 blocks[o] = (blocks[o] & ~(4294967295L << shift)) | (value << shift);
diff --git a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs
index 4ea8619..7539500 100644
--- a/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs
+++ b/src/Lucene.Net/Util/Packed/Packed8ThreeBlocks.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -86,8 +87,8 @@
         public override void Set(int index, long value)
         {
             int o = index * 3;
-            blocks[o] = (byte)((long)((ulong)value >> 16));
-            blocks[o + 1] = (byte)((long)((ulong)value >> 8));
+            blocks[o] = (byte)(value.TripleShift(16));
+            blocks[o + 1] = (byte)(value.TripleShift(8));
             blocks[o + 2] = (byte)value;
         }
 
@@ -104,8 +105,8 @@
             for (int i = off, o = index * 3, end = off + sets; i < end; ++i)
             {
                 long value = arr[i];
-                blocks[o++] = (byte)((long)((ulong)value >> 16));
-                blocks[o++] = (byte)((long)((ulong)value >> 8));
+                blocks[o++] = (byte)(value.TripleShift(16));
+                blocks[o++] = (byte)(value.TripleShift(8));
                 blocks[o++] = (byte)value;
             }
             return sets;
@@ -113,8 +114,8 @@
 
         public override void Fill(int fromIndex, int toIndex, long val)
         {
-            var block1 = (byte)((long)((ulong)val >> 16));
-            var block2 = (byte)((long)((ulong)val >> 8));
+            var block1 = (byte)(val.TripleShift(16));
+            var block2 = (byte)(val.TripleShift(8));
             var block3 = (byte)val;
             for (int i = fromIndex * 3, end = toIndex * 3; i < end; i += 3)
             {
diff --git a/src/Lucene.Net/Util/Packed/PackedDataInput.cs b/src/Lucene.Net/Util/Packed/PackedDataInput.cs
index 3a64ffd..c859f25 100644
--- a/src/Lucene.Net/Util/Packed/PackedDataInput.cs
+++ b/src/Lucene.Net/Util/Packed/PackedDataInput.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Runtime.CompilerServices;
@@ -63,7 +64,7 @@
                     remainingBits = 8;
                 }
                 int bits = Math.Min(bitsPerValue, remainingBits);
-                r = (r << bits) | (((long)((ulong)current >> (remainingBits - bits))) & ((1L << bits) - 1));
+                r = (r << bits) | ((current.TripleShift((remainingBits - bits))) & ((1L << bits) - 1));
                 bitsPerValue -= bits;
                 remainingBits -= bits;
             }
diff --git a/src/Lucene.Net/Util/Packed/PackedDataOutput.cs b/src/Lucene.Net/Util/Packed/PackedDataOutput.cs
index 39e13da..48e193a 100644
--- a/src/Lucene.Net/Util/Packed/PackedDataOutput.cs
+++ b/src/Lucene.Net/Util/Packed/PackedDataOutput.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Runtime.CompilerServices;
@@ -58,12 +59,12 @@
             {
                 if (remainingBits == 0)
                 {
-                    @out.WriteByte((byte)(sbyte)current);
+                    @out.WriteByte((byte)current);
                     current = 0L;
                     remainingBits = 8;
                 }
                 int bits = Math.Min(remainingBits, bitsPerValue);
-                current = current | ((((long)((ulong)value >> (bitsPerValue - bits))) & ((1L << bits) - 1)) << (remainingBits - bits));
+                current = current | (((value.TripleShift((bitsPerValue - bits))) & ((1L << bits) - 1)) << (remainingBits - bits));
                 bitsPerValue -= bits;
                 remainingBits -= bits;
             }
@@ -77,7 +78,7 @@
         {
             if (remainingBits < 8)
             {
-                @out.WriteByte((byte)(sbyte)current);
+                @out.WriteByte((byte)current);
             }
             remainingBits = 8;
             current = 0L;
diff --git a/src/Lucene.Net/Util/Packed/PackedInts.cs b/src/Lucene.Net/Util/Packed/PackedInts.cs
index 579d963..c2f3662 100644
--- a/src/Lucene.Net/Util/Packed/PackedInts.cs
+++ b/src/Lucene.Net/Util/Packed/PackedInts.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -1085,7 +1085,7 @@
                     // stream, but this is not true because packed ints storage used to be
                     // long-aligned and is now byte-aligned, hence this additional
                     // condition when reading the last value
-                    return new DirectPackedReaderAnonymousInnerClassHelper(bitsPerValue, valueCount, @in, endPointer);
+                    return new DirectPackedReaderAnonymousClass(bitsPerValue, valueCount, @in, endPointer);
                 }
                 else
                 {
@@ -1098,13 +1098,13 @@
             }
         }
 
-        private class DirectPackedReaderAnonymousInnerClassHelper : DirectPackedReader
+        private class DirectPackedReaderAnonymousClass : DirectPackedReader
         {
             private readonly IndexInput @in;
             private readonly int valueCount;
             private readonly long endPointer;
 
-            public DirectPackedReaderAnonymousInnerClassHelper(int bitsPerValue, int valueCount, IndexInput @in, long endPointer)
+            public DirectPackedReaderAnonymousClass(int bitsPerValue, int valueCount, IndexInput @in, long endPointer)
                 : base(bitsPerValue, valueCount, @in)
             {
                 this.@in = @in;
@@ -1385,7 +1385,7 @@
                 Debugging.Assert(srcPos + len <= src.Count);
                 Debugging.Assert(destPos + len <= dest.Count);
             }
-            int capacity = (int)((uint)mem >> 3);
+            int capacity = mem.TripleShift(3);
             if (capacity == 0)
             {
                 for (int i = 0; i < len; ++i)
diff --git a/src/Lucene.Net/Util/PriorityQueue.cs b/src/Lucene.Net/Util/PriorityQueue.cs
index f2270a3..f331ae2 100644
--- a/src/Lucene.Net/Util/PriorityQueue.cs
+++ b/src/Lucene.Net/Util/PriorityQueue.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Support;
 using System;
 using System.Collections.Generic;
@@ -291,12 +292,12 @@
         {
             int i = size;
             T node = heap[i]; // save bottom node
-            int j = (int)((uint)i >> 1);
+            int j = i.TripleShift(1);
             while (j > 0 && LessThan(node, heap[j]))
             {
                 heap[i] = heap[j]; // shift parents down
                 i = j;
-                j = (int)((uint)j >> 1);
+                j = j.TripleShift(1);
             }
             heap[i] = node; // install saved node
         }
diff --git a/src/Lucene.Net/Util/RamUsageEstimator.cs b/src/Lucene.Net/Util/RamUsageEstimator.cs
index 6252b31..697d263 100644
--- a/src/Lucene.Net/Util/RamUsageEstimator.cs
+++ b/src/Lucene.Net/Util/RamUsageEstimator.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using J2N.Runtime.CompilerServices;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
@@ -929,7 +929,7 @@
                 if (Debugging.AssertsEnabled)
                 {
                     Debugging.Assert(current > 0 && ((current & (current - 1)) == 0), "Capacity must be a power of two.");
-                    Debugging.Assert((current << 1) > 0, "Maximum capacity exceeded ({0}).", ((int)((uint)0x80000000 >> 1)));
+                    Debugging.Assert((current << 1) > 0, "Maximum capacity exceeded ({0}).", ((int)(0x80000000 >> 1))); // LUCENENET: No need to cast to uint because it already is
                 }
 
                 if (current < MIN_CAPACITY / 2)
@@ -945,9 +945,9 @@
             private int RoundCapacity(int requestedCapacity) // LUCENENET NOTE: made private, since protected is not valid in a sealed class
             {
                 // Maximum positive integer that is a power of two.
-                if (requestedCapacity > ((int)((uint)0x80000000 >> 1)))
+                if (requestedCapacity > ((int)(0x80000000 >> 1))) // LUCENENET: No need to cast to uint because it already is
                 {
-                    return ((int)((uint)0x80000000 >> 1));
+                    return ((int)(0x80000000 >> 1)); // LUCENENET: No need to cast to uint because it already is
                 }
 
                 int capacity = MIN_CAPACITY;
@@ -979,7 +979,7 @@
             [MethodImpl(MethodImplOptions.AggressiveInlining)]
             public IEnumerator<KType> GetEnumerator()
             {
-                return new IteratorAnonymousInnerClassHelper(this);
+                return new IteratorAnonymousClass(this);
             }
 
             System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
@@ -987,11 +987,11 @@
                 return GetEnumerator();
             }
 
-            private class IteratorAnonymousInnerClassHelper : IEnumerator<KType>
+            private class IteratorAnonymousClass : IEnumerator<KType>
             {
                 private readonly IdentityHashSet<KType> outerInstance;
 
-                public IteratorAnonymousInnerClassHelper(IdentityHashSet<KType> outerInstance)
+                public IteratorAnonymousClass(IdentityHashSet<KType> outerInstance)
                 {
                     this.outerInstance = outerInstance;
                     pos = -1;
diff --git a/src/Lucene.Net/Util/Sorter.cs b/src/Lucene.Net/Util/Sorter.cs
index 3c116d0..f26ad62 100644
--- a/src/Lucene.Net/Util/Sorter.cs
+++ b/src/Lucene.Net/Util/Sorter.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Runtime.CompilerServices;
@@ -86,14 +87,14 @@
             int len11, len22;
             if (mid - from > to - mid)
             {
-                len11 = (int)((uint)(mid - from) >> 1);
+                len11 = (mid - from).TripleShift(1);
                 first_cut = from + len11;
                 second_cut = Lower(mid, to, first_cut);
                 len22 = second_cut - mid;
             }
             else
             {
-                len22 = (int)((uint)(to - mid) >> 1);
+                len22 = (to - mid).TripleShift(1);
                 second_cut = mid + len22;
                 first_cut = Upper(from, mid, second_cut);
                 //len11 = first_cut - from; // LUCENENET: Unnecessary assignment
@@ -110,7 +111,7 @@
             int len = to - from;
             while (len > 0)
             {
-                int half = (int)((uint)len >> 1);
+                int half = len.TripleShift(1);
                 int mid = from + half;
                 if (Compare(mid, val) < 0)
                 {
@@ -131,7 +132,7 @@
             int len = to - from;
             while (len > 0)
             {
-                int half = (int)((uint)len >> 1);
+                int half = len.TripleShift(1);
                 int mid = from + half;
                 if (Compare(val, mid) < 0)
                 {
@@ -255,7 +256,7 @@
                 int h = i - 1;
                 while (l <= h)
                 {
-                    int mid = (int)((uint)(l + h) >> 1);
+                    int mid = (l + h).TripleShift(1);
                     int cmp = Compare(i, mid);
                     if (cmp < 0)
                     {
@@ -348,7 +349,7 @@
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         internal static int HeapParent(int from, int i)
         {
-            return ((int)((uint)(i - 1 - from) >> 1)) + from;
+            return ((i - 1 - from).TripleShift(1)) + from;
         }
 
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
diff --git a/src/Lucene.Net/Util/StringHelper.cs b/src/Lucene.Net/Util/StringHelper.cs
index 862efbe..50aff05 100644
--- a/src/Lucene.Net/Util/StringHelper.cs
+++ b/src/Lucene.Net/Util/StringHelper.cs
@@ -1,4 +1,4 @@
-using J2N.Numerics;
+using J2N.Numerics;
 using J2N.Text;
 using System;
 using System.Collections.Generic;
@@ -248,11 +248,11 @@
             h1 ^= len;
 
             // fmix(h1);
-            h1 ^= (int)((uint)h1 >> 16);
+            h1 ^= h1.TripleShift(16);
             h1 *= unchecked((int)0x85ebca6b);
-            h1 ^= (int)((uint)h1 >> 13);
+            h1 ^= h1.TripleShift(13);
             h1 *= unchecked((int)0xc2b2ae35);
-            h1 ^= (int)((uint)h1 >> 16);
+            h1 ^= h1.TripleShift(16);
 
             return h1;
         }
diff --git a/src/Lucene.Net/Util/TimSorter.cs b/src/Lucene.Net/Util/TimSorter.cs
index c950e38..66f960a 100644
--- a/src/Lucene.Net/Util/TimSorter.cs
+++ b/src/Lucene.Net/Util/TimSorter.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using System;
 using System.Runtime.CompilerServices;
@@ -74,7 +75,7 @@
             while (n >= 64)
             {
                 r |= n & 1;
-                n = (int)((uint)n >> 1);
+                n = n.TripleShift(1);
             }
             int minRun = n + r;
             if (Debugging.AssertsEnabled) Debugging.Assert(minRun >= MINRUN && minRun <= THRESHOLD);
@@ -401,7 +402,7 @@
             int len = to - from;
             while (len > 0)
             {
-                int half = (int)((uint)len >> 1);
+                int half = len.TripleShift(1);
                 int mid = from + half;
                 if (CompareSaved(val, mid) > 0)
                 {
@@ -422,7 +423,7 @@
             int len = to - from;
             while (len > 0)
             {
-                int half = (int)((uint)len >> 1);
+                int half = len.TripleShift(1);
                 int mid = from + half;
                 if (CompareSaved(val, mid) < 0)
                 {
diff --git a/src/Lucene.Net/Util/ToStringUtils.cs b/src/Lucene.Net/Util/ToStringUtils.cs
index be6074e..a53c093 100644
--- a/src/Lucene.Net/Util/ToStringUtils.cs
+++ b/src/Lucene.Net/Util/ToStringUtils.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using System.Globalization;
 using System.Text;
 
@@ -59,7 +60,7 @@
         public static string Int64Hex(long x)
         {
             char[] asHex = new char[16];
-            for (int i = 16; --i >= 0; x = (long)((ulong)x >> 4))
+            for (int i = 16; --i >= 0; x = x.TripleShift(4))
             {
                 asHex[i] = HEX[(int)x & 0x0F];
             }
diff --git a/src/Lucene.Net/Util/WAH8DocIdSet.cs b/src/Lucene.Net/Util/WAH8DocIdSet.cs
index 844aaac..b9455e2 100644
--- a/src/Lucene.Net/Util/WAH8DocIdSet.cs
+++ b/src/Lucene.Net/Util/WAH8DocIdSet.cs
@@ -1,3 +1,4 @@
+using J2N.Numerics;
 using Lucene.Net.Diagnostics;
 using Lucene.Net.Support;
 using System;
@@ -200,7 +201,7 @@
             }
             // The logic below is very similar to DisjunctionScorer
             int numSets = docIdSets.Count;
-            PriorityQueue<Iterator> iterators = new PriorityQueueAnonymousInnerClassHelper(numSets);
+            PriorityQueue<Iterator> iterators = new PriorityQueueAnonymousClass(numSets);
             foreach (WAH8DocIdSet set in docIdSets)
             {
                 Iterator iterator = (Iterator)set.GetIterator();
@@ -239,9 +240,9 @@
             return builder.Build();
         }
 
-        private class PriorityQueueAnonymousInnerClassHelper : PriorityQueue<WAH8DocIdSet.Iterator>
+        private class PriorityQueueAnonymousClass : PriorityQueue<WAH8DocIdSet.Iterator>
         {
-            public PriorityQueueAnonymousInnerClassHelper(int numSets)
+            public PriorityQueueAnonymousClass(int numSets)
                 : base(numSets)
             {
             }
@@ -257,7 +258,7 @@
         internal static int WordNum(int docID)
         {
             if (Debugging.AssertsEnabled) Debugging.Assert(docID >= 0);
-            return (int)((uint)docID >> 3);
+            return docID.TripleShift(3);
         }
 
         /// <summary>
@@ -324,14 +325,14 @@
                 {
                     token |= 1 << 3;
                 }
-                @out.WriteByte((byte)(sbyte)token);
+                @out.WriteByte((byte)token);
                 if (cleanLengthMinus2 > 0x03)
                 {
-                    @out.WriteVInt32((int)((uint)cleanLengthMinus2 >> 2));
+                    @out.WriteVInt32(cleanLengthMinus2.TripleShift(2));
                 }
                 if (dirtyLength > 0x07)
                 {
-                    @out.WriteVInt32((int)((uint)dirtyLength >> 3));
+                    @out.WriteVInt32(dirtyLength.TripleShift(3));
                 }
             }
 
@@ -611,7 +612,7 @@
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         internal static int ReadCleanLength(ByteArrayDataInput @in, int token)
         {
-            int len = ((int)((uint)token >> 4)) & 0x07;
+            int len = (token.TripleShift(4)) & 0x07;
             int startPosition = @in.Position;
             if ((len & 0x04) != 0)
             {
@@ -792,7 +793,7 @@
                 // we found a window containing our target, let's binary search now
                 while (lo <= hi)
                 {
-                    int mid = (int)((uint)(lo + hi) >> 1);
+                    int mid = (lo + hi).TripleShift(1);
                     int midWordNum = (int)wordNums.Get(mid);
                     if (midWordNum <= targetWordNum)
                     {
@@ -865,7 +866,7 @@
                 if (bitList != 0) // there are remaining bits in the current word
                 {
                     docID = (wordNum << 3) | ((bitList & 0x0F) - 1);
-                    bitList = (int)((uint)bitList >> 4);
+                    bitList = bitList.TripleShift(4);
                     return docID;
                 }
                 NextWord();
@@ -876,7 +877,7 @@
                 bitList = BitUtil.BitList(word);
                 if (Debugging.AssertsEnabled) Debugging.Assert(bitList != 0);
                 docID = (wordNum << 3) | ((bitList & 0x0F) - 1);
-                bitList = (int)((uint)bitList >> 4);
+                bitList = bitList.TripleShift(4);
                 return docID;
             }
 
diff --git a/src/Lucene.Net/Util/WeakIdentityMap.cs b/src/Lucene.Net/Util/WeakIdentityMap.cs
index 3371207..306e5ee 100644
--- a/src/Lucene.Net/Util/WeakIdentityMap.cs
+++ b/src/Lucene.Net/Util/WeakIdentityMap.cs
@@ -192,7 +192,7 @@
 //            public IEnumerator<TKey> GetEnumerator()
 //            {
 //                outerInstance.Reap();
-//                return new IteratorAnonymousInnerClassHelper(outerInstance);
+//                return new IteratorAnonymousClass(outerInstance);
 //            }
 
 //            IEnumerator IEnumerable.GetEnumerator()
@@ -259,12 +259,12 @@
 //            }
 //        }
 
-//        private class IteratorAnonymousInnerClassHelper : IEnumerator<TKey>
+//        private class IteratorAnonymousClass : IEnumerator<TKey>
 //        {
 //            private readonly WeakIdentityMap<TKey, TValue> outerInstance;
 //            private readonly IEnumerator<KeyValuePair<IdentityWeakReference, TValue>> enumerator;
 
-//            public IteratorAnonymousInnerClassHelper(WeakIdentityMap<TKey, TValue> outerInstance)
+//            public IteratorAnonymousClass(WeakIdentityMap<TKey, TValue> outerInstance)
 //            {
 //                this.outerInstance = outerInstance;
 //                enumerator = outerInstance.backingStore.GetEnumerator();
diff --git a/src/dotnet/tools/lucene-cli/docs/index.md b/src/dotnet/tools/lucene-cli/docs/index.md
index 303f5d7..66f4071 100644
--- a/src/dotnet/tools/lucene-cli/docs/index.md
+++ b/src/dotnet/tools/lucene-cli/docs/index.md
@@ -1,6 +1,6 @@
-# Lucene.Net command line interface (CLI) tools
+# Lucene.NET command line interface (CLI) tools
 
-The Lucene.Net command line interface (CLI) is a new cross-platform toolchain with utilities for maintaining Lucene.Net and demos for learning basic Lucene.Net functionality.
+The Lucene.NET command line interface (CLI) is a new cross-platform toolchain with utilities for maintaining Lucene.NET and demos for learning basic Lucene.NET functionality.
 
 ## Prerequisites
 
@@ -14,7 +14,9 @@
 dotnet tool install lucene-cli -g --version [EnvVar:LuceneNetVersion]
 ```
 
-You may then use the lucene-cli tool to analyze and update Lucene.Net indexes and use its demos.
+> NOTE: The version of the CLI you install should match the version of Lucene.NET you use.
+
+You may then use the lucene-cli tool to analyze and update Lucene.NET indexes and use its demos.
 
 ## CLI Commands
 
@@ -27,7 +29,7 @@
 
 ## Command structure
 
-CLI command structure consists of the driver ("lucene"), the command, and possibly command arguments and options. You see this pattern in most CLI operations, such as checking a Lucene.Net index for problematic segments and fixing (removing) them:
+CLI command structure consists of the driver ("lucene"), the command, and possibly command arguments and options. You see this pattern in most CLI operations, such as checking a Lucene.NET index for problematic segments and fixing (removing) them:
 
 ```
 lucene index check C:\my-index --verbose