SDAP-212 Update EDGE to Python 3.7
diff --git a/src/main/python/libraries/edge/elasticsearch/datasetwriter.py b/src/main/python/libraries/edge/elasticsearch/datasetwriter.py
index 34dd063..ef677f6 100644
--- a/src/main/python/libraries/edge/elasticsearch/datasetwriter.py
+++ b/src/main/python/libraries/edge/elasticsearch/datasetwriter.py
@@ -1,7 +1,7 @@
 from types import *
 import json
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 import requestresponder
 from edge.dateutility import DateUtility
@@ -111,7 +111,7 @@
         queries = []
         sort = None
         filterQuery = None
-        for key, value in variables.iteritems():
+        for key, value in variables.items():
             #query = ''
             if key == 'startTime':
                 startTime = DateUtility.convertISOToUTCTimestamp(value)
@@ -126,7 +126,7 @@
                     query += '[*%20TO%20'+str(stopTime)+']'
                     queries.append(query)
             elif key == 'keyword':
-                newValue = urllib.quote(value)
+                newValue = urllib.parse.quote(value)
 
                 query = newValue
                 queries.append(query)
@@ -153,7 +153,7 @@
                 queries.append(query)
             elif key == 'sortBy':
                 sortByMapping = {'timeDesc': 'start_time:desc', 'timeAsc': 'start_time:asc'}
-                if value in sortByMapping.keys():
+                if value in list(sortByMapping.keys()):
                     sort = sortByMapping[value]
             elif key == 'bbox':
                 filterQuery = self._constructBoundingBoxQuery(value)
diff --git a/src/main/python/libraries/edge/elasticsearch/granulewriter.py b/src/main/python/libraries/edge/elasticsearch/granulewriter.py
index d999fe1..6d82fad 100644
--- a/src/main/python/libraries/edge/elasticsearch/granulewriter.py
+++ b/src/main/python/libraries/edge/elasticsearch/granulewriter.py
@@ -1,6 +1,6 @@
 from types import *
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 
 from edge.opensearch.responsewriter import ResponseWriter
@@ -86,7 +86,7 @@
         sort='desc'
         filterQuery = None
         queries = []
-        for key, value in variables.iteritems():
+        for key, value in variables.items():
             #query = ''
             if key == 'startTime':
                 startTime = DateUtility.convertISOToUTCTimestamp(value)
@@ -101,7 +101,7 @@
                     query += '[* TO '+str(stopTime)+']'
                     queries.append(query)
             elif key == 'keyword':
-                newValue = urllib.quote(value)
+                newValue = urllib.parse.quote(value)
 
                 query = 'SearchableText-LowerCased:('+newValue+')'
                 queries.append(query)
@@ -124,7 +124,7 @@
                 startIndex = 0
             elif key == 'sortBy':
                 sortByMapping = {'timeAsc': 'asc'}
-                if value in sortByMapping.keys():
+                if value in list(sortByMapping.keys()):
                     sort = sortByMapping[value]
             elif key == 'bbox':
                 filterQuery = self._constructBoundingBoxQuery(value)
diff --git a/src/main/python/libraries/edge/elasticsearch/opensearch/atomresponsebyelasticsearch.py b/src/main/python/libraries/edge/elasticsearch/opensearch/atomresponsebyelasticsearch.py
index a79c9c6..131f1c8 100644
--- a/src/main/python/libraries/edge/elasticsearch/opensearch/atomresponsebyelasticsearch.py
+++ b/src/main/python/libraries/edge/elasticsearch/opensearch/atomresponsebyelasticsearch.py
@@ -1,5 +1,5 @@
 import json
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.opensearch.atomresponse import AtomResponse
 from collections import defaultdict
@@ -27,7 +27,7 @@
                 {'namespace': 'opensearch', 'name': 'itemsPerPage', 'value': 1}
             )
             self.parameters['startIndex'] = 0
-            url = self.link + '?' + urllib.urlencode(self.parameters)
+            url = self.link + '?' + urllib.parse.urlencode(self.parameters)
             self.variables.append({'name': 'link', 'attribute': {'href': url, 'rel': 'self', 'type': 'application/atom+xml'}})
             self.variables.append({'name': 'link', 'attribute': {'href': url, 'rel': 'first', 'type': 'application/atom+xml'}})
             item = [
@@ -43,16 +43,16 @@
             rows = int(self.parameters['itemsPerPage'])
 
             self.parameters['startIndex'] = start
-            self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'self', 'type': 'application/atom+xml'}})
+            self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'self', 'type': 'application/atom+xml'}})
             self.parameters['startIndex'] = 0
-            self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'first', 'type': 'application/atom+xml'}})
+            self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'first', 'type': 'application/atom+xml'}})
             if start > 0:
                 if (start - rows > 0):
                     self.parameters['startIndex'] = start - rows
-                self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'previous', 'type': 'application/atom+xml'}})
+                self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'previous', 'type': 'application/atom+xml'}})
             if start + rows < numFound:
                 self.parameters['startIndex'] = start + rows
-                self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'next', 'type': 'application/atom+xml'}})
+                self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'next', 'type': 'application/atom+xml'}})
             
             self.variables.append(
                 {'namespace': 'opensearch', 'name': 'totalResults', 'value': numFound}
@@ -76,11 +76,11 @@
         pass
     
     def _populateItemWithAllMetadata(self, doc, item):
-        for docKey in doc.keys():
+        for docKey in list(doc.keys()):
             if isinstance(doc[docKey], list):
                 for child in doc[docKey]:
                     childItem = []
-                    for childKey in child.keys():
+                    for childKey in list(child.keys()):
                         childItem.append({'namespace': 'gibs', 'name': childKey, 'value': child[childKey]})
                     item.append({'namespace': 'gibs', 'name': docKey, 'value': childItem})
             else:
diff --git a/src/main/python/libraries/edge/elasticsearch/opensearch/datasetatomresponse.py b/src/main/python/libraries/edge/elasticsearch/opensearch/datasetatomresponse.py
index a8d10d8..8de5eda 100644
--- a/src/main/python/libraries/edge/elasticsearch/opensearch/datasetatomresponse.py
+++ b/src/main/python/libraries/edge/elasticsearch/opensearch/datasetatomresponse.py
@@ -1,6 +1,6 @@
 import logging
 import datetime
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.elasticsearch.opensearch.atomresponsebyelasticsearch import AtomResponseByElasticsearch
 from edge.dateutility import DateUtility
@@ -26,16 +26,16 @@
         item.append({'name': 'title', 'value': doc['_source']['title']})
         item.append({'name': 'content', 'value': doc['_source']['description']})
         
-        item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('full', 'true')])), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('full', 'true')])), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS Metadata' }})
         """
         item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'iso')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }})
         item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'gcmd')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'GCMD Metadata' }})
         """
         #Only generate granule search link if dataset has granules
         if (doc['_source']['identifier'].lower() in self.datasets):
-            supportedGranuleParams = dict([(key,value) for key,value in self.parameters.iteritems() if key in ['bbox', 'startTime', 'endTime']])
+            supportedGranuleParams = dict([(key,value) for key,value in self.parameters.items() if key in ['bbox', 'startTime', 'endTime']])
             supportedGranuleParams['identifier'] = persistentId
-            item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'granule?' + urllib.urlencode(supportedGranuleParams), 'rel': 'search', 'type': 'application/atom+xml', 'title': 'Product Search' }})
+            item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'granule?' + urllib.parse.urlencode(supportedGranuleParams), 'rel': 'search', 'type': 'application/atom+xml', 'title': 'Product Search' }})
         """
         if 'Dataset-ImageUrl' in doc and doc['Dataset-ImageUrl'][0] != '':
             item.append({'name': 'link', 'attribute': {'href': doc['Dataset-ImageUrl'][0], 'rel': 'enclosure', 'type': 'image/jpg', 'title': 'Thumbnail' }})
diff --git a/src/main/python/libraries/edge/elasticsearch/opensearch/granuleatomresponse.py b/src/main/python/libraries/edge/elasticsearch/opensearch/granuleatomresponse.py
index a4d8cb7..1ab3cd6 100644
--- a/src/main/python/libraries/edge/elasticsearch/opensearch/granuleatomresponse.py
+++ b/src/main/python/libraries/edge/elasticsearch/opensearch/granuleatomresponse.py
@@ -1,5 +1,5 @@
 import datetime
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.elasticsearch.opensearch.atomresponsebyelasticsearch import AtomResponseByElasticsearch
 from edge.dateutility import DateUtility
@@ -32,7 +32,7 @@
         
         parameters = {'identifier': doc['_source']['identifier'], 'name': doc['_source']['name']}
         parameters['full'] = 'true'
-        item.append({'name': 'link', 'attribute': {'href': self.url+self.searchBasePath + 'granule?' + urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url+self.searchBasePath + 'granule?' + urllib.parse.urlencode(parameters), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS Metadata' }})
         del parameters['full']
         '''
         parameters['format'] = 'iso'
diff --git a/src/main/python/libraries/edge/opensearch/atomresponse.py b/src/main/python/libraries/edge/opensearch/atomresponse.py
index ddf8bdb..58864cc 100644
--- a/src/main/python/libraries/edge/opensearch/atomresponse.py
+++ b/src/main/python/libraries/edge/opensearch/atomresponse.py
@@ -38,7 +38,7 @@
 
         document = Document()
         feed = document.createElement('feed')
-        for namespace in self.namespaces.keys():
+        for namespace in list(self.namespaces.keys()):
             namespaceAttr = 'xmlns'
             if namespace != '':
                 namespaceAttr += ':'+namespace
@@ -141,5 +141,5 @@
             else:
                 variableElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(str(value))))
         if 'attribute' in itemEntry:
-            for attr in itemEntry['attribute'].keys():
+            for attr in list(itemEntry['attribute'].keys()):
                 variableElement.setAttribute(attr, itemEntry['attribute'][attr])
diff --git a/src/main/python/libraries/edge/opensearch/atomresponsebysolr.py b/src/main/python/libraries/edge/opensearch/atomresponsebysolr.py
index c63fd5f..3b72fec 100644
--- a/src/main/python/libraries/edge/opensearch/atomresponsebysolr.py
+++ b/src/main/python/libraries/edge/opensearch/atomresponsebysolr.py
@@ -1,5 +1,5 @@
 import json
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.opensearch.atomresponse import AtomResponse
 from collections import defaultdict
@@ -29,7 +29,7 @@
                 {'namespace': 'opensearch', 'name': 'itemsPerPage', 'value': 1}
             )
             self.parameters['startIndex'] = 0
-            url = self.link + '?' + urllib.urlencode(self.parameters)
+            url = self.link + '?' + urllib.parse.urlencode(self.parameters)
             self.variables.append({'name': 'link', 'attribute': {'href': url, 'rel': 'self', 'type': 'application/atom+xml'}})
             self.variables.append({'name': 'link', 'attribute': {'href': url, 'rel': 'first', 'type': 'application/atom+xml'}})
             item = [
@@ -45,16 +45,16 @@
             rows = int(solrJson['responseHeader']['params']['rows'])
 
             self.parameters['startIndex'] = start
-            self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'self', 'type': 'application/atom+xml'}})
+            self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'self', 'type': 'application/atom+xml'}})
             self.parameters['startIndex'] = 0
-            self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'first', 'type': 'application/atom+xml'}})
+            self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'first', 'type': 'application/atom+xml'}})
             if start > 0:
                 if (start - rows > 0):
                     self.parameters['startIndex'] = start - rows
-                self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'previous', 'type': 'application/atom+xml'}})
+                self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'previous', 'type': 'application/atom+xml'}})
             if start + rows < numFound:
                 self.parameters['startIndex'] = start + rows
-                self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'next', 'type': 'application/atom+xml'}})
+                self.variables.append({'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'next', 'type': 'application/atom+xml'}})
             
             self.variables.append(
                 {'namespace': 'opensearch', 'name': 'totalResults', 'value': solrJson['response']['numFound']}
@@ -112,7 +112,7 @@
     def _populateItemWithPodaacMetadata(self, doc, item, multiValuedElementsKeys):
         ignoreElementsEndingWith = ('-Full', '-Long')
         multiValuedElements = defaultdict(list)
-        for docKey in doc.keys():
+        for docKey in list(doc.keys()):
             if docKey.startswith(multiValuedElementsKeys):
                 multiValuedElements[docKey.split('-', 1)[0]].append(docKey)
             elif not docKey.endswith(ignoreElementsEndingWith):
diff --git a/src/main/python/libraries/edge/opensearch/datasetatomresponse.py b/src/main/python/libraries/edge/opensearch/datasetatomresponse.py
index dc11a93..9af506a 100644
--- a/src/main/python/libraries/edge/opensearch/datasetatomresponse.py
+++ b/src/main/python/libraries/edge/opensearch/datasetatomresponse.py
@@ -1,5 +1,5 @@
 import datetime
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.opensearch.atomresponsebysolr import AtomResponseBySolr
 from edge.dateutility import DateUtility
@@ -23,24 +23,24 @@
         item.append({'name': 'title', 'value': doc['Dataset-LongName'][0]})
         item.append({'name': 'content', 'value': doc['Dataset-Description'][0]})
         
-        item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('full', 'true')])), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'PO.DAAC Metadata' }})
-        item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'iso')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }})
-        item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'gcmd')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'GCMD Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('full', 'true')])), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'PO.DAAC Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('format', 'iso')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('format', 'gcmd')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'GCMD Metadata' }})
         
         #Only generate granule search link if dataset has granules
         if (doc['Dataset-ShortName'][0] in self.datasets):
-            supportedGranuleParams = dict([(key,value) for key,value in self.parameters.iteritems() if key in ['bbox', 'startTime', 'endTime']])
+            supportedGranuleParams = dict([(key,value) for key,value in self.parameters.items() if key in ['bbox', 'startTime', 'endTime']])
             if persistentId == '':
                 supportedGranuleParams['shortName'] = doc['Dataset-ShortName'][0]
             else:
                 supportedGranuleParams['datasetId'] = persistentId
-            item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'granule?' + urllib.urlencode(supportedGranuleParams), 'rel': 'search', 'type': 'application/atom+xml', 'title': 'Granule Search' }})
+            item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'granule?' + urllib.parse.urlencode(supportedGranuleParams), 'rel': 'search', 'type': 'application/atom+xml', 'title': 'Granule Search' }})
         
         if 'Dataset-ImageUrl' in doc and doc['Dataset-ImageUrl'][0] != '':
             item.append({'name': 'link', 'attribute': {'href': doc['Dataset-ImageUrl'][0], 'rel': 'enclosure', 'type': 'image/jpg', 'title': 'Thumbnail' }})
         
         if 'DatasetLocationPolicy-Type' in doc and 'DatasetLocationPolicy-BasePath' in doc:
-            url = dict(zip(doc['DatasetLocationPolicy-Type'], doc['DatasetLocationPolicy-BasePath']))
+            url = dict(list(zip(doc['DatasetLocationPolicy-Type'], doc['DatasetLocationPolicy-BasePath'])))
             if 'LOCAL-OPENDAP' in url:
                 item.append({'name': 'link', 'attribute': {'href': url['LOCAL-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }})
             elif 'REMOTE-OPENDAP' in url:
diff --git a/src/main/python/libraries/edge/opensearch/datasetgranulewriter.py b/src/main/python/libraries/edge/opensearch/datasetgranulewriter.py
index f9c62a1..d0686c9 100644
--- a/src/main/python/libraries/edge/opensearch/datasetgranulewriter.py
+++ b/src/main/python/libraries/edge/opensearch/datasetgranulewriter.py
@@ -1,8 +1,8 @@
 from types import *
 import logging
-import urllib
-import urlparse
-import httplib
+import urllib.request, urllib.parse, urllib.error
+import urllib.parse
+import http.client
 from xml.dom.minidom import Document
 import json
 import xml.sax.saxutils
@@ -129,7 +129,7 @@
         #set default sort order
         sort='Granule-StartTimeLong+desc'
         queries = []
-        for key, value in variables.iteritems():
+        for key, value in variables.items():
             #query = ''
             if key == 'startTime':
                 startTime = DateUtility.convertISOToUTCTimestamp(value)
@@ -144,7 +144,7 @@
                     query += '[*%20TO%20'+str(stopTime)+']'
                     queries.append(query)
             elif key == 'keyword':
-                newValue = urllib.quote(value)
+                newValue = urllib.parse.quote(value)
 
                 query = 'SearchableText-LowerCased:('+newValue+')'
                 queries.append(query)
@@ -167,7 +167,7 @@
                 startIndex = 0
             elif key == 'sortBy':
                 sortByMapping = {'timeAsc': 'Granule-StartTimeLong+asc', 'archiveTimeDesc': 'Granule-ArchiveTimeLong+desc'}
-                if value in sortByMapping.keys():
+                if value in list(sortByMapping.keys()):
                     sort = sortByMapping[value]
             elif key == 'archiveTime':
                 query = 'Granule-ArchiveTimeLong:['+str(value)+'%20TO%20*]'
diff --git a/src/main/python/libraries/edge/opensearch/datasetrssresponse.py b/src/main/python/libraries/edge/opensearch/datasetrssresponse.py
index e9194bc..b078f38 100644
--- a/src/main/python/libraries/edge/opensearch/datasetrssresponse.py
+++ b/src/main/python/libraries/edge/opensearch/datasetrssresponse.py
@@ -1,4 +1,4 @@
-import urllib
+import urllib.request, urllib.parse, urllib.error
 from edge.opensearch.rssresponsebysolr import RssResponseBySolr
 from edge.dateutility import DateUtility
 
@@ -25,24 +25,24 @@
         item.append({'name': 'description', 'value': doc['Dataset-Description'][0]})
         item.append({'name': 'link', 'value': portalUrl})
         
-        item.append({'name': 'enclosure', 'attribute': {'url': self.url + self.searchBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('full', 'true'), ('format', 'rss')])), 'type': 'application/rss+xml', 'length': '0'}})
-        item.append({'name': 'enclosure', 'attribute': {'url': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'iso')])), 'type': 'text/xml', 'length': '0'}})
-        item.append({'name': 'enclosure', 'attribute': {'url': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'gcmd')])), 'type': 'text/xml', 'length': '0'}})
+        item.append({'name': 'enclosure', 'attribute': {'url': self.url + self.searchBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('full', 'true'), ('format', 'rss')])), 'type': 'application/rss+xml', 'length': '0'}})
+        item.append({'name': 'enclosure', 'attribute': {'url': self.url + self.metadataBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('format', 'iso')])), 'type': 'text/xml', 'length': '0'}})
+        item.append({'name': 'enclosure', 'attribute': {'url': self.url + self.metadataBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('format', 'gcmd')])), 'type': 'text/xml', 'length': '0'}})
         
         #Only generate granule search link if dataset has granules
         if (doc['Dataset-ShortName'][0] in self.datasets):
-            supportedGranuleParams = dict([(key,value) for key,value in self.parameters.iteritems() if key in ['bbox', 'startTime', 'endTime', 'format']])
+            supportedGranuleParams = dict([(key,value) for key,value in self.parameters.items() if key in ['bbox', 'startTime', 'endTime', 'format']])
             if persistentId == '':
                 supportedGranuleParams['shortName'] = doc['Dataset-ShortName'][0]
             else:
                 supportedGranuleParams['datasetId'] = persistentId
-            item.append({'name': 'enclosure', 'attribute': {'url': self.url + self.searchBasePath + 'granule?' + urllib.urlencode(supportedGranuleParams), 'type': 'application/rss+xml', 'length': '0'}})
+            item.append({'name': 'enclosure', 'attribute': {'url': self.url + self.searchBasePath + 'granule?' + urllib.parse.urlencode(supportedGranuleParams), 'type': 'application/rss+xml', 'length': '0'}})
         
         if 'Dataset-ImageUrl' in doc and doc['Dataset-ImageUrl'][0] != '':
             item.append({'name': 'enclosure', 'attribute': {'url': doc['Dataset-ImageUrl'][0], 'type': 'image/jpg', 'length': '0'}})
         
         if 'DatasetLocationPolicy-Type' in doc and 'DatasetLocationPolicy-BasePath' in doc:
-            url = dict(zip(doc['DatasetLocationPolicy-Type'], doc['DatasetLocationPolicy-BasePath']))
+            url = dict(list(zip(doc['DatasetLocationPolicy-Type'], doc['DatasetLocationPolicy-BasePath'])))
             if 'LOCAL-OPENDAP' in url:
                 item.append({'name': 'enclosure', 'attribute': {'url': url['LOCAL-OPENDAP'], 'type': 'text/html', 'length': '0'}})
             elif 'REMOTE-OPENDAP' in url:
diff --git a/src/main/python/libraries/edge/opensearch/datasetwriter.py b/src/main/python/libraries/edge/opensearch/datasetwriter.py
index 3ec56cb..f49e5b7 100644
--- a/src/main/python/libraries/edge/opensearch/datasetwriter.py
+++ b/src/main/python/libraries/edge/opensearch/datasetwriter.py
@@ -1,7 +1,7 @@
 from types import *
 import json
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 import requestresponder
 from edge.dateutility import DateUtility
@@ -110,7 +110,7 @@
         queries = []
         sort = None
         filterQuery = None
-        for key, value in variables.iteritems():
+        for key, value in variables.items():
             #query = ''
             if key == 'startTime':
                 startTime = DateUtility.convertISOToUTCTimestamp(value)
@@ -125,7 +125,7 @@
                     query += '[*%20TO%20'+str(stopTime)+']'
                     queries.append(query)
             elif key == 'keyword':
-                newValue = urllib.quote(value)
+                newValue = urllib.parse.quote(value)
 
                 query = 'SearchableText-LowerCased:('+newValue+')'
                 queries.append(query)
@@ -153,7 +153,7 @@
             elif key == 'sortBy':
                 sortByMapping = {'timeDesc': 'DatasetCoverage-StartTimeLong-Long+desc', 'timeAsc': 'DatasetCoverage-StartTimeLong-Long+asc', 
                                  'popularityDesc': 'Dataset-AllTimePopularity+desc', 'popularityAsc': 'Dataset-AllTimePopularity+asc'}
-                if value in sortByMapping.keys():
+                if value in list(sortByMapping.keys()):
                     sort = sortByMapping[value]
             elif key == 'bbox':
                 filterQuery = self._constructBoundingBoxQuery(value)
diff --git a/src/main/python/libraries/edge/opensearch/fgdcresponse.py b/src/main/python/libraries/edge/opensearch/fgdcresponse.py
index c8738ce..8394851 100644
--- a/src/main/python/libraries/edge/opensearch/fgdcresponse.py
+++ b/src/main/python/libraries/edge/opensearch/fgdcresponse.py
@@ -31,7 +31,7 @@
             tree = xml.etree.ElementTree.fromstring(fgdcStr)
             self._indent(tree)
             
-            for namespace in self.namespaces.keys():
+            for namespace in list(self.namespaces.keys()):
                 xml.etree.ElementTree.register_namespace(namespace, self.namespaces[namespace])
             
             return xmlDeclaration + xml.etree.ElementTree.tostring(tree, encoding='utf-8')
diff --git a/src/main/python/libraries/edge/opensearch/fgdcresponsebysolr.py b/src/main/python/libraries/edge/opensearch/fgdcresponsebysolr.py
index 562dc08..52aea7c 100644
--- a/src/main/python/libraries/edge/opensearch/fgdcresponsebysolr.py
+++ b/src/main/python/libraries/edge/opensearch/fgdcresponsebysolr.py
@@ -67,16 +67,16 @@
                 self.variables['UniqueDatasetSensor'] = {}
                 for i, x in enumerate(doc['DatasetSource-Sensor-ShortName']):
                     self.variables['UniqueDatasetSensor'][x] = i
-                self.variables['UniqueDatasetSensor'] = self.variables['UniqueDatasetSensor'].values()
+                self.variables['UniqueDatasetSensor'] = list(self.variables['UniqueDatasetSensor'].values())
                 
                 # Create list of unique dataset source
                 self.variables['UniqueDatasetSource'] = {}
                 for i, x in enumerate(doc['DatasetSource-Source-ShortName']):
                     self.variables['UniqueDatasetSource'][x] = i
-                self.variables['UniqueDatasetSource'] = self.variables['UniqueDatasetSource'].values()
+                self.variables['UniqueDatasetSource'] = list(self.variables['UniqueDatasetSource'].values())
                 
                 # Create dictionary for dataset_resource
-                self.variables['DatasetResource'] = dict(zip(doc['DatasetResource-Type'], doc['DatasetResource-Path']))
+                self.variables['DatasetResource'] = dict(list(zip(doc['DatasetResource-Type'], doc['DatasetResource-Path'])))
                 
                 # Get index of dataset Technical Contact
                 self.variables['TechnicalContactIndex'] = -1
diff --git a/src/main/python/libraries/edge/opensearch/gcmdresponsebysolr.py b/src/main/python/libraries/edge/opensearch/gcmdresponsebysolr.py
index 588fc4a..a773eb6 100644
--- a/src/main/python/libraries/edge/opensearch/gcmdresponsebysolr.py
+++ b/src/main/python/libraries/edge/opensearch/gcmdresponsebysolr.py
@@ -28,8 +28,8 @@
                 
                 #Filter response from solr, if value contains none, N/A, null set to empty string
                 if not allowNone:
-                    for key, value in doc.iteritems():
-                        if key not in ['DatasetPolicy-AccessConstraint', 'DatasetPolicy-UseConstraint'] and isinstance(value[0], basestring) and len(value[0].strip()) <= 4 and value[0].strip().lower() in ['none', 'na', 'n/a', 'null']:
+                    for key, value in doc.items():
+                        if key not in ['DatasetPolicy-AccessConstraint', 'DatasetPolicy-UseConstraint'] and isinstance(value[0], str) and len(value[0].strip()) <= 4 and value[0].strip().lower() in ['none', 'na', 'n/a', 'null']:
                             doc[key][0] = ""
                 
                 self.variables['doc'] = doc
@@ -48,12 +48,12 @@
                             doc['DatasetCitation-ReleaseDateLong'][i] = datetime.utcfromtimestamp(float(x) / 1000).strftime('%Y-%m-%d')
                         except:
                             pass
-                    self.variables['Dataset_Citation'] = [dict(zip(datasetCitationCol,x)) for x in zip(doc['DatasetCitation-Creator'], doc['DatasetCitation-Title'], doc['DatasetCitation-SeriesName'], doc['DatasetCitation-ReleaseDateLong'], doc['DatasetCitation-ReleasePlace'], doc['DatasetCitation-Publisher'], doc['DatasetCitation-Version'], doc['DatasetCitation-CitationDetail'], doc['DatasetCitation-OnlineResource'])]
+                    self.variables['Dataset_Citation'] = [dict(list(zip(datasetCitationCol,x))) for x in zip(doc['DatasetCitation-Creator'], doc['DatasetCitation-Title'], doc['DatasetCitation-SeriesName'], doc['DatasetCitation-ReleaseDateLong'], doc['DatasetCitation-ReleasePlace'], doc['DatasetCitation-Publisher'], doc['DatasetCitation-Version'], doc['DatasetCitation-CitationDetail'], doc['DatasetCitation-OnlineResource'])]
                 
                 # Personnel
                 datasetPersonnelCol = ['Role', 'First_Name', 'Middle_Name', 'Last_Name', 'Email', 'Phone', 'Fax', 'Provider_Short_Name']
                 if 'DatasetContact-Contact-Role' in doc:
-                    self.variables['Personnel'] = [dict(zip(datasetPersonnelCol, x)) for x in zip(doc['DatasetContact-Contact-Role'], doc['DatasetContact-Contact-FirstName'], doc['DatasetContact-Contact-MiddleName'], doc['DatasetContact-Contact-LastName'], doc['DatasetContact-Contact-Email'], doc['DatasetContact-Contact-Phone'], doc['DatasetContact-Contact-Fax'], doc['DatasetContact-Contact-Provider-ShortName'])]
+                    self.variables['Personnel'] = [dict(list(zip(datasetPersonnelCol, x))) for x in zip(doc['DatasetContact-Contact-Role'], doc['DatasetContact-Contact-FirstName'], doc['DatasetContact-Contact-MiddleName'], doc['DatasetContact-Contact-LastName'], doc['DatasetContact-Contact-Email'], doc['DatasetContact-Contact-Phone'], doc['DatasetContact-Contact-Fax'], doc['DatasetContact-Contact-Provider-ShortName'])]
                 
                 # Locate dataset provider contact
                 self.variables['Provider_Personnel'] = next((item for item in self.variables['Personnel'] if item["Provider_Short_Name"] == doc['Dataset-Provider-ShortName'][0]), None)
@@ -63,7 +63,7 @@
                 if 'DatasetParameter-Category' in doc:
                     # Replace all none, None values with empty string
                     doc['DatasetParameter-VariableDetail'] = [self._filterString(variableDetail) for variableDetail in doc['DatasetParameter-VariableDetail']]
-                    self.variables['Parameters'] = [dict(zip(datasetParameterCol, x)) for x in zip(doc['DatasetParameter-Category'], doc['DatasetParameter-Topic'], doc['DatasetParameter-Term'], doc['DatasetParameter-Variable'], doc['DatasetParameter-VariableDetail'])]
+                    self.variables['Parameters'] = [dict(list(zip(datasetParameterCol, x))) for x in zip(doc['DatasetParameter-Category'], doc['DatasetParameter-Topic'], doc['DatasetParameter-Term'], doc['DatasetParameter-Variable'], doc['DatasetParameter-VariableDetail'])]
                 
                 # Format dates
                 try:
@@ -76,21 +76,21 @@
                 # Project
                 projectCol = ['Short_Name', 'Long_Name']
                 if 'DatasetProject-Project-ShortName' in doc:
-                    self.variables['Project'] = [dict(zip(projectCol, x)) for x in zip(doc['DatasetProject-Project-ShortName'], doc['DatasetProject-Project-LongName'])]
+                    self.variables['Project'] = [dict(list(zip(projectCol, x))) for x in zip(doc['DatasetProject-Project-ShortName'], doc['DatasetProject-Project-LongName'])]
                 
                 # Create list of unique dataset sensor
                 self.variables['UniqueDatasetSensor'] = {}
                 if 'DatasetSource-Sensor-ShortName' in doc:
                     for i, x in enumerate(doc['DatasetSource-Sensor-ShortName']):
                         self.variables['UniqueDatasetSensor'][x] = i
-                    self.variables['UniqueDatasetSensor'] = self.variables['UniqueDatasetSensor'].values()
+                    self.variables['UniqueDatasetSensor'] = list(self.variables['UniqueDatasetSensor'].values())
                 
                 # Create list of unique dataset source
                 self.variables['UniqueDatasetSource'] = {}
                 if 'DatasetSource-Source-ShortName' in doc:
                     for i, x in enumerate(doc['DatasetSource-Source-ShortName']):
                         self.variables['UniqueDatasetSource'][x] = i
-                    self.variables['UniqueDatasetSource'] = self.variables['UniqueDatasetSource'].values()
+                    self.variables['UniqueDatasetSource'] = list(self.variables['UniqueDatasetSource'].values())
                 
                 # Last_DIF_Revision_Date
                 self.variables['Last_DIF_Revision_Date'] = datetime.utcfromtimestamp(float(doc['DatasetMetaHistory-LastRevisionDateLong'][0]) / 1000).strftime('%Y-%m-%d')
diff --git a/src/main/python/libraries/edge/opensearch/granuleatomresponse.py b/src/main/python/libraries/edge/opensearch/granuleatomresponse.py
index 9b38347..69f96f0 100644
--- a/src/main/python/libraries/edge/opensearch/granuleatomresponse.py
+++ b/src/main/python/libraries/edge/opensearch/granuleatomresponse.py
@@ -1,5 +1,5 @@
 import datetime
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.opensearch.atomresponsebysolr import AtomResponseBySolr
 from edge.dateutility import DateUtility
@@ -32,12 +32,12 @@
         
         parameters = {'datasetId': doc['Dataset-PersistentId'][0], 'granuleName': doc['Granule-Name'][0]}
         parameters['full'] = 'true'
-        item.append({'name': 'link', 'attribute': {'href': self.url+self.searchBasePath + 'granule?' + urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'PO.DAAC Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url+self.searchBasePath + 'granule?' + urllib.parse.urlencode(parameters), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'PO.DAAC Metadata' }})
         del parameters['full']
         parameters['format'] = 'iso'
-        item.append({'name': 'link', 'attribute': {'href': self.url+self.metadataBasePath + 'granule?' +  urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url+self.metadataBasePath + 'granule?' +  urllib.parse.urlencode(parameters), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }})
         parameters['format'] = 'fgdc'
-        item.append({'name': 'link', 'attribute': {'href': self.url+self.metadataBasePath + 'granule?' +  urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'FGDC Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url+self.metadataBasePath + 'granule?' +  urllib.parse.urlencode(parameters), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'FGDC Metadata' }})
         
         #item.append({'name': 'description', 'value': doc['Dataset-Description'][0]})
         #item.append({'name': 'link', 'value': self.portalUrl+'/'+doc['Dataset-ShortName'][0]})
diff --git a/src/main/python/libraries/edge/opensearch/granuledatacastingresponse.py b/src/main/python/libraries/edge/opensearch/granuledatacastingresponse.py
index 24b5dc0..cd40e46 100644
--- a/src/main/python/libraries/edge/opensearch/granuledatacastingresponse.py
+++ b/src/main/python/libraries/edge/opensearch/granuledatacastingresponse.py
@@ -19,7 +19,7 @@
         
         doc['GranuleLink'] = self._getLinkToGranule(doc)
         
-        doc['GranuleFileSize'] = dict(zip(doc['GranuleArchive-Type'], doc['GranuleArchive-FileSize']))
+        doc['GranuleFileSize'] = dict(list(zip(doc['GranuleArchive-Type'], doc['GranuleArchive-FileSize'])))
         
         if 'GranuleReference-Type' in doc:
             doc['GranuleReference'] = dict([(doc['GranuleReference-Type'][i], doc['GranuleReference-Path'][i]) for i,x in enumerate(doc['GranuleReference-Status']) if x=="ONLINE"])
@@ -28,7 +28,7 @@
         link = None
 
         if 'GranuleReference-Type' in doc and len(self.linkToGranule) > 0:
-            granuleRefDict = dict(zip(doc['GranuleReference-Type'], zip(doc['GranuleReference-Path'], doc['GranuleReference-Status'])))
+            granuleRefDict = dict(list(zip(doc['GranuleReference-Type'], list(zip(doc['GranuleReference-Path'], doc['GranuleReference-Status'])))))
 
             for type in self.linkToGranule:
                 # check if reference type exists
diff --git a/src/main/python/libraries/edge/opensearch/granuleisoresponse.py b/src/main/python/libraries/edge/opensearch/granuleisoresponse.py
index 7b9b0a7..fd3ed16 100644
--- a/src/main/python/libraries/edge/opensearch/granuleisoresponse.py
+++ b/src/main/python/libraries/edge/opensearch/granuleisoresponse.py
@@ -20,7 +20,7 @@
         link = None
 
         if 'GranuleReference-Type' in doc and len(self.linkToGranule) > 0:
-            granuleRefDict = dict(zip(doc['GranuleReference-Type'], zip(doc['GranuleReference-Path'], doc['GranuleReference-Status'])))
+            granuleRefDict = dict(list(zip(doc['GranuleReference-Type'], list(zip(doc['GranuleReference-Path'], doc['GranuleReference-Status'])))))
 
             for type in self.linkToGranule:
                 # check if reference type exists
diff --git a/src/main/python/libraries/edge/opensearch/granulerssresponse.py b/src/main/python/libraries/edge/opensearch/granulerssresponse.py
index a514cca..1b28a9c 100644
--- a/src/main/python/libraries/edge/opensearch/granulerssresponse.py
+++ b/src/main/python/libraries/edge/opensearch/granulerssresponse.py
@@ -1,4 +1,4 @@
-import urllib
+import urllib.request, urllib.parse, urllib.error
 from edge.opensearch.rssresponsebysolr import RssResponseBySolr
 from edge.dateutility import DateUtility
 
@@ -37,12 +37,12 @@
         parameters = {'datasetId': doc['Dataset-PersistentId'][0], 'granuleName': doc['Granule-Name'][0]}
         parameters['full'] = 'true'
         parameters['format'] = 'rss'
-        item.append({'name': 'enclosure', 'attribute': {'url': self.url+self.searchBasePath + 'granule?' + urllib.urlencode(parameters), 'type': 'application/rss+xml', 'length': '0'}})
+        item.append({'name': 'enclosure', 'attribute': {'url': self.url+self.searchBasePath + 'granule?' + urllib.parse.urlencode(parameters), 'type': 'application/rss+xml', 'length': '0'}})
         del parameters['full']
         parameters['format'] = 'iso'
-        item.append({'name': 'enclosure', 'attribute': {'url': self.url+self.metadataBasePath + 'granule?' +  urllib.urlencode(parameters), 'type': 'text/xml', 'length': '0'}})
+        item.append({'name': 'enclosure', 'attribute': {'url': self.url+self.metadataBasePath + 'granule?' +  urllib.parse.urlencode(parameters), 'type': 'text/xml', 'length': '0'}})
         parameters['format'] = 'fgdc'
-        item.append({'name': 'enclosure', 'attribute': {'url': self.url+self.metadataBasePath + 'granule?' +  urllib.urlencode(parameters), 'type': 'text/xml', 'length': '0'}})
+        item.append({'name': 'enclosure', 'attribute': {'url': self.url+self.metadataBasePath + 'granule?' +  urllib.parse.urlencode(parameters), 'type': 'text/xml', 'length': '0'}})
         
         if 'GranuleReference-Type' in doc:
             if 'Granule-DataFormat' in doc:
@@ -80,7 +80,7 @@
         link = None
 
         if 'GranuleReference-Type' in doc and len(self.linkToGranule) > 0:
-            granuleRefDict = dict(zip(doc['GranuleReference-Type'], zip(doc['GranuleReference-Path'], doc['GranuleReference-Status'])))
+            granuleRefDict = dict(list(zip(doc['GranuleReference-Type'], list(zip(doc['GranuleReference-Path'], doc['GranuleReference-Status'])))))
 
             for type in self.linkToGranule:
                 # check if reference type exists
diff --git a/src/main/python/libraries/edge/opensearch/granulewriter.py b/src/main/python/libraries/edge/opensearch/granulewriter.py
index ddbb194..35d79a9 100644
--- a/src/main/python/libraries/edge/opensearch/granulewriter.py
+++ b/src/main/python/libraries/edge/opensearch/granulewriter.py
@@ -1,6 +1,6 @@
 from types import *
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 
 from edge.opensearch.responsewriter import ResponseWriter
@@ -89,7 +89,7 @@
         sort='Granule-StartTimeLong+desc'
         filterQuery = None
         queries = []
-        for key, value in variables.iteritems():
+        for key, value in variables.items():
             #query = ''
             if key == 'startTime':
                 startTime = DateUtility.convertISOToUTCTimestamp(value)
@@ -104,7 +104,7 @@
                     query += '[*%20TO%20'+str(stopTime)+']'
                     queries.append(query)
             elif key == 'keyword':
-                newValue = urllib.quote(value)
+                newValue = urllib.parse.quote(value)
 
                 query = 'SearchableText-LowerCased:('+newValue+')'
                 queries.append(query)
@@ -127,7 +127,7 @@
                 startIndex = 0
             elif key == 'sortBy':
                 sortByMapping = {'timeAsc': 'Granule-StartTimeLong+asc'}
-                if value in sortByMapping.keys():
+                if value in list(sortByMapping.keys()):
                     sort = sortByMapping[value]
             elif key == 'bbox':
                 filterQuery = self._constructBoundingBoxQuery(value)
diff --git a/src/main/python/libraries/edge/opensearch/isoresponsebysolr.py b/src/main/python/libraries/edge/opensearch/isoresponsebysolr.py
index fd9090b..4af96c3 100644
--- a/src/main/python/libraries/edge/opensearch/isoresponsebysolr.py
+++ b/src/main/python/libraries/edge/opensearch/isoresponsebysolr.py
@@ -40,13 +40,13 @@
                     self.variables['UniqueDatasetSensor'] = {}
                     for i, x in enumerate(doc['DatasetSource-Sensor-ShortName']):
                         self.variables['UniqueDatasetSensor'][x] = i
-                    self.variables['UniqueDatasetSensor'] = self.variables['UniqueDatasetSensor'].values()
+                    self.variables['UniqueDatasetSensor'] = list(self.variables['UniqueDatasetSensor'].values())
                     
                     # Create list of unique dataset source
                     self.variables['UniqueDatasetSource'] = {}
                     for i, x in enumerate(doc['DatasetSource-Source-ShortName']):
                         self.variables['UniqueDatasetSource'][x] = i
-                    self.variables['UniqueDatasetSource'] = self.variables['UniqueDatasetSource'].values()
+                    self.variables['UniqueDatasetSource'] = list(self.variables['UniqueDatasetSource'].values())
                     
                     # Replace all none, None values with empty string
                     doc['DatasetParameter-VariableDetail'] = [self._filterString(variableDetail) for variableDetail in doc['DatasetParameter-VariableDetail']]
diff --git a/src/main/python/libraries/edge/opensearch/responsebysolr.py b/src/main/python/libraries/edge/opensearch/responsebysolr.py
index eb01661..26302e1 100644
--- a/src/main/python/libraries/edge/opensearch/responsebysolr.py
+++ b/src/main/python/libraries/edge/opensearch/responsebysolr.py
@@ -54,7 +54,7 @@
                 ]
                 """
                 item = []
-                for docKey in doc.keys():
+                for docKey in list(doc.keys()):
                     item.append({'namespace': 'podaac', 'name': docKey, 'value': doc[docKey]})
 
                 self._populateItem(solrResponse, doc, item)
diff --git a/src/main/python/libraries/edge/opensearch/responsewriter.py b/src/main/python/libraries/edge/opensearch/responsewriter.py
index 2277c65..b390e3b 100644
--- a/src/main/python/libraries/edge/opensearch/responsewriter.py
+++ b/src/main/python/libraries/edge/opensearch/responsewriter.py
@@ -1,6 +1,6 @@
 from types import *
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 import requestresponder
 from edge.httputility import HttpUtility
@@ -31,7 +31,7 @@
     
     def _constructSingleSolrDatasetQuery(self, variables):
         queries = []
-        for key, value in variables.iteritems():
+        for key, value in variables.items():
             # Only key used for ISO granule record is dataset
             if key == 'datasetId':
                 query = 'Dataset-PersistentId:'+self._urlEncodeSolrQueryValue(value)
@@ -56,7 +56,7 @@
         return httpUtility.getResponse(url+'/select/?'+query, callback)
     
     def _urlEncodeSolrQueryValue(self, value):
-        return urllib.quote('"'+value+'"')
+        return urllib.parse.quote('"'+value+'"')
     
     def _constructBoundingBoxQuery(self, value):
         coords = value.split(",")
diff --git a/src/main/python/libraries/edge/opensearch/rssresponse.py b/src/main/python/libraries/edge/opensearch/rssresponse.py
index d36a109..24a84ba 100644
--- a/src/main/python/libraries/edge/opensearch/rssresponse.py
+++ b/src/main/python/libraries/edge/opensearch/rssresponse.py
@@ -36,7 +36,7 @@
         document = Document()
         rss = document.createElement('rss')
         rss.setAttribute('version', '2.0')
-        for namespace in self.namespaces.keys():
+        for namespace in list(self.namespaces.keys()):
             rss.setAttribute('xmlns:'+namespace, self.namespaces[namespace])
         document.appendChild(rss)
 
@@ -122,5 +122,5 @@
             else:
                 variableElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(str(value))))
         if 'attribute' in itemEntry:
-            for attr in itemEntry['attribute'].keys():
+            for attr in list(itemEntry['attribute'].keys()):
                 variableElement.setAttribute(attr, itemEntry['attribute'][attr])
diff --git a/src/main/python/libraries/edge/opensearch/rssresponsebysolr.py b/src/main/python/libraries/edge/opensearch/rssresponsebysolr.py
index fffe234..0ade8d1 100644
--- a/src/main/python/libraries/edge/opensearch/rssresponsebysolr.py
+++ b/src/main/python/libraries/edge/opensearch/rssresponsebysolr.py
@@ -1,5 +1,5 @@
 import json
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.opensearch.rssresponse import RssResponse
 from collections import defaultdict
@@ -29,7 +29,7 @@
                 {'namespace': 'opensearch', 'name': 'itemsPerPage', 'value': 1}
             )
             self.parameters['startIndex'] = 0
-            url = self.link + '?' + urllib.urlencode(self.parameters)
+            url = self.link + '?' + urllib.parse.urlencode(self.parameters)
             self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': url, 'rel': 'self', 'type': 'application/rss+xml'}})
             self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': url, 'rel': 'first', 'type': 'application/rss+xml'}})
             item = [
@@ -45,16 +45,16 @@
             rows = int(solrJson['responseHeader']['params']['rows'])
 
             self.parameters['startIndex'] = start
-            self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'self', 'type': 'application/rss+xml'}})
+            self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'self', 'type': 'application/rss+xml'}})
             self.parameters['startIndex'] = 0
-            self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'first', 'type': 'application/rss+xml'}})
+            self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'first', 'type': 'application/rss+xml'}})
             if start > 0:
                 if (start - rows > 0):
                     self.parameters['startIndex'] = start - rows
-                self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'previous', 'type': 'application/rss+xml'}})
+                self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'previous', 'type': 'application/rss+xml'}})
             if start + rows < numFound:
                 self.parameters['startIndex'] = start + rows
-                self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'next', 'type': 'application/rss+xml'}})
+                self.variables.append({'namespace': 'atom', 'name': 'link', 'attribute': {'href': self.link + '?' + urllib.parse.urlencode(self.parameters), 'rel': 'next', 'type': 'application/rss+xml'}})
             
             self.variables.append(
                 {'namespace': 'opensearch', 'name': 'totalResults', 'value': solrJson['response']['numFound']}
@@ -112,7 +112,7 @@
     def _populateItemWithPodaacMetadata(self, doc, item, multiValuedElementsKeys):
         ignoreElementsEndingWith = ('-Full', '-Long')
         multiValuedElements = defaultdict(list)
-        for docKey in doc.keys():
+        for docKey in list(doc.keys()):
             if docKey.startswith(multiValuedElementsKeys):
                 multiValuedElements[docKey.split('-', 1)[0]].append(docKey)
             elif not docKey.endswith(ignoreElementsEndingWith):
diff --git a/src/main/python/libraries/edge/opensearch/solrcmrtemplateresponse.py b/src/main/python/libraries/edge/opensearch/solrcmrtemplateresponse.py
index 0dbcebb..cd98665 100644
--- a/src/main/python/libraries/edge/opensearch/solrcmrtemplateresponse.py
+++ b/src/main/python/libraries/edge/opensearch/solrcmrtemplateresponse.py
@@ -1,9 +1,9 @@
 import datetime
 import pycurl
-from StringIO import StringIO
+from io import StringIO
 import json
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import os.path
 
 from edge.opensearch.templateresponse import TemplateResponse
@@ -40,7 +40,7 @@
      
         # Format the output if there are errors.
         response = {}
-        if output.keys().__contains__('errors'):
+        if list(output.keys()).__contains__('errors'):
             response['entry'] = []
             response['updated'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
             response['id'] = url
@@ -54,26 +54,26 @@
 
         try:
 
-            if response.keys().__contains__('errors'):
+            if list(response.keys()).__contains__('errors'):
                 return(response)
     
-            if not(response.keys().__contains__('feed')):
+            if not(list(response.keys()).__contains__('feed')):
                 raise ValueError('no "feed" in the cmr response')
-            if not(response['feed'].keys().__contains__('entry')):
+            if not(list(response['feed'].keys()).__contains__('entry')):
                 raise ValueError('no "entry" in the cmr response')
-            if not(response['feed'].keys().__contains__('updated')):
+            if not(list(response['feed'].keys()).__contains__('updated')):
                 raise ValueError('no "updated" key in the cmr response')
-            if not(response['feed'].keys().__contains__('id')):
+            if not(list(response['feed'].keys()).__contains__('id')):
                 raise ValueError('no "id" key in the cmr response')
-            if not(response['feed'].keys().__contains__('title')):
+            if not(list(response['feed'].keys()).__contains__('title')):
                 raise ValueError('no "id" key in the cmr response')
       
             # Create lists if they do not exists.
-            if not(cmr.keys().__contains__('cmr_search_updated')):
+            if not(list(cmr.keys()).__contains__('cmr_search_updated')):
                 cmr['cmr_search_updated'] = []
-            if not(cmr.keys().__contains__('cmr_search_url')):
+            if not(list(cmr.keys()).__contains__('cmr_search_url')):
                 cmr['cmr_search_url'] = []
-            if not(cmr.keys().__contains__('cmr_search_title')):
+            if not(list(cmr.keys()).__contains__('cmr_search_title')):
                 cmr['cmr_search_title'] = []
 
             cmr['cmr_search_updated'].append(response['feed']['updated'])
@@ -85,13 +85,13 @@
                 entry = response['feed']['entry'][0]
                 for key in entry:
                     keyname = 'cmr_%s' %(key)
-                    if not(cmr.keys().__contains__(keyname)):
+                    if not(list(cmr.keys()).__contains__(keyname)):
                         cmr[keyname] = []
                     cmr[keyname].append(entry[key])
     
-        except ValueError, e:
+        except ValueError as e:
             msg = 'Error! parse error: %s.' %e
-            print '%s\n' %msg
+            print('%s\n' %msg)
     
         return(cmr)
 
@@ -128,7 +128,7 @@
                 # CMR: PRODUCT_TYPE
                 #------------------------------------------------------------------------------------------
 
-                if solrJson['response']['docs'][i].keys().__contains__('product_type_dataset_short_name_list'):
+                if list(solrJson['response']['docs'][i].keys()).__contains__('product_type_dataset_short_name_list'):
 
                     for j in range(len(doc['product_type_dataset_short_name_list'])):
 
@@ -153,8 +153,8 @@
                 # CMR: PRODUCT (Only search when the query contains 'id' - ie individual cmr search)
                 #------------------------------------------------------------------------------------------
 
-                elif solrJson['response']['docs'][i].keys().__contains__('product_granule_remote_granule_ur_list') and \
-                     self.parameters.keys().__contains__('id'):
+                elif list(solrJson['response']['docs'][i].keys()).__contains__('product_granule_remote_granule_ur_list') and \
+                     list(self.parameters.keys()).__contains__('id'):
 
                     for j in range(len(doc['product_granule_remote_granule_ur_list'])):
 
@@ -225,19 +225,19 @@
                 self.variables['facets'] = solrJson['facet_counts']
 
         self.parameters['startIndex'] = start
-        self.variables['myself'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['myself'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
         
         if rows != 0:
             self.parameters['startIndex'] = numFound - (numFound % rows)
-        self.variables['last'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['last'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
         
         self.parameters['startIndex'] = 0
-        self.variables['first'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['first'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
         if start > 0:
             if (start - rows > 0):
                 self.parameters['startIndex'] = start - rows
-            self.variables['prev'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+            self.variables['prev'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
             
         if start + rows < numFound:
             self.parameters['startIndex'] = start + rows
-            self.variables['next'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+            self.variables['next'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
diff --git a/src/main/python/libraries/edge/opensearch/solrtemplateresponse.py b/src/main/python/libraries/edge/opensearch/solrtemplateresponse.py
index 952220f..1af651f 100644
--- a/src/main/python/libraries/edge/opensearch/solrtemplateresponse.py
+++ b/src/main/python/libraries/edge/opensearch/solrtemplateresponse.py
@@ -1,7 +1,7 @@
 import datetime
 import json
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.opensearch.templateresponse import TemplateResponse
 
@@ -47,19 +47,19 @@
                 self.variables['facets'] = solrJson['facet_counts']
 
         self.parameters['startIndex'] = start
-        self.variables['myself'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['myself'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
         
         if rows != 0:
             self.parameters['startIndex'] = numFound - (numFound % rows)
-        self.variables['last'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['last'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
         
         self.parameters['startIndex'] = 0
-        self.variables['first'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['first'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
         if start > 0:
             if (start - rows > 0):
                 self.parameters['startIndex'] = start - rows
-            self.variables['prev'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+            self.variables['prev'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
             
         if start + rows < numFound:
             self.parameters['startIndex'] = start + rows
-            self.variables['next'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+            self.variables['next'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
diff --git a/src/main/python/libraries/edge/response/estemplateresponse.py b/src/main/python/libraries/edge/response/estemplateresponse.py
index 0bb443c..e72dee6 100644
--- a/src/main/python/libraries/edge/response/estemplateresponse.py
+++ b/src/main/python/libraries/edge/response/estemplateresponse.py
@@ -1,7 +1,7 @@
 import datetime
 import json
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.opensearch.templateresponse import TemplateResponse
 
@@ -36,19 +36,19 @@
 
 
         self.parameters['startIndex'] = start
-        self.variables['myself'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['myself'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
 
         if rows != 0:
             self.parameters['startIndex'] = numFound - (numFound % rows)
-        self.variables['last'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['last'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
 
         self.parameters['startIndex'] = 0
-        self.variables['first'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['first'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
         if start > 0:
             if (start - rows > 0):
                 self.parameters['startIndex'] = start - rows
-            self.variables['prev'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+            self.variables['prev'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
 
         if start + rows < numFound:
             self.parameters['startIndex'] = start + rows
-            self.variables['next'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+            self.variables['next'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
diff --git a/src/main/python/libraries/edge/response/solrjsontemplateresponse.py b/src/main/python/libraries/edge/response/solrjsontemplateresponse.py
index 76d988f..4936798 100644
--- a/src/main/python/libraries/edge/response/solrjsontemplateresponse.py
+++ b/src/main/python/libraries/edge/response/solrjsontemplateresponse.py
@@ -1,7 +1,7 @@
 import datetime
 import json
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.response.jsontemplateresponse import JsonTemplateResponse
 
@@ -42,19 +42,19 @@
 
 
         self.parameters['startIndex'] = start
-        self.variables['myself'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['myself'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
 
         if rows != 0:
             self.parameters['startIndex'] = numFound - (numFound % rows)
-        self.variables['last'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['last'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
 
         self.parameters['startIndex'] = 0
-        self.variables['first'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+        self.variables['first'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
         if start > 0:
             if (start - rows > 0):
                 self.parameters['startIndex'] = start - rows
-            self.variables['prev'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+            self.variables['prev'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
 
         if start + rows < numFound:
             self.parameters['startIndex'] = start + rows
-            self.variables['next'] = self.link + '?' + urllib.urlencode(self.parameters, True)
+            self.variables['next'] = self.link + '?' + urllib.parse.urlencode(self.parameters, True)
diff --git a/src/main/python/libraries/edge/writer/estemplateresponsewriter.py b/src/main/python/libraries/edge/writer/estemplateresponsewriter.py
index e947031..08fa528 100644
--- a/src/main/python/libraries/edge/writer/estemplateresponsewriter.py
+++ b/src/main/python/libraries/edge/writer/estemplateresponsewriter.py
@@ -1,6 +1,6 @@
 from types import *
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 from collections import OrderedDict
 
@@ -59,7 +59,7 @@
         facets = {}
         if self._configuration.has_option('solr', 'facets'):
             self.facetDefs = json.loads(self._configuration.get('solr', 'facets'), object_pairs_hook=OrderedDict)
-            for facet in self.facetDefs.keys():
+            for facet in list(self.facetDefs.keys()):
                 try:
                     value = requestHandler.get_arguments(facet)
                     if len(value) > 0:
@@ -74,7 +74,7 @@
             logging.exception('Failed to get solr response.')
 
     def _urlEncodeSolrQueryValue(self, value):
-        return urllib.quote('"'+value+'"')
+        return urllib.parse.quote('"'+value+'"')
 
     def _onResponse(self, response):
         logging.debug(response)
diff --git a/src/main/python/libraries/edge/writer/genericproxywriter.py b/src/main/python/libraries/edge/writer/genericproxywriter.py
index dd7b1da..bc8e063 100644
--- a/src/main/python/libraries/edge/writer/genericproxywriter.py
+++ b/src/main/python/libraries/edge/writer/genericproxywriter.py
@@ -1,5 +1,5 @@
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.proxywriter import ProxyWriter
 
diff --git a/src/main/python/libraries/edge/writer/proxywriter.py b/src/main/python/libraries/edge/writer/proxywriter.py
index f747681..0fe735e 100644
--- a/src/main/python/libraries/edge/writer/proxywriter.py
+++ b/src/main/python/libraries/edge/writer/proxywriter.py
@@ -21,7 +21,7 @@
             self.requestHandler.write(str(response.error))
             self.requestHandler.finish()
         else:
-            for name, value in response.headers.iteritems():
+            for name, value in response.headers.items():
                 logging.debug('header: '+name+':'+value)
                 self.requestHandler.set_header(name, value)
             self.requestHandler.set_header('Access-Control-Allow-Origin', '*')
diff --git a/src/main/python/libraries/edge/writer/solrtemplateresponsewriter.py b/src/main/python/libraries/edge/writer/solrtemplateresponsewriter.py
index 636a21a..d54ede5 100644
--- a/src/main/python/libraries/edge/writer/solrtemplateresponsewriter.py
+++ b/src/main/python/libraries/edge/writer/solrtemplateresponsewriter.py
@@ -1,6 +1,6 @@
 from types import *
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 from collections import OrderedDict
 
@@ -58,7 +58,7 @@
         facets = {}
         if self._configuration.has_option('solr', 'facets'):
             self.facetDefs = json.loads(self._configuration.get('solr', 'facets'), object_pairs_hook=OrderedDict)
-            for facet in self.facetDefs.keys():
+            for facet in list(self.facetDefs.keys()):
                 try:
                     value = requestHandler.get_arguments(facet)
                     if len(value) > 0:
@@ -73,7 +73,7 @@
             logging.exception('Failed to get solr response.')
 
     def _urlEncodeSolrQueryValue(self, value):
-        return urllib.quote('"'+value+'"')
+        return urllib.parse.quote('"'+value+'"')
 
     def _onSolrResponse(self, response):
         logging.debug(response)
diff --git a/src/main/python/pluginhandler.py b/src/main/python/pluginhandler.py
index 0b72788..37547f8 100644
--- a/src/main/python/pluginhandler.py
+++ b/src/main/python/pluginhandler.py
@@ -1,6 +1,7 @@
 import os
 import sys
 import logging
+import importlib
 
 class PluginHandler(object):
     def __init__(self, name, pluginPath, format=None):
@@ -37,7 +38,7 @@
             for moduleName in modulePath.split('.'):
                 currentModuleName += moduleName
                 #print('reloading: '+currentModuleName)
-                reload(sys.modules[currentModuleName])
+                importlib.reload(sys.modules[currentModuleName])
                 currentModuleName += '.'
 
         #print('modulePath: '+modulePath)
diff --git a/src/main/python/plugins/example/elastic/Writer.py b/src/main/python/plugins/example/elastic/Writer.py
index f02bda4..93f619f 100644
--- a/src/main/python/plugins/example/elastic/Writer.py
+++ b/src/main/python/plugins/example/elastic/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.estemplateresponsewriter import ESTemplateResponseWriter
 from edge.response.estemplateresponse import ESTemplateResponse
@@ -25,10 +25,10 @@
         filterQueries = []
         sort = None
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
         if len(queries) == 0:
             queries.append('*')
 
diff --git a/src/main/python/plugins/icoads/json/Writer.py b/src/main/python/plugins/icoads/json/Writer.py
index 4d6d0de..8467cc8 100644
--- a/src/main/python/plugins/icoads/json/Writer.py
+++ b/src/main/python/plugins/icoads/json/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrjsontemplateresponse import SolrJsonTemplateResponse
@@ -24,17 +24,17 @@
 
     def _constructSolrQuery(self, startIndex, entriesPerPage, parameters, facets):
         # if no QC flag is given, default to only good
-        if not "qualityFlag" in parameters.keys():
+        if not "qualityFlag" in list(parameters.keys()):
             parameters['qualityFlag'] = 1
 
         queries = []
         filterQueries = []
         sort = None
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'startTime':
                     filterQueries.append('time:['+value+'%20TO%20*]')
                 elif key == 'endTime':
diff --git a/src/main/python/plugins/nexus/climatology/Writer.py b/src/main/python/plugins/nexus/climatology/Writer.py
index 0d78465..b05f209 100644
--- a/src/main/python/plugins/nexus/climatology/Writer.py
+++ b/src/main/python/plugins/nexus/climatology/Writer.py
@@ -1,5 +1,5 @@
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.genericproxywriter import GenericProxyWriter
 
diff --git a/src/main/python/plugins/nexus/solr/Writer.py b/src/main/python/plugins/nexus/solr/Writer.py
index 0d78465..b05f209 100644
--- a/src/main/python/plugins/nexus/solr/Writer.py
+++ b/src/main/python/plugins/nexus/solr/Writer.py
@@ -1,5 +1,5 @@
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.genericproxywriter import GenericProxyWriter
 
diff --git a/src/main/python/plugins/nexus/subsetter/Writer.py b/src/main/python/plugins/nexus/subsetter/Writer.py
index 0d78465..b05f209 100644
--- a/src/main/python/plugins/nexus/subsetter/Writer.py
+++ b/src/main/python/plugins/nexus/subsetter/Writer.py
@@ -1,5 +1,5 @@
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.genericproxywriter import GenericProxyWriter
 
diff --git a/src/main/python/plugins/oceanxtremes/datacasting/Writer.py b/src/main/python/plugins/oceanxtremes/datacasting/Writer.py
index e3a83b6..ef3d509 100644
--- a/src/main/python/plugins/oceanxtremes/datacasting/Writer.py
+++ b/src/main/python/plugins/oceanxtremes/datacasting/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
@@ -32,12 +32,12 @@
         start = '*'
         end = '*'
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'shortName':
-                    queries.append("primary_dataset_short_name:" + urllib.quote(value))
+                    queries.append("primary_dataset_short_name:" + urllib.parse.quote(value))
 
         if len(queries) == 0:
             queries.append('*:*')
@@ -49,13 +49,13 @@
         
         if self.facet:
             query += '&rows=0&facet=true&facet.limit=-1&facet.mincount=1&'
-            query += '&'.join(['facet.field=' + facet for facet in self.facetDefs.values()])
+            query += '&'.join(['facet.field=' + facet for facet in list(self.facetDefs.values())])
         else:
             query += '&start='+str(startIndex)+'&rows='+str(entriesPerPage)
             if sort is not None:
-                query += '&sort=' + urllib.quote(sort + ' ' + sortDir + ",InternalVersion desc")
+                query += '&sort=' + urllib.parse.quote(sort + ' ' + sortDir + ",InternalVersion desc")
             else:
-                query += '&sort=' + urllib.quote("submit_date desc")
+                query += '&sort=' + urllib.parse.quote("submit_date desc")
 
         logging.debug('solr query: '+query)
 
diff --git a/src/main/python/plugins/oceanxtremes/post/Writer.py b/src/main/python/plugins/oceanxtremes/post/Writer.py
index fd5fad8..1ab80f6 100644
--- a/src/main/python/plugins/oceanxtremes/post/Writer.py
+++ b/src/main/python/plugins/oceanxtremes/post/Writer.py
@@ -1,6 +1,6 @@
 import logging
-import urllib2
-import urlparse
+import urllib.request, urllib.error, urllib.parse
+import urllib.parse
 import uuid
 import json
 from datetime import datetime
diff --git a/src/main/python/plugins/oiip/json/Writer.py b/src/main/python/plugins/oiip/json/Writer.py
index 58d84e6..8d177b2 100644
--- a/src/main/python/plugins/oiip/json/Writer.py
+++ b/src/main/python/plugins/oiip/json/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrjsontemplateresponse import SolrJsonTemplateResponse
@@ -25,14 +25,14 @@
     def _constructSolrQuery(self, startIndex, entriesPerPage, parameters, facets):
         queries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'necessity':
-                    queries.append("necessity:" + urllib.quote(value))
+                    queries.append("necessity:" + urllib.parse.quote(value))
                 elif key == 'source':
-                    queries.append("source_ss:\"" + urllib.quote(value) + "\"")
+                    queries.append("source_ss:\"" + urllib.parse.quote(value) + "\"")
 
         if len(queries) == 0:
             queries.append('*:*')
diff --git a/src/main/python/plugins/oiip/xml/Writer.py b/src/main/python/plugins/oiip/xml/Writer.py
index c5c9c6c..d3c0193 100644
--- a/src/main/python/plugins/oiip/xml/Writer.py
+++ b/src/main/python/plugins/oiip/xml/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrjsontemplateresponse import SolrJsonTemplateResponse
@@ -23,14 +23,14 @@
     def _constructSolrQuery(self, startIndex, entriesPerPage, parameters, facets):
         queries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'necessity':
-                    queries.append("necessity:" + urllib.quote(value))
+                    queries.append("necessity:" + urllib.parse.quote(value))
                 elif key == 'source':
-                    queries.append("source_ss:\"" + urllib.quote(value) + "\"")
+                    queries.append("source_ss:\"" + urllib.parse.quote(value) + "\"")
 
         if len(queries) == 0:
             queries.append('*:*')
diff --git a/src/main/python/plugins/passthrough/pt/PassThroughWriter.py b/src/main/python/plugins/passthrough/pt/PassThroughWriter.py
index f4785b7..3dbdeb2 100644
--- a/src/main/python/plugins/passthrough/pt/PassThroughWriter.py
+++ b/src/main/python/plugins/passthrough/pt/PassThroughWriter.py
@@ -1,6 +1,6 @@
 import logging
-import urllib2
-import urlparse
+import urllib.request, urllib.error, urllib.parse
+import urllib.parse
 
 import requestresponder
 from edge.httputility import HttpUtility
@@ -63,7 +63,7 @@
             self.requestHandler.write(str(response.error))
             self.requestHandler.finish()
         else:
-            for name, value in response.headers.iteritems():
+            for name, value in response.headers.items():
                 logging.debug('header: '+name+':'+value)
                 self.requestHandler.set_header(name, value)
             self.requestHandler.set_header('Access-Control-Allow-Origin', '*')
@@ -81,7 +81,7 @@
             logging.debug('allow: '+value)
         """
 
-        segments = urlparse.urlparse(url)
+        segments = urllib.parse.urlparse(url)
         netlocation = segments.netloc
 
         targets = [netlocation]
diff --git a/src/main/python/plugins/product/atom/Writer.py b/src/main/python/plugins/product/atom/Writer.py
index f4e085f..83a4451 100644
--- a/src/main/python/plugins/product/atom/Writer.py
+++ b/src/main/python/plugins/product/atom/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.opensearch.solrcmrtemplateresponse import SolrCmrTemplateResponse
@@ -26,10 +26,10 @@
         queries = []
         filterQueries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if key == 'keyword':
                 logging.debug('product/atom/Writer.py: keyword='+value)
-                queries.append(urllib.quote(value))
+                queries.append(urllib.parse.quote(value))
             elif key == 'product_pt_id':
                 filterQueries.append(key + ':' + self._urlEncodeSolrQueryValue(value))
             elif key == 'startTime':
@@ -42,7 +42,7 @@
             elif key == 'id':
                 queries.append('id:' + self._urlEncodeSolrQueryValue(value))
 
-        for key, value in facets.iteritems():
+        for key, value in facets.items():
             if type(value) is list:
                 if (len(value) == 1):
                     filterQueries.append(key + ':' + self._urlEncodeSolrQueryValue(value[0]))
diff --git a/src/main/python/plugins/product/iso/Writer.py b/src/main/python/plugins/product/iso/Writer.py
index a90552e..42a23ae 100644
--- a/src/main/python/plugins/product/iso/Writer.py
+++ b/src/main/python/plugins/product/iso/Writer.py
@@ -15,7 +15,7 @@
 
     def _generateOpenSearchResponse(self, solrResponse, searchText, searchUrl, searchParams, pretty):
 
-        print "iso product:seachParams = [%s]\n" %searchParams
+        print("iso product:seachParams = [%s]\n" %searchParams)
 
         response = SolrTemplateResponse(self._configuration, searchUrl, searchParams)
         response.setTemplate(self.template)
@@ -25,7 +25,7 @@
     def _constructSolrQuery(self, startIndex, entriesPerPage, parameters, facets):
         queries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if key == 'id':
                 queries.append('id:' + self._urlEncodeSolrQueryValue(value))
             elif key == 'title':
diff --git a/src/main/python/plugins/product_type/atom/Writer.py b/src/main/python/plugins/product_type/atom/Writer.py
index b381f57..6d7e166 100644
--- a/src/main/python/plugins/product_type/atom/Writer.py
+++ b/src/main/python/plugins/product_type/atom/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.opensearch.solrcmrtemplateresponse import SolrCmrTemplateResponse
@@ -15,7 +15,7 @@
         self.template = self._readTemplate(templatePath)
 
     def _generateOpenSearchResponse(self, solrResponse, searchText, searchUrl, searchParams, pretty):
-        print "product_type:seachParams = [%s]\n" %searchParams
+        print("product_type:seachParams = [%s]\n" %searchParams)
         response = SolrCmrTemplateResponse(self._configuration, searchUrl, searchParams)
         response.setTemplate(self.template)
         response.variables['serviceUrl'] = self._configuration.get('service', 'url')
@@ -26,9 +26,9 @@
         queries = []
         filterQueries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if key == 'keyword':
-                queries.append(urllib.quote(value))
+                queries.append(urllib.parse.quote(value))
             elif key == 'layers' and value == 'true':
                 filterQueries.append('-product_type_identifier:*_SRC')
             elif key == 'layers' and value == 'false':
@@ -45,7 +45,7 @@
             elif key == 'id':
                 queries.append('id:' + self._urlEncodeSolrQueryValue(value))
 
-        for key, value in facets.iteritems():
+        for key, value in facets.items():
             if type(value) is list:
                 if (len(value) == 1):
                     filterQueries.append(key + ':' + self._urlEncodeSolrQueryValue(value[0]))
diff --git a/src/main/python/plugins/product_type/iso/Writer.py b/src/main/python/plugins/product_type/iso/Writer.py
index 84295cf..7e050c8 100644
--- a/src/main/python/plugins/product_type/iso/Writer.py
+++ b/src/main/python/plugins/product_type/iso/Writer.py
@@ -22,7 +22,7 @@
     def _constructSolrQuery(self, startIndex, entriesPerPage, parameters, facets):
         queries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if key == 'id':
                 queries.append('id:' + self._urlEncodeSolrQueryValue(value))
             elif key == 'title':
diff --git a/src/main/python/plugins/samos/json/Writer.py b/src/main/python/plugins/samos/json/Writer.py
index 21dc337..bc59fca 100644
--- a/src/main/python/plugins/samos/json/Writer.py
+++ b/src/main/python/plugins/samos/json/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrjsontemplateresponse import SolrJsonTemplateResponse
@@ -24,17 +24,17 @@
 
     def _constructSolrQuery(self, startIndex, entriesPerPage, parameters, facets):
         # if no QC flag is given, default to only good
-        if not "qualityFlag" in parameters.keys():
+        if not "qualityFlag" in list(parameters.keys()):
             parameters['qualityFlag'] = 1
 
         queries = []
         filterQueries = []
         sort = None
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'startTime':
                     filterQueries.append('time:['+value+'%20TO%20*]')
                 elif key == 'endTime':
diff --git a/src/main/python/plugins/slcp/atom/Writer.py b/src/main/python/plugins/slcp/atom/Writer.py
index 7bdae8c..d5354d4 100644
--- a/src/main/python/plugins/slcp/atom/Writer.py
+++ b/src/main/python/plugins/slcp/atom/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
@@ -32,10 +32,10 @@
         start = '*'
         end = '*'
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'startTime':
                     start = value
                 elif key == 'endTime':
@@ -46,7 +46,7 @@
                 elif key == 'concept_id':
                     queries.append('concept-id:' + self._urlEncodeSolrQueryValue(value))
                 elif key == 'sortKey':
-                    if value in sortKeys.keys():
+                    if value in list(sortKeys.keys()):
                         sort = sortKeys[value]
                 elif key == 'sortDir':
                     sortDir = value
@@ -54,7 +54,7 @@
                     filterQueries.append('InDAT:%s' % value)
         queries.append('(BeginningEndingDateTime:['+start+'%20TO%20' + end + ']+OR+(*:*%20NOT%20BeginningEndingDateTime:*))')
 
-        for key, value in facets.iteritems():
+        for key, value in facets.items():
             if type(value) is list:
                 if (len(value) == 1):
                     filterQueries.append(key + ':' + self._urlEncodeSolrQueryValue(value[0]))
@@ -73,13 +73,13 @@
         
         if self.facet:
             query += '&rows=0&facet=true&facet.limit=-1&facet.mincount=1&'
-            query += '&'.join(['facet.field=' + facet for facet in self.facetDefs.values()])
+            query += '&'.join(['facet.field=' + facet for facet in list(self.facetDefs.values())])
         else:
             query += '&start='+str(startIndex)+'&rows='+str(entriesPerPage)
             if sort is not None:
-                query += '&sort=' + urllib.quote(sort + ' ' + sortDir + ",InternalVersion desc")
+                query += '&sort=' + urllib.parse.quote(sort + ' ' + sortDir + ",InternalVersion desc")
             else:
-                query += '&sort=' + urllib.quote("score desc,InternalVersion desc")
+                query += '&sort=' + urllib.parse.quote("score desc,InternalVersion desc")
 
         logging.debug('solr query: '+query)
 
diff --git a/src/main/python/plugins/slcp/basin/Writer.py b/src/main/python/plugins/slcp/basin/Writer.py
index 77bfc9a..9e93d07 100644
--- a/src/main/python/plugins/slcp/basin/Writer.py
+++ b/src/main/python/plugins/slcp/basin/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrjsontemplateresponse import SolrJsonTemplateResponse
diff --git a/src/main/python/plugins/slcp/content/Writer.py b/src/main/python/plugins/slcp/content/Writer.py
index c0e27ab..36973dc 100644
--- a/src/main/python/plugins/slcp/content/Writer.py
+++ b/src/main/python/plugins/slcp/content/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrjsontemplateresponse import SolrJsonTemplateResponse
@@ -28,14 +28,14 @@
         filterQueries.append('status:1')
         sort = None
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
                     #Special case keyword search on glossary_items only match title
                     if 'table' in parameters and parameters['table'] == 'glossary_items':
-                        queries.append('title_t:('+urllib.quote(value) + ')')
+                        queries.append('title_t:('+urllib.parse.quote(value) + ')')
                     else:
-                        queries.append(urllib.quote(value))
+                        queries.append(urllib.parse.quote(value))
                 elif key == 'year':
                     start = value + "-01-01T00:00:00.000Z"
                     end = value + "-12-31T23:59:59.999Z"
@@ -46,7 +46,7 @@
                     range = value.lower().split('-')
                     filterQueries.append('{!frange%20l=' + range[0] + '%20u=' + range[1] + 'z}' + 'title_lc')
                 elif key == 'sort':
-                    sort = urllib.quote(value)
+                    sort = urllib.parse.quote(value)
                 elif key == 'topic_id':
                     filterQueries.append('categories_id:' + value)
                 elif key == 'mission_id':
diff --git a/src/main/python/plugins/slcp/dat/Writer.py b/src/main/python/plugins/slcp/dat/Writer.py
index 1bdfe55..29a5119 100644
--- a/src/main/python/plugins/slcp/dat/Writer.py
+++ b/src/main/python/plugins/slcp/dat/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrjsontemplateresponse import SolrJsonTemplateResponse
@@ -26,7 +26,7 @@
         queries = []
         filterQueries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if key == 'id':
                 queries.append('id:' + self._urlEncodeSolrQueryValue(value))
             elif key == 'slcpShortName':
@@ -46,7 +46,7 @@
         if len(filterQueries) > 0:
             query += '&fq='+'+AND+'.join(filterQueries)
 
-        query += '&sort=' + urllib.quote("DATOrder desc,ShortName asc")
+        query += '&sort=' + urllib.parse.quote("DATOrder desc,ShortName asc")
 
         logging.debug('solr query: '+query)
 
diff --git a/src/main/python/plugins/slcp/echo10/Writer.py b/src/main/python/plugins/slcp/echo10/Writer.py
index ebafc40..adcf258 100644
--- a/src/main/python/plugins/slcp/echo10/Writer.py
+++ b/src/main/python/plugins/slcp/echo10/Writer.py
@@ -22,7 +22,7 @@
     def _constructSolrQuery(self, startIndex, entriesPerPage, parameters, facets):
         queries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if key == 'id':
                 queries.append('id:' + self._urlEncodeSolrQueryValue(value))
             elif key == 'slcpShortName':
diff --git a/src/main/python/plugins/slcp/facet/Writer.py b/src/main/python/plugins/slcp/facet/Writer.py
index 4194bde..f9b4e51 100644
--- a/src/main/python/plugins/slcp/facet/Writer.py
+++ b/src/main/python/plugins/slcp/facet/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrfacettemplateresponse import SolrFacetTemplateResponse
@@ -27,10 +27,10 @@
         queries = []
         filterQueries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'startTime':
                     queries.append('EndingDateTime-Internal:['+value+'%20TO%20*]')
                 elif key == 'endTime':
@@ -41,7 +41,7 @@
                 elif key == 'concept_id':
                     queries.append('concept-id:' + self._urlEncodeSolrQueryValue(value))
 
-        for key, value in facets.iteritems():
+        for key, value in facets.items():
             tagKey = '{!tag=' + key + '}' + key
             if type(value) is list:
                 if (len(value) == 1):
@@ -61,7 +61,7 @@
         
         if self.facet:
             query += '&rows=0&facet=true&facet.limit=-1&'
-            query += '&'.join(['facet.field={!ex=' + facet +'}' + facet if facet in facets else 'facet.field=' + facet for facet in self.facetDefs.values()])
+            query += '&'.join(['facet.field={!ex=' + facet +'}' + facet if facet in facets else 'facet.field=' + facet for facet in list(self.facetDefs.values())])
         else:
             query += '&start='+str(startIndex)+'&rows='+str(entriesPerPage)
 
diff --git a/src/main/python/plugins/slcp/granule/Writer.py b/src/main/python/plugins/slcp/granule/Writer.py
index e462e1c..5dc9a88 100644
--- a/src/main/python/plugins/slcp/granule/Writer.py
+++ b/src/main/python/plugins/slcp/granule/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
@@ -30,10 +30,10 @@
         sort = None
         sortDir = 'asc'
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'startTime':
                     queries.append('EndingDateTime:['+value+'%20TO%20*]')
                 elif key == 'endTime':
@@ -44,12 +44,12 @@
                 elif key == 'shortName':
                     queries.append('ShortName:' + self._urlEncodeSolrQueryValue(value))
                 elif key == 'sortKey':
-                    if value in sortKeys.keys():
+                    if value in list(sortKeys.keys()):
                         sort = sortKeys[value]
                 elif key == 'sortDir':
                     sortDir = value
 
-        for key, value in facets.iteritems():
+        for key, value in facets.items():
             if type(value) is list:
                 if (len(value) == 1):
                     filterQueries.append(key + ':' + self._urlEncodeSolrQueryValue(value[0]))
@@ -68,11 +68,11 @@
         
         if self.facet:
             query += '&rows=0&facet=true&facet.limit=-1&facet.mincount=1&'
-            query += '&'.join(['facet.field=' + facet for facet in self.facetDefs.values()])
+            query += '&'.join(['facet.field=' + facet for facet in list(self.facetDefs.values())])
         else:
             query += '&start='+str(startIndex)+'&rows='+str(entriesPerPage)
             if sort is not None:
-                query += '&sort=' + urllib.quote(sort + ' ' + sortDir)
+                query += '&sort=' + urllib.parse.quote(sort + ' ' + sortDir)
 
         logging.debug('solr query: '+query)
 
diff --git a/src/main/python/plugins/slcp/indicator/Writer.py b/src/main/python/plugins/slcp/indicator/Writer.py
index 2d02f45..148ee9b 100644
--- a/src/main/python/plugins/slcp/indicator/Writer.py
+++ b/src/main/python/plugins/slcp/indicator/Writer.py
@@ -1,5 +1,5 @@
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 
 from edge.writer.proxywriter import ProxyWriter
@@ -23,7 +23,7 @@
                 parameters['fl'] = 'xLatest,yLatest,unit,abbrUnit,updated_at'
         except:
             pass
-        url += '/select?' + urllib.urlencode(parameters)
+        url += '/select?' + urllib.parse.urlencode(parameters)
         logging.debug("proxy to url : " + url)
         return url
 
@@ -33,7 +33,7 @@
             self.requestHandler.write(str(response.error))
             self.requestHandler.finish()
         else:
-            for name, value in response.headers.iteritems():
+            for name, value in response.headers.items():
                 logging.debug('header: '+name+':'+value)
                 self.requestHandler.set_header(name, value)
             self.requestHandler.set_header('Access-Control-Allow-Origin', '*')
diff --git a/src/main/python/plugins/slcp/stats/Writer.py b/src/main/python/plugins/slcp/stats/Writer.py
index f7675af..bbf1edf 100644
--- a/src/main/python/plugins/slcp/stats/Writer.py
+++ b/src/main/python/plugins/slcp/stats/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
 from edge.response.solrjsontemplateresponse import SolrJsonTemplateResponse
@@ -28,10 +28,10 @@
         start = '*'
         end = '*'
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'ds':
                     filterQueries.append('datasetShortName:%s' % value)
                 elif key == 'startTime':
diff --git a/src/main/python/plugins/slcp/suggest/Writer.py b/src/main/python/plugins/slcp/suggest/Writer.py
index 5fec63d..fe0a4e1 100644
--- a/src/main/python/plugins/slcp/suggest/Writer.py
+++ b/src/main/python/plugins/slcp/suggest/Writer.py
@@ -1,5 +1,5 @@
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.writer.proxywriter import ProxyWriter
 
@@ -15,7 +15,7 @@
         #parameters['suggest.build'] = 'true'
         try:
             parameters['suggest.q'] = requestHandler.get_argument('keyword')
-            url += '/suggest?' + urllib.urlencode(parameters)
+            url += '/suggest?' + urllib.parse.urlencode(parameters)
         except:
             raise Exception('Missing keyword parameter.')
         logging.debug("proxy to url : " + url)
diff --git a/src/main/python/plugins/slcp/umm-json/Writer.py b/src/main/python/plugins/slcp/umm-json/Writer.py
index 388e610..f10275d 100644
--- a/src/main/python/plugins/slcp/umm-json/Writer.py
+++ b/src/main/python/plugins/slcp/umm-json/Writer.py
@@ -24,7 +24,7 @@
     def _constructSolrQuery(self, startIndex, entriesPerPage, parameters, facets):
         queries = []
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if key == 'id':
                 queries.append('id:' + self._urlEncodeSolrQueryValue(value))
             elif key == 'slcpShortName':
diff --git a/src/main/python/plugins/spurs/json/Writer.py b/src/main/python/plugins/spurs/json/Writer.py
index b86096a..98e09ba 100644
--- a/src/main/python/plugins/spurs/json/Writer.py
+++ b/src/main/python/plugins/spurs/json/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
@@ -27,17 +27,17 @@
         variable = json.loads(self._configuration.get('solr', 'variable'))
 
         # if no QC flag is given, default to only good
-        if not "qualityFlag" in parameters.keys():
+        if not "qualityFlag" in list(parameters.keys()):
             parameters['qualityFlag'] = 1
 
         queries = []
         filterQueries = []
         sort = None
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'startTime':
                     filterQueries.append('time:['+value+'%20TO%20*]')
                 elif key == 'endTime':
diff --git a/src/main/python/plugins/spurs2/json/Writer.py b/src/main/python/plugins/spurs2/json/Writer.py
index b86096a..98e09ba 100644
--- a/src/main/python/plugins/spurs2/json/Writer.py
+++ b/src/main/python/plugins/spurs2/json/Writer.py
@@ -1,7 +1,7 @@
 import logging
 import os
 import os.path
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import json
 
 from edge.writer.solrtemplateresponsewriter import SolrTemplateResponseWriter
@@ -27,17 +27,17 @@
         variable = json.loads(self._configuration.get('solr', 'variable'))
 
         # if no QC flag is given, default to only good
-        if not "qualityFlag" in parameters.keys():
+        if not "qualityFlag" in list(parameters.keys()):
             parameters['qualityFlag'] = 1
 
         queries = []
         filterQueries = []
         sort = None
 
-        for key, value in parameters.iteritems():
+        for key, value in parameters.items():
             if value != "":
                 if key == 'keyword':
-                    queries.append(urllib.quote(value))
+                    queries.append(urllib.parse.quote(value))
                 elif key == 'startTime':
                     filterQueries.append('time:['+value+'%20TO%20*]')
                 elif key == 'endTime':
diff --git a/src/main/python/plugins/tie/collection/Writer.py b/src/main/python/plugins/tie/collection/Writer.py
index bad8827..e579a02 100644
--- a/src/main/python/plugins/tie/collection/Writer.py
+++ b/src/main/python/plugins/tie/collection/Writer.py
@@ -1,5 +1,5 @@
 import logging
-import urllib
+import urllib.request, urllib.parse, urllib.error
 
 from edge.dateutility import DateUtility
 from edge.writer.proxywriter import ProxyWriter
@@ -49,6 +49,6 @@
         end = DateUtility.convertISOToUTCTimestamp(end) + 999
         parameters['fq'].append('time:[' + str(start) + ' TO ' + str(end) + ']')
 
-        url += '/select?' + urllib.urlencode(parameters, True)
+        url += '/select?' + urllib.parse.urlencode(parameters, True)
         logging.debug("proxy to url : " + url)
         return url
diff --git a/src/main/python/requestresponder.py b/src/main/python/requestresponder.py
index 5d27c48..97cad21 100644
--- a/src/main/python/requestresponder.py
+++ b/src/main/python/requestresponder.py
@@ -1,10 +1,10 @@
 import logging
-import ConfigParser
+import configparser
 
 class RequestResponder(object):
     def __init__(self, configFilePath):
         #logging.debug('config: '+configFilePath)
-        self._configuration = ConfigParser.RawConfigParser()
+        self._configuration = configparser.RawConfigParser()
         self._configuration.read(configFilePath)
         self.requestHandler = None
 
diff --git a/src/main/python/server.py b/src/main/python/server.py
index 409593f..d706c8f 100644
--- a/src/main/python/server.py
+++ b/src/main/python/server.py
@@ -5,7 +5,7 @@
 import logging
 import logging.config
 import os
-import ConfigParser
+import configparser
 import socket
 
 import pluginhandler
@@ -53,7 +53,7 @@
     #logging.basicConfig(filename="log.txt",level=logging.DEBUG)
     logging.config.fileConfig(r'./logging.conf')
 
-    configuration = ConfigParser.RawConfigParser()
+    configuration = configparser.RawConfigParser()
     configuration.read(r'./config.conf')
 
     settings = dict(static_path=os.path.join(os.path.dirname(__file__), "static"), static_url_prefix="/static/", template_path=os.path.join(os.path.dirname(__file__), "templates"))