diff --git a/.travis.yml b/.travis.yml index 88fb87eb..c3d18fe4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,7 @@ language: python python: - - "2.7" + - "2.7" + - "3.6" addons: apt: packages: diff --git a/mx.py b/mx.py index c062688e..1febc218 100755 --- a/mx.py +++ b/mx.py @@ -43,14 +43,15 @@ from defusedxml.ElementTree import parse as etreeParse except ImportError: from xml.etree.ElementTree import parse as etreeParse - -import os, errno, time, subprocess, shlex, types, StringIO, zipfile, signal, tempfile, platform -import __builtin__ +import os, errno, time, subprocess, shlex, zipfile, signal, tempfile, platform +from mx_portable import StringIO, builtins, urllib_request, urllib_error, urllib_parse +from mx_portable import _raw_input, _cmp, _unicode, _long, _basestring, _filter, _py3_decode, _func_code, _check_output import textwrap import socket import tarfile, gzip import hashlib import itertools +from functools import cmp_to_key # TODO use defusedexpat? import xml.parsers.expat, xml.sax.saxutils, xml.dom.minidom from xml.dom.minidom import parseString as minidomParseString @@ -58,7 +59,6 @@ import pipes import difflib import glob -import urllib2, urlparse import filecmp import json from collections import OrderedDict, namedtuple, deque @@ -93,10 +93,10 @@ def update_commands(suite, new_commands): if any of the format args are instances of callable, then they are called with an 'env' are before being used in the call to str.format(). """ - suite_name = suite if isinstance(suite, basestring) else suite.name + suite_name = suite if isinstance(suite, _basestring) else suite.name _length_of_command = 4 - for command_name, command_list in new_commands.iteritems(): + for command_name, command_list in new_commands.items(): assert len(command_list) > 0 and command_list[0] is not None args = [suite_name, command_name] + command_list[1:_length_of_command] command_decorator = command(*args) @@ -199,6 +199,26 @@ def no_suite_discovery(func): except ImportError: pass +class Comparable(object): + def _checked_cmp(self, other, f): + compar = self.__cmp__(other) #pylint: disable=assignment-from-no-return + return f(compar, 0) if compar is not NotImplemented else _cmp(id(self), id(other)) + + def __lt__(self, other): + return self._checked_cmp(other, lambda a, b: a < b) + def __gt__(self, other): + return self._checked_cmp(other, lambda a, b: a > b) + def __eq__(self, other): + return self._checked_cmp(other, lambda a, b: a == b) + def __le__(self, other): + return self._checked_cmp(other, lambda a, b: a <= b) + def __ge__(self, other): + return self._checked_cmp(other, lambda a, b: a >= b) + def __ne__(self, other): + return self._checked_cmp(other, lambda a, b: a != b) + + def __cmp__(self, other): # to override + raise TypeError("No override for compare") class DynamicVar(object): def __init__(self, initial_value): @@ -241,20 +261,6 @@ def relpath_or_absolute(path, start, prefix=""): raise ValueError('can not find a relative path to dependency and path is not absolute: ' + path) return path -# Support for Python 2.6 -def check_output(*popenargs, **kwargs): - process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) - output, _ = process.communicate() - retcode = process.poll() - if retcode: - cmd = kwargs.get("args") - if cmd is None: - cmd = popenargs[0] - error = subprocess.CalledProcessError(retcode, cmd) - error.output = output - raise error - return output - def cpu_count(): cpus = multiprocessing.cpu_count() if _opts.cpu_count: @@ -262,9 +268,6 @@ def cpu_count(): else: return cpus -try: subprocess.check_output -except: subprocess.check_output = check_output - try: zipfile.ZipFile.__enter__ except: zipfile.ZipFile.__enter__ = lambda self: self @@ -393,7 +396,7 @@ def path_len(self): return 1 + self.prev.path_len() if self.prev else 0 -class SuiteConstituent(object): +class SuiteConstituent(Comparable): __metaclass__ = ABCMeta def __init__(self, suite, name): @@ -445,7 +448,7 @@ def _comparison_key(self): def __cmp__(self, other): if not isinstance(other, self.__class__): return NotImplemented - return cmp(self._comparison_key(), other._comparison_key()) + return _cmp(self._comparison_key(), other._comparison_key()) def __eq__(self, other): if not isinstance(other, self.__class__): @@ -1160,7 +1163,7 @@ def needsUpdate(self, newestInput): it will be used instead of the usual platform suffix (provided by platformName()). """ def maven_artifact_id(self, platform=None): - if hasattr(self, 'maven') and isinstance(self.maven, types.DictType): + if hasattr(self, 'maven') and isinstance(self.maven, dict): artifact_id = self.maven.get('artifactId', None) if artifact_id: return artifact_id @@ -1170,7 +1173,7 @@ def maven_artifact_id(self, platform=None): Provide maven groupId string for distribution. """ def maven_group_id(self): - if hasattr(self, 'maven') and isinstance(self.maven, types.DictType): + if hasattr(self, 'maven') and isinstance(self.maven, dict): group_id = self.maven.get('groupId', None) if group_id: return group_id @@ -1475,7 +1478,7 @@ def addFromJAR(jarPath): if arcname.startswith('META-INF/services/') and not arcname == 'META-INF/services/': service = arcname[len('META-INF/services/'):] assert '/' not in service - services.setdefault(service, []).extend(source_zf.read(arcname).splitlines()) + services.setdefault(service, []).extend(_py3_decode(source_zf.read(arcname)).splitlines()) else: with ArchiveWriteGuard(self.original_path(), arc.zf, arcname, jarPath + '!' + arcname, source_zf=source_zf) as guard: if guard: @@ -1662,7 +1665,7 @@ def overlay_check(arcname): # accumulate services services_versions = sorted([v for v in services if isinstance(v, int)]) if services_versions: - acummulated_services = {n: set(p) for n, p in services.items() if isinstance(n, basestring)} + acummulated_services = {n: set(p) for n, p in services.items() if isinstance(n, _basestring)} for v in services_versions: for service, providers in services[v].items(): providers_set = frozenset(providers) @@ -1678,10 +1681,10 @@ def add_service_providers(service, providers, archive_prefix=''): # Convert providers to a set before printing to remove duplicates arc.zf.writestr(arcname, '\n'.join(frozenset(providers)) + '\n') - for service_or_version, providers in services.iteritems(): + for service_or_version, providers in services.items(): if isinstance(service_or_version, int): services_version = service_or_version - for service, providers_ in providers.iteritems(): + for service, providers_ in providers.items(): add_service_providers(service, providers_, 'META-INF/_versions/' + str(services_version) + '/') else: add_service_providers(service_or_version, providers) @@ -1878,7 +1881,7 @@ def __addsrc__(self, arcname, contents): return False def __closing__(self): - for filename, content in self.meta_files.iteritems(): + for filename, content in self.meta_files.items(): if content is not None: self.arc.zf.writestr(filename, content) @@ -2237,7 +2240,7 @@ def _extract_deps(layout, suite, distribution_name): @staticmethod def _as_source_dict(source, distribution_name, destination, path_substitutions=None, string_substitutions=None, distribution_object=None, context=None): - if isinstance(source, basestring): + if isinstance(source, _basestring): if ':' not in source: abort("Invalid source '{}' in layout for '{}': should be of the form ':'\n" "Type could be `file`, `string`, `link`, `dependency` or `extracted-dependency`.".format(source, distribution_name), context=context) @@ -2277,7 +2280,7 @@ def _as_source_dict(source, distribution_name, destination, path_substitutions=N elif source_type == 'string': source_dict['_str_'] = "string:" + source_dict['value'] if 'exclude' in source_dict: - if isinstance(source_dict['exclude'], basestring): + if isinstance(source_dict['exclude'], _basestring): source_dict['exclude'] = [source_dict['exclude']] if path_substitutions and source_dict.get("path"): source_dict["path"] = mx_subst.as_engine(path_substitutions).substitute(source_dict["path"], distribution=distribution_object) @@ -2473,7 +2476,7 @@ def _filter_archive_name(name): archiver.add(extracted_file, arcname, provenance) if tarinfo.isdir(): # use a safe mode while extracting, fix later - os.chmod(extracted_file, 0700) + os.chmod(extracted_file, 0o700) directories.append(tarinfo) # Reverse sort directories. @@ -2531,14 +2534,14 @@ def _rel_arcname(_source_file): def _verify_layout(self): output = realpath(self.get_output()) for destination, sources in self.layout.items(): - if not isinstance(destination, basestring): + if not isinstance(destination, _basestring): abort("Destination (layout keys) should be a string", context=self) if not isinstance(sources, list): sources = [sources] if not destination: abort("Destination (layout keys) can not be empty", context=self) for source in sources: - if not isinstance(source, (basestring, dict)): + if not isinstance(source, (_basestring, dict)): abort("Error in '{}': sources should be strings or dicts".format(destination), context=self) if isabs(destination): abort("Invalid destination: '{}': destination should not be absolute".format(destination), context=self) @@ -2662,11 +2665,11 @@ def find_source_location(self, source, fatal_if_missing=True): return self._source_location_cache[source] -class LayoutTARDistribution(LayoutDistribution, AbstractTARDistribution): +class LayoutTARDistribution(LayoutDistribution, AbstractTARDistribution): #pylint: disable=too-many-ancestors pass -class LayoutJARDistribution(LayoutDistribution, AbstractJARDistribution): +class LayoutJARDistribution(LayoutDistribution, AbstractJARDistribution): #pylint: disable=too-many-ancestors def __init__(self, *args, **kw_args): # we have *args here because some subclasses in suites have been written passing positional args to # LayoutDistribution.__init__ instead of keyword args. We just forward it as-is to super(), it's risky but better @@ -2786,7 +2789,7 @@ def canonical_deps(self): distances = dict() result = set() self._compute_max_dep_distances(self, distances, 0) - for n, d in distances.iteritems(): + for n, d in distances.items(): assert d > 0 or n is self if d == 1: result.add(n) @@ -3053,14 +3056,14 @@ def get_javac_lint_overrides(self): return self._javac_lint_overrides def eclipse_config_up_to_date(self, configZip): - for _, sources in self.eclipse_settings_sources().iteritems(): + for _, sources in self.eclipse_settings_sources().items(): for source in sources: if configZip.isOlderThan(source): return False return True def netbeans_config_up_to_date(self, configZip): - for _, sources in self.netbeans_settings_sources().iteritems(): + for _, sources in self.netbeans_settings_sources().items(): for source in sources: if configZip.isOlderThan(source): return False @@ -3068,10 +3071,10 @@ def netbeans_config_up_to_date(self, configZip): if configZip.isOlderThan(join(self.dir, 'build.xml')): return False - if configZip.isOlderThan(join(self.dir, 'nbproject/project.xml')): + if configZip.isOlderThan(join(self.dir, 'nbproject', 'project.xml')): return False - if configZip.isOlderThan(join(self.dir, 'nbproject/project.properties')): + if configZip.isOlderThan(join(self.dir, 'nbproject', 'project.properties')): return False return True @@ -3406,7 +3409,7 @@ def _find_version_base_project(): if exists(self_package_src_dir): assert len(base.source_dirs()) != 0, '{} has no source directories!'.format(base) for base_package_src_dir in [join(s, relative_package_src_dir) for s in base.source_dirs()]: - if exists(base_package_src_dir) or not flatten_map.has_key(self_package_src_dir): + if exists(base_package_src_dir) or self_package_src_dir not in flatten_map: flatten_map[self_package_src_dir] = base_package_src_dir assert len(self_packages) == len(flatten_map), 'could not find sources for all packages in ' + self.name return flatten_map @@ -3437,7 +3440,7 @@ def get_concealed_imported_packages(self, jdk=None, modulepath=None): def visit(dep, edge): if dep is not self and dep.isJavaProject(): dep_concealed = dep.get_concealed_imported_packages(jdk=jdk, modulepath=modulepath) - for module, packages in dep_concealed.iteritems(): + for module, packages in dep_concealed.items(): concealed.setdefault(module, set()).update(packages) self.walk_deps(visit=visit) @@ -3911,7 +3914,7 @@ def addExportArgs(dep, exports=None, prefix='', jdk=None, observable_modules=Non :param JDKConfig jdk: the JDK to be searched for concealed packages :param observable_modules: only consider modules in this set if not None """ - for module, packages in dep.get_concealed_imported_packages(jdk).iteritems(): + for module, packages in dep.get_concealed_imported_packages(jdk).items(): if observable_modules is not None and module not in observable_modules: continue if module in jdk_modules_overridden_on_classpath: @@ -3935,7 +3938,7 @@ def addRootModules(exports, prefix): have been added to `javacArgs` """ if exports: - javacArgs.append(prefix + '--add-modules=' + ','.join(exports.iterkeys())) + javacArgs.append(prefix + '--add-modules=' + ','.join(exports.keys())) if compliance >= '9': exports = {} @@ -4039,6 +4042,7 @@ def __init__(self, jdk, jvmArgs, mainClass, toolJar, buildArgs=None): pout = [] def redirect(stream): for line in iter(stream.readline, ''): + line = _py3_decode(line) pout.append(line) self._noticePort(line) stream.close() @@ -4088,7 +4092,7 @@ def compile(self, compilerArgs): commandLine = u'\x00'.join(compilerArgs) s.send((commandLine + '\n').encode('utf-8')) f = s.makefile() - response = f.readline().decode('utf-8') + response = _unicode(f.readline()) if response == '': # Compiler server process probably crashed logv('[Compiler daemon process appears to have crashed]') @@ -4467,7 +4471,7 @@ def _get_path_in_cache(name, sha1, urls, ext=None, sources=False, oldPath=False) if ext is None: for url in urls: # Use extension of first URL whose path component ends with a non-empty extension - o = urlparse.urlparse(url) + o = urllib_parse.urlparse(url) if o.path == "/remotecontent" and o.query.startswith("filepath"): path = o.query else: @@ -4503,8 +4507,8 @@ def on_timeout(): while True: try: - return urllib2.urlopen(*args, **kwargs) - except (urllib2.HTTPError) as e: + return urllib_request.urlopen(*args, **kwargs) + except (urllib_error.HTTPError) as e: if e.code == 500: if error500_attempts < error500_limit: error500_attempts += 1 @@ -4513,12 +4517,12 @@ def on_timeout(): time.sleep(0.2) continue raise - except urllib2.URLError as e: + except urllib_error.URLError as e: if isinstance(e.reason, socket.error): if e.reason.errno == errno.EINTR and 'timeout' in kwargs and is_interactive(): warn("urlopen() failed with EINTR. Retrying without timeout.") del kwargs['timeout'] - return urllib2.urlopen(*args, **kwargs) + return urllib_request.urlopen(*args, **kwargs) if e.reason.errno == errno.EINPROGRESS: if on_timeout(): continue @@ -5706,7 +5710,7 @@ def check(self, abortOnError=True): def check_for_hg(self, abortOnError=True): if HgConfig.has_hg is None: try: - subprocess.check_output(['hg']) + _check_output(['hg']) HgConfig.has_hg = True except OSError: HgConfig.has_hg = False @@ -5755,7 +5759,7 @@ def tip(self, vcdir, abortOnError=True): self.check_for_hg() # We don't use run because this can be called very early before _opts is set try: - return subprocess.check_output(['hg', 'tip', '-R', vcdir, '--template', '{node}']) + return _check_output(['hg', 'tip', '-R', vcdir, '--template', '{node}']) except subprocess.CalledProcessError: if abortOnError: abort('hg tip failed') @@ -5766,7 +5770,7 @@ def parent(self, vcdir, abortOnError=True): self.check_for_hg() # We don't use run because this can be called very early before _opts is set try: - out = subprocess.check_output(['hg', '-R', vcdir, 'parents', '--template', '{node}\n']) + out = _check_output(['hg', '-R', vcdir, 'parents', '--template', '{node}\n']) parents = out.rstrip('\n').split('\n') if len(parents) != 1: if abortOnError: @@ -5793,9 +5797,9 @@ def parent_info(self, vcdir, abortOnError=True): def release_version_from_tags(self, vcdir, prefix, snapshotSuffix='dev', abortOnError=True): prefix = prefix + '-' try: - tagged_ids_out = subprocess.check_output(['hg', '-R', vcdir, 'log', '--rev', 'ancestors(.) and tag()', '--template', '{tags},{rev}\n']) + tagged_ids_out = _check_output(['hg', '-R', vcdir, 'log', '--rev', 'ancestors(.) and tag()', '--template', '{tags},{rev}\n']) tagged_ids = [x.split(',') for x in tagged_ids_out.split('\n') if x] - current_id = subprocess.check_output(['hg', '-R', vcdir, 'log', '--template', '{rev}\n', '--rev', '.']).strip() + current_id = _check_output(['hg', '-R', vcdir, 'log', '--template', '{rev}\n', '--rev', '.']).strip() except subprocess.CalledProcessError as e: if abortOnError: abort('hg tags or hg tip failed: ' + str(e)) @@ -5820,7 +5824,7 @@ def first(it): def parent_tags(self, vcdir): try: - _tags = subprocess.check_output(['hg', '-R', vcdir, 'log', '--template', '{tags}', '--rev', '.']).strip().split(' ') + _tags = _check_output(['hg', '-R', vcdir, 'log', '--template', '{tags}', '--rev', '.']).strip().split(' ') return [tag for tag in _tags if tag != 'tip'] except subprocess.CalledProcessError as e: abort('hg log failed: ' + str(e)) @@ -5960,7 +5964,7 @@ def locate(self, vcdir, patterns=None, abortOnError=True): def isDirty(self, vcdir, abortOnError=True): self.check_for_hg() try: - return len(subprocess.check_output(['hg', 'status', '-q', '-R', vcdir])) > 0 + return len(_check_output(['hg', 'status', '-q', '-R', vcdir])) > 0 except subprocess.CalledProcessError: if abortOnError: abort('failed to get status') @@ -5984,7 +5988,7 @@ def latest(self, vcdir, rev1, rev2, abortOnError=True): revs = [rev1, rev2] revsetIntersectAncestors = ' or '.join(('ancestors({})'.format(rev) for rev in revs)) revset = 'heads({})'.format(revsetIntersectAncestors) - out = subprocess.check_output(['hg', '-R', vcdir, 'log', '-r', revset, '--template', '{node}\n']) + out = _check_output(['hg', '-R', vcdir, 'log', '-r', revset, '--template', '{node}\n']) parents = out.rstrip('\n').split('\n') if len(parents) != 1: if abortOnError: @@ -6001,7 +6005,7 @@ def exists(self, vcdir, rev): self.check_for_hg() try: sentinel = 'exists' - out = subprocess.check_output(['hg', '-R', vcdir, 'log', '-r', 'present({})'.format(rev), '--template', sentinel]) + out = _check_output(['hg', '-R', vcdir, 'log', '-r', 'present({})'.format(rev), '--template', sentinel]) return sentinel in out except subprocess.CalledProcessError: abort('exists failed') @@ -6010,7 +6014,7 @@ def root(self, directory, abortOnError=True): metadata = VC._find_metadata_dir(directory, '.hg') if metadata: try: - out = subprocess.check_output(['hg', 'root'], cwd=directory, stderr=subprocess.STDOUT) + out = _check_output(['hg', 'root'], cwd=directory, stderr=subprocess.STDOUT) return out.strip() except subprocess.CalledProcessError: if abortOnError: @@ -6043,7 +6047,7 @@ def check(self, abortOnError=True): def check_for_git(self, abortOnError=True): if GitConfig.has_git is None: try: - subprocess.check_output(['git', '--version']) + _check_output(['git', '--version']) GitConfig.has_git = True except OSError: GitConfig.has_git = False @@ -6105,7 +6109,7 @@ def tip(self, vcdir, abortOnError=True): self.check_for_git() # We don't use run because this can be called very early before _opts is set try: - return subprocess.check_output(['git', 'rev-list', 'HEAD', '-1'], cwd=vcdir) + return _check_output(['git', 'rev-list', 'HEAD', '-1'], cwd=vcdir) except subprocess.CalledProcessError: if abortOnError: abort('git rev-list HEAD failed') @@ -6129,7 +6133,7 @@ def parent(self, vcdir, abortOnError=True): abort('More than one parent exist during merge') return None try: - out = subprocess.check_output(['git', 'show', '--pretty=format:%H', "-s", 'HEAD'], cwd=vcdir) + out = _check_output(['git', 'show', '--pretty=format:%H', "-s", 'HEAD'], cwd=vcdir) return out.strip() except subprocess.CalledProcessError: if abortOnError: @@ -6159,7 +6163,7 @@ def _tags(self, vcdir, prefix, abortOnError=True): """ _tags_prefix = 'tag: ' try: - tags_out = subprocess.check_output(['git', 'log', '--simplify-by-decoration', '--pretty=format:%d', 'HEAD'], cwd=vcdir) + tags_out = _check_output(['git', 'log', '--simplify-by-decoration', '--pretty=format:%d', 'HEAD'], cwd=vcdir) tags_out = tags_out.strip() tags = [] for line in tags_out.split('\n'): @@ -6190,7 +6194,7 @@ def _commitish_revision(self, vcdir, commitish, abortOnError=True): try: if not commitish.endswith('^{commit}'): commitish += '^{commit}' - rev = subprocess.check_output(['git', 'show', '-s', '--format=%H', commitish], cwd=vcdir) + rev = _check_output(['git', 'show', '-s', '--format=%H', commitish], cwd=vcdir) res = rev.strip() assert re.match(r'[0-9a-f]{40}', res) is not None, 'output is not a commit hash: ' + res return res @@ -6232,7 +6236,7 @@ def release_version_from_tags(self, vcdir, prefix, snapshotSuffix='dev', abortOn def parent_tags(self, vcdir): try: - return subprocess.check_output(['git', 'tag', '--list', '--points-at', 'HEAD'], cwd=vcdir).strip().split('\r\n') + return _check_output(['git', 'tag', '--list', '--points-at', 'HEAD'], cwd=vcdir).strip().split('\r\n') except subprocess.CalledProcessError as e: abort('git tag failed: ' + str(e)) @@ -6278,7 +6282,7 @@ def get_matching_branches(cls, repository, brefs, vcdir=None): result = dict() try: head_ref_prefix_length = len(cls._head_to_ref('')) - for line in subprocess.check_output(command).splitlines(): + for line in _check_output(command).splitlines(): commit_id, branch_name = line.split('\t') result[branch_name[head_ref_prefix_length:]] = commit_id except subprocess.CalledProcessError: @@ -6677,7 +6681,7 @@ def isDirty(self, vcdir, abortOnError=True): """ self.check_for_git() try: - output = subprocess.check_output(['git', 'status', '--porcelain', '--untracked-files=no'], cwd=vcdir) + output = _check_output(['git', 'status', '--porcelain', '--untracked-files=no'], cwd=vcdir) return len(output.strip()) > 0 except subprocess.CalledProcessError: if abortOnError: @@ -6723,7 +6727,7 @@ def latest(self, vcdir, rev1, rev2, abortOnError=True): """ self.check_for_git() try: - out = subprocess.check_output(['git', 'rev-list', '-n', '1', '--date-order', rev1, rev2], cwd=vcdir) + out = _check_output(['git', 'rev-list', '-n', '1', '--date-order', rev1, rev2], cwd=vcdir) changesets = out.strip().split('\n') if len(changesets) != 1: if abortOnError: @@ -6747,7 +6751,7 @@ def exists(self, vcdir, rev): """ self.check_for_git() try: - subprocess.check_output(['git', 'cat-file', '-e', rev], cwd=vcdir) + _check_output(['git', 'cat-file', '-e', rev], cwd=vcdir) return True except subprocess.CalledProcessError: return False @@ -6756,7 +6760,7 @@ def root(self, directory, abortOnError=True): if VC._find_metadata_dir(directory, '.git'): if self.check_for_git(abortOnError=abortOnError): try: - out = subprocess.check_output(['git', 'rev-parse', '--show-toplevel'], cwd=directory, stderr=subprocess.STDOUT) + out = _check_output(['git', 'rev-parse', '--show-toplevel'], cwd=directory, stderr=subprocess.STDOUT) return out.strip() except subprocess.CalledProcessError: if abortOnError: @@ -6980,10 +6984,10 @@ def update_to_branch(self, vcdir, branch, abortOnError=True): def _hashFromUrl(url): logvv('Retrieving SHA1 from {}'.format(url)) - hashFile = urllib2.urlopen(url) + hashFile = urllib_request.urlopen(url) try: return hashFile.read() - except urllib2.URLError as e: + except urllib_error.URLError as e: _suggest_http_proxy_error(e) abort('Error while retrieving sha1 {}: {}'.format(url, str(e))) finally: @@ -7087,7 +7091,7 @@ def getArtifactVersions(self, groupId, artifactId): logv('Retrieving and parsing {0}'.format(metadataUrl)) try: metadataFile = _urlopen(metadataUrl, timeout=10) - except urllib2.HTTPError as e: + except urllib_error.HTTPError as e: _suggest_http_proxy_error(e) abort('Error while retrieving metadata for {}:{}: {}'.format(groupId, artifactId, str(e))) try: @@ -7112,7 +7116,7 @@ def getArtifactVersions(self, groupId, artifactId): snapshot_metadataUrl = self.getSnapshotUrl(groupId, artifactId, version_str) try: snapshot_metadataFile = _urlopen(snapshot_metadataUrl, timeout=10) - except urllib2.HTTPError as e: + except urllib_error.HTTPError as e: logv('Version {0} not accessible. Try previous snapshot.'.format(metadataUrl)) snapshot_metadataFile = None @@ -7123,7 +7127,7 @@ def getArtifactVersions(self, groupId, artifactId): break return MavenArtifactVersions(latestVersionString, releaseVersionString, versionStrings) - except urllib2.URLError as e: + except urllib_error.URLError as e: abort('Error while retrieving versions for {0}:{1}: {2}'.format(groupId, artifactId, str(e))) finally: if metadataFile: @@ -7138,8 +7142,8 @@ def getSnapshot(self, groupId, artifactId, version): logv('Retrieving and parsing {0}'.format(metadataUrl)) try: metadataFile = _urlopen(metadataUrl, timeout=10) - except urllib2.URLError as e: - if isinstance(e, urllib2.HTTPError) and e.code == 404: + except urllib_error.URLError as e: + if isinstance(e, urllib_error.HTTPError) and e.code == 404: return None _suggest_http_proxy_error(e) abort('Error while retrieving snapshot for {}:{}:{}: {}'.format(groupId, artifactId, version, str(e))) @@ -7244,7 +7248,7 @@ def _mavenGroupId(suite): return group_id name = suite.name else: - assert isinstance(suite, types.StringTypes) + assert isinstance(suite, str) name = suite return 'com.oracle.' + _map_to_maven_dist_name(name) @@ -7762,7 +7766,7 @@ def distMatcher(dist): return True if not dist.isJARDistribution() and not args.all_distribution_types: return False - return getattr(d, 'maven', False) and not dist.is_test_distribution() + return getattr(d, 'maven', False) and not dist.is_test_distribution() #pylint: disable=undefined-variable has_deployed_dist = False for s in _suites: @@ -7984,7 +7988,7 @@ def set_primary_dir(self, d): def importee_dir(self, importer_dir, suite_import, check_alternate=True): suitename = suite_import.name - if self.suitenamemap.has_key(suitename): + if suitename in self.suitenamemap: suitename = self.suitenamemap[suitename] # Try use the URL first so that a big repo is cloned to a local @@ -7994,7 +7998,7 @@ def importee_dir(self, importer_dir, suite_import, check_alternate=True): for urlinfo in suite_import.urlinfos: if urlinfo.abs_kind() == 'source': # 'https://github.com/graalvm/graal.git' -> 'graal' - base, _ = os.path.splitext(basename(urlparse.urlparse(urlinfo.url).path)) + base, _ = os.path.splitext(basename(urllib_parse.urlparse(urlinfo.url).path)) if base: break if base: path = join(SiblingSuiteModel.siblings_dir(importer_dir), base) @@ -8057,7 +8061,7 @@ def find_suite_dir(self, suite_import): def importee_dir(self, importer_dir, suite_import, check_alternate=True): suitename = suite_import.name - if self.suitenamemap.has_key(suitename): + if suitename in self.suitenamemap: suitename = self.suitenamemap[suitename] if basename(importer_dir) == basename(self._primaryDir): # primary is importer @@ -8202,7 +8206,7 @@ def get_source_urls(source, kind=None): def _validate_abolute_url(urlstr, acceptNone=False): if urlstr is None: return acceptNone - url = urlparse.urlsplit(urlstr) + url = urllib_parse.urlsplit(urlstr) return url.scheme and (url.netloc or url.path) class SCMMetadata(object): @@ -8343,17 +8347,17 @@ def _preload_suite_dict(self): del sys.path[0] def expand(value, context): - if isinstance(value, types.DictionaryType): - for n, v in value.iteritems(): + if isinstance(value, dict): + for n, v in value.items(): value[n] = expand(v, context + [n]) - elif isinstance(value, types.ListType): + elif isinstance(value, list): for i in range(len(value)): value[i] = expand(value[i], context + [str(i)]) - elif isinstance(value, types.StringTypes): + elif isinstance(value, str): value = expandvars(value) if '$' in value or '%' in value: abort('value of ' + '.'.join(context) + ' contains an undefined environment variable: ' + value) - elif isinstance(value, types.BooleanType): + elif isinstance(value, bool): pass else: abort('value of ' + '.'.join(context) + ' is of unexpected type ' + str(type(value))) @@ -8498,8 +8502,8 @@ def _load_suite_dict(self): unknown = set(d.keys()) - frozenset(supported) suiteExtensionAttributePrefix = self.name + ':' - suiteSpecific = {n[len(suiteExtensionAttributePrefix):]: d[n] for n in d.iterkeys() if n.startswith(suiteExtensionAttributePrefix) and n != suiteExtensionAttributePrefix} - for n, v in suiteSpecific.iteritems(): + suiteSpecific = {n[len(suiteExtensionAttributePrefix):]: d[n] for n in d.keys() if n.startswith(suiteExtensionAttributePrefix) and n != suiteExtensionAttributePrefix} + for n, v in suiteSpecific.items(): if hasattr(self, n): abort('Cannot override built-in suite attribute "' + n + '"', context=self) setattr(self, n, v) @@ -8604,7 +8608,7 @@ def _load_metadata(self): url = scmDict.pop('url', read) self.scm = SCMMetadata(url, read, write) - for name, attrs in sorted(jreLibsMap.iteritems()): + for name, attrs in sorted(jreLibsMap.items()): jar = attrs.pop('jar') # JRE libraries are optional by default optional = attrs.pop('optional', 'true') != 'false' @@ -8612,7 +8616,7 @@ def _load_metadata(self): l = JreLibrary(self, name, jar, optional, theLicense, **attrs) self.jreLibs.append(l) - for name, attrs in sorted(jdkLibsMap.iteritems()): + for name, attrs in sorted(jdkLibsMap.items()): path = attrs.pop('path') deps = Suite._pop_list(attrs, 'dependencies', context='jdklibrary ' + name) # JRE libraries are optional by default @@ -8624,7 +8628,7 @@ def _load_metadata(self): l = JdkLibrary(self, name, path, deps, optional, theLicense, jdkStandardizedSince=jdkStandardizedSince, **attrs) self.jdkLibs.append(l) - for name, attrs in sorted(importsMap.iteritems()): + for name, attrs in sorted(importsMap.items()): if name == 'suites': pass elif name == 'libraries': @@ -8749,7 +8753,7 @@ def re_init_imports(self): self._init_imports() def _load_distributions(self, distsMap): - for name, attrs in sorted(distsMap.iteritems()): + for name, attrs in sorted(distsMap.items()): if '<' in name: parameters = re.findall(r'<(.+?)>', name) self.distTemplates.append(DistributionTemplate(self, name, attrs, parameters)) @@ -8795,7 +8799,7 @@ def _load_distribution(self, name, attrs): maven = attrs.pop('maven', True) stripConfigFileNames = attrs.pop('strip', None) assert stripConfigFileNames is None or isinstance(stripConfigFileNames, list) - if isinstance(maven, types.DictType) and maven.get('version', None): + if isinstance(maven, dict) and maven.get('version', None): abort("'version' is not supported in maven specification for distributions") if attrs.pop('buildDependencies', None): abort("'buildDependencies' is not supported for JAR distributions") @@ -8842,13 +8846,13 @@ def _pop_os_arch(attrs, context): @staticmethod def _merge_os_arch_attrs(attrs, os_arch_attrs, context, path=''): if os_arch_attrs: - for k, v in os_arch_attrs.iteritems(): + for k, v in os_arch_attrs.items(): if k in attrs: other = attrs[k] key_path = path + '.' + str(k) - if isinstance(v, types.DictType) and isinstance(other, types.DictType): + if isinstance(v, dict) and isinstance(other, dict): Suite._merge_os_arch_attrs(other, v, context, key_path) - elif isinstance(v, types.ListType) and isinstance(other, types.ListType): + elif isinstance(v, list) and isinstance(other, list): attrs[k] = v + other else: abort("OS/Arch attribute must not override non-OS/Arch attribute '{}' in {}".format(key_path, context)) @@ -8856,7 +8860,7 @@ def _merge_os_arch_attrs(attrs, os_arch_attrs, context, path=''): attrs[k] = v def _load_libraries(self, libsMap): - for name, attrs in sorted(libsMap.iteritems()): + for name, attrs in sorted(libsMap.items()): context = 'library ' + name attrs.pop('native', False) # TODO use to make non-classpath libraries os_arch = Suite._pop_os_arch(attrs, context) @@ -9240,7 +9244,7 @@ def _load_projects(self): """projects are unique to source suites""" projsMap = self._check_suiteDict('projects') - for name, attrs in sorted(projsMap.iteritems()): + for name, attrs in sorted(projsMap.items()): try: context = 'project ' + name className = attrs.pop('class', None) @@ -9421,7 +9425,7 @@ def _register_metadata(self): @staticmethod def _projects_recursive(importing_suite, imported_suite, projects, visitmap): - if visitmap.has_key(imported_suite.name): + if imported_suite.name in visitmap: return projects += imported_suite.projects visitmap[imported_suite.name] = True @@ -9627,8 +9631,7 @@ def writexml(self, writer, indent="", addindent="", newl=""): writer.write(indent + "<" + self.tagName) attrs = self._get_attributes() - a_names = attrs.keys() - a_names.sort() + a_names = sorted(attrs.keys()) for a_name in a_names: writer.write(" %s=\"" % a_name) @@ -9689,7 +9692,7 @@ def element(self, tag, attributes=None, data=None): def xml(self, indent='', newl='', escape=False, standalone=None): assert self.current == self - result = self.toprettyxml(indent, newl, encoding="UTF-8") + result = _py3_decode(self.toprettyxml(indent, newl, encoding="UTF-8")) if not result.startswith('\n' + result @@ -9742,7 +9745,7 @@ def _cygpathU2W(p): """ if p is None or get_os() != "cygwin": return p - return subprocess.check_output(['cygpath', '-a', '-w', p]).strip() + return _check_output(['cygpath', '-a', '-w', p]).strip() def _cygpathW2U(p): """ @@ -9751,7 +9754,7 @@ def _cygpathW2U(p): """ if p is None or get_os() != "cygwin": return p - return subprocess.check_output(['cygpath', '-a', '-u', p]).strip() + return _check_output(['cygpath', '-a', '-u', p]).strip() def _separatedCygpathU2W(p): """ @@ -9784,7 +9787,7 @@ def get_arch(): if machine == 'i386' and get_os() == 'darwin': try: # Support for Snow Leopard and earlier version of MacOSX - if subprocess.check_output(['sysctl', '-n', 'hw.cpu64bit_capable']).strip() == '1': + if _check_output(['sysctl', '-n', 'hw.cpu64bit_capable']).strip() == '1': return 'amd64' except OSError: # sysctl is not available @@ -9853,7 +9856,7 @@ def projects(opt_limit_to_suite=False, limit_to_primary=False): Get the list of all loaded projects limited by --suite option if opt_limit_to_suite == True and by primary suite if limit_to_primary == True """ - sortedProjects = sorted((p for p in _projects.itervalues() if not p.suite.internal)) + sortedProjects = sorted((p for p in _projects.values() if not p.suite.internal)) if opt_limit_to_suite: sortedProjects = _dependencies_opt_limit_to_suites(sortedProjects) if limit_to_primary: @@ -9959,11 +9962,11 @@ def instantiateDistribution(templateName, args, fatalIfMissing=True, context=Non abort('Missing parameters while instantiating distribution template ' + t.name + ': ' + ', '.join(missingParams), context=t) def _patch(v): - if isinstance(v, types.StringType): + if isinstance(v, str): return _patchTemplateString(v, args, context) - elif isinstance(v, types.DictType): + elif isinstance(v, dict): return {kk: _patch(vv) for kk, vv in v.items()} - elif isinstance(v, types.ListType): + elif isinstance(v, list): return [_patch(e) for e in v] else: return v @@ -10109,7 +10112,7 @@ def classpath_entries(names=None, includeSelf=True, preferProjects=False, exclud if names is None: roots = set(dependencies()) else: - if isinstance(names, types.StringTypes): + if isinstance(names, str): names = [names] elif isinstance(names, Dependency): names = [names] @@ -10124,7 +10127,7 @@ def classpath_entries(names=None, includeSelf=True, preferProjects=False, exclud if excludes is None: excludes = [] else: - if isinstance(excludes, types.StringTypes): + if isinstance(excludes, str): excludes = [excludes] elif isinstance(excludes, Dependency): excludes = [excludes] @@ -10297,10 +10300,10 @@ def dependencies(opt_limit_to_suite=False): dependencies during iteration, the behavior of the iterator is undefined. If 'types' is not None, only dependencies of a type in 'types """ - it = itertools.chain(_projects.itervalues(), _libs.itervalues(), _dists.itervalues(), _jdkLibs.itervalues(), _jreLibs.itervalues()) + it = itertools.chain(_projects.values(), _libs.values(), _dists.values(), _jdkLibs.values(), _jreLibs.values()) if opt_limit_to_suite and _opts.specific_suites: - it = itertools.ifilter(lambda d: d.suite.name in _opts.specific_suites, it) - itertools.ifilter(lambda d: not d.suite.internal, it) + it = _filter(lambda d: d.suite.name in _opts.specific_suites, it) + _filter(lambda d: not d.suite.internal, it) return it def defaultDependencies(opt_limit_to_suite=False): @@ -10360,7 +10363,7 @@ def add_dist(dist): if not dist in dists: dists.append(dist) - for d in _dists.itervalues(): + for d in _dists.values(): add_dist(d) return dists @@ -10405,7 +10408,7 @@ def extract_VM_args(args, useDoubleDash=False, allowClasspath=False, defaultAllV def _format_commands(): msg = '\navailable commands:\n' commands = _mx_commands.commands() - sorted_commands = sorted([k for k in commands.iterkeys() if ':' not in k]) + sorted([k for k in commands.iterkeys() if ':' in k]) + sorted_commands = sorted([k for k in commands.keys() if ':' not in k]) + sorted([k for k in commands.keys() if ':' in k]) msg += _mx_commands.list_commands(sorted_commands) return msg + '\n' @@ -10590,7 +10593,7 @@ def _parse_cmd_line(self, opts, firstParse): """ A factory for creating JDKConfig objects. """ -class JDKFactory(object): +class JDKFactory: def getJDKConfig(self): nyi('getJDKConfig', self) @@ -10638,7 +10641,7 @@ def _getJDKFactory(tag, versionCheck): if tag not in _jdkFactories: return None complianceMap = _jdkFactories[tag] - for compliance in sorted(complianceMap.iterkeys(), reverse=True): + for compliance in sorted(complianceMap.keys(), reverse=True): if not versionCheck or versionCheck(VersionSpec(str(compliance))): return complianceMap[compliance] return None @@ -10693,7 +10696,7 @@ def get_jdk_option(): if len(_jdkFactories) == 0: abort("No JDK providers available") available = [] - for t, m in _jdkFactories.iteritems(): + for t, m in _jdkFactories.items(): for c in m: available.append('{}:{}'.format(t, c)) abort("No provider for '{}:{}' JDK (available: {})".format(jdktag, jdkCompliance if jdkCompliance else '*', ', '.join(available))) @@ -10713,7 +10716,7 @@ def _is_supported_by_jdt(jdk): :type jdk: :class:`mx.JDKConfig` or string :rtype: bool """ - if isinstance(jdk, basestring): + if isinstance(jdk, _basestring): jdk = get_jdk(tag=jdk) else: assert isinstance(jdk, JDKConfig) @@ -10757,7 +10760,7 @@ def get_jdk(versionCheck=None, purpose=None, cancel=None, versionDescription=Non defaultJdk = kwargs['defaultJdk'] # interpret string and compliance as compliance check - if isinstance(versionCheck, types.StringTypes): + if isinstance(versionCheck, str): versionCheck = JavaCompliance(versionCheck) if isinstance(versionCheck, JavaCompliance): versionCheck, versionDescription = _convert_compliance_to_version_check(versionCheck) @@ -10901,7 +10904,7 @@ def _compare_configs(c1, c2): elif c2 in _extra_java_homes: return -1 return VersionSpec.__cmp__(c1.version, c2.version) - return sorted(unique_configs, cmp=_compare_configs, reverse=True) + return sorted(unique_configs, key=cmp_to_key(_compare_configs), reverse=True) def is_interactive(): if get_env('CONTINUOUS_INTEGRATION'): @@ -11052,7 +11055,7 @@ def _waitpid(pid): while True: try: return os.waitpid(pid, os.WNOHANG) - except OSError, e: + except OSError as e: if e.errno == errno.EINTR: continue raise @@ -11190,7 +11193,7 @@ def run_mx(args, suite=None, mxpy=None, nonZeroIsFatal=True, out=None, err=None, commands = [sys.executable, '-u', mxpy, '--java-home=' + get_jdk().home] cwd = None if suite: - if isinstance(suite, basestring): + if isinstance(suite, _basestring): commands += ['-p', suite] cwd = suite else: @@ -11230,9 +11233,9 @@ def run(args, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, e out and err if they are callable objects. """ - assert isinstance(args, types.ListType), "'args' must be a list: " + str(args) + assert isinstance(args, list), "'args' must be a list: " + str(args) for arg in args: - assert isinstance(arg, types.StringTypes), 'argument is not a string: ' + str(arg) + assert isinstance(arg, str), 'argument is not a string: ' + str(arg) if env is None: env = os.environ.copy() @@ -11287,8 +11290,8 @@ def run(args, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, e preexec_fn, creationflags = (None, 0) def redirect(stream, f): - for line in iter(stream.readline, ''): - f(line) + for line in iter(stream.readline, b''): + f(_py3_decode(line)) stream.close() stdout = out if not callable(out) else subprocess.PIPE stderr = err if not callable(err) else subprocess.PIPE @@ -11447,7 +11450,7 @@ def write(self, line): """ A JavaCompliance simplifies comparing Java compliance values extracted from a JDK version string. """ -class JavaCompliance: +class JavaCompliance(Comparable): def __init__(self, ver): ver = str(ver) pattern = r'(?:1\.)?(\d+)(.*)' @@ -11477,15 +11480,15 @@ def __repr__(self): return str(self) + '..' + str(self._upper_bound) def __cmp__(self, other): - if isinstance(other, types.StringType): + if isinstance(other, str): other = JavaCompliance(other) - r = cmp(self.value, other.value) + r = _cmp(self.value, other.value) if r == 0: if self._upper_bound is None: return 0 if other._upper_bound is None else 1 if other._upper_bound is None: return -1 - r = cmp(self._upper_bound, other._upper_bound) + r = _cmp(self._upper_bound, other._upper_bound) return r def __hash__(self): @@ -11513,7 +11516,7 @@ def _exact_match(self, version): """ A version specification as defined in JSR-56 """ -class VersionSpec: +class VersionSpec(Comparable): def __init__(self, versionString): validChar = r'[\x21-\x25\x27-\x29\x2c\x2f-\x5e\x60-\x7f]' separator = r'[.\-_]' @@ -11530,7 +11533,7 @@ def __str__(self): return self.versionString def __cmp__(self, other): - return cmp(self.strippedParts, other.strippedParts) + return _cmp(self.strippedParts, other.strippedParts) def __hash__(self): return self.parts.__hash__() @@ -11562,7 +11565,7 @@ def java_debug_args(): return debug_args -class JDKConfig: +class JDKConfig(Comparable): """ A JDKConfig object encapsulates info about an installed or deployed JDK. """ @@ -11601,13 +11604,13 @@ def __init__(self, home, tag=None): # Prepend the -d64 VM option only if the java command supports it try: - output = subprocess.check_output([self.java, '-d64', '-version'], stderr=subprocess.STDOUT) + output = _check_output([self.java, '-d64', '-version'], stderr=subprocess.STDOUT) self.java_args = ['-d64'] + self.java_args except OSError as e: raise JDKConfigException('{}: {}'.format(e.errno, e.strerror)) except subprocess.CalledProcessError as e: try: - output = subprocess.check_output([self.java, '-version'], stderr=subprocess.STDOUT) + output = _check_output([self.java, '-version'], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: raise JDKConfigException('{}: {}'.format(e.returncode, e.output)) @@ -11641,7 +11644,7 @@ def _init_classpaths(self): if not self._classpaths_initialized: _, binDir = _compile_mx_class('ClasspathDump', jdk=self) if self.javaCompliance <= JavaCompliance('1.8'): - self._bootclasspath, self._extdirs, self._endorseddirs = [x if x != 'null' else None for x in subprocess.check_output([self.java, '-cp', _cygpathU2W(binDir), 'ClasspathDump'], stderr=subprocess.PIPE).split('|')] + self._bootclasspath, self._extdirs, self._endorseddirs = [x if x != 'null' else None for x in _check_output([self.java, '-cp', _cygpathU2W(binDir), 'ClasspathDump'], stderr=subprocess.PIPE).split('|')] # All 3 system properties accessed by ClasspathDump are expected to exist if not self._bootclasspath or not self._extdirs or not self._endorseddirs: warn("Could not find all classpaths: boot='" + str(self._bootclasspath) + "' extdirs='" + str(self._extdirs) + "' endorseddirs='" + str(self._endorseddirs) + "'") @@ -11668,13 +11671,13 @@ def __cmp__(self, other): if other is None: return False if isinstance(other, JDKConfig): - compilanceCmp = cmp(self.javaCompliance, other.javaCompliance) + compilanceCmp = _cmp(self.javaCompliance, other.javaCompliance) if compilanceCmp: return compilanceCmp - versionCmp = cmp(self.version, other.version) + versionCmp = _cmp(self.version, other.version) if versionCmp: return versionCmp - return cmp(self.home, other.home) + return _cmp(self.home, other.home) raise TypeError() def processArgs(self, args, addDefaultArgs=True): @@ -11766,7 +11769,7 @@ def getKnownJavacLints(self): """ if self._knownJavacLints is None: try: - out = subprocess.check_output([self.javac, '-X'], stderr=subprocess.STDOUT) + out = _check_output([self.javac, '-X'], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: if e.output: log(e.output) @@ -11913,7 +11916,7 @@ def get_root_modules(self): for mod in modules: # no java.se => add all java.* if not mod.name.startswith('java.') or not has_java_dot_se: - if any((len(to) == 0 for _, to in mod.exports.iteritems())): + if any((len(to) == 0 for _, to in mod.exports.items())): result.append(mod) return result @@ -12048,7 +12051,7 @@ def colorize(msg, color='red', bright=True, stream=sys.stderr): return None code = _ansi_color_table.get(color, None) if code is None: - abort('Unsupported color: ' + color + '.\nSupported colors are: ' + ', '.join(_ansi_color_table.iterkeys())) + abort('Unsupported color: ' + color + '.\nSupported colors are: ' + ', '.join(_ansi_color_table.keys())) if bright: code += ';1' color_on = '\033[' + code + 'm' @@ -12130,7 +12133,7 @@ def gmake_cmd(): if _gmake_cmd == '': for a in ['make', 'gmake', 'gnumake']: try: - output = subprocess.check_output([a, '--version'], stderr=subprocess.STDOUT) + output = _check_output([a, '--version'], stderr=subprocess.STDOUT) if 'GNU' in output: _gmake_cmd = a break @@ -12244,12 +12247,12 @@ def _suggest_http_proxy_error(e): Displays a message related to http proxies that may explain the reason for the exception `e`. """ proxyVars = ['http_proxy', 'https_proxy', 'HTTP_PROXY', 'HTTPS_PROXY'] - proxyDefs = {k : _original_environ[k] for k in proxyVars if k in _original_environ.iterkeys()} + proxyDefs = {k : _original_environ[k] for k in proxyVars if k in _original_environ.keys()} if not proxyDefs: warn('** If behind a firewall without direct internet access, use the http_proxy environment variable ' \ '(e.g. "env http_proxy=proxy.company.com:80 mx ...") or download manually with a web browser.') else: - defs = [i[0] + '=' + i[1] for i in proxyDefs.iteritems()] + defs = [i[0] + '=' + i[1] for i in proxyDefs.items()] warn('** You have the following environment variable(s) set which may be the cause of the URL error:\n ' + '\n '.join(defs)) def _suggest_tlsv1_error(e): @@ -12320,12 +12323,12 @@ def _attempt_download(url, path, jarEntryName=None): return True - except (IOError, socket.timeout, urllib2.HTTPError) as e: + except (IOError, socket.timeout, urllib_error.HTTPError) as e: # In case of an exception the temp file is removed automatically, so no cleanup is necessary log_error("Error reading from " + url + ": " + str(e)) _suggest_http_proxy_error(e) _suggest_tlsv1_error(e) - if isinstance(e, urllib2.HTTPError) and e.code == 500: + if isinstance(e, urllib_error.HTTPError) and e.code == 500: return "retry" finally: if conn: @@ -12377,7 +12380,7 @@ def download(path, urls, verbose=False, abortOnError=True, verifyOnly=False): if abortOnError: msg = 'Could not download to ' + path + ' from any of the following URLs: ' + ', '.join(urls) if verifyOnly: - for url, e in verify_errors.iteritems(): + for url, e in verify_errors.items(): msg += '\n ' + url + ': ' + str(e) abort(msg) else: @@ -12392,7 +12395,7 @@ def update_file(path, content, showDiff=False): try: old = None if existed: - with open(path, 'rb') as f: + with open(path, 'r') as f: old = f.read() if old == content: @@ -12401,7 +12404,7 @@ def update_file(path, content, showDiff=False): if existed and _opts.backup_modified: shutil.move(path, path + '.orig') - with open(path, 'wb') as f: + with open(path, 'w') as f: f.write(content) if existed: @@ -12411,7 +12414,6 @@ def update_file(path, content, showDiff=False): if showDiff: log('diff: ' + path) log(''.join(difflib.unified_diff(old.splitlines(1), content.splitlines(1)))) - else: log('created ' + path) return True @@ -12539,7 +12541,7 @@ def build(cmd_args, parser=None): # ... and the dependencies that *will not* be built if _removedDeps: log('Dependencies removed from build:') - for _, reason in _removedDeps.iteritems(): + for _, reason in _removedDeps.items(): if isinstance(reason, tuple): reason, _ = reason log(' {}'.format(reason)) @@ -12700,7 +12702,7 @@ def depsDone(task): if len(failed): for t in failed: log_error('{0} failed'.format(t)) - for daemon in daemons.itervalues(): + for daemon in daemons.values(): daemon.shutdown() abort('{0} build tasks failed'.format(len(failed))) @@ -12709,7 +12711,7 @@ def depsDone(task): t.prepare(daemons) t.execute() - for daemon in daemons.itervalues(): + for daemon in daemons.values(): daemon.shutdown() # TODO check for distributions overlap (while loading suites?) @@ -12912,7 +12914,7 @@ def update(self, removeTrailingWhitespace, restore): log("we have: " + str(len(batches)) + " batches") batch_num = 0 - for batch, javafiles in batches.iteritems(): + for batch, javafiles in batches.items(): batch_num += 1 log("Processing batch {0} ({1} files)...".format(batch_num, len(javafiles))) @@ -13003,7 +13005,7 @@ def pylint(args): ver = (-1, -1) try: - output = subprocess.check_output(['pylint', '--version'], stderr=subprocess.STDOUT) + output = _check_output(['pylint', '--version'], stderr=subprocess.STDOUT) m = re.match(r'.*pylint (\d+)\.(\d+)\.(\d+).*', output, re.DOTALL) if not m: log_error('could not determine pylint version from ' + output) @@ -13206,7 +13208,7 @@ def _add_str_tar(self, data, archive_name, provenance): tarinfo.name = archive_name tarinfo.size = len(data) tarinfo.mtime = calendar.timegm(datetime.now().utctimetuple()) - self.zf.addfile(self._tarinfo_filter(tarinfo), StringIO.StringIO(data)) + self.zf.addfile(self._tarinfo_filter(tarinfo), StringIO(data)) def _add_link_tar(self, target, archive_name, provenance): self._add_provenance(archive_name, provenance) @@ -13483,14 +13485,14 @@ def newest(paths): def isOlderThan(self, arg): if not self.timestamp: return True - if isinstance(arg, (types.IntType, types.LongType, types.FloatType)): + if isinstance(arg, (int, _long, float)): return self.timestamp < arg if isinstance(arg, TimeStampFile): if arg.timestamp is None: return False else: return arg.timestamp > self.timestamp - elif isinstance(arg, types.ListType): + elif isinstance(arg, list): files = arg else: files = [arg] @@ -13502,14 +13504,14 @@ def isOlderThan(self, arg): def isNewerThan(self, arg): if not self.timestamp: return False - if isinstance(arg, (types.IntType, types.LongType, types.FloatType)): + if isinstance(arg, (int, _long, float)): return self.timestamp > arg if isinstance(arg, TimeStampFile): if arg.timestamp is None: return False else: return arg.timestamp < self.timestamp - elif isinstance(arg, types.ListType): + elif isinstance(arg, list): files = arg else: files = [arg] @@ -13533,7 +13535,7 @@ def touch(self): os.utime(self.path, None) else: ensure_dir_exists(dirname(self.path)) - file(self.path, 'a') + open(self.path, 'a') self.timestamp = getmtime(self.path) def checkstyle(args): @@ -13616,7 +13618,7 @@ def match(name): batch.sources.extend(javafilelist) - for key, batch in batches.iteritems(): + for key, batch in batches.items(): if len(batch.sources) == 0: continue config, checkstyleVersion = key @@ -13639,7 +13641,7 @@ def start_element(name, attrs): xp = xml.parsers.expat.ParserCreate() xp.StartElementHandler = start_element - with open(auditfileName) as fp: + with open(auditfileName, 'rb') as fp: xp.ParseFile(fp) if len(errors) != 0: map(log_error, errors) @@ -13674,7 +13676,7 @@ def _safe_path(path): path = '\\\\?\\UNC' + path else: path = '\\\\?\\' + path - path = unicode(path) + path = _unicode(path) return path def getmtime(name): @@ -13699,7 +13701,7 @@ def open(name, mode='r'): # pylint: disable=redefined-builtin """ Wrapper for builtin open function that handles long path names on Windows. """ - return __builtin__.open(_safe_path(name), mode=mode) + return builtins.open(_safe_path(name), mode=mode) def copytree(src, dst, symlinks=False, ignore=None): shutil.copytree(_safe_path(src), _safe_path(dst), symlinks, ignore) @@ -13787,7 +13789,7 @@ def help_(args): name = args[0] if name not in _mx_commands.commands(): - hits = [c for c in _mx_commands.commands().iterkeys() if c.startswith(name)] + hits = [c for c in _mx_commands.commands().keys() if c.startswith(name)] if len(hits) == 1: name = hits[0] elif len(hits) == 0: @@ -13838,7 +13840,7 @@ def flattenMultiReleaseSources(args): # multi-release jars are resolved. for version, maps in sorted(versions.items()): for flatten_map in maps: - for src_dir, dst_dir in flatten_map.iteritems(): + for src_dir, dst_dir in flatten_map.items(): if not args.commands: print(src_dir, dst_dir) else: @@ -14371,7 +14373,7 @@ def processDep(dep, edge): # Ignore modules (such as jdk.internal.vm.compiler) that define packages # that are also defined by project deps as the latter will have the most # recent API. - exports = sorted([(module, pkgs) for module, pkgs in moduleDeps.iteritems() if allProjectPackages.isdisjoint(pkgs)]) + exports = sorted([(module, pkgs) for module, pkgs in moduleDeps.items() if allProjectPackages.isdisjoint(pkgs)]) if exports: addExportsValue = [] exported_modules = [] @@ -14511,11 +14513,11 @@ def processDep(dep, edge): if dep.isJavaProject(): concealed = dep.get_concealed_imported_packages(jdk) if concealed: - for module, pkgs in concealed.iteritems(): + for module, pkgs in concealed.items(): concealedAPDeps.setdefault(module, []).extend(pkgs) if concealedAPDeps: exports = [] - for module, pkgs in concealedAPDeps.iteritems(): + for module, pkgs in concealedAPDeps.items(): for pkg in pkgs: exports.append('--add-exports=' + module + '/' + pkg + '=ALL-UNNAMED') warn('Annotation processor(s) for ' + p.name +' uses non-exported module packages, requiring ' + @@ -14553,7 +14555,7 @@ def _get_ide_envvars(): 'JAVA_HOME' : get_env('JAVA_HOME') or get_jdk().home, 'EXTRA_JAVA_HOMES' : get_env('EXTRA_JAVA_HOMES'), } - for name, value in _ide_envvars.iteritems(): + for name, value in _ide_envvars.items(): if value is None: value = get_env(name) if value is not None: @@ -14565,7 +14567,7 @@ def _capture_eclipse_settings(logToConsole, absolutePaths): # Changes to these values should cause regeneration of the project files. settings = 'logToConsole=%s\n' % logToConsole settings = settings + 'absolutePaths=%s\n' % absolutePaths - for name, value in _get_ide_envvars().iteritems(): + for name, value in _get_ide_envvars().items(): settings = settings + '%s=%s\n' % (name, value) return settings @@ -14593,7 +14595,7 @@ def _eclipseinit_suite(s, buildProcessorJars=True, refreshOnly=False, logToConso files += _processorjars_suite(s) for p in s.projects: - code = p._eclipseinit.func_code + code = _func_code(p._eclipseinit) if 'absolutePaths' in code.co_varnames[:code.co_argcount]: p._eclipseinit(files, libFiles, absolutePaths=absolutePaths) else: @@ -14728,7 +14730,7 @@ def _genEclipseBuilder(dotProjectDoc, p, name, mxCommand, refresh=True, refreshF launchOut.open('launchConfiguration', {'type' : 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType'}) launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.core.capture_output', 'value': consoleOn}) launchOut.open('mapAttribute', {'key' : 'org.eclipse.debug.core.environmentVariables'}) - for key, value in _get_ide_envvars().iteritems(): + for key, value in _get_ide_envvars().items(): launchOut.element('mapEntry', {'key' : key, 'value' : value}) launchOut.close('mapAttribute') @@ -14811,7 +14813,7 @@ def generate_eclipse_workingsets(): # identify the location where to look for workingsets.xml wsfilename = 'workingsets.xml' wsloc = '.metadata/.plugins/org.eclipse.ui.workbench' - if os.environ.has_key('WORKSPACE'): + if 'WORKSPACE' in os.environ: expected_wsroot = os.environ['WORKSPACE'] else: expected_wsroot = primary_suite().dir @@ -14828,7 +14830,7 @@ def generate_eclipse_workingsets(): wspath = join(wsdir, wsfilename) def _add_to_working_set(key, value): - if not workingSets.has_key(key): + if key not in workingSets: workingSets[key] = [value] else: workingSets[key].append(value) @@ -14897,12 +14899,12 @@ def __init__(self): # parsing logic def _ws_start(name, attributes): if name == 'workingSet': - if attributes.has_key('name'): + if 'name' in attributes: ps.current_ws_name = attributes['name'] - if attributes.has_key('aggregate') and attributes['aggregate'] == 'true': + if 'aggregate' in attributes and attributes['aggregate'] == 'true': ps.aggregate_ws = True ps.current_ws = None - elif workingSets.has_key(ps.current_ws_name): + elif ps.current_ws_name in workingSets: ps.current_ws = workingSets[ps.current_ws_name] ps.seen_ws.append(ps.current_ws_name) ps.seen_projects = list() @@ -14942,7 +14944,7 @@ def _ws_item(name, attributes): if name == 'item': if ps.current_ws is None: target.element(name, attributes) - elif not attributes.has_key('elementID') and attributes.has_key('factoryID') and attributes.has_key('path') and attributes.has_key('type'): + elif not 'elementID' in attributes and 'factoryID' in attributes and 'path' in attributes and 'type' in attributes: target.element(name, attributes) p_name = attributes['path'][1:] # strip off the leading '/' ps.seen_projects.append(p_name) @@ -14957,7 +14959,7 @@ def _ws_item(name, attributes): # process document parser.StartElementHandler = _ws_start parser.EndElementHandler = _ws_end - with open(wspath, 'r') as wsfile: + with open(wspath, 'rb') as wsfile: parser.ParseFile(wsfile) target.close('workingSetManager') @@ -15210,7 +15212,7 @@ def processDep(dep, edge): if files is not None: files.append(join(p.dir, 'nbproject', 'project.xml')) - out = StringIO.StringIO() + out = StringIO() jdkPlatform = 'JDK_' + str(jdk.version) annotationProcessorEnabled = "false" @@ -15596,7 +15598,7 @@ def intellij_get_python_sdk_name(sdks): return "Python {v[0]}.{v[1]} ({exe})".format(v=sys.version_info, exe=exe) def intellij_get_ruby_sdk_name(sdks): - for sdk in sdks.itervalues(): + for sdk in sdks.values(): if sdk['type'] == intellij_ruby_sdk_type: return sdk['name'] return "truffleruby" @@ -15641,7 +15643,7 @@ def _complianceToIntellijLanguageLevel(compliance): def _intellij_external_project(externalProjects, sdks, host): if externalProjects: - for project_name, project_definition in externalProjects.iteritems(): + for project_name, project_definition in externalProjects.items(): if not project_definition.get('path', None): abort("external project {} is missing path attribute".format(project_name)) if not project_definition.get('type', None): @@ -15803,7 +15805,7 @@ def processDep(dep, edge): if compilerXml and jdk.javaCompliance >= '9': moduleDeps = p.get_concealed_imported_packages(jdk=jdk) if moduleDeps: - exports = sorted([(m, pkgs) for m, pkgs in moduleDeps.iteritems() if dependencies_project_packages.isdisjoint(pkgs)]) + exports = sorted([(m, pkgs) for m, pkgs in moduleDeps.items() if dependencies_project_packages.isdisjoint(pkgs)]) if exports: args = [] exported_modules = set() @@ -15979,7 +15981,7 @@ def make_library(name, path, source_path, suite_dir): compilerXml.close('wildcardResourcePatterns') if annotationProcessorProfiles: compilerXml.open('annotationProcessing') - for t, modules in sorted(annotationProcessorProfiles.iteritems()): + for t, modules in sorted(annotationProcessorProfiles.items()): source_gen_dir = t[0] processors = t[1:] compilerXml.open('profile', attributes={'default': 'false', 'name': '-'.join([ap.name for ap in processors]) + "-" + source_gen_dir, 'enabled': 'true'}) @@ -16053,7 +16055,7 @@ def processApDep(dep, edge): if corePrefsSources: miscXml = XMLDoc() miscXml.open('project', attributes={'version' : '4'}) - out = StringIO.StringIO() + out = StringIO() print('# GENERATED -- DO NOT EDIT', file=out) for source in corePrefsSources: print('# Source:', source, file=out) @@ -16065,7 +16067,7 @@ def processApDep(dep, edge): update_file(formatterConfigFile, out.getvalue()) importConfigFile = None if uiPrefsSources: - out = StringIO.StringIO() + out = StringIO() print('# GENERATED -- DO NOT EDIT', file=out) for source in uiPrefsSources: print('# Source:', source, file=out) @@ -16299,7 +16301,7 @@ def rm(path): except: log_error("Error removing {0}".format(p.name + '.jar')) - for d in _dists.itervalues(): + for d in _dists.values(): if not d.isJARDistribution(): continue if d.get_ide_project_dir(): @@ -16455,7 +16457,7 @@ def ignorePath(path, whitelist): unmanagedSources.setdefault(suite.vc_dir, []).extend(javaSourcesInVC) # also check for files that are outside of suites - for vcDir, vc in suiteVcDirs.iteritems(): + for vcDir, vc in suiteVcDirs.items(): for dirpath, dirnames, files in os.walk(vcDir): if dirpath in suiteDirs: # skip known suites @@ -16477,7 +16479,7 @@ def ignorePath(path, whitelist): retcode = 0 if len(unmanagedSources) > 0: log('The following files are managed but not in any project:') - for vc_dir, sources in unmanagedSources.iteritems(): + for vc_dir, sources in unmanagedSources.items(): for source in sources: log(source) if suiteWhitelists.get(vc_dir) is not None: @@ -16738,11 +16740,11 @@ def find_group(pkg): g = find_group(p) if g is None: continue - if not groups.has_key(g): + if g not in groups: groups[g] = set() groups[g].add(p) groupargs = list() - for k, v in groups.iteritems(): + for k, v in groups.items(): if len(v) == 0: continue groupargs.append('-group') @@ -16895,7 +16897,7 @@ def site(args): if args.dot_output_base is not None: dotErr = None try: - if 'version' not in subprocess.check_output(['dot', '-V'], stderr=subprocess.STDOUT): + if 'version' not in _check_output(['dot', '-V'], stderr=subprocess.STDOUT): dotErr = 'dot -V does not print a string containing "version"' except subprocess.CalledProcessError as e: dotErr = 'error calling "dot -V": {0}'.format(e) @@ -17334,9 +17336,9 @@ def select_items(items, descriptions=None, allowMultiple=True): log('\n'.join(wrapper.wrap(('[{0:>' + numlen + '}] {1} - {2}').format(i + 1, items[i], descriptions[i])))) while True: if allowMultiple: - s = raw_input('Enter number(s) of selection (separate multiple choices with spaces): ').split() + s = _raw_input('Enter number(s) of selection (separate multiple choices with spaces): ').split() else: - s = [raw_input('Enter number of selection: ')] + s = [_raw_input('Enter number of selection: ')] try: s = [int(x) for x in s] except: @@ -17376,7 +17378,7 @@ def createArchive(addMethod): entries = {} def add(path, arcname): apath = os.path.abspath(path) - if not entries.has_key(arcname): + if arcname not in entries: entries[arcname] = apath logv('[adding ' + path + ']') addMethod(path, arcname=arcname) @@ -17387,18 +17389,18 @@ def add(path, arcname): libsToExport = set() if args.include_all: - for lib in _libs.itervalues(): + for lib in _libs.values(): libsToExport.add(lib) else: def isValidLibrary(dep): - if dep in _libs.iterkeys(): + if dep in _libs.keys(): lib = _libs[dep] if len(lib.urls) != 0 or args.include_system_libs: return lib return None # iterate over all project dependencies and find used libraries - for p in _projects.itervalues(): + for p in _projects.values(): for dep in p.deps: r = isValidLibrary(dep) if r: @@ -17756,7 +17758,7 @@ def format_help(self): return ArgumentParser.format_help(self) + self._get_program_help() def _get_program_help(self): - help_output = subprocess.check_output([get_jdk().java, '-cp', classpath('com.oracle.mxtool.checkcopy'), 'com.oracle.mxtool.checkcopy.CheckCopyright', '--help']) + help_output = _check_output([get_jdk().java, '-cp', classpath('com.oracle.mxtool.checkcopy'), 'com.oracle.mxtool.checkcopy.CheckCopyright', '--help']) return '\nother argumemnts preceded with --, e.g. mx checkcopyright --primary -- --all\n' + help_output # ensure compiled form of code is up to date @@ -17839,8 +17841,8 @@ def _copy_eclipse_settings(p, files=None): settingsDir = join(p.dir, ".settings") ensure_dir_exists(settingsDir) - for name, sources in p.eclipse_settings_sources().iteritems(): - out = StringIO.StringIO() + for name, sources in p.eclipse_settings_sources().items(): + out = StringIO() print('# GENERATED -- DO NOT EDIT', file=out) for source in sources: print('# Source:', source, file=out) @@ -17925,7 +17927,7 @@ def show_envs(args): parser.add_argument('--all', action='store_true', help='show all variables, not just those starting with "MX"') args = parser.parse_args(args) - for key, value in os.environ.iteritems(): + for key, value in os.environ.items(): if args.all or key.startswith('MX'): print('{0}: {1}'.format(key, value)) @@ -17976,9 +17978,9 @@ def ask_question(question, options, default=None, answer=None): print(question + questionMark + answer) else: if is_interactive(): - answer = raw_input(question + questionMark) or default + answer = _raw_input(question + questionMark) or default while not answer: - answer = raw_input(question + questionMark) + answer = _raw_input(question + questionMark) else: if default: answer = default @@ -18239,7 +18241,7 @@ def visit(dep, edge): if reasonAttr: abort('"ignore" attribute must be False/"false" or a non-empty string providing the reason the dependency is ignored', context=dep) else: - assert isinstance(reasonAttr, basestring) + assert isinstance(reasonAttr, _basestring) strippedReason = reasonAttr.strip() if len(strippedReason) != 0: if not strippedReason == "false": @@ -18282,7 +18284,7 @@ def note_removal(dep, reason, details=None): walk_deps(visit=visit, ignoredEdges=[DEP_EXCLUDED]) res = OrderedDict() - for dep, reason in removedDeps.iteritems(): + for dep, reason in removedDeps.items(): if not isinstance(reason, str): assert isinstance(reason, tuple) res[dep.name] = reason @@ -18327,7 +18329,7 @@ def _get_import_dir(url, mode): # Try use the URL first so that a big repo is cloned to a local # directory whose named is based on the repo instead of a suite # nested in the big repo. - root, _ = os.path.splitext(basename(urlparse.urlparse(url).path)) + root, _ = os.path.splitext(basename(urllib_parse.urlparse(url).path)) if root: import_dir = join(SiblingSuiteModel.siblings_dir(importing_suite.dir), root) else: @@ -18724,8 +18726,8 @@ def _install_socks_proxy_opener(proxytype, proxyaddr, proxyport=None): else: abort("Unknown Socks Proxy type {0}".format(proxytype)) - opener = urllib2.build_opener(SocksiPyHandler(proxytype, proxyaddr, proxyport)) - urllib2.install_opener(opener) + opener = urllib_request.build_opener(SocksiPyHandler(proxytype, proxyaddr, proxyport)) + urllib_request.install_opener(opener) def main(): @@ -18896,7 +18898,7 @@ def _visit_and_find_jmh_dep(dst, edge): command_args = commandAndArgs[1:] if command not in _mx_commands.commands(): - hits = [c for c in _mx_commands.commands().iterkeys() if c.startswith(command)] + hits = [c for c in _mx_commands.commands().keys() if c.startswith(command)] if len(hits) == 1: command = hits[0] elif len(hits) == 0: diff --git a/mx_benchmark.py b/mx_benchmark.py index a1c16ee4..55d8c343 100644 --- a/mx_benchmark.py +++ b/mx_benchmark.py @@ -39,6 +39,7 @@ from collections import OrderedDict import mx +from mx_portable import _long _bm_suites = {} _benchmark_executor = None @@ -377,7 +378,7 @@ def bm_suite_valid_keys(): def vm_registries(): res = set() - for bm_suite in _bm_suites.itervalues(): + for bm_suite in _bm_suites.values(): if isinstance(bm_suite, VmBenchmarkSuite): res.add(bm_suite.get_vm_registry()) return res @@ -471,7 +472,7 @@ def parse(self, text): varpat = re.compile(r"\$([a-zA-Z_][0-9a-zA-Z_]*)") for iteration, m in enumerate(self.parseResults(text)): datapoint = {} - for key, value in self.replacement.iteritems(): + for key, value in self.replacement.items(): inst = value if isinstance(inst, tuple): v, vtype = inst @@ -488,8 +489,8 @@ def var(name): inst = str(v) elif vtype is int: inst = int(v) - elif vtype is long: - inst = long(v) + elif vtype is _long: + inst = _long(v) elif vtype is float: inst = float(v) elif vtype is bool: @@ -498,7 +499,7 @@ def var(name): inst = vtype(v) else: raise RuntimeError("Cannot handle object '{0}' of expected type {1}".format(v, vtype)) - if not isinstance(inst, (str, int, long, float, bool)): + if not isinstance(inst, (str, int, _long, float, bool)): raise RuntimeError("Object '{0}' has unknown type: {1}".format(inst, type(inst))) datapoint[key] = inst datapoints.append(datapoint) @@ -680,9 +681,9 @@ def parse(self, text): if "params" in result: # add all parameter as a single string - d["extra.jmh.params"] = ", ".join(["=".join(kv) for kv in result["params"].iteritems()]) + d["extra.jmh.params"] = ", ".join(["=".join(kv) for kv in result["params"].items()]) # and also the individual values - for k, v in result["params"].iteritems(): + for k, v in result["params"].items(): d["extra.jmh.param." + k] = str(v) for k in self.getExtraJmhKeys(): @@ -1370,7 +1371,7 @@ class JMHRunnerBenchmarkSuite(JMHBenchmarkSuiteBase): #pylint: disable=too-many- def benchmarkList(self, bmSuiteArgs): """Return all different JMH versions found.""" - return list(JMHRunnerBenchmarkSuite.get_jmh_projects_dict().iterkeys()) + return list(JMHRunnerBenchmarkSuite.get_jmh_projects_dict().keys()) def createCommandLineArgs(self, benchmarks, bmSuiteArgs): if benchmarks is None: @@ -1394,7 +1395,7 @@ def get_jmh_projects_dict(): jmhProjects = {} projects = mx.projects_opt_limit_to_suites() if mx.primary_suite() == mx._mx_suite: - projects = [p for p in mx._projects.itervalues() if p.suite == mx._mx_suite] + projects = [p for p in mx._projects.values() if p.suite == mx._mx_suite] for p in projects: for x in p.deps: if x.name.startswith('JMH'): @@ -1825,7 +1826,7 @@ def benchmark(self, mxBenchmarkArgs, bmSuiteArgs): vmregToSuites.setdefault(vmreg, []).append(bm_suite_name) else: noVmRegSuites.append(bm_suite_name) - for vmreg, bm_suite_names in vmregToSuites.iteritems(): + for vmreg, bm_suite_names in vmregToSuites.items(): print("\nThe following {} benchmark suites are available:\n".format(vmreg.vm_type_name)) for name in bm_suite_names: print(" " + name) @@ -1838,7 +1839,7 @@ def benchmark(self, mxBenchmarkArgs, bmSuiteArgs): if mxBenchmarkArgs.help or mxBenchmarkArgs.benchmark is None: parser.print_help() - for key, entry in parsers.iteritems(): + for key, entry in parsers.items(): if mxBenchmarkArgs.benchmark is None or key in suite.parserNames(): print(entry.description) entry.parser.print_help() diff --git a/mx_benchplot.py b/mx_benchplot.py index e1814ac4..23102ce6 100644 --- a/mx_benchplot.py +++ b/mx_benchplot.py @@ -365,7 +365,7 @@ def extract_results(files, names, last_n=None, selected_benchmarks=None): higher = entry['metric.better'] == 'higher' result[benchmark] = {'scores': [score], 'higher': higher, 'name': name} - for _, entry in result.iteritems(): + for _, entry in result.items(): if last_n and len(entry['scores']) > abs(last_n): if last_n < 0: entry['trimmed_scores'] = entry['scores'][:-last_n] @@ -379,7 +379,7 @@ def extract_results(files, names, last_n=None, selected_benchmarks=None): # Compute a variance value. This is a percentage variance relative to the average score # which is easier to interpret than a raw number. - for _, entry in result.iteritems(): + for _, entry in result.items(): variance = 0 for score in entry['scores']: variance = variance + (score - entry['score']) * (score - entry['score']) diff --git a/mx_compat.py b/mx_compat.py index 7700a0ec..374f5fe8 100644 --- a/mx_compat.py +++ b/mx_compat.py @@ -26,7 +26,7 @@ from __future__ import print_function -import sys, inspect, re, types, bisect +import sys, inspect, re, bisect from collections import OrderedDict from os.path import join import mx @@ -395,13 +395,13 @@ def check_checkstyle_config(self): def minVersion(): _ensureCompatLoaded() - return _versionsMap.keys()[0] + return list(_versionsMap)[0] def getMxCompatibility(version): """:rtype: MxCompatibility500""" if version < minVersion(): # ensures compat loaded return None - keys = _versionsMap.keys() + keys = list(_versionsMap.keys()) return _versionsMap[keys[bisect.bisect_right(keys, version)-1]] _versionsMap = OrderedDict() @@ -411,12 +411,12 @@ def _ensureCompatLoaded(): def flattenClassTree(tree): root = tree[0][0] - assert isinstance(root, types.TypeType), root + assert isinstance(root, type), root yield root if len(tree) > 1: assert len(tree) == 2 rest = tree[1] - assert isinstance(rest, types.ListType), rest + assert isinstance(rest, list), rest for c in flattenClassTree(rest): yield c diff --git a/mx_downstream.py b/mx_downstream.py index 01d5c810..9f23fd69 100644 --- a/mx_downstream.py +++ b/mx_downstream.py @@ -30,7 +30,7 @@ from os.path import join, exists, isabs, basename from argparse import ArgumentParser -from urlparse import urlparse +from mx_portable import urllib_parse import os import mx import mx_urlrewrites @@ -117,7 +117,7 @@ def ignore_output_root(d, names): targetDir = None for repoUrl in repoUrls: # Deduce a target name from the target URL - url = urlparse(repoUrl) + url = urllib_parse.urlparse(repoUrl) targetName = url.path if targetName.rfind('/') != -1: targetName = targetName[targetName.rfind('/') + 1:] diff --git a/mx_gate.py b/mx_gate.py index ac00a7df..65d96f6a 100644 --- a/mx_gate.py +++ b/mx_gate.py @@ -36,6 +36,8 @@ import mx import sys +from mx_portable import _basestring + """ Predefined Task tags. """ @@ -73,7 +75,7 @@ class Task: def tag_matches(self, _tags): for t in _tags: - assert isinstance(t, basestring), '{} is not a string and thus not a valid tag'.format(t) + assert isinstance(t, _basestring), '{} is not a string and thus not a valid tag'.format(t) if t in Task.tags: if t not in Task.tags_range: # no range restriction @@ -457,6 +459,7 @@ def _run_gate(cleanArgs, args, tasks): if t: mx.command_function('version')(['--oneline']) mx.command_function('sversions')([]) + mx.log("Python version: {}".format(sys.version_info)) with Task('JDKReleaseInfo', tasks, tags=[Tags.always]) as t: if t: diff --git a/mx_javamodules.py b/mx_javamodules.py index 7c1bb3a4..52f08178 100644 --- a/mx_javamodules.py +++ b/mx_javamodules.py @@ -30,7 +30,7 @@ import re import zipfile import pickle -import StringIO +from mx_portable import StringIO, _cmp, _viewkeys import shutil import itertools from os.path import join, exists, dirname, basename @@ -39,6 +39,7 @@ from zipfile import ZipFile import mx +from mx_portable import _basestring class JavaModuleDescriptor(object): @@ -66,7 +67,7 @@ def __init__(self, name, exports, requires, uses, provides, packages=None, conce self.concealedRequires = concealedRequires if concealedRequires else {} self.uses = frozenset(uses) self.provides = provides - exportedPackages = frozenset(exports.viewkeys()) + exportedPackages = frozenset(_viewkeys(exports)) self.packages = exportedPackages if packages is None else frozenset(packages) assert len(exports) == 0 or exportedPackages.issubset(self.packages), exportedPackages - self.packages self.conceals = self.packages - exportedPackages @@ -83,7 +84,7 @@ def __repr__(self): def __cmp__(self, other): assert isinstance(other, JavaModuleDescriptor) - return cmp(self.name, other.name) + return _cmp(self.name, other.name) @staticmethod def load(dist, jdk, fatalIfNotCreated=True): @@ -145,17 +146,17 @@ def as_module_info(self): """ Gets this module descriptor expressed as the contents of a ``module-info.java`` file. """ - out = StringIO.StringIO() + out = StringIO() print('module ' + self.name + ' {', file=out) - for dependency, modifiers in sorted(self.requires.iteritems()): + for dependency, modifiers in sorted(self.requires.items()): modifiers_string = (' '.join(sorted(modifiers)) + ' ') if len(modifiers) != 0 else '' print(' requires ' + modifiers_string + dependency + ';', file=out) - for source, targets in sorted(self.exports.iteritems()): + for source, targets in sorted(self.exports.items()): targets_string = (' to ' + ', '.join(sorted(targets))) if len(targets) != 0 else '' print(' exports ' + source + targets_string + ';', file=out) for use in sorted(self.uses): print(' uses ' + use + ';', file=out) - for service, providers in sorted(self.provides.iteritems()): + for service, providers in sorted(self.provides.items()): print(' provides ' + service + ' with ' + ', '.join((p for p in providers)) + ';', file=out) for pkg in sorted(self.conceals): print(' // conceals: ' + pkg, file=out) @@ -166,7 +167,7 @@ def as_module_info(self): if self.modulepath: print(' // modulepath: ' + ', '.join([jmd.name for jmd in self.modulepath]), file=out) if self.concealedRequires: - for dependency, packages in sorted(self.concealedRequires.iteritems()): + for dependency, packages in sorted(self.concealedRequires.items()): for package in sorted(packages): print(' // concealed-requires: ' + dependency + '/' + package, file=out) print('}', file=out) @@ -365,7 +366,7 @@ def is_valid_module_name(name): provides.setdefault(service, []).extend(providers) else: mx.abort('Cannot parse module descriptor line: ' + str(parts)) - packages.update(exports.viewkeys()) + packages.update(_viewkeys(exports)) if save: try: @@ -603,7 +604,7 @@ def make_java_module(dist, jdk): javacCmd.append('--upgrade-module-path') javacCmd.append(os.pathsep.join(upgrademodulepathJars)) if concealedRequires: - for module, packages_ in concealedRequires.iteritems(): + for module, packages_ in concealedRequires.items(): for package in packages_: javacCmd.append('--add-exports=' + module + '/' + package + '=' + moduleName) # https://blogs.oracle.com/darcy/new-javac-warning-for-setting-an-older-source-without-bootclasspath @@ -659,10 +660,10 @@ def lookup_module(name): def add_transitive(mod): if mod not in transitive_closure: transitive_closure.add(mod) - for name in mod.requires.iterkeys(): + for name in mod.requires.keys(): add_transitive(lookup_module(name)) for root in roots: - if isinstance(root, basestring): + if isinstance(root, _basestring): root = lookup_module(root) add_transitive(root) return transitive_closure diff --git a/mx_native.py b/mx_native.py index a1ee58d4..aef1b65d 100644 --- a/mx_native.py +++ b/mx_native.py @@ -148,7 +148,7 @@ def _ninja_deps(cls): # pylint: disable=no-self-argument Ninja.binary = mx.join(dep.get_path(False), 'ninja') try: - import ninja_syntax # pylint: disable=unused-variable + import ninja_syntax # pylint: disable=unused-variable, unused-import except ImportError: def raise_(e): raise e diff --git a/mx_portable.py b/mx_portable.py new file mode 100644 index 00000000..ec1641c3 --- /dev/null +++ b/mx_portable.py @@ -0,0 +1,59 @@ +import sys +import itertools +import subprocess + +if sys.version_info[0] < 3: + from StringIO import StringIO #pylint: disable=unused-import + import __builtin__ as builtins #pylint: disable=unused-import + import urllib2 #pylint: disable=unused-import + urllib_request = urllib2 + urllib_error = urllib2 + del urllib2 + import urlparse as urllib_parse #pylint: disable=unused-import + + _filter = itertools.ifilter + _cmp = cmp #pylint: disable=undefined-variable + _raw_input = raw_input #pylint: disable=undefined-variable + _unicode = unicode #pylint: disable=undefined-variable + _long = long #pylint: disable=undefined-variable + _basestring = basestring #pylint: disable=undefined-variable + + def _py3_decode(x): + return x + def _py3_encode(x): + return x + + def _func_code(f): + return f.func_code + + def _viewkeys(dictionary): + return dictionary.viewkeys() +else: + from io import StringIO #pylint: disable=unused-import + import builtins #pylint: disable=unused-import + import urllib.request as urllib_request #pylint: disable=unused-import,no-name-in-module + import urllib.error as urllib_error #pylint: disable=unused-import,no-name-in-module + import urllib.parse as urllib_parse #pylint: disable=unused-import,no-name-in-module + + _filter = filter + def _cmp(a, b): + return (a > b) - (a < b) + + _raw_input = input + _unicode = str + _long = int + _basestring = str + + def _py3_decode(x): + return x.decode() + def _py3_encode(x): + return x.encode() + + def _func_code(f): + return f.__code__ + + def _viewkeys(dictionary): + return dictionary.keys() + +def _check_output(*args, **kwargs): + return _py3_decode(subprocess.check_output(*args, **kwargs)) diff --git a/mx_unittest.py b/mx_unittest.py index 03f1d430..75db6d7b 100755 --- a/mx_unittest.py +++ b/mx_unittest.py @@ -187,14 +187,14 @@ def _run_tests(args, harness, vmLauncher, annotations, testfile, blacklist, whit mx.abort("Method specification is class#method: " + tests[0]) t, method = words - for c, p in candidates.iteritems(): + for c, p in candidates.items(): # prefer exact matches first if t == c: found = True classes.append(c) depsContainingTests.add(p) if not found: - for c, p in candidates.iteritems(): + for c, p in candidates.items(): if t in c: found = True classes.append(c) @@ -209,7 +209,7 @@ def _run_tests(args, harness, vmLauncher, annotations, testfile, blacklist, whit for t in tests: if '#' in t: mx.abort('Method specifications can only be used in a single test: ' + t) - for c, p in candidates.iteritems(): + for c, p in candidates.items(): if t in c: found = True classes.append(c) diff --git a/mx_urlrewrites.py b/mx_urlrewrites.py index 4ad7d613..ad90a29c 100644 --- a/mx_urlrewrites.py +++ b/mx_urlrewrites.py @@ -62,7 +62,7 @@ def _error(msg): return if not isinstance(urlrewrite, dict) or len(urlrewrite) != 1: onError('A URL rewrite rule must be a dict with a single entry') - for pattern, attrs in urlrewrite.iteritems(): + for pattern, attrs in urlrewrite.items(): replacement = attrs.pop('replacement', None) if replacement is None: raise Exception('URL rewrite for pattern "' + pattern + '" is missing "replacement" entry') diff --git a/select_jdk.py b/select_jdk.py index 8184e3d9..ed5224ce 100755 --- a/select_jdk.py +++ b/select_jdk.py @@ -29,7 +29,7 @@ import os, tempfile from argparse import ArgumentParser, REMAINDER from os.path import exists, expanduser, join, isdir, isfile, realpath, dirname, abspath -import StringIO +from mx_portable import StringIO, _raw_input def is_valid_jdk(jdk): """ @@ -87,7 +87,7 @@ def get_PATH_sep(shell): def get_shell_commands(args, jdk, extra_jdks): setvar_format = get_setvar_format(args.shell) - shell_commands = StringIO.StringIO() + shell_commands = StringIO() print(setvar_format % ('JAVA_HOME', jdk), file=shell_commands) if extra_jdks: print(setvar_format % ('EXTRA_JAVA_HOMES', os.pathsep.join(extra_jdks)), file=shell_commands) @@ -209,6 +209,6 @@ def apply_selection(args, jdk, extra_jdks): os.rename(tmp_cache_path, jdk_cache_path) choices = {str(index):jdk for index, jdk in choices} - jdks = [choices[n] for n in raw_input('Select JDK(s) (separate multiple choices by whitespace)> ').split() if n in choices] + jdks = [choices[n] for n in _raw_input('Select JDK(s) (separate multiple choices by whitespace)> ').split() if n in choices] if jdks: apply_selection(args, jdks[0], jdks[1:]) diff --git a/tag_version.py b/tag_version.py index 2aa054d5..800e28e6 100755 --- a/tag_version.py +++ b/tag_version.py @@ -30,6 +30,7 @@ import subprocess import re +from mx_portable import _check_output from argparse import ArgumentParser from os.path import realpath, dirname @@ -49,11 +50,11 @@ def get_parents(commit): - return subprocess.check_output(['git', 'rev-parse', commit + '^@']).strip().split() + return _check_output(['git', 'rev-parse', commit + '^@']).strip().split() def with_hash(commit): - h = subprocess.check_output(['git', 'rev-parse', commit]).strip() + h = _check_output(['git', 'rev-parse', commit]).strip() if h == commit: return h return '{} ({})'.format(commit, h) @@ -79,7 +80,7 @@ def with_hash(commit): if not args.ancestor: raise SystemExit('{} is not a merge or has no parent that is a merge'.format(with_hash(args.descendant))) -diff = subprocess.check_output(['git', 'diff', args.ancestor, args.descendant, '--', 'mx.py'], cwd=mx_home).strip() +diff = _check_output(['git', 'diff', args.ancestor, args.descendant, '--', 'mx.py'], cwd=mx_home).strip() new_version = new_version_re.match(diff) old_version = old_version_re.match(diff)