summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatt A. Tobin <email@mattatobin.com>2021-10-30 19:05:33 -0400
committerMatt A. Tobin <email@mattatobin.com>2021-10-30 19:05:33 -0400
commitd6ea3201b3f578d8590fddb395759483e75b6221 (patch)
treeadd1ee9c886bda910cb32df2ba6665dd50dfa172
parentabc16648c3bcccd785ab5708f02f39947d7dd3fa (diff)
downloadaura-central-d6ea3201b3f578d8590fddb395759483e75b6221.tar.gz
Issue %3031 - Remove FasterMake/FasterMake+RecursiveMake
- This includes anything to do with artifact builds. - This also removes configure options to specify build-backends and hardcodes RecursiveMake
-rw-r--r--Makefile.in30
-rw-r--r--build/moz.build7
-rw-r--r--config/baseconfig.mk2
-rw-r--r--config/faster/rules.mk110
-rw-r--r--moz.configure67
-rw-r--r--python/mozbuild/mozbuild/artifacts.py1085
-rw-r--r--python/mozbuild/mozbuild/backend/__init__.py2
-rw-r--r--python/mozbuild/mozbuild/backend/common.py36
-rw-r--r--python/mozbuild/mozbuild/backend/configenvironment.py4
-rw-r--r--python/mozbuild/mozbuild/backend/fastermake.py165
-rw-r--r--python/mozbuild/mozbuild/mach_commands.py150
11 files changed, 7 insertions, 1651 deletions
diff --git a/Makefile.in b/Makefile.in
index 6c2327388..56befd9a0 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -137,16 +137,10 @@ default:: $(BUILD_BACKEND_FILES)
endif
install_manifests := \
- $(addprefix dist/,branding idl include public private sdk xpi-stage) \
+ $(addprefix dist/,branding bin idl include public private sdk xpi-stage) \
_tests \
$(NULL)
-# Skip the dist/bin install manifest when using the hybrid
-# FasterMake/RecursiveMake backend. This is a hack until bug 1241744 moves
-# xpidl handling to FasterMake in that case, mechanically making the dist/bin
-# install manifest non-existent (non-existent manifests being skipped)
-ifeq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
-install_manifests += dist/bin
-endif
+
install_manifest_depends = \
CLOBBER \
$(configure_dir)/configure \
@@ -166,17 +160,6 @@ endif
.PHONY: install-manifests
install-manifests: $(addprefix install-,$(install_manifests))
-# If we're using the hybrid FasterMake/RecursiveMake backend, we want
-# to recurse in the faster/ directory in parallel of install manifests.
-# But dist/idl needs to happen before (cf. dependencies in
-# config/faster/rules.mk)
-ifneq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
-install-manifests: faster
-.PHONY: faster
-faster: install-dist/idl
- $(MAKE) -C faster FASTER_RECURSIVE_MAKE=1
-endif
-
.PHONY: tup
tup:
$(call BUILDSTATUS,TIERS make tup)
@@ -198,17 +181,8 @@ endif
.PHONY: $(addprefix install-,$(subst /,_,$(install_manifests)))
$(addprefix install-,$(install_manifests)): install-%: $(install_manifest_depends)
-ifneq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
- @# If we're using the hybrid FasterMake/RecursiveMake backend, we want
- @# to ensure the FasterMake end doesn't have install manifests for the
- @# same directory, because that would blow up
- $(if $(wildcard _build_manifests/install/$(subst /,_,$*)),$(if $(wildcard faster/install_$(subst /,_,$*)*),$(error FasterMake and RecursiveMake ends of the hybrid build system want to handle $*)))
-endif
$(addprefix $(call py_action,process_install_manifest,$(if $(NO_REMOVE),--no-remove )$*) ,$(wildcard _build_manifests/install/$(subst /,_,$*)))
-# Dummy wrapper rule to allow the faster backend to piggy back
-$(addprefix install-,$(subst /,_,$(filter dist/%,$(install_manifests)))): install-dist_%: install-dist/% ;
-
.PHONY: install-tests
install-tests: install-test-files
diff --git a/build/moz.build b/build/moz.build
index 27f681369..6567dd944 100644
--- a/build/moz.build
+++ b/build/moz.build
@@ -97,10 +97,3 @@ if CONFIG['MOZ_VALGRIND']:
'valgrind/i386-redhat-linux-gnu.sup',
'valgrind/x86_64-redhat-linux-gnu.sup',
]
-
-if CONFIG['MOZ_ARTIFACT_BUILDS']:
- # Ensure a pre-built interfaces.xpt installed to the objdir by the artifact
- # code is included by the top-level chrome.manifest.
- EXTRA_COMPONENTS += [
- 'prebuilt-interfaces.manifest',
- ]
diff --git a/config/baseconfig.mk b/config/baseconfig.mk
index 47a12b16e..a125466ab 100644
--- a/config/baseconfig.mk
+++ b/config/baseconfig.mk
@@ -45,7 +45,7 @@ endif # WINNT
ifndef INCLUDED_AUTOCONF_MK
default::
else
-TIERS := $(if $(MOZ_ARTIFACT_BUILDS),artifact )pre-export export $(if $(COMPILE_ENVIRONMENT),compile )misc libs tools
+TIERS := pre-export export $(if $(COMPILE_ENVIRONMENT),compile )misc libs tools
endif
# These defines are used to support the twin-topsrcdir model for comm-central.
diff --git a/config/faster/rules.mk b/config/faster/rules.mk
deleted file mode 100644
index 9d7b322fa..000000000
--- a/config/faster/rules.mk
+++ /dev/null
@@ -1,110 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# /!\ Please make sure to update the following comment when you touch this
-# file. Thank you /!\
-
-# The traditional Mozilla build system relied on going through the entire
-# build tree a number of times with different targets, and many of the
-# things happening at each step required other things happening in previous
-# steps without any documentation of those dependencies.
-#
-# This new build system tries to start afresh by establishing what files or
-# operations are needed for the build, and applying the necessary rules to
-# have those in place, relying on make dependencies to get them going.
-#
-# As of writing, only building non-compiled parts of Firefox is supported
-# here (a few other things are also left out). This is a starting point, with
-# the intent to grow this build system to make it more complete.
-#
-# This file contains rules and dependencies to get things working. The intent
-# is for a Makefile to define some dependencies and variables, and include
-# this file. What needs to be defined there, and ends up being generated by
-# python/mozbuild/mozbuild/backend/fastermake.py is the following:
-# - TOPSRCDIR/TOPOBJDIR, respectively the top source directory and the top
-# object directory
-# - PYTHON, the path to the python executable
-# - ACDEFINES, which contains a set of -Dvar=name to be used during
-# preprocessing
-# - INSTALL_MANIFESTS, which defines the list of base directories handled
-# by install manifests, see further below
-#
-# A convention used between this file and the Makefile including it is that
-# global Make variables names are uppercase, while "local" Make variables
-# applied to specific targets are lowercase.
-
-# Targets to be triggered for a default build
-default: $(addprefix install-,$(INSTALL_MANIFESTS))
-
-ifndef NO_XPIDL
-# Targets from the recursive make backend to be built for a default build
-default: $(TOPOBJDIR)/config/makefiles/xpidl/xpidl
-endif
-
-# Mac builds require to copy things in dist/bin/*.app
-# TODO: remove the MOZ_WIDGET_TOOLKIT and MOZ_BUILD_APP variables from
-# faster/Makefile and python/mozbuild/mozbuild/test/backend/test_build.py
-# when this is not required anymore.
-# We however don't need to do this when using the hybrid
-# FasterMake/RecursiveMake backend (FASTER_RECURSIVE_MAKE is set when
-# recursing from the RecursiveMake backend)
-ifndef FASTER_RECURSIVE_MAKE
-ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
-default:
- $(MAKE) -C $(TOPOBJDIR)/$(MOZ_BUILD_APP)/app repackage
-endif
-endif
-
-.PHONY: FORCE
-
-# Extra define to trigger some workarounds. We should strive to limit the
-# use of those. As of writing the only ones are in
-# toolkit/content/buildconfig.html and browser/locales/jar.mn.
-ACDEFINES += -DBUILD_FASTER
-
-# Files under the faster/ sub-directory, however, are not meant to use the
-# fallback
-$(TOPOBJDIR)/faster/%: ;
-
-# Generic rule to fall back to the recursive make backend.
-# This needs to stay after other $(TOPOBJDIR)/* rules because GNU Make
-# <3.82 apply pattern rules in definition order, not stem length like
-# modern GNU Make.
-$(TOPOBJDIR)/%: FORCE
- $(MAKE) -C $(dir $@) $(notdir $@)
-
-# Install files using install manifests
-#
-# The list of base directories is given in INSTALL_MANIFESTS. The
-# corresponding install manifests are named correspondingly, with forward
-# slashes replaced with underscores, and prefixed with `install_`. That is,
-# the install manifest for `dist/bin` would be `install_dist_bin`.
-$(addprefix install-,$(INSTALL_MANIFESTS)): install-%: $(addprefix $(TOPOBJDIR)/,buildid.h source-repo.h)
- @# For now, force preprocessed files to be reprocessed every time.
- @# The overhead is not that big, and this avoids waiting for proper
- @# support for defines tracking in process_install_manifest.
- @touch install_$(subst /,_,$*)
- @# BOOKMARKS_INCLUDE_DIR is for bookmarks.html only.
- $(PYTHON) -m mozbuild.action.process_install_manifest \
- --track install_$(subst /,_,$*).track \
- $(TOPOBJDIR)/$* \
- -DAB_CD=en-US \
- -DBOOKMARKS_INCLUDE_DIR=$(TOPSRCDIR)/browser/locales/en-US/profile \
- $(ACDEFINES) \
- install_$(subst /,_,$*)
-
-# ============================================================================
-# Below is a set of additional dependencies and variables used to build things
-# that are not supported by data in moz.build.
-
-# The xpidl target in config/makefiles/xpidl requires the install manifest for
-# dist/idl to have been processed. When using the hybrid
-# FasterMake/RecursiveMake backend, this dependency is handled in the top-level
-# Makefile.
-ifndef FASTER_RECURSIVE_MAKE
-$(TOPOBJDIR)/config/makefiles/xpidl/xpidl: $(TOPOBJDIR)/install-dist_idl
-endif
-# It also requires all the install manifests for dist/bin to have been processed
-# because it adds interfaces.manifest references with buildlist.py.
-$(TOPOBJDIR)/config/makefiles/xpidl/xpidl: $(addprefix install-,$(filter dist/bin%,$(INSTALL_MANIFESTS)))
diff --git a/moz.configure b/moz.configure
index 68450543f..633aaf689 100644
--- a/moz.configure
+++ b/moz.configure
@@ -11,33 +11,6 @@ include('build/moz.configure/init.configure')
# - Spidermonkey-specific options and rules should go in js/moz.configure.
# - etc.
-option('--enable-artifact-builds', env='MOZ_ARTIFACT_BUILDS',
- help='Download and use prebuilt binary artifacts.')
-
-@depends('--enable-artifact-builds')
-def artifact_builds(value):
- if value:
- return True
-
-set_config('MOZ_ARTIFACT_BUILDS', artifact_builds)
-
-imply_option('--enable-artifact-build-symbols',
- depends(artifact_builds)(lambda v: False if v is None else None),
- reason='--disable-artifact-builds')
-
-option('--enable-artifact-build-symbols',
- help='Download symbols when artifact builds are enabled.')
-
-set_config('MOZ_ARTIFACT_BUILD_SYMBOLS',
- depends_if('--enable-artifact-build-symbols')(lambda _: True))
-
-@depends('--enable-artifact-builds')
-def imply_disable_compile_environment(value):
- if value:
- return False
-
-imply_option('--enable-compile-environment', imply_disable_compile_environment)
-
option('--disable-compile-environment',
help='Disable compiler/library checks')
@@ -73,38 +46,8 @@ include('build/moz.configure/warnings.configure',
include(include_project_configure)
-@depends('--help')
-@imports(_from='mozbuild.backend', _import='backends')
-def build_backends_choices(_):
- return tuple(backends)
-
-
-@deprecated_option('--enable-build-backend', nargs='+',
- choices=build_backends_choices)
-def build_backend(backends):
- if backends:
- return tuple('+%s' % b for b in backends)
-
-imply_option('--build-backends', build_backend)
-
-
-@depends('--enable-artifact-builds', '--disable-compile-environment', '--help')
-@imports('sys')
-def build_backend_defaults(artifact_builds, compile_environment, _):
- if artifact_builds:
- all_backends = ['FasterMake+RecursiveMake']
- else:
- all_backends = ['RecursiveMake', 'FasterMake']
- return tuple(all_backends)
-
-option('--build-backends', nargs='+', default=build_backend_defaults,
- choices=build_backends_choices, help='Build backends to generate')
-
-@depends('--build-backends')
-def build_backends(backends):
- return backends
-
-set_config('BUILD_BACKENDS', build_backends)
+# We only support one build-backend, namely RecursiveMake.
+set_config('BUILD_BACKENDS', tuple(['RecursiveMake']))
# Awk detection
@@ -183,11 +126,7 @@ check_prog('GMAKE', possible_makes)
# tup detection
# ==============================================================
-@depends(build_backends)
-def tup_progs(build_backends):
- for backend in build_backends:
- if 'Tup' in backend:
- return ['tup']
+def tup_progs():
return None
tup = check_prog('TUP', tup_progs)
diff --git a/python/mozbuild/mozbuild/artifacts.py b/python/mozbuild/mozbuild/artifacts.py
deleted file mode 100644
index 563fcb9ff..000000000
--- a/python/mozbuild/mozbuild/artifacts.py
+++ /dev/null
@@ -1,1085 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-'''
-Fetch build artifacts from a Firefox tree.
-
-This provides an (at-the-moment special purpose) interface to download Android
-artifacts from Mozilla's Task Cluster.
-
-This module performs the following steps:
-
-* find a candidate hg parent revision. At one time we used the local pushlog,
- which required the mozext hg extension. This isn't feasible with git, and it
- is only mildly less efficient to not use the pushlog, so we don't use it even
- when querying hg.
-
-* map the candidate parent to candidate Task Cluster tasks and artifact
- locations. Pushlog entries might not correspond to tasks (yet), and those
- tasks might not produce the desired class of artifacts.
-
-* fetch fresh Task Cluster artifacts and purge old artifacts, using a simple
- Least Recently Used cache.
-
-* post-process fresh artifacts, to speed future installation. In particular,
- extract relevant files from Mac OS X DMG files into a friendly archive format
- so we don't have to mount DMG files frequently.
-
-The bulk of the complexity is in managing and persisting several caches. If
-we found a Python LRU cache that pickled cleanly, we could remove a lot of
-this code! Sadly, I found no such candidate implementations, so we pickle
-pylru caches manually.
-
-None of the instances (or the underlying caches) are safe for concurrent use.
-A future need, perhaps.
-
-This module requires certain modules be importable from the ambient Python
-environment. |mach artifact| ensures these modules are available, but other
-consumers will need to arrange this themselves.
-'''
-
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import collections
-import functools
-import glob
-import hashlib
-import logging
-import operator
-import os
-import pickle
-import re
-import requests
-import shutil
-import stat
-import subprocess
-import tarfile
-import tempfile
-import urlparse
-import zipfile
-
-import pylru
-import taskcluster
-
-from mozbuild.util import (
- ensureParentDir,
- FileAvoidWrite,
-)
-import mozinstall
-from mozpack.files import (
- JarFinder,
- TarFinder,
-)
-from mozpack.mozjar import (
- JarReader,
- JarWriter,
-)
-from mozpack.packager.unpack import UnpackFinder
-import mozpack.path as mozpath
-from mozregression.download_manager import (
- DownloadManager,
-)
-from mozregression.persist_limit import (
- PersistLimit,
-)
-
-NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 # Number of candidate pushheads to cache per parent changeset.
-
-# Number of parent changesets to consider as possible pushheads.
-# There isn't really such a thing as a reasonable default here, because we don't
-# know how many pushheads we'll need to look at to find a build with our artifacts,
-# and we don't know how many changesets will be in each push. For now we assume
-# we'll find a build in the last 50 pushes, assuming each push contains 10 changesets.
-NUM_REVISIONS_TO_QUERY = 500
-
-MAX_CACHED_TASKS = 400 # Number of pushheads to cache Task Cluster task data for.
-
-# Number of downloaded artifacts to cache. Each artifact can be very large,
-# so don't make this to large! TODO: make this a size (like 500 megs) rather than an artifact count.
-MAX_CACHED_ARTIFACTS = 6
-
-# Downloaded artifacts are cached, and a subset of their contents extracted for
-# easy installation. This is most noticeable on Mac OS X: since mounting and
-# copying from DMG files is very slow, we extract the desired binaries to a
-# separate archive for fast re-installation.
-PROCESSED_SUFFIX = '.processed.jar'
-
-CANDIDATE_TREES = (
- 'mozilla-central',
- 'integration/mozilla-inbound',
- 'releases/mozilla-aurora'
-)
-
-class ArtifactJob(object):
- # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in.
- # Each item is a pair of (pattern, (src_prefix, dest_prefix), where src_prefix
- # is the prefix of the pattern relevant to its location in the archive, and
- # dest_prefix is the prefix to be added that will yield the final path relative
- # to dist/.
- test_artifact_patterns = {
- ('bin/BadCertServer', ('bin', 'bin')),
- ('bin/GenerateOCSPResponse', ('bin', 'bin')),
- ('bin/OCSPStaplingServer', ('bin', 'bin')),
- ('bin/certutil', ('bin', 'bin')),
- ('bin/fileid', ('bin', 'bin')),
- ('bin/pk12util', ('bin', 'bin')),
- ('bin/ssltunnel', ('bin', 'bin')),
- ('bin/xpcshell', ('bin', 'bin')),
- ('bin/plugins/*', ('bin/plugins', 'plugins'))
- }
-
- # We can tell our input is a test archive by this suffix, which happens to
- # be the same across platforms.
- _test_archive_suffix = '.common.tests.zip'
-
- def __init__(self, package_re, tests_re, log=None, download_symbols=False):
- self._package_re = re.compile(package_re)
- self._tests_re = None
- if tests_re:
- self._tests_re = re.compile(tests_re)
- self._log = log
- self._symbols_archive_suffix = None
- if download_symbols:
- self._symbols_archive_suffix = 'crashreporter-symbols.zip'
-
- def log(self, *args, **kwargs):
- if self._log:
- self._log(*args, **kwargs)
-
- def find_candidate_artifacts(self, artifacts):
- # TODO: Handle multiple artifacts, taking the latest one.
- tests_artifact = None
- for artifact in artifacts:
- name = artifact['name']
- if self._package_re and self._package_re.match(name):
- yield name
- elif self._tests_re and self._tests_re.match(name):
- tests_artifact = name
- yield name
- elif self._symbols_archive_suffix and name.endswith(self._symbols_archive_suffix):
- yield name
- else:
- self.log(logging.DEBUG, 'artifact',
- {'name': name},
- 'Not yielding artifact named {name} as a candidate artifact')
- if self._tests_re and not tests_artifact:
- raise ValueError('Expected tests archive matching "{re}", but '
- 'found none!'.format(re=self._tests_re))
-
- def process_artifact(self, filename, processed_filename):
- if filename.endswith(ArtifactJob._test_archive_suffix) and self._tests_re:
- return self.process_tests_artifact(filename, processed_filename)
- if self._symbols_archive_suffix and filename.endswith(self._symbols_archive_suffix):
- return self.process_symbols_archive(filename, processed_filename)
- return self.process_package_artifact(filename, processed_filename)
-
- def process_package_artifact(self, filename, processed_filename):
- raise NotImplementedError("Subclasses must specialize process_package_artifact!")
-
- def process_tests_artifact(self, filename, processed_filename):
- added_entry = False
-
- with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
- reader = JarReader(filename)
- for filename, entry in reader.entries.iteritems():
- for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
- if not mozpath.match(filename, pattern):
- continue
- destpath = mozpath.relpath(filename, src_prefix)
- destpath = mozpath.join(dest_prefix, destpath)
- self.log(logging.INFO, 'artifact',
- {'destpath': destpath},
- 'Adding {destpath} to processed archive')
- mode = entry['external_attr'] >> 16
- writer.add(destpath.encode('utf-8'), reader[filename], mode=mode)
- added_entry = True
-
- if not added_entry:
- raise ValueError('Archive format changed! No pattern from "{patterns}"'
- 'matched an archive path.'.format(
- patterns=LinuxArtifactJob.test_artifact_patterns))
-
- def process_symbols_archive(self, filename, processed_filename):
- with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
- reader = JarReader(filename)
- for filename in reader.entries:
- destpath = mozpath.join('crashreporter-symbols', filename)
- self.log(logging.INFO, 'artifact',
- {'destpath': destpath},
- 'Adding {destpath} to processed archive')
- writer.add(destpath.encode('utf-8'), reader[filename])
-
-class AndroidArtifactJob(ArtifactJob):
-
- product = 'mobile'
-
- package_artifact_patterns = {
- 'application.ini',
- 'platform.ini',
- '**/*.so',
- '**/interfaces.xpt',
- }
-
- def process_artifact(self, filename, processed_filename):
- # Extract all .so files into the root, which will get copied into dist/bin.
- with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
- for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
- if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
- continue
-
- dirname, basename = os.path.split(p)
- self.log(logging.INFO, 'artifact',
- {'basename': basename},
- 'Adding {basename} to processed archive')
-
- basedir = 'bin'
- if not basename.endswith('.so'):
- basedir = mozpath.join('bin', dirname.lstrip('assets/'))
- basename = mozpath.join(basedir, basename)
- writer.add(basename.encode('utf-8'), f.open())
-
-
-class LinuxArtifactJob(ArtifactJob):
-
- product = 'firefox'
-
- package_artifact_patterns = {
- 'firefox/application.ini',
- 'firefox/crashreporter',
- 'firefox/dependentlibs.list',
- 'firefox/firefox',
- 'firefox/firefox-bin',
- 'firefox/minidump-analyzer',
- 'firefox/platform.ini',
- 'firefox/plugin-container',
- 'firefox/updater',
- 'firefox/**/*.so',
- 'firefox/**/interfaces.xpt',
- }
-
- def process_package_artifact(self, filename, processed_filename):
- added_entry = False
-
- with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
- with tarfile.open(filename) as reader:
- for p, f in UnpackFinder(TarFinder(filename, reader)):
- if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
- continue
-
- # We strip off the relative "firefox/" bit from the path,
- # but otherwise preserve it.
- destpath = mozpath.join('bin',
- mozpath.relpath(p, "firefox"))
- self.log(logging.INFO, 'artifact',
- {'destpath': destpath},
- 'Adding {destpath} to processed archive')
- writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)
- added_entry = True
-
- if not added_entry:
- raise ValueError('Archive format changed! No pattern from "{patterns}" '
- 'matched an archive path.'.format(
- patterns=LinuxArtifactJob.package_artifact_patterns))
-
-
-class MacArtifactJob(ArtifactJob):
-
- product = 'firefox'
-
- def process_package_artifact(self, filename, processed_filename):
- tempdir = tempfile.mkdtemp()
- try:
- self.log(logging.INFO, 'artifact',
- {'tempdir': tempdir},
- 'Unpacking DMG into {tempdir}')
- mozinstall.install(filename, tempdir) # Doesn't handle already mounted DMG files nicely:
-
- # InstallError: Failed to install "/Users/nalexander/.mozbuild/package-frontend/b38eeeb54cdcf744-firefox-44.0a1.en-US.mac.dmg (local variable 'appDir' referenced before assignment)"
-
- # File "/Users/nalexander/Mozilla/gecko/mobile/android/mach_commands.py", line 250, in artifact_install
- # return artifacts.install_from(source, self.distdir)
- # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 457, in install_from
- # return self.install_from_hg(source, distdir)
- # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 445, in install_from_hg
- # return self.install_from_url(url, distdir)
- # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 418, in install_from_url
- # return self.install_from_file(filename, distdir)
- # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 336, in install_from_file
- # mozinstall.install(filename, tempdir)
- # File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 117, in install
- # install_dir = _install_dmg(src, dest)
- # File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 261, in _install_dmg
- # subprocess.call('hdiutil detach %s -quiet' % appDir,
-
- bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
- if len(bundle_dirs) != 1:
- raise ValueError('Expected one source bundle, found: {}'.format(bundle_dirs))
- [source] = bundle_dirs
-
- # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
- paths_no_keep_path = ('Contents/MacOS', [
- 'crashreporter.app/Contents/MacOS/crashreporter',
- 'crashreporter.app/Contents/MacOS/minidump-analyzer',
- 'firefox',
- 'firefox-bin',
- 'libfreebl3.dylib',
- 'liblgpllibs.dylib',
- # 'liblogalloc.dylib',
- 'libmozglue.dylib',
- 'libnss3.dylib',
- 'libnssckbi.dylib',
- 'libnssdbm3.dylib',
- 'libplugin_child_interpose.dylib',
- # 'libreplace_jemalloc.dylib',
- # 'libreplace_malloc.dylib',
- 'libmozavutil.dylib',
- 'libmozavcodec.dylib',
- 'libsoftokn3.dylib',
- 'plugin-container.app/Contents/MacOS/plugin-container',
- 'updater.app/Contents/MacOS/org.mozilla.updater',
- # 'xpcshell',
- 'XUL',
- ])
-
- # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
- paths_keep_path = ('Contents/Resources', [
- 'browser/components/libbrowsercomps.dylib',
- 'dependentlibs.list',
- # 'firefox',
- 'gmp-clearkey/0.1/libclearkey.dylib',
- # 'gmp-fake/1.0/libfake.dylib',
- # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
- '**/interfaces.xpt',
- ])
-
- with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
- root, paths = paths_no_keep_path
- finder = UnpackFinder(mozpath.join(source, root))
- for path in paths:
- for p, f in finder.find(path):
- self.log(logging.INFO, 'artifact',
- {'path': p},
- 'Adding {path} to processed archive')
- destpath = mozpath.join('bin', os.path.basename(p))
- writer.add(destpath.encode('utf-8'), f, mode=f.mode)
-
- root, paths = paths_keep_path
- finder = UnpackFinder(mozpath.join(source, root))
- for path in paths:
- for p, f in finder.find(path):
- self.log(logging.INFO, 'artifact',
- {'path': p},
- 'Adding {path} to processed archive')
- destpath = mozpath.join('bin', p)
- writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)
-
- finally:
- try:
- shutil.rmtree(tempdir)
- except (OSError, IOError):
- self.log(logging.WARN, 'artifact',
- {'tempdir': tempdir},
- 'Unable to delete {tempdir}')
- pass
-
-
-class WinArtifactJob(ArtifactJob):
- package_artifact_patterns = {
- 'firefox/dependentlibs.list',
- 'firefox/platform.ini',
- 'firefox/application.ini',
- 'firefox/**/*.dll',
- 'firefox/*.exe',
- 'firefox/**/interfaces.xpt',
- }
-
- product = 'firefox'
-
- # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in.
- test_artifact_patterns = {
- ('bin/BadCertServer.exe', ('bin', 'bin')),
- ('bin/GenerateOCSPResponse.exe', ('bin', 'bin')),
- ('bin/OCSPStaplingServer.exe', ('bin', 'bin')),
- ('bin/certutil.exe', ('bin', 'bin')),
- ('bin/fileid.exe', ('bin', 'bin')),
- ('bin/pk12util.exe', ('bin', 'bin')),
- ('bin/ssltunnel.exe', ('bin', 'bin')),
- ('bin/xpcshell.exe', ('bin', 'bin')),
- ('bin/plugins/*', ('bin/plugins', 'plugins'))
- }
-
- def process_package_artifact(self, filename, processed_filename):
- added_entry = False
- with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
- for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
- if not any(mozpath.match(p, pat) for pat in self.package_artifact_patterns):
- continue
-
- # strip off the relative "firefox/" bit from the path:
- basename = mozpath.relpath(p, "firefox")
- basename = mozpath.join('bin', basename)
- self.log(logging.INFO, 'artifact',
- {'basename': basename},
- 'Adding {basename} to processed archive')
- writer.add(basename.encode('utf-8'), f.open(), mode=f.mode)
- added_entry = True
-
- if not added_entry:
- raise ValueError('Archive format changed! No pattern from "{patterns}"'
- 'matched an archive path.'.format(
- patterns=self.artifact_patterns))
-
-# Keep the keys of this map in sync with the |mach artifact| --job
-# options. The keys of this map correspond to entries at
-# https://tools.taskcluster.net/index/artifacts/#gecko.v2.mozilla-central.latest/gecko.v2.mozilla-central.latest
-# The values correpsond to a pair of (<package regex>, <test archive regex>).
-JOB_DETAILS = {
- 'android-api-15-opt': (AndroidArtifactJob, ('public/build/target.apk',
- None)),
- 'android-api-15-debug': (AndroidArtifactJob, ('public/build/target.apk',
- None)),
- 'android-x86-opt': (AndroidArtifactJob, ('public/build/target.apk',
- None)),
- 'linux-opt': (LinuxArtifactJob, ('public/build/firefox-(.*)\.linux-i686\.tar\.bz2',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
- 'linux-debug': (LinuxArtifactJob, ('public/build/firefox-(.*)\.linux-i686\.tar\.bz2',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
- 'linux64-opt': (LinuxArtifactJob, ('public/build/firefox-(.*)\.linux-x86_64\.tar\.bz2',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
- 'linux64-debug': (LinuxArtifactJob, ('public/build/target\.tar\.bz2',
- 'public/build/target\.common\.tests\.zip')),
- 'macosx64-opt': (MacArtifactJob, ('public/build/firefox-(.*)\.mac\.dmg',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
- 'macosx64-debug': (MacArtifactJob, ('public/build/firefox-(.*)\.mac64\.dmg',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
- 'win32-opt': (WinArtifactJob, ('public/build/firefox-(.*)\.win32.zip',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
- 'win32-debug': (WinArtifactJob, ('public/build/firefox-(.*)\.win32.zip',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
- 'win64-opt': (WinArtifactJob, ('public/build/firefox-(.*)\.win64.zip',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
- 'win64-debug': (WinArtifactJob, ('public/build/firefox-(.*)\.win64.zip',
- 'public/build/firefox-(.*)\.common\.tests\.zip')),
-}
-
-
-
-def get_job_details(job, log=None, download_symbols=False):
- cls, (package_re, tests_re) = JOB_DETAILS[job]
- return cls(package_re, tests_re, log=log, download_symbols=download_symbols)
-
-def cachedmethod(cachefunc):
- '''Decorator to wrap a class or instance method with a memoizing callable that
- saves results in a (possibly shared) cache.
- '''
- def decorator(method):
- def wrapper(self, *args, **kwargs):
- mapping = cachefunc(self)
- if mapping is None:
- return method(self, *args, **kwargs)
- key = (method.__name__, args, tuple(sorted(kwargs.items())))
- try:
- value = mapping[key]
- return value
- except KeyError:
- pass
- result = method(self, *args, **kwargs)
- mapping[key] = result
- return result
- return functools.update_wrapper(wrapper, method)
- return decorator
-
-
-class CacheManager(object):
- '''Maintain an LRU cache. Provide simple persistence, including support for
- loading and saving the state using a "with" block. Allow clearing the cache
- and printing the cache for debugging.
-
- Provide simple logging.
- '''
-
- def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None, log=None, skip_cache=False):
- self._skip_cache = skip_cache
- self._cache = pylru.lrucache(cache_size, callback=cache_callback)
- self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle')
- self._log = log
-
- def log(self, *args, **kwargs):
- if self._log:
- self._log(*args, **kwargs)
-
- def load_cache(self):
- if self._skip_cache:
- self.log(logging.DEBUG, 'artifact',
- {},
- 'Skipping cache: ignoring load_cache!')
- return
-
- try:
- items = pickle.load(open(self._cache_filename, 'rb'))
- for key, value in items:
- self._cache[key] = value
- except Exception as e:
- # Corrupt cache, perhaps? Sadly, pickle raises many different
- # exceptions, so it's not worth trying to be fine grained here.
- # We ignore any exception, so the cache is effectively dropped.
- self.log(logging.INFO, 'artifact',
- {'filename': self._cache_filename, 'exception': repr(e)},
- 'Ignoring exception unpickling cache file {filename}: {exception}')
- pass
-
- def dump_cache(self):
- if self._skip_cache:
- self.log(logging.DEBUG, 'artifact',
- {},
- 'Skipping cache: ignoring dump_cache!')
- return
-
- ensureParentDir(self._cache_filename)
- pickle.dump(list(reversed(list(self._cache.items()))), open(self._cache_filename, 'wb'), -1)
-
- def clear_cache(self):
- if self._skip_cache:
- self.log(logging.DEBUG, 'artifact',
- {},
- 'Skipping cache: ignoring clear_cache!')
- return
-
- with self:
- self._cache.clear()
-
- def print_cache(self):
- with self:
- for item in self._cache.items():
- self.log(logging.INFO, 'artifact',
- {'item': item},
- '{item}')
-
- def print_last_item(self, args, sorted_kwargs, result):
- # By default, show nothing.
- pass
-
- def print_last(self):
- # We use the persisted LRU caches to our advantage. The first item is
- # most recent.
- with self:
- item = next(self._cache.items(), None)
- if item is not None:
- (name, args, sorted_kwargs), result = item
- self.print_last_item(args, sorted_kwargs, result)
- else:
- self.log(logging.WARN, 'artifact',
- {},
- 'No last cached item found.')
-
- def __enter__(self):
- self.load_cache()
- return self
-
- def __exit__(self, type, value, traceback):
- self.dump_cache()
-
-class PushheadCache(CacheManager):
- '''Helps map tree/revision pairs to parent pushheads according to the pushlog.'''
-
- def __init__(self, cache_dir, log=None, skip_cache=False):
- CacheManager.__init__(self, cache_dir, 'pushhead_cache', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
-
- @cachedmethod(operator.attrgetter('_cache'))
- def parent_pushhead_id(self, tree, revision):
- cset_url_tmpl = ('https://hg.mozilla.org/{tree}/json-pushes?'
- 'changeset={changeset}&version=2&tipsonly=1')
- req = requests.get(cset_url_tmpl.format(tree=tree, changeset=revision),
- headers={'Accept': 'application/json'})
- if req.status_code not in range(200, 300):
- raise ValueError
- result = req.json()
- [found_pushid] = result['pushes'].keys()
- return int(found_pushid)
-
- @cachedmethod(operator.attrgetter('_cache'))
- def pushid_range(self, tree, start, end):
- pushid_url_tmpl = ('https://hg.mozilla.org/{tree}/json-pushes?'
- 'startID={start}&endID={end}&version=2&tipsonly=1')
-
- req = requests.get(pushid_url_tmpl.format(tree=tree, start=start,
- end=end),
- headers={'Accept': 'application/json'})
- result = req.json()
- return [
- p['changesets'][-1] for p in result['pushes'].values()
- ]
-
-class TaskCache(CacheManager):
- '''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
-
- def __init__(self, cache_dir, log=None, skip_cache=False):
- CacheManager.__init__(self, cache_dir, 'artifact_url', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
- self._index = taskcluster.Index()
- self._queue = taskcluster.Queue()
-
- @cachedmethod(operator.attrgetter('_cache'))
- def artifact_urls(self, tree, job, rev, download_symbols):
- try:
- artifact_job = get_job_details(job, log=self._log, download_symbols=download_symbols)
- except KeyError:
- self.log(logging.INFO, 'artifact',
- {'job': job},
- 'Unknown job {job}')
- raise KeyError("Unknown job")
-
- # Grab the second part of the repo name, which is generally how things
- # are indexed. Eg: 'integration/mozilla-inbound' is indexed as
- # 'mozilla-inbound'
- tree = tree.split('/')[1] if '/' in tree else tree
-
- namespace = 'gecko.v2.{tree}.revision.{rev}.{product}.{job}'.format(
- rev=rev,
- tree=tree,
- product=artifact_job.product,
- job=job,
- )
- self.log(logging.DEBUG, 'artifact',
- {'namespace': namespace},
- 'Searching Taskcluster index with namespace: {namespace}')
- try:
- task = self._index.findTask(namespace)
- except Exception:
- # Not all revisions correspond to pushes that produce the job we
- # care about; and even those that do may not have completed yet.
- raise ValueError('Task for {namespace} does not exist (yet)!'.format(namespace=namespace))
- taskId = task['taskId']
-
- artifacts = self._queue.listLatestArtifacts(taskId)['artifacts']
-
- urls = []
- for artifact_name in artifact_job.find_candidate_artifacts(artifacts):
- # We can easily extract the task ID from the URL. We can't easily
- # extract the build ID; we use the .ini files embedded in the
- # downloaded artifact for this. We could also use the uploaded
- # public/build/buildprops.json for this purpose.
- url = self._queue.buildUrl('getLatestArtifact', taskId, artifact_name)
- urls.append(url)
- if not urls:
- raise ValueError('Task for {namespace} existed, but no artifacts found!'.format(namespace=namespace))
- return urls
-
- def print_last_item(self, args, sorted_kwargs, result):
- tree, job, rev = args
- self.log(logging.INFO, 'artifact',
- {'rev': rev},
- 'Last installed binaries from hg parent revision {rev}')
-
-
-class ArtifactCache(CacheManager):
- '''Fetch Task Cluster artifact URLs and purge least recently used artifacts from disk.'''
-
- def __init__(self, cache_dir, log=None, skip_cache=False):
- # TODO: instead of storing N artifact packages, store M megabytes.
- CacheManager.__init__(self, cache_dir, 'fetch', MAX_CACHED_ARTIFACTS, cache_callback=self.delete_file, log=log, skip_cache=skip_cache)
- self._cache_dir = cache_dir
- size_limit = 1024 * 1024 * 1024 # 1Gb in bytes.
- file_limit = 4 # But always keep at least 4 old artifacts around.
- persist_limit = PersistLimit(size_limit, file_limit)
- self._download_manager = DownloadManager(self._cache_dir, persist_limit=persist_limit)
- self._last_dl_update = -1
-
- def delete_file(self, key, value):
- try:
- os.remove(value)
- self.log(logging.INFO, 'artifact',
- {'filename': value},
- 'Purged artifact {filename}')
- except (OSError, IOError):
- pass
-
- try:
- os.remove(value + PROCESSED_SUFFIX)
- self.log(logging.INFO, 'artifact',
- {'filename': value + PROCESSED_SUFFIX},
- 'Purged processed artifact {filename}')
- except (OSError, IOError):
- pass
-
- @cachedmethod(operator.attrgetter('_cache'))
- def fetch(self, url, force=False):
- # We download to a temporary name like HASH[:16]-basename to
- # differentiate among URLs with the same basenames. We used to then
- # extract the build ID from the downloaded artifact and use it to make a
- # human readable unique name, but extracting build IDs is time consuming
- # (especially on Mac OS X, where we must mount a large DMG file).
- hash = hashlib.sha256(url).hexdigest()[:16]
- fname = hash + '-' + os.path.basename(url)
-
- path = os.path.abspath(mozpath.join(self._cache_dir, fname))
- if self._skip_cache and os.path.exists(path):
- self.log(logging.DEBUG, 'artifact',
- {'path': path},
- 'Skipping cache: removing cached downloaded artifact {path}')
- os.remove(path)
-
- self.log(logging.INFO, 'artifact',
- {'path': path},
- 'Downloading to temporary location {path}')
- try:
- dl = self._download_manager.download(url, fname)
-
- def download_progress(dl, bytes_so_far, total_size):
- percent = (float(bytes_so_far) / total_size) * 100
- now = int(percent / 5)
- if now == self._last_dl_update:
- return
- self._last_dl_update = now
- self.log(logging.INFO, 'artifact',
- {'bytes_so_far': bytes_so_far, 'total_size': total_size, 'percent': percent},
- 'Downloading... {percent:02.1f} %')
-
- if dl:
- dl.set_progress(download_progress)
- dl.wait()
- self.log(logging.INFO, 'artifact',
- {'path': os.path.abspath(mozpath.join(self._cache_dir, fname))},
- 'Downloaded artifact to {path}')
- return os.path.abspath(mozpath.join(self._cache_dir, fname))
- finally:
- # Cancel any background downloads in progress.
- self._download_manager.cancel()
-
- def print_last_item(self, args, sorted_kwargs, result):
- url, = args
- self.log(logging.INFO, 'artifact',
- {'url': url},
- 'Last installed binaries from url {url}')
- self.log(logging.INFO, 'artifact',
- {'filename': result},
- 'Last installed binaries from local file {filename}')
- self.log(logging.INFO, 'artifact',
- {'filename': result + PROCESSED_SUFFIX},
- 'Last installed binaries from local processed file {filename}')
-
-
-class Artifacts(object):
- '''Maintain state to efficiently fetch build artifacts from a Firefox tree.'''
-
- def __init__(self, tree, substs, defines, job=None, log=None,
- cache_dir='.', hg=None, git=None, skip_cache=False,
- topsrcdir=None):
- if (hg and git) or (not hg and not git):
- raise ValueError("Must provide path to exactly one of hg and git")
-
- self._substs = substs
- self._download_symbols = self._substs.get('MOZ_ARTIFACT_BUILD_SYMBOLS', False)
- self._defines = defines
- self._tree = tree
- self._job = job or self._guess_artifact_job()
- self._log = log
- self._hg = hg
- self._git = git
- self._cache_dir = cache_dir
- self._skip_cache = skip_cache
- self._topsrcdir = topsrcdir
-
- try:
- self._artifact_job = get_job_details(self._job, log=self._log, download_symbols=self._download_symbols)
- except KeyError:
- self.log(logging.INFO, 'artifact',
- {'job': self._job},
- 'Unknown job {job}')
- raise KeyError("Unknown job")
-
- self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
- self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
- self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
-
- def log(self, *args, **kwargs):
- if self._log:
- self._log(*args, **kwargs)
-
- def _guess_artifact_job(self):
- # Add the "-debug" suffix to the guessed artifact job name
- # if MOZ_DEBUG is enabled.
- if self._substs.get('MOZ_DEBUG'):
- target_suffix = '-debug'
- else:
- target_suffix = '-opt'
-
- if self._substs.get('MOZ_BUILD_APP', '') == 'mobile/android':
- if self._substs['ANDROID_CPU_ARCH'] == 'x86':
- return 'android-x86-opt'
- return 'android-api-15' + target_suffix
-
- target_64bit = False
- if self._substs['target_cpu'] == 'x86_64':
- target_64bit = True
-
- if self._defines.get('XP_LINUX', False):
- return ('linux64' if target_64bit else 'linux') + target_suffix
- if self._defines.get('XP_WIN', False):
- return ('win64' if target_64bit else 'win32') + target_suffix
- raise Exception('Cannot determine default job for |mach artifact|!')
-
- def _pushheads_from_rev(self, rev, count):
- """Queries hg.mozilla.org's json-pushlog for pushheads that are nearby
- ancestors or `rev`. Multiple trees are queried, as the `rev` may
- already have been pushed to multiple repositories. For each repository
- containing `rev`, the pushhead introducing `rev` and the previous
- `count` pushheads from that point are included in the output.
- """
-
- with self._pushhead_cache as pushhead_cache:
- found_pushids = {}
- for tree in CANDIDATE_TREES:
- self.log(logging.INFO, 'artifact',
- {'tree': tree,
- 'rev': rev},
- 'Attempting to find a pushhead containing {rev} on {tree}.')
- try:
- pushid = pushhead_cache.parent_pushhead_id(tree, rev)
- found_pushids[tree] = pushid
- except ValueError:
- continue
-
- candidate_pushheads = collections.defaultdict(list)
-
- for tree, pushid in found_pushids.iteritems():
- end = pushid
- start = pushid - NUM_PUSHHEADS_TO_QUERY_PER_PARENT
-
- self.log(logging.INFO, 'artifact',
- {'tree': tree,
- 'pushid': pushid,
- 'num': NUM_PUSHHEADS_TO_QUERY_PER_PARENT},
- 'Retrieving the last {num} pushheads starting with id {pushid} on {tree}')
- for pushhead in pushhead_cache.pushid_range(tree, start, end):
- candidate_pushheads[pushhead].append(tree)
-
- return candidate_pushheads
-
- def _get_hg_revisions_from_git(self):
- rev_list = subprocess.check_output([
- self._git, 'rev-list', '--topo-order',
- '--max-count={num}'.format(num=NUM_REVISIONS_TO_QUERY),
- 'HEAD',
- ], cwd=self._topsrcdir)
-
- hg_hash_list = subprocess.check_output([
- self._git, 'cinnabar', 'git2hg'
- ] + rev_list.splitlines(), cwd=self._topsrcdir)
-
- zeroes = "0" * 40
-
- hashes = []
- for hg_hash in hg_hash_list.splitlines():
- hg_hash = hg_hash.strip()
- if not hg_hash or hg_hash == zeroes:
- continue
- hashes.append(hg_hash)
- return hashes
-
- def _get_recent_public_revisions(self):
- """Returns recent ancestors of the working parent that are likely to
- to be known to Mozilla automation.
-
- If we're using git, retrieves hg revisions from git-cinnabar.
- """
- if self._git:
- return self._get_hg_revisions_from_git()
-
- return subprocess.check_output([
- self._hg, 'log',
- '--template', '{node}\n',
- '-r', 'last(public() and ::., {num})'.format(
- num=NUM_REVISIONS_TO_QUERY)
- ], cwd=self._topsrcdir).splitlines()
-
- def _find_pushheads(self):
- """Returns an iterator of recent pushhead revisions, starting with the
- working parent.
- """
-
- last_revs = self._get_recent_public_revisions()
- candidate_pushheads = self._pushheads_from_rev(last_revs[0].rstrip(),
- NUM_PUSHHEADS_TO_QUERY_PER_PARENT)
- count = 0
- for rev in last_revs:
- rev = rev.rstrip()
- if not rev:
- continue
- if rev not in candidate_pushheads:
- continue
- count += 1
- yield candidate_pushheads[rev], rev
-
- if not count:
- raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n'
- 'Search started with {rev}, which must be known to Mozilla automation.\n\n'
- 'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
- rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
-
- def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
- try:
- urls = task_cache.artifact_urls(tree, job, pushhead, self._download_symbols)
- except ValueError:
- return None
- if urls:
- self.log(logging.INFO, 'artifact',
- {'pushhead': pushhead,
- 'tree': tree},
- 'Installing from remote pushhead {pushhead} on {tree}')
- return urls
- return None
-
- def install_from_file(self, filename, distdir):
- self.log(logging.INFO, 'artifact',
- {'filename': filename},
- 'Installing from {filename}')
-
- # Do we need to post-process?
- processed_filename = filename + PROCESSED_SUFFIX
-
- if self._skip_cache and os.path.exists(processed_filename):
- self.log(logging.DEBUG, 'artifact',
- {'path': processed_filename},
- 'Skipping cache: removing cached processed artifact {path}')
- os.remove(processed_filename)
-
- if not os.path.exists(processed_filename):
- self.log(logging.INFO, 'artifact',
- {'filename': filename},
- 'Processing contents of {filename}')
- self.log(logging.INFO, 'artifact',
- {'processed_filename': processed_filename},
- 'Writing processed {processed_filename}')
- self._artifact_job.process_artifact(filename, processed_filename)
-
- self.log(logging.INFO, 'artifact',
- {'processed_filename': processed_filename},
- 'Installing from processed {processed_filename}')
-
- # Copy all .so files, avoiding modification where possible.
- ensureParentDir(mozpath.join(distdir, '.dummy'))
-
- with zipfile.ZipFile(processed_filename) as zf:
- for info in zf.infolist():
- if info.filename.endswith('.ini'):
- continue
- n = mozpath.join(distdir, info.filename)
- fh = FileAvoidWrite(n, mode='rb')
- shutil.copyfileobj(zf.open(info), fh)
- file_existed, file_updated = fh.close()
- self.log(logging.INFO, 'artifact',
- {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
- '{updating} {filename}')
- if not file_existed or file_updated:
- # Libraries and binaries may need to be marked executable,
- # depending on platform.
- perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
- perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
- os.chmod(n, perms)
- return 0
-
- def install_from_url(self, url, distdir):
- self.log(logging.INFO, 'artifact',
- {'url': url},
- 'Installing from {url}')
- with self._artifact_cache as artifact_cache: # The with block handles persistence.
- filename = artifact_cache.fetch(url)
- return self.install_from_file(filename, distdir)
-
- def _install_from_hg_pushheads(self, hg_pushheads, distdir):
- """Iterate pairs (hg_hash, {tree-set}) associating hg revision hashes
- and tree-sets they are known to be in, trying to download and
- install from each.
- """
-
- urls = None
- count = 0
- # with blocks handle handle persistence.
- with self._task_cache as task_cache:
- for trees, hg_hash in hg_pushheads:
- for tree in trees:
- count += 1
- self.log(logging.DEBUG, 'artifact',
- {'hg_hash': hg_hash,
- 'tree': tree},
- 'Trying to find artifacts for hg revision {hg_hash} on tree {tree}.')
- urls = self.find_pushhead_artifacts(task_cache, self._job, tree, hg_hash)
- if urls:
- for url in urls:
- if self.install_from_url(url, distdir):
- return 1
- return 0
-
- self.log(logging.ERROR, 'artifact',
- {'count': count},
- 'Tried {count} pushheads, no built artifacts found.')
- return 1
-
- def install_from_recent(self, distdir):
- hg_pushheads = self._find_pushheads()
- return self._install_from_hg_pushheads(hg_pushheads, distdir)
-
- def install_from_revset(self, revset, distdir):
- if self._hg:
- revision = subprocess.check_output([self._hg, 'log', '--template', '{node}\n',
- '-r', revset], cwd=self._topsrcdir).strip()
- if len(revision.split('\n')) != 1:
- raise ValueError('hg revision specification must resolve to exactly one commit')
- else:
- revision = subprocess.check_output([self._git, 'rev-parse', revset], cwd=self._topsrcdir).strip()
- revision = subprocess.check_output([self._git, 'cinnabar', 'git2hg', revision], cwd=self._topsrcdir).strip()
- if len(revision.split('\n')) != 1:
- raise ValueError('hg revision specification must resolve to exactly one commit')
- if revision == "0" * 40:
- raise ValueError('git revision specification must resolve to a commit known to hg')
-
- self.log(logging.INFO, 'artifact',
- {'revset': revset,
- 'revision': revision},
- 'Will only accept artifacts from a pushhead at {revision} '
- '(matched revset "{revset}").')
- pushheads = [(list(CANDIDATE_TREES), revision)]
- return self._install_from_hg_pushheads(pushheads, distdir)
-
- def install_from(self, source, distdir):
- """Install artifacts from a ``source`` into the given ``distdir``.
- """
- if source and os.path.isfile(source):
- return self.install_from_file(source, distdir)
- elif source and urlparse.urlparse(source).scheme:
- return self.install_from_url(source, distdir)
- else:
- if source is None and 'MOZ_ARTIFACT_REVISION' in os.environ:
- source = os.environ['MOZ_ARTIFACT_REVISION']
-
- if source:
- return self.install_from_revset(source, distdir)
-
- return self.install_from_recent(distdir)
-
-
- def print_last(self):
- self.log(logging.INFO, 'artifact',
- {},
- 'Printing last used artifact details.')
- self._task_cache.print_last()
- self._artifact_cache.print_last()
- self._pushhead_cache.print_last()
-
- def clear_cache(self):
- self.log(logging.INFO, 'artifact',
- {},
- 'Deleting cached artifacts and caches.')
- self._task_cache.clear_cache()
- self._artifact_cache.clear_cache()
- self._pushhead_cache.clear_cache()
-
- def print_cache(self):
- self.log(logging.INFO, 'artifact',
- {},
- 'Printing cached artifacts and caches.')
- self._task_cache.print_cache()
- self._artifact_cache.print_cache()
- self._pushhead_cache.print_cache()
diff --git a/python/mozbuild/mozbuild/backend/__init__.py b/python/mozbuild/mozbuild/backend/__init__.py
index 9e0e27d36..b5ef6f9f3 100644
--- a/python/mozbuild/mozbuild/backend/__init__.py
+++ b/python/mozbuild/mozbuild/backend/__init__.py
@@ -6,8 +6,6 @@ backends = {
'ChromeMap': 'mozbuild.codecoverage.chrome_map',
'CompileDB': 'mozbuild.compilation.database',
'CppEclipse': 'mozbuild.backend.cpp_eclipse',
- 'FasterMake': 'mozbuild.backend.fastermake',
- 'FasterMake+RecursiveMake': None,
'RecursiveMake': 'mozbuild.backend.recursivemake',
'Tup': 'mozbuild.backend.tup',
}
diff --git a/python/mozbuild/mozbuild/backend/common.py b/python/mozbuild/mozbuild/backend/common.py
index 12b2a27c4..a90aa1e5d 100644
--- a/python/mozbuild/mozbuild/backend/common.py
+++ b/python/mozbuild/mozbuild/backend/common.py
@@ -252,71 +252,35 @@ class CommonBackend(BuildBackend):
# We should consider aggregating WebIDL types in emitter.py.
elif isinstance(obj, WebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename))
elif isinstance(obj, GeneratedEventWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.generated_events_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, TestWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.test_sources.add(mozpath.join(obj.srcdir,
obj.basename))
elif isinstance(obj, PreprocessedTestWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.preprocessed_test_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, GeneratedWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.generated_sources.add(mozpath.join(obj.srcdir,
obj.basename))
elif isinstance(obj, PreprocessedWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.preprocessed_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, ExampleWebIDLInterface):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.example_interfaces.add(obj.name)
elif isinstance(obj, IPDLFile):
- # IPDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename))
elif isinstance(obj, UnifiedSources):
- # Unified sources aren't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
if obj.have_unified_mapping:
self._write_unified_files(obj.unified_source_mapping, obj.objdir)
if hasattr(self, '_process_unified_sources'):
diff --git a/python/mozbuild/mozbuild/backend/configenvironment.py b/python/mozbuild/mozbuild/backend/configenvironment.py
index 331309af6..0edcf5366 100644
--- a/python/mozbuild/mozbuild/backend/configenvironment.py
+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
@@ -187,10 +187,6 @@ class ConfigEnvironment(object):
self.substs_unicode = ReadOnlyDict(self.substs_unicode)
- @property
- def is_artifact_build(self):
- return self.substs.get('MOZ_ARTIFACT_BUILDS', False)
-
@staticmethod
def from_config_status(path):
config = BuildConfig.from_config_status(path)
diff --git a/python/mozbuild/mozbuild/backend/fastermake.py b/python/mozbuild/mozbuild/backend/fastermake.py
deleted file mode 100644
index d55928e8c..000000000
--- a/python/mozbuild/mozbuild/backend/fastermake.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, unicode_literals, print_function
-
-from mozbuild.backend.base import PartialBackend
-from mozbuild.backend.common import CommonBackend
-from mozbuild.frontend.context import (
- ObjDirPath,
-)
-from mozbuild.frontend.data import (
- ChromeManifestEntry,
- FinalTargetPreprocessedFiles,
- FinalTargetFiles,
- JARManifest,
- XPIDLFile,
-)
-from mozbuild.makeutil import Makefile
-from mozbuild.util import OrderedDefaultDict
-from mozpack.manifests import InstallManifest
-import mozpack.path as mozpath
-
-
-class FasterMakeBackend(CommonBackend, PartialBackend):
- def _init(self):
- super(FasterMakeBackend, self)._init()
-
- self._manifest_entries = OrderedDefaultDict(set)
-
- self._install_manifests = OrderedDefaultDict(InstallManifest)
-
- self._dependencies = OrderedDefaultDict(list)
-
- self._has_xpidl = False
-
- def _add_preprocess(self, obj, path, dest, target=None, **kwargs):
- if target is None:
- target = mozpath.basename(path)
- # This matches what PP_TARGETS do in config/rules.
- if target.endswith('.in'):
- target = target[:-3]
- if target.endswith('.css'):
- kwargs['marker'] = '%'
- depfile = mozpath.join(
- self.environment.topobjdir, 'faster', '.deps',
- mozpath.join(obj.install_target, dest, target).replace('/', '_'))
- self._install_manifests[obj.install_target].add_preprocess(
- mozpath.join(obj.srcdir, path),
- mozpath.join(dest, target),
- depfile,
- **kwargs)
-
- def consume_object(self, obj):
- if isinstance(obj, JARManifest) and \
- obj.install_target.startswith('dist/bin'):
- self._consume_jar_manifest(obj)
-
- elif isinstance(obj, (FinalTargetFiles,
- FinalTargetPreprocessedFiles)) and \
- obj.install_target.startswith('dist/bin'):
- defines = obj.defines or {}
- if defines:
- defines = defines.defines
- for path, files in obj.files.walk():
- for f in files:
- if isinstance(obj, FinalTargetPreprocessedFiles):
- self._add_preprocess(obj, f.full_path, path,
- target=f.target_basename,
- defines=defines)
- elif '*' in f:
- def _prefix(s):
- for p in mozpath.split(s):
- if '*' not in p:
- yield p + '/'
- prefix = ''.join(_prefix(f.full_path))
-
- self._install_manifests[obj.install_target] \
- .add_pattern_symlink(
- prefix,
- f.full_path[len(prefix):],
- mozpath.join(path, f.target_basename))
- else:
- self._install_manifests[obj.install_target].add_symlink(
- f.full_path,
- mozpath.join(path, f.target_basename)
- )
- if isinstance(f, ObjDirPath):
- dep_target = 'install-%s' % obj.install_target
- self._dependencies[dep_target].append(
- mozpath.relpath(f.full_path,
- self.environment.topobjdir))
-
- elif isinstance(obj, ChromeManifestEntry) and \
- obj.install_target.startswith('dist/bin'):
- top_level = mozpath.join(obj.install_target, 'chrome.manifest')
- if obj.path != top_level:
- entry = 'manifest %s' % mozpath.relpath(obj.path,
- obj.install_target)
- self._manifest_entries[top_level].add(entry)
- self._manifest_entries[obj.path].add(str(obj.entry))
-
- elif isinstance(obj, XPIDLFile):
- self._has_xpidl = True
- # We're not actually handling XPIDL files.
- return False
-
- else:
- return False
-
- return True
-
- def consume_finished(self):
- mk = Makefile()
- # Add the default rule at the very beginning.
- mk.create_rule(['default'])
- mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
- mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
- if not self._has_xpidl:
- mk.add_statement('NO_XPIDL = 1')
-
- # Add a few necessary variables inherited from configure
- for var in (
- 'PYTHON',
- 'ACDEFINES',
- 'MOZ_BUILD_APP',
- 'MOZ_WIDGET_TOOLKIT',
- ):
- value = self.environment.substs.get(var)
- if value is not None:
- mk.add_statement('%s = %s' % (var, value))
-
- install_manifests_bases = self._install_manifests.keys()
-
- # Add information for chrome manifest generation
- manifest_targets = []
-
- for target, entries in self._manifest_entries.iteritems():
- manifest_targets.append(target)
- install_target = mozpath.basedir(target, install_manifests_bases)
- self._install_manifests[install_target].add_content(
- ''.join('%s\n' % e for e in sorted(entries)),
- mozpath.relpath(target, install_target))
-
- # Add information for install manifests.
- mk.add_statement('INSTALL_MANIFESTS = %s'
- % ' '.join(self._install_manifests.keys()))
-
- # Add dependencies we infered:
- for target, deps in self._dependencies.iteritems():
- mk.create_rule([target]).add_dependencies(
- '$(TOPOBJDIR)/%s' % d for d in deps)
-
- mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')
-
- for base, install_manifest in self._install_manifests.iteritems():
- with self._write_file(
- mozpath.join(self.environment.topobjdir, 'faster',
- 'install_%s' % base.replace('/', '_'))) as fh:
- install_manifest.write(fileobj=fh)
-
- with self._write_file(
- mozpath.join(self.environment.topobjdir, 'faster',
- 'Makefile')) as fh:
- mk.dump(fh, removal_guard=False)
diff --git a/python/mozbuild/mozbuild/mach_commands.py b/python/mozbuild/mozbuild/mach_commands.py
index e299fca18..c4ee73345 100644
--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -509,7 +509,7 @@ class Build(MachCommandBase):
# to avoid accidentally disclosing PII.
telemetry_data['substs'] = {}
try:
- for key in ['MOZ_ARTIFACT_BUILDS', 'MOZ_USING_CCACHE']:
+ for key in ['MOZ_USING_CCACHE']:
value = self.substs.get(key, False)
telemetry_data['substs'][key] = value
except BuildEnvironmentNotFoundException:
@@ -1477,154 +1477,6 @@ class MachDebug(MachCommandBase):
return json.JSONEncoder.default(self, obj)
json.dump(self, cls=EnvironmentEncoder, sort_keys=True, fp=out)
-class ArtifactSubCommand(SubCommand):
- def __call__(self, func):
- after = SubCommand.__call__(self, func)
- jobchoices = {
- 'linux',
- 'linux64',
- 'macosx64',
- 'win32',
- 'win64'
- }
- args = [
- CommandArgument('--tree', metavar='TREE', type=str,
- help='Firefox tree.'),
- CommandArgument('--job', metavar='JOB', choices=jobchoices,
- help='Build job.'),
- CommandArgument('--verbose', '-v', action='store_true',
- help='Print verbose output.'),
- ]
- for arg in args:
- after = arg(after)
- return after
-
-
-@CommandProvider
-class PackageFrontend(MachCommandBase):
- """Fetch and install binary artifacts from Mozilla automation."""
-
- @Command('artifact', category='post-build',
- description='Use pre-built artifacts to build Firefox.')
- def artifact(self):
- '''Download, cache, and install pre-built binary artifacts to build Firefox.
-
- Use |mach build| as normal to freshen your installed binary libraries:
- artifact builds automatically download, cache, and install binary
- artifacts from Mozilla automation, replacing whatever may be in your
- object directory. Use |mach artifact last| to see what binary artifacts
- were last used.
-
- Never build libxul again!
-
- '''
- pass
-
- def _set_log_level(self, verbose):
- self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
-
- def _install_pip_package(self, package):
- if os.environ.get('MOZ_AUTOMATION'):
- self.virtualenv_manager._run_pip([
- 'install',
- package,
- '--no-index',
- '--find-links',
- 'http://pypi.pub.build.mozilla.org/pub',
- '--trusted-host',
- 'pypi.pub.build.mozilla.org',
- ])
- return
- self.virtualenv_manager.install_pip_package(package)
-
- def _make_artifacts(self, tree=None, job=None, skip_cache=False):
- # Undo PATH munging that will be done by activating the virtualenv,
- # so that invoked subprocesses expecting to find system python
- # (git cinnabar, in particular), will not find virtualenv python.
- original_path = os.environ.get('PATH', '')
- self._activate_virtualenv()
- os.environ['PATH'] = original_path
-
- for package in ('taskcluster==0.0.32',
- 'mozregression==1.0.2'):
- self._install_pip_package(package)
-
- state_dir = self._mach_context.state_dir
- cache_dir = os.path.join(state_dir, 'package-frontend')
-
- try:
- os.makedirs(cache_dir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- import which
-
- here = os.path.abspath(os.path.dirname(__file__))
- build_obj = MozbuildObject.from_environment(cwd=here)
-
- hg = None
- if conditions.is_hg(build_obj):
- if self._is_windows():
- hg = which.which('hg.exe')
- else:
- hg = which.which('hg')
-
- git = None
- if conditions.is_git(build_obj):
- if self._is_windows():
- git = which.which('git.exe')
- else:
- git = which.which('git')
-
- # Absolutely must come after the virtualenv is populated!
- from mozbuild.artifacts import Artifacts
- artifacts = Artifacts(tree, self.substs, self.defines, job,
- log=self.log, cache_dir=cache_dir,
- skip_cache=skip_cache, hg=hg, git=git,
- topsrcdir=self.topsrcdir)
- return artifacts
-
- @ArtifactSubCommand('artifact', 'install',
- 'Install a good pre-built artifact.')
- @CommandArgument('source', metavar='SRC', nargs='?', type=str,
- help='Where to fetch and install artifacts from. Can be omitted, in '
- 'which case the current hg repository is inspected; an hg revision; '
- 'a remote URL; or a local file.',
- default=None)
- @CommandArgument('--skip-cache', action='store_true',
- help='Skip all local caches to force re-fetching remote artifacts.',
- default=False)
- def artifact_install(self, source=None, skip_cache=False, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job, skip_cache=skip_cache)
-
- return artifacts.install_from(source, self.distdir)
-
- @ArtifactSubCommand('artifact', 'last',
- 'Print the last pre-built artifact installed.')
- def artifact_print_last(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.print_last()
- return 0
-
- @ArtifactSubCommand('artifact', 'print-cache',
- 'Print local artifact cache for debugging.')
- def artifact_print_cache(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.print_cache()
- return 0
-
- @ArtifactSubCommand('artifact', 'clear-cache',
- 'Delete local artifacts and reset local artifact cache.')
- def artifact_clear_cache(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.clear_cache()
- return 0
-
@CommandProvider
class Vendor(MachCommandBase):
"""Vendor third-party dependencies into the source repository."""