summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile.in40
-rw-r--r--build.gradle129
-rw-r--r--build/docs/cppeclipse.rst54
-rw-r--r--build/docs/index.rst8
-rw-r--r--build/docs/supported-configurations.rst46
-rw-r--r--build/docs/visualstudio.rst100
-rw-r--r--build/mach_bootstrap.py1
-rw-r--r--build/moz.build7
-rw-r--r--caps/tests/mochitest/browser_checkloaduri.js3
-rw-r--r--config/baseconfig.mk2
-rw-r--r--config/faster/rules.mk110
-rw-r--r--gradle.properties2
-rw-r--r--gradle/wrapper/gradle-wrapper.jarbin53638 -> 0 bytes
-rw-r--r--gradle/wrapper/gradle-wrapper.properties7
-rwxr-xr-xgradlew160
-rw-r--r--moz.configure77
-rw-r--r--old-configure.in1
-rw-r--r--python/moz.build1
-rw-r--r--python/mozbuild/mozbuild/backend/__init__.py11
-rw-r--r--python/mozbuild/mozbuild/backend/base.py55
-rw-r--r--python/mozbuild/mozbuild/backend/common.py36
-rw-r--r--python/mozbuild/mozbuild/backend/configenvironment.py4
-rw-r--r--python/mozbuild/mozbuild/backend/cpp_eclipse.py685
-rw-r--r--python/mozbuild/mozbuild/backend/fastermake.py165
-rw-r--r--python/mozbuild/mozbuild/backend/mach_commands.py123
-rw-r--r--python/mozbuild/mozbuild/backend/visualstudio.py582
-rw-r--r--python/mozbuild/mozbuild/codecoverage/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/codecoverage/chrome_map.py105
-rw-r--r--python/mozbuild/mozbuild/codecoverage/packager.py43
-rw-r--r--python/mozbuild/mozbuild/config_status.py17
-rw-r--r--python/mozbuild/mozbuild/mach_commands.py150
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_visualstudio.py64
-rw-r--r--security/apps/AppSignatureVerification.cpp1559
-rw-r--r--security/apps/AppTrustDomain.cpp388
-rw-r--r--security/apps/AppTrustDomain.h89
-rw-r--r--security/apps/addons-public.crtbin1637 -> 0 bytes
-rw-r--r--security/apps/addons-stage.crtbin1895 -> 0 bytes
-rw-r--r--security/apps/gen_cert_header.py45
-rw-r--r--security/apps/marketplace-dev-public.crtbin964 -> 0 bytes
-rw-r--r--security/apps/marketplace-dev-reviewers.crtbin1012 -> 0 bytes
-rw-r--r--security/apps/marketplace-prod-public.crtbin1177 -> 0 bytes
-rw-r--r--security/apps/marketplace-prod-reviewers.crtbin1171 -> 0 bytes
-rw-r--r--security/apps/marketplace-stage.crtbin1157 -> 0 bytes
-rw-r--r--security/apps/moz.build43
-rw-r--r--security/apps/privileged-package-root.derbin930 -> 0 bytes
-rw-r--r--security/apps/trusted-app-public.der0
-rw-r--r--security/manager/ssl/nsIX509CertDB.idl73
-rw-r--r--settings.gradle1
-rw-r--r--toolkit/components/moz.build1
-rw-r--r--toolkit/components/mozprotocol/moz.build9
-rw-r--r--toolkit/components/mozprotocol/mozProtocolHandler.js48
-rw-r--r--toolkit/components/mozprotocol/mozProtocolHandler.manifest2
-rw-r--r--toolkit/mozapps/installer/packager.mk13
-rw-r--r--toolkit/mozapps/installer/upload-files.mk5
-rw-r--r--toolkit/toolkit.mozbuild2
55 files changed, 36 insertions, 5030 deletions
diff --git a/Makefile.in b/Makefile.in
index 6c23273884..aec100ea0d 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -137,16 +137,10 @@ default:: $(BUILD_BACKEND_FILES)
endif
install_manifests := \
- $(addprefix dist/,branding idl include public private sdk xpi-stage) \
+ $(addprefix dist/,branding bin idl include public private sdk xpi-stage) \
_tests \
$(NULL)
-# Skip the dist/bin install manifest when using the hybrid
-# FasterMake/RecursiveMake backend. This is a hack until bug 1241744 moves
-# xpidl handling to FasterMake in that case, mechanically making the dist/bin
-# install manifest non-existent (non-existent manifests being skipped)
-ifeq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
-install_manifests += dist/bin
-endif
+
install_manifest_depends = \
CLOBBER \
$(configure_dir)/configure \
@@ -166,27 +160,6 @@ endif
.PHONY: install-manifests
install-manifests: $(addprefix install-,$(install_manifests))
-# If we're using the hybrid FasterMake/RecursiveMake backend, we want
-# to recurse in the faster/ directory in parallel of install manifests.
-# But dist/idl needs to happen before (cf. dependencies in
-# config/faster/rules.mk)
-ifneq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
-install-manifests: faster
-.PHONY: faster
-faster: install-dist/idl
- $(MAKE) -C faster FASTER_RECURSIVE_MAKE=1
-endif
-
-.PHONY: tup
-tup:
- $(call BUILDSTATUS,TIERS make tup)
- $(call BUILDSTATUS,TIER_START make)
- $(MAKE) install-manifests buildid.h source-repo.h
- $(call BUILDSTATUS,TIER_FINISH make)
- $(call BUILDSTATUS,TIER_START tup)
- @$(TUP) $(if $(findstring s,$(filter-out --%,$(MAKEFLAGS))),,--verbose)
- $(call BUILDSTATUS,TIER_FINISH tup)
-
# process_install_manifest needs to be invoked with --no-remove when building
# js as standalone because automated builds are building nspr separately and
# that would remove the resulting files.
@@ -198,17 +171,8 @@ endif
.PHONY: $(addprefix install-,$(subst /,_,$(install_manifests)))
$(addprefix install-,$(install_manifests)): install-%: $(install_manifest_depends)
-ifneq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
- @# If we're using the hybrid FasterMake/RecursiveMake backend, we want
- @# to ensure the FasterMake end doesn't have install manifests for the
- @# same directory, because that would blow up
- $(if $(wildcard _build_manifests/install/$(subst /,_,$*)),$(if $(wildcard faster/install_$(subst /,_,$*)*),$(error FasterMake and RecursiveMake ends of the hybrid build system want to handle $*)))
-endif
$(addprefix $(call py_action,process_install_manifest,$(if $(NO_REMOVE),--no-remove )$*) ,$(wildcard _build_manifests/install/$(subst /,_,$*)))
-# Dummy wrapper rule to allow the faster backend to piggy back
-$(addprefix install-,$(subst /,_,$(filter dist/%,$(install_manifests)))): install-dist_%: install-dist/% ;
-
.PHONY: install-tests
install-tests: install-test-files
diff --git a/build.gradle b/build.gradle
deleted file mode 100644
index d31d07cdcd..0000000000
--- a/build.gradle
+++ /dev/null
@@ -1,129 +0,0 @@
-import java.util.regex.Pattern
-
-allprojects {
- // Expose the per-object-directory configuration to all projects.
- ext {
- mozconfig = gradle.mozconfig
- topsrcdir = gradle.mozconfig.topsrcdir
- topobjdir = gradle.mozconfig.topobjdir
- }
-
- repositories {
- if (gradle.mozconfig.substs.GRADLE_MAVEN_REPOSITORY) {
- maven {
- url gradle.mozconfig.substs.GRADLE_MAVEN_REPOSITORY
- }
- }
- }
-}
-
-buildDir "${topobjdir}/gradle/build"
-
-buildscript {
- repositories {
- if (gradle.mozconfig.substs.GRADLE_MAVEN_REPOSITORY) {
- maven {
- url gradle.mozconfig.substs.GRADLE_MAVEN_REPOSITORY
- }
- }
- // For android-sdk-manager SNAPSHOT releases.
- maven {
- url "file://${gradle.mozconfig.topsrcdir}/mobile/android/gradle/m2repo"
- }
- }
-
- dependencies {
- classpath 'com.android.tools.build:gradle:2.1.3'
- classpath('com.stanfy.spoon:spoon-gradle-plugin:1.0.4') {
- // Without these, we get errors linting.
- exclude module: 'guava'
- }
- // Provided in tree.
- classpath 'com.jakewharton.sdkmanager:gradle-plugin:1.5.0-SNAPSHOT'
- }
-}
-
-task generateCodeAndResources(type:Exec) {
- workingDir "${topobjdir}"
-
- commandLine mozconfig.substs.GMAKE
- args '-C'
- args "${topobjdir}/mobile/android/base"
- args 'gradle-targets'
-
- // Only show the output if something went wrong.
- ignoreExitValue = true
- standardOutput = new ByteArrayOutputStream()
- errorOutput = standardOutput
- doLast {
- if (execResult.exitValue != 0) {
- throw new GradleException("Process '${commandLine}' finished with non-zero exit value ${execResult.exitValue}:\n\n${standardOutput.toString()}")
- }
- }
-}
-
-// Skip unit test for all build variants, unless if it was specifically requested by user.
-// The enabled property for the unit test tasks is reset based on the command line task names just before the task execution.
-// I bet there is a easier/cleaner way to do this, but this gets the job done for now.
-Pattern pattern = Pattern.compile('.*test(.+UnitTest)?.*')
-boolean startTasksIncludeTest = gradle.startParameter.taskNames.any {
- taskName ->
- taskName.matches(pattern)
-}
-gradle.taskGraph.beforeTask {
- Task task ->
- if (task.name.matches(pattern)) {
- task.enabled = startTasksIncludeTest
- }
-}
-
-afterEvaluate {
- subprojects {
- if (!hasProperty('android')) {
- return
- }
- android.applicationVariants.all {
- preBuild.dependsOn rootProject.generateCodeAndResources
- }
- android.libraryVariants.all {
- preBuild.dependsOn rootProject.generateCodeAndResources
- }
- }
-}
-
-apply plugin: 'idea'
-
-idea {
- project {
- languageLevel = '1.7'
- }
-
- module {
- // Object directories take a huge amount of time for IntelliJ to index.
- // Exclude them. Convention is that object directories start with obj.
- // IntelliJ is clever and will not exclude the parts of the object
- // directory that are referenced, if there are any. In practice,
- // indexing the entirety of the tree is taking too long, so exclude all
- // but mobile/.
- def topsrcdirURI = file(topsrcdir).toURI()
- excludeDirs += files(file(topsrcdir)
- .listFiles({it.isDirectory()} as FileFilter)
- .collect({topsrcdirURI.relativize(it.toURI()).toString()}) // Relative paths.
- .findAll({!it.equals('mobile/')}))
-
- // If topobjdir is below topsrcdir, hide only some portions of that tree.
- def topobjdirURI = file(topobjdir).toURI()
- if (!topsrcdirURI.relativize(topobjdirURI).isAbsolute()) {
- excludeDirs -= file(topobjdir)
- excludeDirs += files(file(topobjdir).listFiles())
- excludeDirs -= file("${topobjdir}/gradle")
- }
-
- if (!mozconfig.substs.MOZ_INSTALL_TRACKING) {
- excludeDirs += file("${topsrcdir}/mobile/android/thirdparty/com/adjust")
- }
- }
-}
-
-task wrapper(type: Wrapper) {
-}
diff --git a/build/docs/cppeclipse.rst b/build/docs/cppeclipse.rst
deleted file mode 100644
index 3596a2f9a6..0000000000
--- a/build/docs/cppeclipse.rst
+++ /dev/null
@@ -1,54 +0,0 @@
-.. _build_cppeclipse:
-
-=====================
-Cpp Eclipse Projects
-=====================
-
-For additional information on using Eclipse CDT see
-`the MDN page
-<https://developer.mozilla.org/en-US/docs/Eclipse_CDT>`_.
-
-The build system contains alpha support for generating C++ Eclipse
-project files to aid with development.
-
-Please report bugs to bugzilla and make them depend on bug 973770.
-
-To generate a C++ Eclipse project files, you'll need to have a fully
-built tree::
-
- mach build
-
-Then, simply generate the Eclipse build backend::
-
- mach build-backend -b CppEclipse
-
-If all goes well, the path to the generated workspace should be
-printed.
-
-To use the generated Eclipse project files, you'll need to
-have a Eclipse CDT 8.3 (We plan to follow the latest Eclipse release)
-`Eclipse CDT plugin
-<https://www.eclipse.org/cdt/>`_
-installed. You can then import all the projects into Eclipse using
-*File > Import ... > General > Existing Projects into Workspace*
--only- if you have not ran the background indexer.
-
-Updating Project Files
-======================
-
-As you pull and update the source tree, your C++ Eclipse files may
-fall out of sync with the build configuration. The tree should still
-build fine from within Eclipse, but source files may be missing and in
-rare circumstances Eclipse's index may not have the proper build
-configuration.
-
-To account for this, you'll want to periodically regenerate the
-Eclipse project files. You can do this by running ``mach build
-&& mach build-backend -b CppEclipse`` from the
-command line.
-
-Currently, regeneration rewrites the original project files. **If
-you've made any customizations to the projects, they will likely get
-overwritten.** We would like to improve this user experience in the
-future.
-
diff --git a/build/docs/index.rst b/build/docs/index.rst
index 75000aff33..fa1434b852 100644
--- a/build/docs/index.rst
+++ b/build/docs/index.rst
@@ -28,14 +28,6 @@ Important Concepts
locales
rust
-integrated development environment (IDE)
-========================================
-.. toctree::
- :maxdepth: 1
-
- cppeclipse
- visualstudio
-
mozbuild
========
diff --git a/build/docs/supported-configurations.rst b/build/docs/supported-configurations.rst
index cc2c1ea728..cfd1c98264 100644
--- a/build/docs/supported-configurations.rst
+++ b/build/docs/supported-configurations.rst
@@ -5,41 +5,44 @@ Supported Configurations
========================
This page attempts to document supported build configurations.
+For more up-to-date information please go to http://developer.palemoon.org/
Windows
=======
-We support building on Windows XP and newer operating systems using
-Visual Studio 2010 and newer.
+We support building on Windows 7 and newer operating systems using
+Visual Studio 2015 U3.
-The following are not fully supported by Mozilla (but may work):
+The following are not fully supported (but may work):
-* Building without the latest *MozillaBuild* Windows development
- environment
+* Building with a *MozillaBuild* Windows development
+ environment not mentioned on the developer documentation site.
* Building with Mingw or any other non-Visual Studio toolchain.
OS X
====
-
-We support building on OS X 10.6 and newer with the OS X 10.6 SDK.
+(This section needs updating)
+We support building on OS X 10.8 and newer with the OS X 10.8 SDK.
The tree should build with the following OS X releases and SDK versions:
-* 10.6 Snow Leopard
-* 10.7 Lion
* 10.8 Mountain Lion
* 10.9 Mavericks
-
-The tree requires building with Clang 3.3 and newer. This corresponds to
-version of 4.2 of Apple's Clang that ships with Xcode. This corresponds
-to Xcode 4.6 and newer. Xcode 4.6 only runs on OS X 10.7.4 and newer.
-So, OS X 10.6 users will need to install a non-Apple toolchain. Running
-``mach bootstrap`` should install an appropriate toolchain from Homebrew
-or MacPorts automatically.
-
-The tree should build with GCC 4.4 and newer on OS X. However, this
+* 10.10 Yosemite
+* 10.11 El Capitan
+* 10.12 Sierra
+* 10.13 High Sierra
+* 10.14 Mojave
+* 10.15 Catalina
+* 11 Big Sur (Including Apple ARM SoC)
+
+The tree requires building with Apple's Clang 4.2 that ships with Xcode.
+This corresponds to Xcode 4.6 and newer. Xcode 4.6 only runs on OS X 10.7.4
+and newer.
+
+The tree should build with GCC 7.1 and newer on OS X. However, this
build configuration isn't as widely used (and differs from what Mozilla
-uses to produce OS X builds), so it's recommended to stick with Clang.
+uses to produce OS X builds).
Linux
=====
@@ -47,9 +50,6 @@ Linux
Linux 2.6 and later kernels are supported.
Most distributions are supported as long as the proper package
-dependencies are in place. Running ``mach bootstrap`` should install
-packages for popular Linux distributions. ``configure`` will typically
+dependencies are in place. ``configure`` will typically
detect missing dependencies and inform you how to disable features to
work around unsatisfied dependencies.
-
-Clang 3.3 or GCC 4.4 is required to build the tree.
diff --git a/build/docs/visualstudio.rst b/build/docs/visualstudio.rst
deleted file mode 100644
index 3fbf28e94b..0000000000
--- a/build/docs/visualstudio.rst
+++ /dev/null
@@ -1,100 +0,0 @@
-.. _build_visualstudio:
-
-======================
-Visual Studio Projects
-======================
-
-The build system contains alpha support for generating Visual Studio
-project files to aid with development.
-
-To generate Visual Studio project files, you'll need to have a configured tree::
-
- mach configure
-
-(If you have built recently, your tree is already configured.)
-
-Then, simply generate the Visual Studio build backend::
-
- mach build-backend -b VisualStudio
-
-If all goes well, the path to the generated Solution (``.sln``) file should be
-printed. You should be able to open that solution with Visual Studio 2010 or
-newer.
-
-Currently, output is hard-coded to the Visual Studio 2010 format. If you open
-the solution in a newer Visual Studio release, you will be prompted to upgrade
-projects. Simply click through the wizard to do that.
-
-Structure of Solution
-=====================
-
-The Visual Studio solution consists of hundreds of projects spanning thousands
-of files. To help with organization, the solution is divided into the following
-trees/folders:
-
-Build Targets
- This folder contains common build targets. The *full* project is used to
- perform a full build. The *binaries* project is used to build just binaries.
- The *visual-studio* project can be built to regenerate the Visual Studio
- project files.
-
- Performing the *clean* action on any of these targets will clean the
- *entire* build output.
-
-Binaries
- This folder contains common binaries that can be executed from within
- Visual Studio. If you are building the Firefox desktop application,
- the *firefox* project will launch firefox.exe. You probably want one of
- these set to your startup project.
-
-Libraries
- This folder contains entries for each static library that is produced as
- part of the build. These roughly correspond to each directory in the tree
- containing C/C++. e.g. code from ``dom/base`` will be contained in the
- ``dom_base`` project.
-
- These projects don't do anything when built. If you build a project here,
- the *binaries* build target project is built.
-
-Updating Project Files
-======================
-
-As you pull and update the source tree, your Visual Studio files may fall out
-of sync with the build configuration. The tree should still build fine from
-within Visual Studio. But source files may be missing and IntelliSense may not
-have the proper build configuration.
-
-To account for this, you'll want to periodically regenerate the Visual Studio
-project files. You can do this within Visual Studio by building the
-``Build Targets :: visual-studio`` project or by running
-``mach build-backend -b VisualStudio`` from the command line.
-
-Currently, regeneration rewrites the original project files. **If you've made
-any customizations to the solution or projects, they will likely get
-overwritten.** We would like to improve this user experience in the
-future.
-
-Moving Project Files Around
-===========================
-
-The produced Visual Studio solution and project files should be portable.
-If you want to move them to a non-default directory, they should continue
-to work from wherever they are. If they don't, please file a bug.
-
-Invoking mach through Visual Studio
-===================================
-
-It's possible to build the tree via Visual Studio. There is some light magic
-involved here.
-
-Alongside the Visual Studio project files is a batch script named ``mach.bat``.
-This batch script sets the environment variables present in your *MozillaBuild*
-development environment at the time of Visual Studio project generation
-and invokes *mach* inside an msys shell with the arguments specified to the
-batch script. This script essentially allows you to invoke mach commands
-inside the MozillaBuild environment without having to load MozillaBuild.
-
-While projects currently only utilize the ``mach build`` command, the batch
-script does not limit it's use: any mach command can be invoked. Developers
-may abuse this fact to add custom projects and commands that invoke other
-mach commands.
diff --git a/build/mach_bootstrap.py b/build/mach_bootstrap.py
index 0443eedda2..22eaa3425a 100644
--- a/build/mach_bootstrap.py
+++ b/build/mach_bootstrap.py
@@ -100,7 +100,6 @@ MACH_MODULES = [
'python/mach/mach/commands/settings.py',
'python/compare-locales/mach_commands.py',
'python/mozbuild/mozbuild/mach_commands.py',
- 'python/mozbuild/mozbuild/backend/mach_commands.py',
'python/mozbuild/mozbuild/compilation/codecomplete.py',
'python/mozbuild/mozbuild/frontend/mach_commands.py',
'services/common/tests/mach_commands.py',
diff --git a/build/moz.build b/build/moz.build
index 27f681369d..6567dd944c 100644
--- a/build/moz.build
+++ b/build/moz.build
@@ -97,10 +97,3 @@ if CONFIG['MOZ_VALGRIND']:
'valgrind/i386-redhat-linux-gnu.sup',
'valgrind/x86_64-redhat-linux-gnu.sup',
]
-
-if CONFIG['MOZ_ARTIFACT_BUILDS']:
- # Ensure a pre-built interfaces.xpt installed to the objdir by the artifact
- # code is included by the top-level chrome.manifest.
- EXTRA_COMPONENTS += [
- 'prebuilt-interfaces.manifest',
- ]
diff --git a/caps/tests/mochitest/browser_checkloaduri.js b/caps/tests/mochitest/browser_checkloaduri.js
index 24a97c1c45..1fac5c97c7 100644
--- a/caps/tests/mochitest/browser_checkloaduri.js
+++ b/caps/tests/mochitest/browser_checkloaduri.js
@@ -58,7 +58,6 @@ const URLs = new Map([
["data:text/html,Hi", true, false, true],
["view-source:data:text/html,Hi", false, false, true],
["javascript:alert('hi')", true, false, true],
- ["moz://a", false, false, true],
["about:test-chrome-privs", false, false, true],
["about:test-unknown-unlinkable", false, false, true],
["about:test-content-unlinkable", false, false, true],
@@ -80,7 +79,6 @@ const URLs = new Map([
["data:text/html,Hi", true, false, true],
["view-source:data:text/html,Hi", false, false, true],
["javascript:alert('hi')", true, false, true],
- ["moz://a", false, false, true],
["about:test-chrome-privs", false, false, true],
["about:test-unknown-unlinkable", false, false, true],
["about:test-content-unlinkable", false, false, true],
@@ -102,7 +100,6 @@ const URLs = new Map([
["data:text/html,Hi", true, false, true],
["view-source:data:text/html,Hi", true, false, true],
["javascript:alert('hi')", true, false, true],
- ["moz://a", false, false, true],
["about:test-chrome-privs", false, false, true],
["about:test-unknown-unlinkable", false, false, true],
["about:test-content-unlinkable", false, false, true],
diff --git a/config/baseconfig.mk b/config/baseconfig.mk
index 47a12b16e4..a125466abf 100644
--- a/config/baseconfig.mk
+++ b/config/baseconfig.mk
@@ -45,7 +45,7 @@ endif # WINNT
ifndef INCLUDED_AUTOCONF_MK
default::
else
-TIERS := $(if $(MOZ_ARTIFACT_BUILDS),artifact )pre-export export $(if $(COMPILE_ENVIRONMENT),compile )misc libs tools
+TIERS := pre-export export $(if $(COMPILE_ENVIRONMENT),compile )misc libs tools
endif
# These defines are used to support the twin-topsrcdir model for comm-central.
diff --git a/config/faster/rules.mk b/config/faster/rules.mk
deleted file mode 100644
index 9d7b322fa2..0000000000
--- a/config/faster/rules.mk
+++ /dev/null
@@ -1,110 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# /!\ Please make sure to update the following comment when you touch this
-# file. Thank you /!\
-
-# The traditional Mozilla build system relied on going through the entire
-# build tree a number of times with different targets, and many of the
-# things happening at each step required other things happening in previous
-# steps without any documentation of those dependencies.
-#
-# This new build system tries to start afresh by establishing what files or
-# operations are needed for the build, and applying the necessary rules to
-# have those in place, relying on make dependencies to get them going.
-#
-# As of writing, only building non-compiled parts of Firefox is supported
-# here (a few other things are also left out). This is a starting point, with
-# the intent to grow this build system to make it more complete.
-#
-# This file contains rules and dependencies to get things working. The intent
-# is for a Makefile to define some dependencies and variables, and include
-# this file. What needs to be defined there, and ends up being generated by
-# python/mozbuild/mozbuild/backend/fastermake.py is the following:
-# - TOPSRCDIR/TOPOBJDIR, respectively the top source directory and the top
-# object directory
-# - PYTHON, the path to the python executable
-# - ACDEFINES, which contains a set of -Dvar=name to be used during
-# preprocessing
-# - INSTALL_MANIFESTS, which defines the list of base directories handled
-# by install manifests, see further below
-#
-# A convention used between this file and the Makefile including it is that
-# global Make variables names are uppercase, while "local" Make variables
-# applied to specific targets are lowercase.
-
-# Targets to be triggered for a default build
-default: $(addprefix install-,$(INSTALL_MANIFESTS))
-
-ifndef NO_XPIDL
-# Targets from the recursive make backend to be built for a default build
-default: $(TOPOBJDIR)/config/makefiles/xpidl/xpidl
-endif
-
-# Mac builds require to copy things in dist/bin/*.app
-# TODO: remove the MOZ_WIDGET_TOOLKIT and MOZ_BUILD_APP variables from
-# faster/Makefile and python/mozbuild/mozbuild/test/backend/test_build.py
-# when this is not required anymore.
-# We however don't need to do this when using the hybrid
-# FasterMake/RecursiveMake backend (FASTER_RECURSIVE_MAKE is set when
-# recursing from the RecursiveMake backend)
-ifndef FASTER_RECURSIVE_MAKE
-ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
-default:
- $(MAKE) -C $(TOPOBJDIR)/$(MOZ_BUILD_APP)/app repackage
-endif
-endif
-
-.PHONY: FORCE
-
-# Extra define to trigger some workarounds. We should strive to limit the
-# use of those. As of writing the only ones are in
-# toolkit/content/buildconfig.html and browser/locales/jar.mn.
-ACDEFINES += -DBUILD_FASTER
-
-# Files under the faster/ sub-directory, however, are not meant to use the
-# fallback
-$(TOPOBJDIR)/faster/%: ;
-
-# Generic rule to fall back to the recursive make backend.
-# This needs to stay after other $(TOPOBJDIR)/* rules because GNU Make
-# <3.82 apply pattern rules in definition order, not stem length like
-# modern GNU Make.
-$(TOPOBJDIR)/%: FORCE
- $(MAKE) -C $(dir $@) $(notdir $@)
-
-# Install files using install manifests
-#
-# The list of base directories is given in INSTALL_MANIFESTS. The
-# corresponding install manifests are named correspondingly, with forward
-# slashes replaced with underscores, and prefixed with `install_`. That is,
-# the install manifest for `dist/bin` would be `install_dist_bin`.
-$(addprefix install-,$(INSTALL_MANIFESTS)): install-%: $(addprefix $(TOPOBJDIR)/,buildid.h source-repo.h)
- @# For now, force preprocessed files to be reprocessed every time.
- @# The overhead is not that big, and this avoids waiting for proper
- @# support for defines tracking in process_install_manifest.
- @touch install_$(subst /,_,$*)
- @# BOOKMARKS_INCLUDE_DIR is for bookmarks.html only.
- $(PYTHON) -m mozbuild.action.process_install_manifest \
- --track install_$(subst /,_,$*).track \
- $(TOPOBJDIR)/$* \
- -DAB_CD=en-US \
- -DBOOKMARKS_INCLUDE_DIR=$(TOPSRCDIR)/browser/locales/en-US/profile \
- $(ACDEFINES) \
- install_$(subst /,_,$*)
-
-# ============================================================================
-# Below is a set of additional dependencies and variables used to build things
-# that are not supported by data in moz.build.
-
-# The xpidl target in config/makefiles/xpidl requires the install manifest for
-# dist/idl to have been processed. When using the hybrid
-# FasterMake/RecursiveMake backend, this dependency is handled in the top-level
-# Makefile.
-ifndef FASTER_RECURSIVE_MAKE
-$(TOPOBJDIR)/config/makefiles/xpidl/xpidl: $(TOPOBJDIR)/install-dist_idl
-endif
-# It also requires all the install manifests for dist/bin to have been processed
-# because it adds interfaces.manifest references with buildlist.py.
-$(TOPOBJDIR)/config/makefiles/xpidl/xpidl: $(addprefix install-,$(filter dist/bin%,$(INSTALL_MANIFESTS)))
diff --git a/gradle.properties b/gradle.properties
deleted file mode 100644
index 40ca366b2f..0000000000
--- a/gradle.properties
+++ /dev/null
@@ -1,2 +0,0 @@
-org.gradle.parallel=true
-org.gradle.daemon=true
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
deleted file mode 100644
index e8c6bf7bb4..0000000000
--- a/gradle/wrapper/gradle-wrapper.jar
+++ /dev/null
Binary files differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
deleted file mode 100644
index 8964ccd444..0000000000
--- a/gradle/wrapper/gradle-wrapper.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-#Fri Sep 16 15:41:50 PDT 2016
-distributionBase=GRADLE_USER_HOME
-distributionPath=wrapper/dists
-zipStoreBase=GRADLE_USER_HOME
-zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-2.14.1-all.zip
-distributionSha256Sum=88a910cdf2e03ebbb5fe90f7ecf534fc9ac22e12112dc9a2fee810c598a76091
diff --git a/gradlew b/gradlew
deleted file mode 100755
index 97fac783e1..0000000000
--- a/gradlew
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env bash
-
-##############################################################################
-##
-## Gradle start up script for UN*X
-##
-##############################################################################
-
-# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-DEFAULT_JVM_OPTS=""
-
-APP_NAME="Gradle"
-APP_BASE_NAME=`basename "$0"`
-
-# Use the maximum available, or set MAX_FD != -1 to use that value.
-MAX_FD="maximum"
-
-warn ( ) {
- echo "$*"
-}
-
-die ( ) {
- echo
- echo "$*"
- echo
- exit 1
-}
-
-# OS specific support (must be 'true' or 'false').
-cygwin=false
-msys=false
-darwin=false
-case "`uname`" in
- CYGWIN* )
- cygwin=true
- ;;
- Darwin* )
- darwin=true
- ;;
- MINGW* )
- msys=true
- ;;
-esac
-
-# Attempt to set APP_HOME
-# Resolve links: $0 may be a link
-PRG="$0"
-# Need this for relative symlinks.
-while [ -h "$PRG" ] ; do
- ls=`ls -ld "$PRG"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '/.*' > /dev/null; then
- PRG="$link"
- else
- PRG=`dirname "$PRG"`"/$link"
- fi
-done
-SAVED="`pwd`"
-cd "`dirname \"$PRG\"`/" >&-
-APP_HOME="`pwd -P`"
-cd "$SAVED" >&-
-
-CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
-
-# Determine the Java command to use to start the JVM.
-if [ -n "$JAVA_HOME" ] ; then
- if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
- # IBM's JDK on AIX uses strange locations for the executables
- JAVACMD="$JAVA_HOME/jre/sh/java"
- else
- JAVACMD="$JAVA_HOME/bin/java"
- fi
- if [ ! -x "$JAVACMD" ] ; then
- die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
- fi
-else
- JAVACMD="java"
- which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
-fi
-
-# Increase the maximum file descriptors if we can.
-if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
- MAX_FD_LIMIT=`ulimit -H -n`
- if [ $? -eq 0 ] ; then
- if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
- MAX_FD="$MAX_FD_LIMIT"
- fi
- ulimit -n $MAX_FD
- if [ $? -ne 0 ] ; then
- warn "Could not set maximum file descriptor limit: $MAX_FD"
- fi
- else
- warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
- fi
-fi
-
-# For Darwin, add options to specify how the application appears in the dock
-if $darwin; then
- GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
-fi
-
-# For Cygwin, switch paths to Windows format before running java
-if $cygwin ; then
- APP_HOME=`cygpath --path --mixed "$APP_HOME"`
- CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
- JAVACMD=`cygpath --unix "$JAVACMD"`
-
- # We build the pattern for arguments to be converted via cygpath
- ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
- SEP=""
- for dir in $ROOTDIRSRAW ; do
- ROOTDIRS="$ROOTDIRS$SEP$dir"
- SEP="|"
- done
- OURCYGPATTERN="(^($ROOTDIRS))"
- # Add a user-defined pattern to the cygpath arguments
- if [ "$GRADLE_CYGPATTERN" != "" ] ; then
- OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
- fi
- # Now convert the arguments - kludge to limit ourselves to /bin/sh
- i=0
- for arg in "$@" ; do
- CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
- CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
-
- if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
- eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
- else
- eval `echo args$i`="\"$arg\""
- fi
- i=$((i+1))
- done
- case $i in
- (0) set -- ;;
- (1) set -- "$args0" ;;
- (2) set -- "$args0" "$args1" ;;
- (3) set -- "$args0" "$args1" "$args2" ;;
- (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
- (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
- (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
- (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
- (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
- (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
- esac
-fi
-
-# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
-function splitJvmOpts() {
- JVM_OPTS=("$@")
-}
-eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
-JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
-
-exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
diff --git a/moz.configure b/moz.configure
index a4bba5bc3b..e3ee68bac3 100644
--- a/moz.configure
+++ b/moz.configure
@@ -11,33 +11,6 @@ include('build/moz.configure/init.configure')
# - Spidermonkey-specific options and rules should go in js/moz.configure.
# - etc.
-option('--enable-artifact-builds', env='MOZ_ARTIFACT_BUILDS',
- help='Download and use prebuilt binary artifacts.')
-
-@depends('--enable-artifact-builds')
-def artifact_builds(value):
- if value:
- return True
-
-set_config('MOZ_ARTIFACT_BUILDS', artifact_builds)
-
-imply_option('--enable-artifact-build-symbols',
- depends(artifact_builds)(lambda v: False if v is None else None),
- reason='--disable-artifact-builds')
-
-option('--enable-artifact-build-symbols',
- help='Download symbols when artifact builds are enabled.')
-
-set_config('MOZ_ARTIFACT_BUILD_SYMBOLS',
- depends_if('--enable-artifact-build-symbols')(lambda _: True))
-
-@depends('--enable-artifact-builds')
-def imply_disable_compile_environment(value):
- if value:
- return False
-
-imply_option('--enable-compile-environment', imply_disable_compile_environment)
-
option('--disable-compile-environment',
help='Disable compiler/library checks')
@@ -73,43 +46,8 @@ include('build/moz.configure/warnings.configure',
include(include_project_configure)
-@depends('--help')
-@imports(_from='mozbuild.backend', _import='backends')
-def build_backends_choices(_):
- return tuple(backends)
-
-
-@deprecated_option('--enable-build-backend', nargs='+',
- choices=build_backends_choices)
-def build_backend(backends):
- if backends:
- return tuple('+%s' % b for b in backends)
-
-imply_option('--build-backends', build_backend)
-
-
-@depends('--enable-artifact-builds', '--disable-compile-environment', '--help')
-@imports('sys')
-def build_backend_defaults(artifact_builds, compile_environment, _):
- if artifact_builds:
- all_backends = ['FasterMake+RecursiveMake']
- else:
- all_backends = ['RecursiveMake', 'FasterMake']
- # Normally, we'd use target.os == 'WINNT', but a dependency on target
- # would require target to depend on --help, as well as host and shell,
- # and this is not a can of worms we can open at the moment.
- if sys.platform == 'win32' and compile_environment:
- all_backends.append('VisualStudio')
- return tuple(all_backends)
-
-option('--build-backends', nargs='+', default=build_backend_defaults,
- choices=build_backends_choices, help='Build backends to generate')
-
-@depends('--build-backends')
-def build_backends(backends):
- return backends
-
-set_config('BUILD_BACKENDS', build_backends)
+# We only support one build-backend, namely RecursiveMake.
+set_config('BUILD_BACKENDS', tuple(['RecursiveMake']))
# Awk detection
@@ -186,17 +124,6 @@ def possible_makes(make, host):
check_prog('GMAKE', possible_makes)
-# tup detection
-# ==============================================================
-@depends(build_backends)
-def tup_progs(build_backends):
- for backend in build_backends:
- if 'Tup' in backend:
- return ['tup']
- return None
-
-tup = check_prog('TUP', tup_progs)
-
# Miscellaneous programs
# ==============================================================
check_prog('DOXYGEN', ('doxygen',), allow_missing=True)
diff --git a/old-configure.in b/old-configure.in
index 0ed6984ca9..6ed4fc1f3d 100644
--- a/old-configure.in
+++ b/old-configure.in
@@ -4961,7 +4961,6 @@ AC_SUBST_LIST(VPX_ASFLAGS)
AC_SUBST(VPX_AS_CONVERSION)
AC_SUBST(VPX_X86_ASM)
AC_SUBST(VPX_ARM_ASM)
-AC_SUBST(MOZ_CODE_COVERAGE)
AC_SUBST(LIBJPEG_TURBO_USE_YASM)
AC_SUBST_LIST(LIBJPEG_TURBO_ASFLAGS)
AC_SUBST(MOZ_LIBAV_FFT)
diff --git a/python/moz.build b/python/moz.build
index 108b986b55..819d1db9d8 100644
--- a/python/moz.build
+++ b/python/moz.build
@@ -27,7 +27,6 @@ PYTHON_UNIT_TESTS += [
'mozbuild/mozbuild/test/backend/test_build.py',
'mozbuild/mozbuild/test/backend/test_configenvironment.py',
'mozbuild/mozbuild/test/backend/test_recursivemake.py',
- 'mozbuild/mozbuild/test/backend/test_visualstudio.py',
'mozbuild/mozbuild/test/compilation/test_warnings.py',
'mozbuild/mozbuild/test/configure/lint.py',
'mozbuild/mozbuild/test/configure/test_checks_configure.py',
diff --git a/python/mozbuild/mozbuild/backend/__init__.py b/python/mozbuild/mozbuild/backend/__init__.py
index 7093e0c83f..fede9cf9c7 100644
--- a/python/mozbuild/mozbuild/backend/__init__.py
+++ b/python/mozbuild/mozbuild/backend/__init__.py
@@ -3,23 +3,12 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
backends = {
- 'ChromeMap': 'mozbuild.codecoverage.chrome_map',
'CompileDB': 'mozbuild.compilation.database',
- 'CppEclipse': 'mozbuild.backend.cpp_eclipse',
- 'FasterMake': 'mozbuild.backend.fastermake',
- 'FasterMake+RecursiveMake': None,
'RecursiveMake': 'mozbuild.backend.recursivemake',
- 'Tup': 'mozbuild.backend.tup',
- 'VisualStudio': 'mozbuild.backend.visualstudio',
}
def get_backend_class(name):
- if '+' in name:
- from mozbuild.backend.base import HybridBackend
- return HybridBackend(*(get_backend_class(name)
- for name in name.split('+')))
-
class_name = '%sBackend' % name
module = __import__(backends[name], globals(), locals(), [class_name])
return getattr(module, class_name)
diff --git a/python/mozbuild/mozbuild/backend/base.py b/python/mozbuild/mozbuild/backend/base.py
index f5e0c2d3c8..c46a3b1397 100644
--- a/python/mozbuild/mozbuild/backend/base.py
+++ b/python/mozbuild/mozbuild/backend/base.py
@@ -125,13 +125,11 @@ class BuildBackend(LoggingMixin):
for obj in objs:
obj_start = time.time()
- if (not self.consume_object(obj) and
- not isinstance(self, PartialBackend)):
+ if (not self.consume_object(obj)):
raise Exception('Unhandled object of type %s' % type(obj))
self._execution_time += time.time() - obj_start
- if (isinstance(obj, ContextDerived) and
- not isinstance(self, PartialBackend)):
+ if (isinstance(obj, ContextDerived)):
self.backend_input_files |= obj.context_all_paths
# Pull in all loaded Python as dependencies so any Python changes that
@@ -266,52 +264,3 @@ class BuildBackend(LoggingMixin):
with self._write_file(obj.output_path) as fh:
pp.out = fh
yield pp
-
-
-class PartialBackend(BuildBackend):
- """A PartialBackend is a BuildBackend declaring that its consume_object
- method may not handle all build configuration objects it's passed, and
- that it's fine."""
-
-
-def HybridBackend(*backends):
- """A HybridBackend is the combination of one or more PartialBackends
- with a non-partial BuildBackend.
-
- Build configuration objects are passed to each backend, stopping at the
- first of them that declares having handled them.
- """
- assert len(backends) >= 2
- assert all(issubclass(b, PartialBackend) for b in backends[:-1])
- assert not(issubclass(backends[-1], PartialBackend))
- assert all(issubclass(b, BuildBackend) for b in backends)
-
- class TheHybridBackend(BuildBackend):
- def __init__(self, environment):
- self._backends = [b(environment) for b in backends]
- super(TheHybridBackend, self).__init__(environment)
-
- def consume_object(self, obj):
- return any(b.consume_object(obj) for b in self._backends)
-
- def consume_finished(self):
- for backend in self._backends:
- backend.consume_finished()
-
- for attr in ('_execution_time', '_created_count', '_updated_count',
- '_unchanged_count', '_deleted_count'):
- setattr(self, attr,
- sum(getattr(b, attr) for b in self._backends))
-
- for b in self._backends:
- self.file_diffs.update(b.file_diffs)
- for attr in ('backend_input_files', '_backend_output_files'):
- files = getattr(self, attr)
- files |= getattr(b, attr)
-
- name = '+'.join(itertools.chain(
- (b.__name__.replace('Backend', '') for b in backends[:1]),
- (b.__name__ for b in backends[-1:])
- ))
-
- return type(str(name), (TheHybridBackend,), {})
diff --git a/python/mozbuild/mozbuild/backend/common.py b/python/mozbuild/mozbuild/backend/common.py
index 12b2a27c45..a90aa1e5d5 100644
--- a/python/mozbuild/mozbuild/backend/common.py
+++ b/python/mozbuild/mozbuild/backend/common.py
@@ -252,71 +252,35 @@ class CommonBackend(BuildBackend):
# We should consider aggregating WebIDL types in emitter.py.
elif isinstance(obj, WebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename))
elif isinstance(obj, GeneratedEventWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.generated_events_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, TestWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.test_sources.add(mozpath.join(obj.srcdir,
obj.basename))
elif isinstance(obj, PreprocessedTestWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.preprocessed_test_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, GeneratedWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.generated_sources.add(mozpath.join(obj.srcdir,
obj.basename))
elif isinstance(obj, PreprocessedWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.preprocessed_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, ExampleWebIDLInterface):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.example_interfaces.add(obj.name)
elif isinstance(obj, IPDLFile):
- # IPDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename))
elif isinstance(obj, UnifiedSources):
- # Unified sources aren't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
if obj.have_unified_mapping:
self._write_unified_files(obj.unified_source_mapping, obj.objdir)
if hasattr(self, '_process_unified_sources'):
diff --git a/python/mozbuild/mozbuild/backend/configenvironment.py b/python/mozbuild/mozbuild/backend/configenvironment.py
index 331309af6d..0edcf53660 100644
--- a/python/mozbuild/mozbuild/backend/configenvironment.py
+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
@@ -187,10 +187,6 @@ class ConfigEnvironment(object):
self.substs_unicode = ReadOnlyDict(self.substs_unicode)
- @property
- def is_artifact_build(self):
- return self.substs.get('MOZ_ARTIFACT_BUILDS', False)
-
@staticmethod
def from_config_status(path):
config = BuildConfig.from_config_status(path)
diff --git a/python/mozbuild/mozbuild/backend/cpp_eclipse.py b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
deleted file mode 100644
index ae89df5b20..0000000000
--- a/python/mozbuild/mozbuild/backend/cpp_eclipse.py
+++ /dev/null
@@ -1,685 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import
-
-import errno
-import random
-import os
-import subprocess
-import types
-import xml.etree.ElementTree as ET
-from .common import CommonBackend
-
-from ..frontend.data import (
- Defines,
-)
-from mozbuild.base import ExecutionSummary
-
-# TODO Have ./mach eclipse generate the workspace and index it:
-# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse
-# Open eclipse:
-# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
-
-class CppEclipseBackend(CommonBackend):
- """Backend that generates Cpp Eclipse project files.
- """
-
- def __init__(self, environment):
- if os.name == 'nt':
- raise Exception('Eclipse is not supported on Windows. '
- 'Consider using Visual Studio instead.')
- super(CppEclipseBackend, self).__init__(environment)
-
- def _init(self):
- CommonBackend._init(self)
-
- self._paths_to_defines = {}
- self._project_name = 'Gecko'
- self._workspace_dir = self._get_workspace_path()
- self._project_dir = os.path.join(self._workspace_dir, self._project_name)
- self._overwriting_workspace = os.path.isdir(self._workspace_dir)
-
- self._macbundle = self.environment.substs['MOZ_MACBUNDLE_NAME']
- self._appname = self.environment.substs['MOZ_APP_NAME']
- self._bin_suffix = self.environment.substs['BIN_SUFFIX']
- self._cxx = self.environment.substs['CXX']
- # Note: We need the C Pre Processor (CPP) flags, not the CXX flags
- self._cppflags = self.environment.substs.get('CPPFLAGS', '')
-
- def summary(self):
- return ExecutionSummary(
- 'CppEclipse backend executed in {execution_time:.2f}s\n'
- 'Generated Cpp Eclipse workspace in "{workspace:s}".\n'
- 'If missing, import the project using File > Import > General > Existing Project into workspace\n'
- '\n'
- 'Run with: eclipse -data {workspace:s}\n',
- execution_time=self._execution_time,
- workspace=self._workspace_dir)
-
- def _get_workspace_path(self):
- return CppEclipseBackend.get_workspace_path(self.environment.topsrcdir, self.environment.topobjdir)
-
- @staticmethod
- def get_workspace_path(topsrcdir, topobjdir):
- # Eclipse doesn't support having the workspace inside the srcdir.
- # Since most people have their objdir inside their srcdir it's easier
- # and more consistent to just put the workspace along side the srcdir
- srcdir_parent = os.path.dirname(topsrcdir)
- workspace_dirname = "eclipse_" + os.path.basename(topobjdir)
- return os.path.join(srcdir_parent, workspace_dirname)
-
- def consume_object(self, obj):
- reldir = getattr(obj, 'relativedir', None)
-
- # Note that unlike VS, Eclipse' indexer seem to crawl the headers and
- # isn't picky about the local includes.
- if isinstance(obj, Defines):
- self._paths_to_defines.setdefault(reldir, {}).update(obj.defines)
-
- return True
-
- def consume_finished(self):
- settings_dir = os.path.join(self._project_dir, '.settings')
- launch_dir = os.path.join(self._project_dir, 'RunConfigurations')
- workspace_settings_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
- workspace_language_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.cdt.core')
-
- for dir_name in [self._project_dir, settings_dir, launch_dir, workspace_settings_dir, workspace_language_dir]:
- try:
- os.makedirs(dir_name)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- project_path = os.path.join(self._project_dir, '.project')
- with open(project_path, 'wb') as fh:
- self._write_project(fh)
-
- cproject_path = os.path.join(self._project_dir, '.cproject')
- with open(cproject_path, 'wb') as fh:
- self._write_cproject(fh)
-
- language_path = os.path.join(settings_dir, 'language.settings.xml')
- with open(language_path, 'wb') as fh:
- self._write_language_settings(fh)
-
- workspace_language_path = os.path.join(workspace_language_dir, 'language.settings.xml')
- with open(workspace_language_path, 'wb') as fh:
- workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
- workspace_lang_settings = workspace_lang_settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
- fh.write(workspace_lang_settings)
-
- self._write_launch_files(launch_dir)
-
- # This will show up as an 'unmanged' formatter. This can be named by generating
- # another file.
- formatter_prefs_path = os.path.join(settings_dir, 'org.eclipse.cdt.core.prefs')
- with open(formatter_prefs_path, 'wb') as fh:
- fh.write(FORMATTER_SETTINGS);
-
- editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs");
- with open(editor_prefs_path, 'wb') as fh:
- fh.write(EDITOR_SETTINGS);
-
- # Now import the project into the workspace
- self._import_project()
-
- def _import_project(self):
- # If the workspace already exists then don't import the project again because
- # eclipse doesn't handle this properly
- if self._overwriting_workspace:
- return
-
- # We disable the indexer otherwise we're forced to index
- # the whole codebase when importing the project. Indexing the project can take 20 minutes.
- self._write_noindex()
-
- try:
- process = subprocess.check_call(
- ["eclipse", "-application", "-nosplash",
- "org.eclipse.cdt.managedbuilder.core.headlessbuild",
- "-data", self._workspace_dir, "-importAll", self._project_dir])
- finally:
- self._remove_noindex()
-
- def _write_noindex(self):
- noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
- with open(noindex_path, 'wb') as fh:
- fh.write(NOINDEX_TEMPLATE);
-
- def _remove_noindex(self):
- noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
- os.remove(noindex_path)
-
- def _define_entry(self, name, value):
- define = ET.Element('entry')
- define.set('kind', 'macro')
- define.set('name', name)
- define.set('value', value)
- return ET.tostring(define)
-
- def _write_language_settings(self, fh):
- settings = LANGUAGE_SETTINGS_TEMPLATE
-
- settings = settings.replace('@GLOBAL_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include'))
- settings = settings.replace('@NSPR_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/nspr'))
- settings = settings.replace('@IPDL_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'ipc/ipdl/_ipdlheaders'))
- settings = settings.replace('@PREINCLUDE_FILE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/mozilla-config.h'))
- settings = settings.replace('@DEFINE_MOZILLA_INTERNAL_API@', self._define_entry('MOZILLA_INTERNAL_API', '1'))
- settings = settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
-
- fh.write(settings)
-
- def _write_launch_files(self, launch_dir):
- bin_dir = os.path.join(self.environment.topobjdir, 'dist')
-
- # TODO Improve binary detection
- if self._macbundle:
- exe_path = os.path.join(bin_dir, self._macbundle, 'Contents/MacOS')
- else:
- exe_path = os.path.join(bin_dir, 'bin')
-
- exe_path = os.path.join(exe_path, self._appname + self._bin_suffix)
-
- main_gecko_launch = os.path.join(launch_dir, 'gecko.launch')
- with open(main_gecko_launch, 'wb') as fh:
- launch = GECKO_LAUNCH_CONFIG_TEMPLATE
- launch = launch.replace('@LAUNCH_PROGRAM@', exe_path)
- launch = launch.replace('@LAUNCH_ARGS@', '-P -no-remote')
- fh.write(launch)
-
- #TODO Add more launch configs (and delegate calls to mach)
-
- def _write_project(self, fh):
- project = PROJECT_TEMPLATE;
-
- project = project.replace('@PROJECT_NAME@', self._project_name)
- project = project.replace('@PROJECT_TOPSRCDIR@', self.environment.topsrcdir)
- fh.write(project)
-
- def _write_cproject(self, fh):
- cproject_header = CPROJECT_TEMPLATE_HEADER
- cproject_header = cproject_header.replace('@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
- cproject_header = cproject_header.replace('@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
- fh.write(cproject_header)
-
- for path, defines in self._paths_to_defines.items():
- folderinfo = CPROJECT_TEMPLATE_FOLDER_INFO_HEADER
- folderinfo = folderinfo.replace('@FOLDER_ID@', str(random.randint(1000000, 99999999999)))
- folderinfo = folderinfo.replace('@FOLDER_NAME@', 'tree/' + path)
- fh.write(folderinfo)
- for k, v in defines.items():
- define = ET.Element('listOptionValue')
- define.set('builtIn', 'false')
- define.set('value', str(k) + "=" + str(v))
- fh.write(ET.tostring(define))
- fh.write(CPROJECT_TEMPLATE_FOLDER_INFO_FOOTER)
-
-
- fh.write(CPROJECT_TEMPLATE_FOOTER)
-
-
-PROJECT_TEMPLATE = """<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>@PROJECT_NAME@</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
- <triggers>clean,full,incremental,</triggers>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
- <triggers></triggers>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.cdt.core.cnature</nature>
- <nature>org.eclipse.cdt.core.ccnature</nature>
- <nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
- <nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
- </natures>
- <linkedResources>
- <link>
- <name>tree</name>
- <type>2</type>
- <location>@PROJECT_TOPSRCDIR@</location>
- </link>
- </linkedResources>
- <filteredResources>
- <filter>
- <id>17111971</id>
- <name>tree</name>
- <type>30</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-obj-*</arguments>
- </matcher>
- </filter>
- <filter>
- <id>14081994</id>
- <name>tree</name>
- <type>22</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-*.rej</arguments>
- </matcher>
- </filter>
- <filter>
- <id>25121970</id>
- <name>tree</name>
- <type>22</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-*.orig</arguments>
- </matcher>
- </filter>
- <filter>
- <id>10102004</id>
- <name>tree</name>
- <type>10</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-.hg</arguments>
- </matcher>
- </filter>
- <filter>
- <id>23122002</id>
- <name>tree</name>
- <type>22</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-*.pyc</arguments>
- </matcher>
- </filter>
- </filteredResources>
-</projectDescription>
-"""
-
-CPROJECT_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?fileVersion 4.0.0?>
-
-<cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage">
- <storageModule moduleId="org.eclipse.cdt.core.settings">
- <cconfiguration id="0.1674256904">
- <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.1674256904" moduleId="org.eclipse.cdt.core.settings" name="Default">
- <externalSettings/>
- <extensions>
- <extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- </extensions>
- </storageModule>
- <storageModule moduleId="cdtBuildSystem" version="4.0.0">
- <configuration artifactName="${ProjName}" buildProperties="" description="" id="0.1674256904" name="Default" parent="org.eclipse.cdt.build.core.prefbase.cfg">
- <folderInfo id="0.1674256904." name="/" resourcePath="">
- <toolChain id="cdt.managedbuild.toolchain.gnu.cross.exe.debug.1276586933" name="Cross GCC" superClass="cdt.managedbuild.toolchain.gnu.cross.exe.debug">
- <targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF" id="cdt.managedbuild.targetPlatform.gnu.cross.710759961" isAbstract="false" osList="all" superClass="cdt.managedbuild.targetPlatform.gnu.cross"/>
- <builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
- </toolChain>
- </folderInfo>
-"""
-CPROJECT_TEMPLATE_FOLDER_INFO_HEADER = """
- <folderInfo id="0.1674256904.@FOLDER_ID@" name="/" resourcePath="@FOLDER_NAME@">
- <toolChain id="org.eclipse.cdt.build.core.prefbase.toolchain.1022318069" name="No ToolChain" superClass="org.eclipse.cdt.build.core.prefbase.toolchain" unusedChildren="">
- <tool id="org.eclipse.cdt.build.core.settings.holder.libs.1259030812" name="holder for library settings" superClass="org.eclipse.cdt.build.core.settings.holder.libs.1800697532"/>
- <tool id="org.eclipse.cdt.build.core.settings.holder.1407291069" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder.582514939">
- <option id="org.eclipse.cdt.build.core.settings.holder.symbols.1907658087" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
-"""
-CPROJECT_TEMPLATE_FOLDER_INFO_DEFINE = """
- <listOptionValue builtIn="false" value="@FOLDER_DEFINE@"/>
-"""
-CPROJECT_TEMPLATE_FOLDER_INFO_FOOTER = """
- </option>
- <inputType id="org.eclipse.cdt.build.core.settings.holder.inType.440601711" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
- </tool>
- </toolChain>
- </folderInfo>
-"""
-CPROJECT_TEMPLATE_FILEINFO = """ <fileInfo id="0.1674256904.474736658" name="Layers.cpp" rcbsApplicability="disable" resourcePath="tree/gfx/layers/Layers.cpp" toolsToInvoke="org.eclipse.cdt.build.core.settings.holder.582514939.463639939">
- <tool id="org.eclipse.cdt.build.core.settings.holder.582514939.463639939" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder.582514939">
- <option id="org.eclipse.cdt.build.core.settings.holder.symbols.232300236" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
- <listOptionValue builtIn="false" value="BENWA=BENWAVAL"/>
- </option>
- <inputType id="org.eclipse.cdt.build.core.settings.holder.inType.1942876228" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
- </tool>
- </fileInfo>
-"""
-CPROJECT_TEMPLATE_FOOTER = """ </configuration>
- </storageModule>
- <storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
- </cconfiguration>
- </storageModule>
- <storageModule moduleId="cdtBuildSystem" version="4.0.0">
- <project id="Empty.null.1281234804" name="Empty"/>
- </storageModule>
- <storageModule moduleId="scannerConfiguration">
- <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
- <scannerConfigBuildInfo instanceId="0.1674256904">
- <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
- </scannerConfigBuildInfo>
- </storageModule>
- <storageModule moduleId="refreshScope" versionNumber="2">
- <configuration configurationName="Default"/>
- </storageModule>
- <storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/>
-</cproject>
-"""
-
-WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<plugin>
- <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
- <provider class="org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector" console="true" id="org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot;">
- <language-scope id="org.eclipse.cdt.core.gcc"/>
- <language-scope id="org.eclipse.cdt.core.g++"/>
- </provider>
- </extension>
-</plugin>
-"""
-
-LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<project>
- <configuration id="0.1674256904" name="Default">
- <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
- <provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
- <language id="org.eclipse.cdt.core.g++">
- <resource project-relative-path="">
- <entry kind="includePath" name="@GLOBAL_INCLUDE_PATH@">
- <flag value="LOCAL"/>
- </entry>
- <entry kind="includePath" name="@NSPR_INCLUDE_PATH@">
- <flag value="LOCAL"/>
- </entry>
- <entry kind="includePath" name="@IPDL_INCLUDE_PATH@">
- <flag value="LOCAL"/>
- </entry>
- <entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
- <flag value="LOCAL"/>
- </entry>
- <!--
- Because of https://developer.mozilla.org/en-US/docs/Eclipse_CDT#Headers_are_only_parsed_once
- we need to make sure headers are parsed with MOZILLA_INTERNAL_API to make sure
- the indexer gets the version that is used in most of the true. This means that
- MOZILLA_EXTERNAL_API code will suffer.
- -->
- @DEFINE_MOZILLA_INTERNAL_API@
- </resource>
- </language>
- </provider>
- <provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot; -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
- <language-scope id="org.eclipse.cdt.core.gcc"/>
- <language-scope id="org.eclipse.cdt.core.g++"/>
- </provider>
- <provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
- </extension>
- </configuration>
-</project>
-"""
-
-GECKO_LAUNCH_CONFIG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
-<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="lldb"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=""/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
-<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
-<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="2"/>
-<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
-<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_ARGUMENTS" value="@LAUNCH_ARGS@"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="@LAUNCH_PROGRAM@"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="Gecko"/>
-<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="true"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
-<booleanAttribute key="org.eclipse.cdt.launch.use_terminal" value="true"/>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
-<listEntry value="/gecko"/>
-</listAttribute>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
-<listEntry value="4"/>
-</listAttribute>
-<booleanAttribute key="org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND" value="false"/>
-<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
-</launchConfiguration>
-"""
-
-B2GFLASH_LAUNCH_CONFIG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
-<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="lldb"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=""/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
-<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
-<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="2"/>
-<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
-<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="@LAUNCH_PROGRAM@"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="Gecko"/>
-<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="true"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
-<stringAttribute key="org.eclipse.cdt.launch.WORKING_DIRECTORY" value="@OBJDIR@"/>
-<booleanAttribute key="org.eclipse.cdt.launch.use_terminal" value="true"/>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
-<listEntry value="/gecko"/>
-</listAttribute>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
-<listEntry value="4"/>
-</listAttribute>
-<booleanAttribute key="org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND" value="false"/>
-<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
-</launchConfiguration>
-"""
-
-
-EDITOR_SETTINGS = """eclipse.preferences.version=1
-lineNumberRuler=true
-overviewRuler_migration=migrated_3.1
-printMargin=true
-printMarginColumn=80
-showCarriageReturn=false
-showEnclosedSpaces=false
-showLeadingSpaces=false
-showLineFeed=false
-showWhitespaceCharacters=true
-spacesForTabs=true
-tabWidth=2
-undoHistorySize=200
-"""
-
-FORMATTER_SETTINGS = """eclipse.preferences.version=1
-org.eclipse.cdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.cdt.core.formatter.alignment_for_assignment=16
-org.eclipse.cdt.core.formatter.alignment_for_base_clause_in_type_declaration=80
-org.eclipse.cdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.cdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.cdt.core.formatter.alignment_for_conditional_expression=34
-org.eclipse.cdt.core.formatter.alignment_for_conditional_expression_chain=18
-org.eclipse.cdt.core.formatter.alignment_for_constructor_initializer_list=48
-org.eclipse.cdt.core.formatter.alignment_for_declarator_list=16
-org.eclipse.cdt.core.formatter.alignment_for_enumerator_list=48
-org.eclipse.cdt.core.formatter.alignment_for_expression_list=0
-org.eclipse.cdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.cdt.core.formatter.alignment_for_member_access=0
-org.eclipse.cdt.core.formatter.alignment_for_overloaded_left_shift_chain=16
-org.eclipse.cdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.cdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.cdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.cdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.cdt.core.formatter.brace_position_for_block_in_case=next_line_shifted
-org.eclipse.cdt.core.formatter.brace_position_for_method_declaration=next_line
-org.eclipse.cdt.core.formatter.brace_position_for_namespace_declaration=end_of_line
-org.eclipse.cdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.cdt.core.formatter.brace_position_for_type_declaration=next_line
-org.eclipse.cdt.core.formatter.comment.min_distance_between_code_and_line_comment=1
-org.eclipse.cdt.core.formatter.comment.never_indent_line_comments_on_first_column=true
-org.eclipse.cdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=true
-org.eclipse.cdt.core.formatter.compact_else_if=true
-org.eclipse.cdt.core.formatter.continuation_indentation=2
-org.eclipse.cdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.cdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.cdt.core.formatter.indent_access_specifier_compare_to_type_header=false
-org.eclipse.cdt.core.formatter.indent_access_specifier_extra_spaces=0
-org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_access_specifier=true
-org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_namespace_header=false
-org.eclipse.cdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.cdt.core.formatter.indent_declaration_compare_to_template_header=true
-org.eclipse.cdt.core.formatter.indent_empty_lines=false
-org.eclipse.cdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.cdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_switch=false
-org.eclipse.cdt.core.formatter.indentation.size=2
-org.eclipse.cdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_after_template_declaration=insert
-org.eclipse.cdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_colon_in_constructor_initializer_list=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_identifier_in_function_declaration=insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.cdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.cdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_arguments=insert
-org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_parameters=insert
-org.eclipse.cdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.cdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.cdt.core.formatter.insert_space_after_colon_in_base_clause=insert
-org.eclipse.cdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.cdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.cdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_base_types=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_declarator_list=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_expression_list=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_arguments=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_parameters=insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_bracket=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_exception_specification=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.cdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.cdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.cdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_bracket=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_exception_specification=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_base_clause=insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_base_types=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_declarator_list=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_expression_list=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_namespace_declaration=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_bracket=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_exception_specification=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.cdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.cdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_brackets=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_exception_specification=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.cdt.core.formatter.join_wrapped_lines=false
-org.eclipse.cdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.cdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.cdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.cdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.cdt.core.formatter.lineSplit=80
-org.eclipse.cdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.cdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.cdt.core.formatter.tabulation.char=space
-org.eclipse.cdt.core.formatter.tabulation.size=2
-org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
-"""
-
-NOINDEX_TEMPLATE = """eclipse.preferences.version=1
-indexer/indexerId=org.eclipse.cdt.core.nullIndexer
-"""
diff --git a/python/mozbuild/mozbuild/backend/fastermake.py b/python/mozbuild/mozbuild/backend/fastermake.py
deleted file mode 100644
index d55928e8c7..0000000000
--- a/python/mozbuild/mozbuild/backend/fastermake.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, unicode_literals, print_function
-
-from mozbuild.backend.base import PartialBackend
-from mozbuild.backend.common import CommonBackend
-from mozbuild.frontend.context import (
- ObjDirPath,
-)
-from mozbuild.frontend.data import (
- ChromeManifestEntry,
- FinalTargetPreprocessedFiles,
- FinalTargetFiles,
- JARManifest,
- XPIDLFile,
-)
-from mozbuild.makeutil import Makefile
-from mozbuild.util import OrderedDefaultDict
-from mozpack.manifests import InstallManifest
-import mozpack.path as mozpath
-
-
-class FasterMakeBackend(CommonBackend, PartialBackend):
- def _init(self):
- super(FasterMakeBackend, self)._init()
-
- self._manifest_entries = OrderedDefaultDict(set)
-
- self._install_manifests = OrderedDefaultDict(InstallManifest)
-
- self._dependencies = OrderedDefaultDict(list)
-
- self._has_xpidl = False
-
- def _add_preprocess(self, obj, path, dest, target=None, **kwargs):
- if target is None:
- target = mozpath.basename(path)
- # This matches what PP_TARGETS do in config/rules.
- if target.endswith('.in'):
- target = target[:-3]
- if target.endswith('.css'):
- kwargs['marker'] = '%'
- depfile = mozpath.join(
- self.environment.topobjdir, 'faster', '.deps',
- mozpath.join(obj.install_target, dest, target).replace('/', '_'))
- self._install_manifests[obj.install_target].add_preprocess(
- mozpath.join(obj.srcdir, path),
- mozpath.join(dest, target),
- depfile,
- **kwargs)
-
- def consume_object(self, obj):
- if isinstance(obj, JARManifest) and \
- obj.install_target.startswith('dist/bin'):
- self._consume_jar_manifest(obj)
-
- elif isinstance(obj, (FinalTargetFiles,
- FinalTargetPreprocessedFiles)) and \
- obj.install_target.startswith('dist/bin'):
- defines = obj.defines or {}
- if defines:
- defines = defines.defines
- for path, files in obj.files.walk():
- for f in files:
- if isinstance(obj, FinalTargetPreprocessedFiles):
- self._add_preprocess(obj, f.full_path, path,
- target=f.target_basename,
- defines=defines)
- elif '*' in f:
- def _prefix(s):
- for p in mozpath.split(s):
- if '*' not in p:
- yield p + '/'
- prefix = ''.join(_prefix(f.full_path))
-
- self._install_manifests[obj.install_target] \
- .add_pattern_symlink(
- prefix,
- f.full_path[len(prefix):],
- mozpath.join(path, f.target_basename))
- else:
- self._install_manifests[obj.install_target].add_symlink(
- f.full_path,
- mozpath.join(path, f.target_basename)
- )
- if isinstance(f, ObjDirPath):
- dep_target = 'install-%s' % obj.install_target
- self._dependencies[dep_target].append(
- mozpath.relpath(f.full_path,
- self.environment.topobjdir))
-
- elif isinstance(obj, ChromeManifestEntry) and \
- obj.install_target.startswith('dist/bin'):
- top_level = mozpath.join(obj.install_target, 'chrome.manifest')
- if obj.path != top_level:
- entry = 'manifest %s' % mozpath.relpath(obj.path,
- obj.install_target)
- self._manifest_entries[top_level].add(entry)
- self._manifest_entries[obj.path].add(str(obj.entry))
-
- elif isinstance(obj, XPIDLFile):
- self._has_xpidl = True
- # We're not actually handling XPIDL files.
- return False
-
- else:
- return False
-
- return True
-
- def consume_finished(self):
- mk = Makefile()
- # Add the default rule at the very beginning.
- mk.create_rule(['default'])
- mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
- mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
- if not self._has_xpidl:
- mk.add_statement('NO_XPIDL = 1')
-
- # Add a few necessary variables inherited from configure
- for var in (
- 'PYTHON',
- 'ACDEFINES',
- 'MOZ_BUILD_APP',
- 'MOZ_WIDGET_TOOLKIT',
- ):
- value = self.environment.substs.get(var)
- if value is not None:
- mk.add_statement('%s = %s' % (var, value))
-
- install_manifests_bases = self._install_manifests.keys()
-
- # Add information for chrome manifest generation
- manifest_targets = []
-
- for target, entries in self._manifest_entries.iteritems():
- manifest_targets.append(target)
- install_target = mozpath.basedir(target, install_manifests_bases)
- self._install_manifests[install_target].add_content(
- ''.join('%s\n' % e for e in sorted(entries)),
- mozpath.relpath(target, install_target))
-
- # Add information for install manifests.
- mk.add_statement('INSTALL_MANIFESTS = %s'
- % ' '.join(self._install_manifests.keys()))
-
- # Add dependencies we infered:
- for target, deps in self._dependencies.iteritems():
- mk.create_rule([target]).add_dependencies(
- '$(TOPOBJDIR)/%s' % d for d in deps)
-
- mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')
-
- for base, install_manifest in self._install_manifests.iteritems():
- with self._write_file(
- mozpath.join(self.environment.topobjdir, 'faster',
- 'install_%s' % base.replace('/', '_'))) as fh:
- install_manifest.write(fileobj=fh)
-
- with self._write_file(
- mozpath.join(self.environment.topobjdir, 'faster',
- 'Makefile')) as fh:
- mk.dump(fh, removal_guard=False)
diff --git a/python/mozbuild/mozbuild/backend/mach_commands.py b/python/mozbuild/mozbuild/backend/mach_commands.py
deleted file mode 100644
index f2448b2f44..0000000000
--- a/python/mozbuild/mozbuild/backend/mach_commands.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import argparse
-import os
-import sys
-import subprocess
-import which
-
-from mozbuild.base import (
- MachCommandBase,
-)
-
-from mach.decorators import (
- CommandArgument,
- CommandProvider,
- Command,
-)
-
-@CommandProvider
-class MachCommands(MachCommandBase):
- @Command('ide', category='devenv',
- description='Generate a project and launch an IDE.')
- @CommandArgument('ide', choices=['eclipse', 'visualstudio', 'intellij'])
- @CommandArgument('args', nargs=argparse.REMAINDER)
- def eclipse(self, ide, args):
- if ide == 'eclipse':
- backend = 'CppEclipse'
- elif ide == 'visualstudio':
- backend = 'VisualStudio'
-
- if ide == 'eclipse':
- try:
- which.which('eclipse')
- except which.WhichError:
- print('Eclipse CDT 8.4 or later must be installed in your PATH.')
- print('Download: http://www.eclipse.org/cdt/downloads.php')
- return 1
- elif ide =='intellij':
- studio = ['idea']
- if sys.platform != 'darwin':
- try:
- which.which(studio[0])
- except:
- self.print_ide_error(ide)
- return 1
- else:
- # In order of preference!
- for d in self.get_mac_ide_preferences(ide):
- if os.path.isdir(d):
- studio = ['open', '-a', d]
- break
- else:
- print('IntelliJ IDEA 14 is not installed in /Applications.')
- return 1
-
- # Here we refresh the whole build. 'build export' is sufficient here and is probably more
- # correct but it's also nice having a single target to get a fully built and indexed
- # project (gives a easy target to use before go out to lunch).
- res = self._mach_context.commands.dispatch('build', self._mach_context)
- if res != 0:
- return 1
-
- if ide in ('intellij'):
- res = self._mach_context.commands.dispatch('package', self._mach_context)
- if res != 0:
- return 1
- else:
- # Generate or refresh the IDE backend.
- python = self.virtualenv_manager.python_path
- config_status = os.path.join(self.topobjdir, 'config.status')
- args = [python, config_status, '--backend=%s' % backend]
- res = self._run_command_in_objdir(args=args, pass_thru=True, ensure_exit_code=False)
- if res != 0:
- return 1
-
-
- if ide == 'eclipse':
- eclipse_workspace_dir = self.get_eclipse_workspace_path()
- process = subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
- elif ide == 'visualstudio':
- visual_studio_workspace_dir = self.get_visualstudio_workspace_path()
- process = subprocess.check_call(['explorer.exe', visual_studio_workspace_dir])
- elif ide == 'intellij':
- gradle_dir = None
- if self.is_gradle_project_already_imported():
- gradle_dir = self.get_gradle_project_path()
- else:
- gradle_dir = self.get_gradle_import_path()
- process = subprocess.check_call(studio + [gradle_dir])
-
- def get_eclipse_workspace_path(self):
- from mozbuild.backend.cpp_eclipse import CppEclipseBackend
- return CppEclipseBackend.get_workspace_path(self.topsrcdir, self.topobjdir)
-
- def get_visualstudio_workspace_path(self):
- return os.path.join(self.topobjdir, 'msvc', 'mozilla.sln')
-
- def get_gradle_project_path(self):
- return os.path.join(self.topobjdir, 'mobile', 'gradle')
-
- def get_gradle_import_path(self):
- return os.path.join(self.get_gradle_project_path(), 'build.gradle')
-
- def is_gradle_project_already_imported(self):
- gradle_project_path = os.path.join(self.get_gradle_project_path(), '.idea')
- return os.path.exists(gradle_project_path)
-
- def get_mac_ide_preferences(self, ide):
- if sys.platform == 'darwin':
- return [
- '/Applications/IntelliJ IDEA 14 EAP.app',
- '/Applications/IntelliJ IDEA 14.app',
- '/Applications/IntelliJ IDEA 14 CE EAP.app',
- '/Applications/IntelliJ IDEA 14 CE.app']
-
- def print_ide_error(self, ide):
- if ide == 'intellij':
- print('IntelliJ is not installed in your PATH.')
- print('You can generate a command-line launcher from IntelliJ IDEA->Tools->Create Command-line launcher with script name \'idea\'')
diff --git a/python/mozbuild/mozbuild/backend/visualstudio.py b/python/mozbuild/mozbuild/backend/visualstudio.py
deleted file mode 100644
index 86e97d13d3..0000000000
--- a/python/mozbuild/mozbuild/backend/visualstudio.py
+++ /dev/null
@@ -1,582 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# This file contains a build backend for generating Visual Studio project
-# files.
-
-from __future__ import absolute_import, unicode_literals
-
-import errno
-import os
-import re
-import types
-import uuid
-
-from xml.dom import getDOMImplementation
-
-from mozpack.files import FileFinder
-
-from .common import CommonBackend
-from ..frontend.data import (
- Defines,
- GeneratedSources,
- HostProgram,
- HostSources,
- Library,
- LocalInclude,
- Program,
- Sources,
- UnifiedSources,
-)
-from mozbuild.base import ExecutionSummary
-
-
-MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
-
-def get_id(name):
- return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
-
-def visual_studio_product_to_solution_version(version):
- if version == '2015':
- return '12.00', '14'
- else:
- raise Exception('Unknown version seen: %s' % version)
-
-def visual_studio_product_to_platform_toolset_version(version):
- if version == '2015':
- return 'v140'
- else:
- raise Exception('Unknown version seen: %s' % version)
-
-class VisualStudioBackend(CommonBackend):
- """Generate Visual Studio project files.
-
- This backend is used to produce Visual Studio projects and a solution
- to foster developing Firefox with Visual Studio.
-
- This backend is currently considered experimental. There are many things
- not optimal about how it works.
- """
-
- def _init(self):
- CommonBackend._init(self)
-
- # These should eventually evolve into parameters.
- self._out_dir = os.path.join(self.environment.topobjdir, 'msvc')
- self._projsubdir = 'projects'
-
- self._version = self.environment.substs.get('MSVS_VERSION', '2015')
-
- self._paths_to_sources = {}
- self._paths_to_includes = {}
- self._paths_to_defines = {}
- self._paths_to_configs = {}
- self._libs_to_paths = {}
- self._progs_to_paths = {}
-
- def summary(self):
- return ExecutionSummary(
- 'VisualStudio backend executed in {execution_time:.2f}s\n'
- 'Generated Visual Studio solution at {path:s}',
- execution_time=self._execution_time,
- path=os.path.join(self._out_dir, 'mozilla.sln'))
-
- def consume_object(self, obj):
- reldir = getattr(obj, 'relativedir', None)
-
- if hasattr(obj, 'config') and reldir not in self._paths_to_configs:
- self._paths_to_configs[reldir] = obj.config
-
- if isinstance(obj, Sources):
- self._add_sources(reldir, obj)
-
- elif isinstance(obj, HostSources):
- self._add_sources(reldir, obj)
-
- elif isinstance(obj, GeneratedSources):
- self._add_sources(reldir, obj)
-
- elif isinstance(obj, UnifiedSources):
- # XXX we should be letting CommonBackend.consume_object call this
- # for us instead.
- self._process_unified_sources(obj);
-
- elif isinstance(obj, Library):
- self._libs_to_paths[obj.basename] = reldir
-
- elif isinstance(obj, Program) or isinstance(obj, HostProgram):
- self._progs_to_paths[obj.program] = reldir
-
- elif isinstance(obj, Defines):
- self._paths_to_defines.setdefault(reldir, {}).update(obj.defines)
-
- elif isinstance(obj, LocalInclude):
- includes = self._paths_to_includes.setdefault(reldir, [])
- includes.append(obj.path.full_path)
-
- # Just acknowledge everything.
- return True
-
- def _add_sources(self, reldir, obj):
- s = self._paths_to_sources.setdefault(reldir, set())
- s.update(obj.files)
-
- def _process_unified_sources(self, obj):
- reldir = getattr(obj, 'relativedir', None)
-
- s = self._paths_to_sources.setdefault(reldir, set())
- s.update(obj.files)
-
- def consume_finished(self):
- out_dir = self._out_dir
- out_proj_dir = os.path.join(self._out_dir, self._projsubdir)
-
- projects = self._write_projects_for_sources(self._libs_to_paths,
- "library", out_proj_dir)
- projects.update(self._write_projects_for_sources(self._progs_to_paths,
- "binary", out_proj_dir))
-
- # Generate projects that can be used to build common targets.
- for target in ('export', 'binaries', 'tools', 'full'):
- basename = 'target_%s' % target
- command = '$(SolutionDir)\\mach.bat build'
- if target != 'full':
- command += ' %s' % target
-
- project_id = self._write_vs_project(out_proj_dir, basename, target,
- build_command=command,
- clean_command='$(SolutionDir)\\mach.bat build clean')
-
- projects[basename] = (project_id, basename, target)
-
- # A project that can be used to regenerate the visual studio projects.
- basename = 'target_vs'
- project_id = self._write_vs_project(out_proj_dir, basename, 'visual-studio',
- build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
- projects[basename] = (project_id, basename, 'visual-studio')
-
- # Write out a shared property file with common variables.
- props_path = os.path.join(out_proj_dir, 'mozilla.props')
- with self._write_file(props_path, mode='rb') as fh:
- self._write_props(fh)
-
- # Generate some wrapper scripts that allow us to invoke mach inside
- # a MozillaBuild-like environment. We currently only use the batch
- # script. We'd like to use the PowerShell script. However, it seems
- # to buffer output from within Visual Studio (surely this is
- # configurable) and the default execution policy of PowerShell doesn't
- # allow custom scripts to be executed.
- with self._write_file(os.path.join(out_dir, 'mach.bat'), mode='rb') as fh:
- self._write_mach_batch(fh)
-
- with self._write_file(os.path.join(out_dir, 'mach.ps1'), mode='rb') as fh:
- self._write_mach_powershell(fh)
-
- # Write out a solution file to tie it all together.
- solution_path = os.path.join(out_dir, 'mozilla.sln')
- with self._write_file(solution_path, mode='rb') as fh:
- self._write_solution(fh, projects)
-
- def _write_projects_for_sources(self, sources, prefix, out_dir):
- projects = {}
- for item, path in sorted(sources.items()):
- config = self._paths_to_configs.get(path, None)
- sources = self._paths_to_sources.get(path, set())
- sources = set(os.path.join('$(TopSrcDir)', path, s) for s in sources)
- sources = set(os.path.normpath(s) for s in sources)
-
- finder = FileFinder(os.path.join(self.environment.topsrcdir, path),
- find_executables=False)
-
- headers = [t[0] for t in finder.find('*.h')]
- headers = [os.path.normpath(os.path.join('$(TopSrcDir)',
- path, f)) for f in headers]
-
- includes = [
- os.path.join('$(TopSrcDir)', path),
- os.path.join('$(TopObjDir)', path),
- ]
- includes.extend(self._paths_to_includes.get(path, []))
- includes.append('$(TopObjDir)\\dist\\include\\nss')
- includes.append('$(TopObjDir)\\dist\\include')
-
- for v in ('NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS',
- 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
- if not config:
- break
-
- args = config.substs.get(v, [])
-
- for i, arg in enumerate(args):
- if arg.startswith('-I'):
- includes.append(os.path.normpath(arg[2:]))
-
- # Pull in system defaults.
- includes.append('$(DefaultIncludes)')
-
- includes = [os.path.normpath(i) for i in includes]
-
- defines = []
- for k, v in self._paths_to_defines.get(path, {}).items():
- if v is True:
- defines.append(k)
- else:
- defines.append('%s=%s' % (k, v))
-
- debugger=None
- if prefix == 'binary':
- if item.startswith(self.environment.substs['MOZ_APP_NAME']):
- debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '-no-remote')
- else:
- debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '')
-
- basename = '%s_%s' % (prefix, item)
-
- project_id = self._write_vs_project(out_dir, basename, item,
- includes=includes,
- forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
- defines=defines,
- headers=headers,
- sources=sources,
- debugger=debugger)
-
- projects[basename] = (project_id, basename, item)
-
- return projects
-
- def _write_solution(self, fh, projects):
- # Visual Studio appears to write out its current version in the
- # solution file. Instead of trying to figure out what version it will
- # write, try to parse the version out of the existing file and use it
- # verbatim.
- vs_version = None
- try:
- with open(fh.name, 'rb') as sfh:
- for line in sfh:
- if line.startswith(b'VisualStudioVersion = '):
- vs_version = line.split(b' = ', 1)[1].strip()
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
-
- format_version, comment_version = visual_studio_product_to_solution_version(self._version)
- # This is a Visual C++ Project type.
- project_type = '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
-
- # Visual Studio seems to require this header.
- fh.write('Microsoft Visual Studio Solution File, Format Version %s\r\n' %
- format_version)
- fh.write('# Visual Studio %s\r\n' % comment_version)
-
- if vs_version:
- fh.write('VisualStudioVersion = %s\r\n' % vs_version)
-
- # Corresponds to VS2013.
- fh.write('MinimumVisualStudioVersion = 12.0.31101.0\r\n')
-
- binaries_id = projects['target_binaries'][0]
-
- # Write out entries for each project.
- for key in sorted(projects):
- project_id, basename, name = projects[key]
- path = os.path.join(self._projsubdir, '%s.vcxproj' % basename)
-
- fh.write('Project("{%s}") = "%s", "%s", "{%s}"\r\n' % (
- project_type, name, path, project_id))
-
- # Make all libraries depend on the binaries target.
- if key.startswith('library_'):
- fh.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
- fh.write('\t\t{%s} = {%s}\r\n' % (binaries_id, binaries_id))
- fh.write('\tEndProjectSection\r\n')
-
- fh.write('EndProject\r\n')
-
- # Write out solution folders for organizing things.
-
- # This is the UUID you use for solution folders.
- container_id = '2150E333-8FDC-42A3-9474-1A3956D46DE8'
-
- def write_container(desc):
- cid = get_id(desc.encode('utf-8'))
- fh.write('Project("{%s}") = "%s", "%s", "{%s}"\r\n' % (
- container_id, desc, desc, cid))
- fh.write('EndProject\r\n')
-
- return cid
-
- library_id = write_container('Libraries')
- target_id = write_container('Build Targets')
- binary_id = write_container('Binaries')
-
- fh.write('Global\r\n')
-
- # Make every project a member of our one configuration.
- fh.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
- fh.write('\t\tBuild|Win32 = Build|Win32\r\n')
- fh.write('\tEndGlobalSection\r\n')
-
- # Set every project's active configuration to the one configuration and
- # set up the default build project.
- fh.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
- for name, project in sorted(projects.items()):
- fh.write('\t\t{%s}.Build|Win32.ActiveCfg = Build|Win32\r\n' % project[0])
-
- # Only build the full build target by default.
- # It's important we don't write multiple entries here because they
- # conflict!
- if name == 'target_full':
- fh.write('\t\t{%s}.Build|Win32.Build.0 = Build|Win32\r\n' % project[0])
-
- fh.write('\tEndGlobalSection\r\n')
-
- fh.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
- fh.write('\t\tHideSolutionNode = FALSE\r\n')
- fh.write('\tEndGlobalSection\r\n')
-
- # Associate projects with containers.
- fh.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
- for key in sorted(projects):
- project_id = projects[key][0]
-
- if key.startswith('library_'):
- container_id = library_id
- elif key.startswith('target_'):
- container_id = target_id
- elif key.startswith('binary_'):
- container_id = binary_id
- else:
- raise Exception('Unknown project type: %s' % key)
-
- fh.write('\t\t{%s} = {%s}\r\n' % (project_id, container_id))
- fh.write('\tEndGlobalSection\r\n')
-
- fh.write('EndGlobal\r\n')
-
- def _write_props(self, fh):
- impl = getDOMImplementation()
- doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
-
- project = doc.documentElement
- project.setAttribute('xmlns', MSBUILD_NAMESPACE)
- project.setAttribute('ToolsVersion', '4.0')
-
- ig = project.appendChild(doc.createElement('ImportGroup'))
- ig.setAttribute('Label', 'PropertySheets')
-
- pg = project.appendChild(doc.createElement('PropertyGroup'))
- pg.setAttribute('Label', 'UserMacros')
-
- ig = project.appendChild(doc.createElement('ItemGroup'))
-
- def add_var(k, v):
- e = pg.appendChild(doc.createElement(k))
- e.appendChild(doc.createTextNode(v))
-
- e = ig.appendChild(doc.createElement('BuildMacro'))
- e.setAttribute('Include', k)
-
- e = e.appendChild(doc.createElement('Value'))
- e.appendChild(doc.createTextNode('$(%s)' % k))
-
- add_var('TopObjDir', os.path.normpath(self.environment.topobjdir))
- add_var('TopSrcDir', os.path.normpath(self.environment.topsrcdir))
- add_var('PYTHON', '$(TopObjDir)\\_virtualenv\\Scripts\\python.exe')
- add_var('MACH', '$(TopSrcDir)\\mach')
-
- # From MozillaBuild.
- add_var('DefaultIncludes', os.environ.get('INCLUDE', ''))
-
- fh.write(b'\xef\xbb\xbf')
- doc.writexml(fh, addindent=' ', newl='\r\n')
-
- def _relevant_environment_variables(self):
- # Write out the environment variables, presumably coming from
- # MozillaBuild.
- for k, v in sorted(os.environ.items()):
- if not re.match('^[a-zA-Z0-9_]+$', k):
- continue
-
- if k in ('OLDPWD', 'PS1'):
- continue
-
- if k.startswith('_'):
- continue
-
- yield k, v
-
- yield 'TOPSRCDIR', self.environment.topsrcdir
- yield 'TOPOBJDIR', self.environment.topobjdir
-
- def _write_mach_powershell(self, fh):
- for k, v in self._relevant_environment_variables():
- fh.write(b'$env:%s = "%s"\r\n' % (k, v))
-
- relpath = os.path.relpath(self.environment.topsrcdir,
- self.environment.topobjdir).replace('\\', '/')
-
- fh.write(b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath)
- fh.write(b'$bashargs = $bashargs + $args\r\n')
-
- fh.write(b"$expanded = $bashargs -join ' '\r\n")
- fh.write(b'$procargs = "-c", $expanded\r\n')
-
- fh.write(b'Start-Process -WorkingDirectory $env:TOPOBJDIR '
- b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
- b'-ArgumentList $procargs '
- b'-Wait -NoNewWindow\r\n')
-
- def _write_mach_batch(self, fh):
- """Write out a batch script that builds the tree.
-
- The script "bootstraps" into the MozillaBuild environment by setting
- the environment variables that are active in the current MozillaBuild
- environment. Then, it builds the tree.
- """
- for k, v in self._relevant_environment_variables():
- fh.write(b'SET "%s=%s"\r\n' % (k, v))
-
- fh.write(b'cd %TOPOBJDIR%\r\n')
-
- # We need to convert Windows-native paths to msys paths. Easiest way is
- # relative paths, since munging c:\ to /c/ is slightly more
- # complicated.
- relpath = os.path.relpath(self.environment.topsrcdir,
- self.environment.topobjdir).replace('\\', '/')
-
- # We go through mach because it has the logic for choosing the most
- # appropriate build tool.
- fh.write(b'"%%MOZILLABUILD%%\\msys\\bin\\bash" '
- b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
-
- def _write_vs_project(self, out_dir, basename, name, **kwargs):
- root = '%s.vcxproj' % basename
- project_id = get_id(basename.encode('utf-8'))
-
- with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
- project_id, name = VisualStudioBackend.write_vs_project(fh,
- self._version, project_id, name, **kwargs)
-
- with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
- fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
- fh.write('<Project ToolsVersion="4.0" xmlns="%s">\r\n' %
- MSBUILD_NAMESPACE)
- fh.write('</Project>\r\n')
-
- return project_id
-
- @staticmethod
- def write_vs_project(fh, version, project_id, name, includes=[],
- forced_includes=[], defines=[],
- build_command=None, clean_command=None,
- debugger=None, headers=[], sources=[]):
-
- impl = getDOMImplementation()
- doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
-
- project = doc.documentElement
- project.setAttribute('DefaultTargets', 'Build')
- project.setAttribute('ToolsVersion', '4.0')
- project.setAttribute('xmlns', MSBUILD_NAMESPACE)
-
- ig = project.appendChild(doc.createElement('ItemGroup'))
- ig.setAttribute('Label', 'ProjectConfigurations')
-
- pc = ig.appendChild(doc.createElement('ProjectConfiguration'))
- pc.setAttribute('Include', 'Build|Win32')
-
- c = pc.appendChild(doc.createElement('Configuration'))
- c.appendChild(doc.createTextNode('Build'))
-
- p = pc.appendChild(doc.createElement('Platform'))
- p.appendChild(doc.createTextNode('Win32'))
-
- pg = project.appendChild(doc.createElement('PropertyGroup'))
- pg.setAttribute('Label', 'Globals')
-
- n = pg.appendChild(doc.createElement('ProjectName'))
- n.appendChild(doc.createTextNode(name))
-
- k = pg.appendChild(doc.createElement('Keyword'))
- k.appendChild(doc.createTextNode('MakeFileProj'))
-
- g = pg.appendChild(doc.createElement('ProjectGuid'))
- g.appendChild(doc.createTextNode('{%s}' % project_id))
-
- rn = pg.appendChild(doc.createElement('RootNamespace'))
- rn.appendChild(doc.createTextNode('mozilla'))
-
- pts = pg.appendChild(doc.createElement('PlatformToolset'))
- pts.appendChild(doc.createTextNode(visual_studio_product_to_platform_toolset_version(version)))
-
- i = project.appendChild(doc.createElement('Import'))
- i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')
-
- ig = project.appendChild(doc.createElement('ImportGroup'))
- ig.setAttribute('Label', 'ExtensionTargets')
-
- ig = project.appendChild(doc.createElement('ImportGroup'))
- ig.setAttribute('Label', 'ExtensionSettings')
-
- ig = project.appendChild(doc.createElement('ImportGroup'))
- ig.setAttribute('Label', 'PropertySheets')
- i = ig.appendChild(doc.createElement('Import'))
- i.setAttribute('Project', 'mozilla.props')
-
- pg = project.appendChild(doc.createElement('PropertyGroup'))
- pg.setAttribute('Label', 'Configuration')
- ct = pg.appendChild(doc.createElement('ConfigurationType'))
- ct.appendChild(doc.createTextNode('Makefile'))
-
- pg = project.appendChild(doc.createElement('PropertyGroup'))
- pg.setAttribute('Condition', "'$(Configuration)|$(Platform)'=='Build|Win32'")
-
- if build_command:
- n = pg.appendChild(doc.createElement('NMakeBuildCommandLine'))
- n.appendChild(doc.createTextNode(build_command))
-
- if clean_command:
- n = pg.appendChild(doc.createElement('NMakeCleanCommandLine'))
- n.appendChild(doc.createTextNode(clean_command))
-
- if includes:
- n = pg.appendChild(doc.createElement('NMakeIncludeSearchPath'))
- n.appendChild(doc.createTextNode(';'.join(includes)))
-
- if forced_includes:
- n = pg.appendChild(doc.createElement('NMakeForcedIncludes'))
- n.appendChild(doc.createTextNode(';'.join(forced_includes)))
-
- if defines:
- n = pg.appendChild(doc.createElement('NMakePreprocessorDefinitions'))
- n.appendChild(doc.createTextNode(';'.join(defines)))
-
- if debugger:
- n = pg.appendChild(doc.createElement('LocalDebuggerCommand'))
- n.appendChild(doc.createTextNode(debugger[0]))
-
- n = pg.appendChild(doc.createElement('LocalDebuggerCommandArguments'))
- n.appendChild(doc.createTextNode(debugger[1]))
-
- i = project.appendChild(doc.createElement('Import'))
- i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.props')
-
- i = project.appendChild(doc.createElement('Import'))
- i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.targets')
-
- # Now add files to the project.
- ig = project.appendChild(doc.createElement('ItemGroup'))
- for header in sorted(headers or []):
- n = ig.appendChild(doc.createElement('ClInclude'))
- n.setAttribute('Include', header)
-
- ig = project.appendChild(doc.createElement('ItemGroup'))
- for source in sorted(sources or []):
- n = ig.appendChild(doc.createElement('ClCompile'))
- n.setAttribute('Include', source)
-
- fh.write(b'\xef\xbb\xbf')
- doc.writexml(fh, addindent=' ', newl='\r\n')
-
- return project_id, name
diff --git a/python/mozbuild/mozbuild/codecoverage/__init__.py b/python/mozbuild/mozbuild/codecoverage/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/python/mozbuild/mozbuild/codecoverage/__init__.py
+++ /dev/null
diff --git a/python/mozbuild/mozbuild/codecoverage/chrome_map.py b/python/mozbuild/mozbuild/codecoverage/chrome_map.py
deleted file mode 100644
index 81c3c9a071..0000000000
--- a/python/mozbuild/mozbuild/codecoverage/chrome_map.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from collections import defaultdict
-import json
-import os
-import urlparse
-
-from mach.config import ConfigSettings
-from mach.logging import LoggingManager
-from mozbuild.backend.common import CommonBackend
-from mozbuild.base import MozbuildObject
-from mozbuild.frontend.data import (
- FinalTargetFiles,
- FinalTargetPreprocessedFiles,
-)
-from mozbuild.frontend.data import JARManifest, ChromeManifestEntry
-from mozpack.chrome.manifest import (
- Manifest,
- ManifestChrome,
- ManifestOverride,
- ManifestResource,
- parse_manifest,
-)
-import mozpack.path as mozpath
-
-
-class ChromeManifestHandler(object):
- def __init__(self):
- self.overrides = {}
- self.chrome_mapping = defaultdict(set)
-
- def handle_manifest_entry(self, entry):
- format_strings = {
- "content": "chrome://%s/content/",
- "resource": "resource://%s/",
- "locale": "chrome://%s/locale/",
- "skin": "chrome://%s/skin/",
- }
-
- if isinstance(entry, (ManifestChrome, ManifestResource)):
- if isinstance(entry, ManifestResource):
- dest = entry.target
- url = urlparse.urlparse(dest)
- if not url.scheme:
- dest = mozpath.normpath(mozpath.join(entry.base, dest))
- if url.scheme == 'file':
- dest = mozpath.normpath(url.path)
- else:
- dest = mozpath.normpath(entry.path)
-
- base_uri = format_strings[entry.type] % entry.name
- self.chrome_mapping[base_uri].add(dest)
- if isinstance(entry, ManifestOverride):
- self.overrides[entry.overloaded] = entry.overload
- if isinstance(entry, Manifest):
- for e in parse_manifest(None, entry.path):
- self.handle_manifest_entry(e)
-
-class ChromeMapBackend(CommonBackend):
- def _init(self):
- CommonBackend._init(self)
-
- log_manager = LoggingManager()
- self._cmd = MozbuildObject(self.environment.topsrcdir, ConfigSettings(),
- log_manager, self.environment.topobjdir)
- self._install_mapping = {}
- self.manifest_handler = ChromeManifestHandler()
-
- def consume_object(self, obj):
- if isinstance(obj, JARManifest):
- self._consume_jar_manifest(obj)
- if isinstance(obj, ChromeManifestEntry):
- self.manifest_handler.handle_manifest_entry(obj.entry)
- if isinstance(obj, (FinalTargetFiles,
- FinalTargetPreprocessedFiles)):
- self._handle_final_target_files(obj)
- return True
-
- def _handle_final_target_files(self, obj):
- for path, files in obj.files.walk():
- for f in files:
- dest = mozpath.join(obj.install_target, path, f.target_basename)
- is_pp = isinstance(obj,
- FinalTargetPreprocessedFiles)
- self._install_mapping[dest] = f.full_path, is_pp
-
- def consume_finished(self):
- # Our result has three parts:
- # A map from url prefixes to objdir directories:
- # { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
- # A map of overrides.
- # A map from objdir paths to sourcedir paths, and a flag for whether the source was preprocessed:
- # { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
- # [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", false ], ... }
- outputfile = os.path.join(self.environment.topobjdir, 'chrome-map.json')
- with self._write_file(outputfile) as fh:
- chrome_mapping = self.manifest_handler.chrome_mapping
- overrides = self.manifest_handler.overrides
- json.dump([
- {k: list(v) for k, v in chrome_mapping.iteritems()},
- overrides,
- self._install_mapping,
- ], fh, sort_keys=True, indent=2)
diff --git a/python/mozbuild/mozbuild/codecoverage/packager.py b/python/mozbuild/mozbuild/codecoverage/packager.py
deleted file mode 100644
index 3a4f359f6a..0000000000
--- a/python/mozbuild/mozbuild/codecoverage/packager.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function
-
-import argparse
-import sys
-
-from mozpack.files import FileFinder
-from mozpack.copier import Jarrer
-
-def package_gcno_tree(root, output_file):
- # XXX JarWriter doesn't support unicode strings, see bug 1056859
- if isinstance(root, unicode):
- root = root.encode('utf-8')
-
- finder = FileFinder(root)
- jarrer = Jarrer(optimize=False)
- for p, f in finder.find("**/*.gcno"):
- jarrer.add(p, f)
- jarrer.copy(output_file)
-
-
-def cli(args=sys.argv[1:]):
- parser = argparse.ArgumentParser()
- parser.add_argument('-o', '--output-file',
- dest='output_file',
- help='Path to save packaged data to.')
- parser.add_argument('--root',
- dest='root',
- default=None,
- help='Root directory to search from.')
- args = parser.parse_args(args)
-
- if not args.root:
- from buildconfig import topobjdir
- args.root = topobjdir
-
- return package_gcno_tree(args.root, args.output_file)
-
-if __name__ == '__main__':
- sys.exit(cli())
diff --git a/python/mozbuild/mozbuild/config_status.py b/python/mozbuild/mozbuild/config_status.py
index 0b4e6e41dd..ac91a65afe 100644
--- a/python/mozbuild/mozbuild/config_status.py
+++ b/python/mozbuild/mozbuild/config_status.py
@@ -32,19 +32,6 @@ from mozbuild.backend import (
log_manager = LoggingManager()
-VISUAL_STUDIO_ADVERTISEMENT = '''
-===============================
-Visual Studio Support Available
-
-You are building Firefox on Windows. You can generate Visual Studio
-files by running:
-
- mach build-backend --backend=VisualStudio
-
-===============================
-'''.strip()
-
-
def config_status(topobjdir='.', topsrcdir='.', defines=None,
non_global_defines=None, substs=None, source=None,
mozconfig=None, args=sys.argv[1:]):
@@ -173,7 +160,3 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
for path, diff in sorted(the_backend.file_diffs.items()):
print('\n'.join(diff))
- # Advertise Visual Studio if appropriate.
- if os.name == 'nt' and 'VisualStudio' not in options.backend:
- print(VISUAL_STUDIO_ADVERTISEMENT)
-
diff --git a/python/mozbuild/mozbuild/mach_commands.py b/python/mozbuild/mozbuild/mach_commands.py
index 6616b24931..38a98b7432 100644
--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -509,7 +509,7 @@ class Build(MachCommandBase):
# to avoid accidentally disclosing PII.
telemetry_data['substs'] = {}
try:
- for key in ['MOZ_ARTIFACT_BUILDS', 'MOZ_USING_CCACHE']:
+ for key in ['MOZ_USING_CCACHE']:
value = self.substs.get(key, False)
telemetry_data['substs'][key] = value
except BuildEnvironmentNotFoundException:
@@ -1493,154 +1493,6 @@ class MachDebug(MachCommandBase):
return json.JSONEncoder.default(self, obj)
json.dump(self, cls=EnvironmentEncoder, sort_keys=True, fp=out)
-class ArtifactSubCommand(SubCommand):
- def __call__(self, func):
- after = SubCommand.__call__(self, func)
- jobchoices = {
- 'linux',
- 'linux64',
- 'macosx64',
- 'win32',
- 'win64'
- }
- args = [
- CommandArgument('--tree', metavar='TREE', type=str,
- help='Firefox tree.'),
- CommandArgument('--job', metavar='JOB', choices=jobchoices,
- help='Build job.'),
- CommandArgument('--verbose', '-v', action='store_true',
- help='Print verbose output.'),
- ]
- for arg in args:
- after = arg(after)
- return after
-
-
-@CommandProvider
-class PackageFrontend(MachCommandBase):
- """Fetch and install binary artifacts from Mozilla automation."""
-
- @Command('artifact', category='post-build',
- description='Use pre-built artifacts to build Firefox.')
- def artifact(self):
- '''Download, cache, and install pre-built binary artifacts to build Firefox.
-
- Use |mach build| as normal to freshen your installed binary libraries:
- artifact builds automatically download, cache, and install binary
- artifacts from Mozilla automation, replacing whatever may be in your
- object directory. Use |mach artifact last| to see what binary artifacts
- were last used.
-
- Never build libxul again!
-
- '''
- pass
-
- def _set_log_level(self, verbose):
- self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
-
- def _install_pip_package(self, package):
- if os.environ.get('MOZ_AUTOMATION'):
- self.virtualenv_manager._run_pip([
- 'install',
- package,
- '--no-index',
- '--find-links',
- 'http://pypi.pub.build.mozilla.org/pub',
- '--trusted-host',
- 'pypi.pub.build.mozilla.org',
- ])
- return
- self.virtualenv_manager.install_pip_package(package)
-
- def _make_artifacts(self, tree=None, job=None, skip_cache=False):
- # Undo PATH munging that will be done by activating the virtualenv,
- # so that invoked subprocesses expecting to find system python
- # (git cinnabar, in particular), will not find virtualenv python.
- original_path = os.environ.get('PATH', '')
- self._activate_virtualenv()
- os.environ['PATH'] = original_path
-
- for package in ('taskcluster==0.0.32',
- 'mozregression==1.0.2'):
- self._install_pip_package(package)
-
- state_dir = self._mach_context.state_dir
- cache_dir = os.path.join(state_dir, 'package-frontend')
-
- try:
- os.makedirs(cache_dir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- import which
-
- here = os.path.abspath(os.path.dirname(__file__))
- build_obj = MozbuildObject.from_environment(cwd=here)
-
- hg = None
- if conditions.is_hg(build_obj):
- if self._is_windows():
- hg = which.which('hg.exe')
- else:
- hg = which.which('hg')
-
- git = None
- if conditions.is_git(build_obj):
- if self._is_windows():
- git = which.which('git.exe')
- else:
- git = which.which('git')
-
- # Absolutely must come after the virtualenv is populated!
- from mozbuild.artifacts import Artifacts
- artifacts = Artifacts(tree, self.substs, self.defines, job,
- log=self.log, cache_dir=cache_dir,
- skip_cache=skip_cache, hg=hg, git=git,
- topsrcdir=self.topsrcdir)
- return artifacts
-
- @ArtifactSubCommand('artifact', 'install',
- 'Install a good pre-built artifact.')
- @CommandArgument('source', metavar='SRC', nargs='?', type=str,
- help='Where to fetch and install artifacts from. Can be omitted, in '
- 'which case the current hg repository is inspected; an hg revision; '
- 'a remote URL; or a local file.',
- default=None)
- @CommandArgument('--skip-cache', action='store_true',
- help='Skip all local caches to force re-fetching remote artifacts.',
- default=False)
- def artifact_install(self, source=None, skip_cache=False, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job, skip_cache=skip_cache)
-
- return artifacts.install_from(source, self.distdir)
-
- @ArtifactSubCommand('artifact', 'last',
- 'Print the last pre-built artifact installed.')
- def artifact_print_last(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.print_last()
- return 0
-
- @ArtifactSubCommand('artifact', 'print-cache',
- 'Print local artifact cache for debugging.')
- def artifact_print_cache(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.print_cache()
- return 0
-
- @ArtifactSubCommand('artifact', 'clear-cache',
- 'Delete local artifacts and reset local artifact cache.')
- def artifact_clear_cache(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.clear_cache()
- return 0
-
@CommandProvider
class Vendor(MachCommandBase):
"""Vendor third-party dependencies into the source repository."""
diff --git a/python/mozbuild/mozbuild/test/backend/test_visualstudio.py b/python/mozbuild/mozbuild/test/backend/test_visualstudio.py
deleted file mode 100644
index bfc95e5527..0000000000
--- a/python/mozbuild/mozbuild/test/backend/test_visualstudio.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import unicode_literals
-
-from xml.dom.minidom import parse
-import os
-import unittest
-
-from mozbuild.backend.visualstudio import VisualStudioBackend
-from mozbuild.test.backend.common import BackendTester
-
-from mozunit import main
-
-
-class TestVisualStudioBackend(BackendTester):
- @unittest.skip('Failing inconsistently in automation.')
- def test_basic(self):
- """Ensure we can consume our stub project."""
-
- env = self._consume('visual-studio', VisualStudioBackend)
-
- msvc = os.path.join(env.topobjdir, 'msvc')
- self.assertTrue(os.path.isdir(msvc))
-
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'mozilla.sln')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'mozilla.props')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'mach.bat')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'binary_my_app.vcxproj')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'target_full.vcxproj')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'library_dir1.vcxproj')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'library_dir1.vcxproj.user')))
-
- d = parse(os.path.join(msvc, 'library_dir1.vcxproj'))
- self.assertEqual(d.documentElement.tagName, 'Project')
- els = d.getElementsByTagName('ClCompile')
- self.assertEqual(len(els), 2)
-
- # mozilla-config.h should be explicitly listed as an include.
- els = d.getElementsByTagName('NMakeForcedIncludes')
- self.assertEqual(len(els), 1)
- self.assertEqual(els[0].firstChild.nodeValue,
- '$(TopObjDir)\\dist\\include\\mozilla-config.h')
-
- # LOCAL_INCLUDES get added to the include search path.
- els = d.getElementsByTagName('NMakeIncludeSearchPath')
- self.assertEqual(len(els), 1)
- includes = els[0].firstChild.nodeValue.split(';')
- self.assertIn(os.path.normpath('$(TopSrcDir)/includeA/foo'), includes)
- self.assertIn(os.path.normpath('$(TopSrcDir)/dir1'), includes)
- self.assertIn(os.path.normpath('$(TopObjDir)/dir1'), includes)
- self.assertIn(os.path.normpath('$(TopObjDir)\\dist\\include'), includes)
-
- # DEFINES get added to the project.
- els = d.getElementsByTagName('NMakePreprocessorDefinitions')
- self.assertEqual(len(els), 1)
- defines = els[0].firstChild.nodeValue.split(';')
- self.assertIn('DEFINEFOO', defines)
- self.assertIn('DEFINEBAR=bar', defines)
-
-
-if __name__ == '__main__':
- main()
diff --git a/security/apps/AppSignatureVerification.cpp b/security/apps/AppSignatureVerification.cpp
deleted file mode 100644
index aed0b70c1c..0000000000
--- a/security/apps/AppSignatureVerification.cpp
+++ /dev/null
@@ -1,1559 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "nsNSSCertificateDB.h"
-
-#include "AppTrustDomain.h"
-#include "CryptoTask.h"
-#include "NSSCertDBTrustDomain.h"
-#include "ScopedNSSTypes.h"
-#include "base64.h"
-#include "certdb.h"
-#include "mozilla/Casting.h"
-#include "mozilla/Logging.h"
-#include "mozilla/RefPtr.h"
-#include "mozilla/UniquePtr.h"
-#include "nsCOMPtr.h"
-#include "nsComponentManagerUtils.h"
-#include "nsDataSignatureVerifier.h"
-#include "nsHashKeys.h"
-#include "nsIDirectoryEnumerator.h"
-#include "nsIFile.h"
-#include "nsIFileStreams.h"
-#include "nsIInputStream.h"
-#include "nsIStringEnumerator.h"
-#include "nsIZipReader.h"
-#include "nsNSSCertificate.h"
-#include "nsNetUtil.h"
-#include "nsProxyRelease.h"
-#include "nsString.h"
-#include "nsTHashtable.h"
-#include "nssb64.h"
-#include "pkix/pkix.h"
-#include "pkix/pkixnss.h"
-#include "plstr.h"
-#include "secmime.h"
-
-
-using namespace mozilla::pkix;
-using namespace mozilla;
-using namespace mozilla::psm;
-
-extern mozilla::LazyLogModule gPIPNSSLog;
-
-namespace {
-
-// Reads a maximum of 1MB from a stream into the supplied buffer.
-// The reason for the 1MB limit is because this function is used to read
-// signature-related files and we want to avoid OOM. The uncompressed length of
-// an entry can be hundreds of times larger than the compressed version,
-// especially if someone has specifically crafted the entry to cause OOM or to
-// consume massive amounts of disk space.
-//
-// @param stream The input stream to read from.
-// @param buf The buffer that we read the stream into, which must have
-// already been allocated.
-nsresult
-ReadStream(const nsCOMPtr<nsIInputStream>& stream, /*out*/ SECItem& buf)
-{
- // The size returned by Available() might be inaccurate so we need
- // to check that Available() matches up with the actual length of
- // the file.
- uint64_t length;
- nsresult rv = stream->Available(&length);
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
- }
-
- // Cap the maximum accepted size of signature-related files at 1MB (which is
- // still crazily huge) to avoid OOM. The uncompressed length of an entry can be
- // hundreds of times larger than the compressed version, especially if
- // someone has speifically crafted the entry to cause OOM or to consume
- // massive amounts of disk space.
- static const uint32_t MAX_LENGTH = 1024 * 1024;
- if (length > MAX_LENGTH) {
- return NS_ERROR_FILE_TOO_BIG;
- }
-
- // With bug 164695 in mind we +1 to leave room for null-terminating
- // the buffer.
- SECITEM_AllocItem(buf, static_cast<uint32_t>(length + 1));
-
- // buf.len == length + 1. We attempt to read length + 1 bytes
- // instead of length, so that we can check whether the metadata for
- // the entry is incorrect.
- uint32_t bytesRead;
- rv = stream->Read(BitwiseCast<char*, unsigned char*>(buf.data), buf.len,
- &bytesRead);
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
- }
- if (bytesRead != length) {
- return NS_ERROR_FILE_CORRUPTED;
- }
-
- buf.data[buf.len - 1] = 0; // null-terminate
-
- return NS_OK;
-}
-
-// Finds exactly one (signature metadata) JAR entry that matches the given
-// search pattern, and then load it. Fails if there are no matches or if
-// there is more than one match. If bugDigest is not null then on success
-// bufDigest will contain the SHA-1 digeset of the entry.
-nsresult
-FindAndLoadOneEntry(nsIZipReader * zip,
- const nsACString & searchPattern,
- /*out*/ nsACString & filename,
- /*out*/ SECItem & buf,
- /*optional, out*/ Digest * bufDigest)
-{
- nsCOMPtr<nsIUTF8StringEnumerator> files;
- nsresult rv = zip->FindEntries(searchPattern, getter_AddRefs(files));
- if (NS_FAILED(rv) || !files) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- bool more;
- rv = files->HasMore(&more);
- NS_ENSURE_SUCCESS(rv, rv);
- if (!more) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- rv = files->GetNext(filename);
- NS_ENSURE_SUCCESS(rv, rv);
-
- // Check if there is more than one match, if so then error!
- rv = files->HasMore(&more);
- NS_ENSURE_SUCCESS(rv, rv);
- if (more) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- nsCOMPtr<nsIInputStream> stream;
- rv = zip->GetInputStream(filename, getter_AddRefs(stream));
- NS_ENSURE_SUCCESS(rv, rv);
-
- rv = ReadStream(stream, buf);
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- if (bufDigest) {
- rv = bufDigest->DigestBuf(SEC_OID_SHA1, buf.data, buf.len - 1);
- NS_ENSURE_SUCCESS(rv, rv);
- }
-
- return NS_OK;
-}
-
-// Verify the digest of an entry. We avoid loading the entire entry into memory
-// at once, which would require memory in proportion to the size of the largest
-// entry. Instead, we require only a small, fixed amount of memory.
-//
-// @param stream an input stream from a JAR entry or file depending on whether
-// it is from a signed archive or unpacked into a directory
-// @param digestFromManifest The digest that we're supposed to check the file's
-// contents against, from the manifest
-// @param buf A scratch buffer that we use for doing the I/O, which must have
-// already been allocated. The size of this buffer is the unit
-// size of our I/O.
-nsresult
-VerifyStreamContentDigest(nsIInputStream* stream,
- const SECItem& digestFromManifest, SECItem& buf)
-{
- MOZ_ASSERT(buf.len > 0);
- if (digestFromManifest.len != SHA1_LENGTH)
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- nsresult rv;
- uint64_t len64;
- rv = stream->Available(&len64);
- NS_ENSURE_SUCCESS(rv, rv);
- if (len64 > UINT32_MAX) {
- return NS_ERROR_SIGNED_JAR_ENTRY_TOO_LARGE;
- }
-
- UniquePK11Context digestContext(PK11_CreateDigestContext(SEC_OID_SHA1));
- if (!digestContext) {
- return mozilla::psm::GetXPCOMFromNSSError(PR_GetError());
- }
-
- rv = MapSECStatus(PK11_DigestBegin(digestContext.get()));
- NS_ENSURE_SUCCESS(rv, rv);
-
- uint64_t totalBytesRead = 0;
- for (;;) {
- uint32_t bytesRead;
- rv = stream->Read(BitwiseCast<char*, unsigned char*>(buf.data), buf.len,
- &bytesRead);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (bytesRead == 0) {
- break; // EOF
- }
-
- totalBytesRead += bytesRead;
- if (totalBytesRead >= UINT32_MAX) {
- return NS_ERROR_SIGNED_JAR_ENTRY_TOO_LARGE;
- }
-
- rv = MapSECStatus(PK11_DigestOp(digestContext.get(), buf.data, bytesRead));
- NS_ENSURE_SUCCESS(rv, rv);
- }
-
- if (totalBytesRead != len64) {
- // The metadata we used for Available() doesn't match the actual size of
- // the entry.
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- // Verify that the digests match.
- Digest digest;
- rv = digest.End(SEC_OID_SHA1, digestContext);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (SECITEM_CompareItem(&digestFromManifest, &digest.get()) != SECEqual) {
- return NS_ERROR_SIGNED_JAR_MODIFIED_ENTRY;
- }
-
- return NS_OK;
-}
-
-nsresult
-VerifyEntryContentDigest(nsIZipReader* zip, const nsACString& aFilename,
- const SECItem& digestFromManifest, SECItem& buf)
-{
- nsCOMPtr<nsIInputStream> stream;
- nsresult rv = zip->GetInputStream(aFilename, getter_AddRefs(stream));
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- return VerifyStreamContentDigest(stream, digestFromManifest, buf);
-}
-
-// @oaram aDir directory containing the unpacked signed archive
-// @param aFilename path of the target file relative to aDir
-// @param digestFromManifest The digest that we're supposed to check the file's
-// contents against, from the manifest
-// @param buf A scratch buffer that we use for doing the I/O
-nsresult
-VerifyFileContentDigest(nsIFile* aDir, const nsAString& aFilename,
- const SECItem& digestFromManifest, SECItem& buf)
-{
- // Find the file corresponding to the manifest path
- nsCOMPtr<nsIFile> file;
- nsresult rv = aDir->Clone(getter_AddRefs(file));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // We don't know how to handle JARs with signed directory entries.
- // It's technically possible in the manifest but makes no sense on disk.
- // Inside an archive we just ignore them, but here we have to treat it
- // as an error because the signed bytes never got unpacked.
- int32_t pos = 0;
- int32_t slash;
- int32_t namelen = aFilename.Length();
- if (namelen == 0 || aFilename[namelen - 1] == '/') {
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- // Append path segments one by one
- do {
- slash = aFilename.FindChar('/', pos);
- int32_t segend = (slash == kNotFound) ? namelen : slash;
- rv = file->Append(Substring(aFilename, pos, (segend - pos)));
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
- pos = slash + 1;
- } while (pos < namelen && slash != kNotFound);
-
- bool exists;
- rv = file->Exists(&exists);
- if (NS_FAILED(rv) || !exists) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- bool isDir;
- rv = file->IsDirectory(&isDir);
- if (NS_FAILED(rv) || isDir) {
- // We only support signed files, not directory entries
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- // Open an input stream for that file and verify it.
- nsCOMPtr<nsIInputStream> stream;
- rv = NS_NewLocalFileInputStream(getter_AddRefs(stream), file, -1, -1,
- nsIFileInputStream::CLOSE_ON_EOF);
- if (NS_FAILED(rv) || !stream) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- return VerifyStreamContentDigest(stream, digestFromManifest, buf);
-}
-
-// On input, nextLineStart is the start of the current line. On output,
-// nextLineStart is the start of the next line.
-nsresult
-ReadLine(/*in/out*/ const char* & nextLineStart, /*out*/ nsCString & line,
- bool allowContinuations = true)
-{
- line.Truncate();
- size_t previousLength = 0;
- size_t currentLength = 0;
- for (;;) {
- const char* eol = PL_strpbrk(nextLineStart, "\r\n");
-
- if (!eol) { // Reached end of file before newline
- eol = nextLineStart + strlen(nextLineStart);
- }
-
- previousLength = currentLength;
- line.Append(nextLineStart, eol - nextLineStart);
- currentLength = line.Length();
-
- // The spec says "No line may be longer than 72 bytes (not characters)"
- // in its UTF8-encoded form.
- static const size_t lineLimit = 72;
- if (currentLength - previousLength > lineLimit) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // The spec says: "Implementations should support 65535-byte
- // (not character) header values..."
- if (currentLength > 65535) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (*eol == '\r') {
- ++eol;
- }
- if (*eol == '\n') {
- ++eol;
- }
-
- nextLineStart = eol;
-
- if (*eol != ' ') {
- // not a continuation
- return NS_OK;
- }
-
- // continuation
- if (!allowContinuations) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- ++nextLineStart; // skip space and keep appending
- }
-}
-
-// The header strings are defined in the JAR specification.
-#define JAR_MF_SEARCH_STRING "(M|/M)ETA-INF/(M|m)(ANIFEST|anifest).(MF|mf)$"
-#define JAR_SF_SEARCH_STRING "(M|/M)ETA-INF/*.(SF|sf)$"
-#define JAR_RSA_SEARCH_STRING "(M|/M)ETA-INF/*.(RSA|rsa)$"
-#define JAR_META_DIR "META-INF"
-#define JAR_MF_HEADER "Manifest-Version: 1.0"
-#define JAR_SF_HEADER "Signature-Version: 1.0"
-
-nsresult
-ParseAttribute(const nsAutoCString & curLine,
- /*out*/ nsAutoCString & attrName,
- /*out*/ nsAutoCString & attrValue)
-{
- // Find the colon that separates the name from the value.
- int32_t colonPos = curLine.FindChar(':');
- if (colonPos == kNotFound) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // set attrName to the name, skipping spaces between the name and colon
- int32_t nameEnd = colonPos;
- for (;;) {
- if (nameEnd == 0) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID; // colon with no name
- }
- if (curLine[nameEnd - 1] != ' ')
- break;
- --nameEnd;
- }
- curLine.Left(attrName, nameEnd);
-
- // Set attrValue to the value, skipping spaces between the colon and the
- // value. The value may be empty.
- int32_t valueStart = colonPos + 1;
- int32_t curLineLength = curLine.Length();
- while (valueStart != curLineLength && curLine[valueStart] == ' ') {
- ++valueStart;
- }
- curLine.Right(attrValue, curLineLength - valueStart);
-
- return NS_OK;
-}
-
-// Parses the version line of the MF or SF header.
-nsresult
-CheckManifestVersion(const char* & nextLineStart,
- const nsACString & expectedHeader)
-{
- // The JAR spec says: "Manifest-Version and Signature-Version must be first,
- // and in exactly that case (so that they can be recognized easily as magic
- // strings)."
- nsAutoCString curLine;
- nsresult rv = ReadLine(nextLineStart, curLine, false);
- if (NS_FAILED(rv)) {
- return rv;
- }
- if (!curLine.Equals(expectedHeader)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
- return NS_OK;
-}
-
-// Parses a signature file (SF) as defined in the JDK 8 JAR Specification.
-//
-// The SF file *must* contain exactly one SHA1-Digest-Manifest attribute in
-// the main section. All other sections are ignored. This means that this will
-// NOT parse old-style signature files that have separate digests per entry.
-// The JDK8 x-Digest-Manifest variant is better because:
-//
-// (1) It allows us to follow the principle that we should minimize the
-// processing of data that we do before we verify its signature. In
-// particular, with the x-Digest-Manifest style, we can verify the digest
-// of MANIFEST.MF before we parse it, which prevents malicious JARs
-// exploiting our MANIFEST.MF parser.
-// (2) It is more time-efficient and space-efficient to have one
-// x-Digest-Manifest instead of multiple x-Digest values.
-//
-// In order to get benefit (1), we do NOT implement the fallback to the older
-// mechanism as the spec requires/suggests. Also, for simplity's sake, we only
-// support exactly one SHA1-Digest-Manifest attribute, and no other
-// algorithms.
-//
-// filebuf must be null-terminated. On output, mfDigest will contain the
-// decoded value of SHA1-Digest-Manifest.
-nsresult
-ParseSF(const char* filebuf, /*out*/ SECItem & mfDigest)
-{
- nsresult rv;
-
- const char* nextLineStart = filebuf;
- rv = CheckManifestVersion(nextLineStart, NS_LITERAL_CSTRING(JAR_SF_HEADER));
- if (NS_FAILED(rv))
- return rv;
-
- // Find SHA1-Digest-Manifest
- for (;;) {
- nsAutoCString curLine;
- rv = ReadLine(nextLineStart, curLine);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- if (curLine.Length() == 0) {
- // End of main section (blank line or end-of-file), and no
- // SHA1-Digest-Manifest found.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- nsAutoCString attrName;
- nsAutoCString attrValue;
- rv = ParseAttribute(curLine, attrName, attrValue);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- if (attrName.LowerCaseEqualsLiteral("sha1-digest-manifest")) {
- rv = MapSECStatus(ATOB_ConvertAsciiToItem(&mfDigest, attrValue.get()));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // There could be multiple SHA1-Digest-Manifest attributes, which
- // would be an error, but it's better to just skip any erroneous
- // duplicate entries rather than trying to detect them, because:
- //
- // (1) It's simpler, and simpler generally means more secure
- // (2) An attacker can't make us accept a JAR we would otherwise
- // reject just by adding additional SHA1-Digest-Manifest
- // attributes.
- break;
- }
-
- // ignore unrecognized attributes
- }
-
- return NS_OK;
-}
-
-// Parses MANIFEST.MF. The filenames of all entries will be returned in
-// mfItems. buf must be a pre-allocated scratch buffer that is used for doing
-// I/O.
-nsresult
-ParseMF(const char* filebuf, nsIZipReader * zip,
- /*out*/ nsTHashtable<nsCStringHashKey> & mfItems,
- ScopedAutoSECItem & buf)
-{
- nsresult rv;
-
- const char* nextLineStart = filebuf;
-
- rv = CheckManifestVersion(nextLineStart, NS_LITERAL_CSTRING(JAR_MF_HEADER));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Skip the rest of the header section, which ends with a blank line.
- {
- nsAutoCString line;
- do {
- rv = ReadLine(nextLineStart, line);
- if (NS_FAILED(rv)) {
- return rv;
- }
- } while (line.Length() > 0);
-
- // Manifest containing no file entries is OK, though useless.
- if (*nextLineStart == '\0') {
- return NS_OK;
- }
- }
-
- nsAutoCString curItemName;
- ScopedAutoSECItem digest;
-
- for (;;) {
- nsAutoCString curLine;
- rv = ReadLine(nextLineStart, curLine);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (curLine.Length() == 0) {
- // end of section (blank line or end-of-file)
-
- if (curItemName.Length() == 0) {
- // '...Each section must start with an attribute with the name as
- // "Name",...', so every section must have a Name attribute.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (digest.len == 0) {
- // We require every entry to have a digest, since we require every
- // entry to be signed and we don't allow duplicate entries.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (mfItems.Contains(curItemName)) {
- // Duplicate entry
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Verify that the entry's content digest matches the digest from this
- // MF section.
- rv = VerifyEntryContentDigest(zip, curItemName, digest, buf);
- if (NS_FAILED(rv))
- return rv;
-
- mfItems.PutEntry(curItemName);
-
- if (*nextLineStart == '\0') // end-of-file
- break;
-
- // reset so we know we haven't encountered either of these for the next
- // item yet.
- curItemName.Truncate();
- digest.reset();
-
- continue; // skip the rest of the loop below
- }
-
- nsAutoCString attrName;
- nsAutoCString attrValue;
- rv = ParseAttribute(curLine, attrName, attrValue);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Lines to look for:
-
- // (1) Digest:
- if (attrName.LowerCaseEqualsLiteral("sha1-digest"))
- {
- if (digest.len > 0) // multiple SHA1 digests in section
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- rv = MapSECStatus(ATOB_ConvertAsciiToItem(&digest, attrValue.get()));
- if (NS_FAILED(rv))
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- continue;
- }
-
- // (2) Name: associates this manifest section with a file in the jar.
- if (attrName.LowerCaseEqualsLiteral("name"))
- {
- if (MOZ_UNLIKELY(curItemName.Length() > 0)) // multiple names in section
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- if (MOZ_UNLIKELY(attrValue.Length() == 0))
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- curItemName = attrValue;
-
- continue;
- }
-
- // (3) Magic: the only other must-understand attribute
- if (attrName.LowerCaseEqualsLiteral("magic")) {
- // We don't understand any magic, so we can't verify an entry that
- // requires magic. Since we require every entry to have a valid
- // signature, we have no choice but to reject the entry.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // unrecognized attributes must be ignored
- }
-
- return NS_OK;
-}
-
-struct VerifyCertificateContext {
- AppTrustedRoot trustedRoot;
- UniqueCERTCertList& builtChain;
-};
-
-nsresult
-VerifyCertificate(CERTCertificate* signerCert, void* voidContext, void* pinArg)
-{
- // TODO: null pinArg is tolerated.
- if (NS_WARN_IF(!signerCert) || NS_WARN_IF(!voidContext)) {
- return NS_ERROR_INVALID_ARG;
- }
- const VerifyCertificateContext& context =
- *static_cast<const VerifyCertificateContext*>(voidContext);
-
- AppTrustDomain trustDomain(context.builtChain, pinArg);
- if (trustDomain.SetTrustedRoot(context.trustedRoot) != SECSuccess) {
- return MapSECStatus(SECFailure);
- }
- Input certDER;
- mozilla::pkix::Result rv = certDER.Init(signerCert->derCert.data,
- signerCert->derCert.len);
- if (rv != Success) {
- return mozilla::psm::GetXPCOMFromNSSError(MapResultToPRErrorCode(rv));
- }
-
- rv = BuildCertChain(trustDomain, certDER, Now(),
- EndEntityOrCA::MustBeEndEntity,
- KeyUsage::digitalSignature,
- KeyPurposeId::id_kp_codeSigning,
- CertPolicyId::anyPolicy,
- nullptr/*stapledOCSPResponse*/);
- if (rv == mozilla::pkix::Result::ERROR_EXPIRED_CERTIFICATE) {
- // For code-signing you normally need trusted 3rd-party timestamps to
- // handle expiration properly. The signer could always mess with their
- // system clock so you can't trust the certificate was un-expired when
- // the signing took place. The choice is either to ignore expiration
- // or to enforce expiration at time of use. The latter leads to the
- // user-hostile result that perfectly good code stops working.
- //
- // Our package format doesn't support timestamps (nor do we have a
- // trusted 3rd party timestamper), but since we sign all of our apps and
- // add-ons ourselves we can trust ourselves not to mess with the clock
- // on the signing systems. We also have a revocation mechanism if we
- // need it. It's OK to ignore cert expiration under these conditions.
- //
- // This is an invalid approach if
- // * we issue certs to let others sign their own packages
- // * mozilla::pkix returns "expired" when there are "worse" problems
- // with the certificate or chain.
- // (see bug 1267318)
- rv = Success;
- }
- if (rv != Success) {
- return mozilla::psm::GetXPCOMFromNSSError(MapResultToPRErrorCode(rv));
- }
-
- return NS_OK;
-}
-
-nsresult
-VerifySignature(AppTrustedRoot trustedRoot, const SECItem& buffer,
- const SECItem& detachedDigest,
- /*out*/ UniqueCERTCertList& builtChain)
-{
- // Currently, this function is only called within the CalculateResult() method
- // of CryptoTasks. As such, NSS should not be shut down at this point and the
- // CryptoTask implementation should already hold a nsNSSShutDownPreventionLock.
- // We acquire a nsNSSShutDownPreventionLock here solely to prove we did to
- // VerifyCMSDetachedSignatureIncludingCertificate().
- nsNSSShutDownPreventionLock locker;
- VerifyCertificateContext context = { trustedRoot, builtChain };
- // XXX: missing pinArg
- return VerifyCMSDetachedSignatureIncludingCertificate(buffer, detachedDigest,
- VerifyCertificate,
- &context, nullptr,
- locker);
-}
-
-NS_IMETHODIMP
-OpenSignedAppFile(AppTrustedRoot aTrustedRoot, nsIFile* aJarFile,
- /*out, optional */ nsIZipReader** aZipReader,
- /*out, optional */ nsIX509Cert** aSignerCert)
-{
- NS_ENSURE_ARG_POINTER(aJarFile);
-
- if (aZipReader) {
- *aZipReader = nullptr;
- }
-
- if (aSignerCert) {
- *aSignerCert = nullptr;
- }
-
- nsresult rv;
-
- static NS_DEFINE_CID(kZipReaderCID, NS_ZIPREADER_CID);
- nsCOMPtr<nsIZipReader> zip = do_CreateInstance(kZipReaderCID, &rv);
- NS_ENSURE_SUCCESS(rv, rv);
-
- rv = zip->Open(aJarFile);
- NS_ENSURE_SUCCESS(rv, rv);
-
- // Signature (RSA) file
- nsAutoCString sigFilename;
- ScopedAutoSECItem sigBuffer;
- rv = FindAndLoadOneEntry(zip, nsLiteralCString(JAR_RSA_SEARCH_STRING),
- sigFilename, sigBuffer, nullptr);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- // Signature (SF) file
- nsAutoCString sfFilename;
- ScopedAutoSECItem sfBuffer;
- Digest sfCalculatedDigest;
- rv = FindAndLoadOneEntry(zip, NS_LITERAL_CSTRING(JAR_SF_SEARCH_STRING),
- sfFilename, sfBuffer, &sfCalculatedDigest);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- sigBuffer.type = siBuffer;
- UniqueCERTCertList builtChain;
- rv = VerifySignature(aTrustedRoot, sigBuffer, sfCalculatedDigest.get(),
- builtChain);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- ScopedAutoSECItem mfDigest;
- rv = ParseSF(BitwiseCast<char*, unsigned char*>(sfBuffer.data), mfDigest);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Manifest (MF) file
- nsAutoCString mfFilename;
- ScopedAutoSECItem manifestBuffer;
- Digest mfCalculatedDigest;
- rv = FindAndLoadOneEntry(zip, NS_LITERAL_CSTRING(JAR_MF_SEARCH_STRING),
- mfFilename, manifestBuffer, &mfCalculatedDigest);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- if (SECITEM_CompareItem(&mfDigest, &mfCalculatedDigest.get()) != SECEqual) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Allocate the I/O buffer only once per JAR, instead of once per entry, in
- // order to minimize malloc/free calls and in order to avoid fragmenting
- // memory.
- ScopedAutoSECItem buf(128 * 1024);
-
- nsTHashtable<nsCStringHashKey> items;
-
- rv = ParseMF(BitwiseCast<char*, unsigned char*>(manifestBuffer.data), zip,
- items, buf);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Verify every entry in the file.
- nsCOMPtr<nsIUTF8StringEnumerator> entries;
- rv = zip->FindEntries(EmptyCString(), getter_AddRefs(entries));
- if (NS_SUCCEEDED(rv) && !entries) {
- rv = NS_ERROR_UNEXPECTED;
- }
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- for (;;) {
- bool hasMore;
- rv = entries->HasMore(&hasMore);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (!hasMore) {
- break;
- }
-
- nsAutoCString entryFilename;
- rv = entries->GetNext(entryFilename);
- NS_ENSURE_SUCCESS(rv, rv);
-
- MOZ_LOG(gPIPNSSLog, LogLevel::Debug, ("Verifying digests for %s",
- entryFilename.get()));
-
- // The files that comprise the signature mechanism are not covered by the
- // signature.
- //
- // XXX: This is OK for a single signature, but doesn't work for
- // multiple signatures, because the metadata for the other signatures
- // is not signed either.
- if (entryFilename == mfFilename ||
- entryFilename == sfFilename ||
- entryFilename == sigFilename) {
- continue;
- }
-
- if (entryFilename.Length() == 0) {
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- // Entries with names that end in "/" are directory entries, which are not
- // signed.
- //
- // XXX: As long as we don't unpack the JAR into the filesystem, the "/"
- // entries are harmless. But, it is not clear what the security
- // implications of directory entries are if/when we were to unpackage the
- // JAR into the filesystem.
- if (entryFilename[entryFilename.Length() - 1] == '/') {
- continue;
- }
-
- nsCStringHashKey * item = items.GetEntry(entryFilename);
- if (!item) {
- return NS_ERROR_SIGNED_JAR_UNSIGNED_ENTRY;
- }
-
- // Remove the item so we can check for leftover items later
- items.RemoveEntry(item);
- }
-
- // We verified that every entry that we require to be signed is signed. But,
- // were there any missing entries--that is, entries that are mentioned in the
- // manifest but missing from the archive?
- if (items.Count() != 0) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- // Return the reader to the caller if they want it
- if (aZipReader) {
- zip.forget(aZipReader);
- }
-
- // Return the signer's certificate to the reader if they want it.
- // XXX: We should return an nsIX509CertList with the whole validated chain.
- if (aSignerCert) {
- CERTCertListNode* signerCertNode = CERT_LIST_HEAD(builtChain);
- if (!signerCertNode || CERT_LIST_END(signerCertNode, builtChain) ||
- !signerCertNode->cert) {
- return NS_ERROR_FAILURE;
- }
- nsCOMPtr<nsIX509Cert> signerCert =
- nsNSSCertificate::Create(signerCertNode->cert);
- NS_ENSURE_TRUE(signerCert, NS_ERROR_OUT_OF_MEMORY);
- signerCert.forget(aSignerCert);
- }
-
- return NS_OK;
-}
-
-nsresult
-VerifySignedManifest(AppTrustedRoot aTrustedRoot,
- nsIInputStream* aManifestStream,
- nsIInputStream* aSignatureStream,
- /*out, optional */ nsIX509Cert** aSignerCert)
-{
- NS_ENSURE_ARG(aManifestStream);
- NS_ENSURE_ARG(aSignatureStream);
-
- if (aSignerCert) {
- *aSignerCert = nullptr;
- }
-
- // Load signature file in buffer
- ScopedAutoSECItem signatureBuffer;
- nsresult rv = ReadStream(aSignatureStream, signatureBuffer);
- if (NS_FAILED(rv)) {
- return rv;
- }
- signatureBuffer.type = siBuffer;
-
- // Load manifest file in buffer
- ScopedAutoSECItem manifestBuffer;
- rv = ReadStream(aManifestStream, manifestBuffer);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Calculate SHA1 digest of the manifest buffer
- Digest manifestCalculatedDigest;
- rv = manifestCalculatedDigest.DigestBuf(SEC_OID_SHA1,
- manifestBuffer.data,
- manifestBuffer.len - 1); // buffer is null terminated
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
- }
-
- // Get base64 encoded string from manifest buffer digest
- UniquePORTString
- base64EncDigest(NSSBase64_EncodeItem(nullptr, nullptr, 0,
- const_cast<SECItem*>(&manifestCalculatedDigest.get())));
- if (NS_WARN_IF(!base64EncDigest)) {
- return NS_ERROR_OUT_OF_MEMORY;
- }
-
- // Calculate SHA1 digest of the base64 encoded string
- Digest doubleDigest;
- rv = doubleDigest.DigestBuf(SEC_OID_SHA1,
- BitwiseCast<uint8_t*, char*>(base64EncDigest.get()),
- strlen(base64EncDigest.get()));
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
- }
-
- // Verify the manifest signature (signed digest of the base64 encoded string)
- UniqueCERTCertList builtChain;
- rv = VerifySignature(aTrustedRoot, signatureBuffer,
- doubleDigest.get(), builtChain);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Return the signer's certificate to the reader if they want it.
- if (aSignerCert) {
- CERTCertListNode* signerCertNode = CERT_LIST_HEAD(builtChain);
- if (!signerCertNode || CERT_LIST_END(signerCertNode, builtChain) ||
- !signerCertNode->cert) {
- return NS_ERROR_FAILURE;
- }
- nsCOMPtr<nsIX509Cert> signerCert =
- nsNSSCertificate::Create(signerCertNode->cert);
- if (NS_WARN_IF(!signerCert)) {
- return NS_ERROR_OUT_OF_MEMORY;
- }
-
- signerCert.forget(aSignerCert);
- }
-
- return NS_OK;
-}
-
-class OpenSignedAppFileTask final : public CryptoTask
-{
-public:
- OpenSignedAppFileTask(AppTrustedRoot aTrustedRoot, nsIFile* aJarFile,
- nsIOpenSignedAppFileCallback* aCallback)
- : mTrustedRoot(aTrustedRoot)
- , mJarFile(aJarFile)
- , mCallback(new nsMainThreadPtrHolder<nsIOpenSignedAppFileCallback>(aCallback))
- {
- }
-
-private:
- virtual nsresult CalculateResult() override
- {
- return OpenSignedAppFile(mTrustedRoot, mJarFile,
- getter_AddRefs(mZipReader),
- getter_AddRefs(mSignerCert));
- }
-
- // nsNSSCertificate implements nsNSSShutdownObject, so there's nothing that
- // needs to be released
- virtual void ReleaseNSSResources() override { }
-
- virtual void CallCallback(nsresult rv) override
- {
- (void) mCallback->OpenSignedAppFileFinished(rv, mZipReader, mSignerCert);
- }
-
- const AppTrustedRoot mTrustedRoot;
- const nsCOMPtr<nsIFile> mJarFile;
- nsMainThreadPtrHandle<nsIOpenSignedAppFileCallback> mCallback;
- nsCOMPtr<nsIZipReader> mZipReader; // out
- nsCOMPtr<nsIX509Cert> mSignerCert; // out
-};
-
-class VerifySignedmanifestTask final : public CryptoTask
-{
-public:
- VerifySignedmanifestTask(AppTrustedRoot aTrustedRoot,
- nsIInputStream* aManifestStream,
- nsIInputStream* aSignatureStream,
- nsIVerifySignedManifestCallback* aCallback)
- : mTrustedRoot(aTrustedRoot)
- , mManifestStream(aManifestStream)
- , mSignatureStream(aSignatureStream)
- , mCallback(
- new nsMainThreadPtrHolder<nsIVerifySignedManifestCallback>(aCallback))
- {
- }
-
-private:
- virtual nsresult CalculateResult() override
- {
- return VerifySignedManifest(mTrustedRoot, mManifestStream,
- mSignatureStream, getter_AddRefs(mSignerCert));
- }
-
- // nsNSSCertificate implements nsNSSShutdownObject, so there's nothing that
- // needs to be released
- virtual void ReleaseNSSResources() override { }
-
- virtual void CallCallback(nsresult rv) override
- {
- (void) mCallback->VerifySignedManifestFinished(rv, mSignerCert);
- }
-
- const AppTrustedRoot mTrustedRoot;
- const nsCOMPtr<nsIInputStream> mManifestStream;
- const nsCOMPtr<nsIInputStream> mSignatureStream;
- nsMainThreadPtrHandle<nsIVerifySignedManifestCallback> mCallback;
- nsCOMPtr<nsIX509Cert> mSignerCert; // out
-};
-
-} // unnamed namespace
-
-NS_IMETHODIMP
-nsNSSCertificateDB::OpenSignedAppFileAsync(
- AppTrustedRoot aTrustedRoot, nsIFile* aJarFile,
- nsIOpenSignedAppFileCallback* aCallback)
-{
- NS_ENSURE_ARG_POINTER(aJarFile);
- NS_ENSURE_ARG_POINTER(aCallback);
- RefPtr<OpenSignedAppFileTask> task(new OpenSignedAppFileTask(aTrustedRoot,
- aJarFile,
- aCallback));
- return task->Dispatch("SignedJAR");
-}
-
-NS_IMETHODIMP
-nsNSSCertificateDB::VerifySignedManifestAsync(
- AppTrustedRoot aTrustedRoot, nsIInputStream* aManifestStream,
- nsIInputStream* aSignatureStream, nsIVerifySignedManifestCallback* aCallback)
-{
- NS_ENSURE_ARG_POINTER(aManifestStream);
- NS_ENSURE_ARG_POINTER(aSignatureStream);
- NS_ENSURE_ARG_POINTER(aCallback);
-
- RefPtr<VerifySignedmanifestTask> task(
- new VerifySignedmanifestTask(aTrustedRoot, aManifestStream,
- aSignatureStream, aCallback));
- return task->Dispatch("SignedManifest");
-}
-
-
-//
-// Signature verification for archives unpacked into a file structure
-//
-
-// Finds the "*.rsa" signature file in the META-INF directory and returns
-// the name. It is an error if there are none or more than one .rsa file
-nsresult
-FindSignatureFilename(nsIFile* aMetaDir,
- /*out*/ nsAString& aFilename)
-{
- nsCOMPtr<nsISimpleEnumerator> entries;
- nsresult rv = aMetaDir->GetDirectoryEntries(getter_AddRefs(entries));
- nsCOMPtr<nsIDirectoryEnumerator> files = do_QueryInterface(entries);
- if (NS_FAILED(rv) || !files) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- bool found = false;
- nsCOMPtr<nsIFile> file;
- rv = files->GetNextFile(getter_AddRefs(file));
-
- while (NS_SUCCEEDED(rv) && file) {
- nsAutoString leafname;
- rv = file->GetLeafName(leafname);
- if (NS_SUCCEEDED(rv)) {
- if (StringEndsWith(leafname, NS_LITERAL_STRING(".rsa"))) {
- if (!found) {
- found = true;
- aFilename = leafname;
- } else {
- // second signature file is an error
- rv = NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- break;
- }
- }
- rv = files->GetNextFile(getter_AddRefs(file));
- }
- }
-
- if (!found) {
- rv = NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- files->Close();
- return rv;
-}
-
-// Loads the signature metadata file that matches the given filename in
-// the passed-in Meta-inf directory. If bufDigest is not null then on
-// success bufDigest will contain the SHA-1 digest of the entry.
-nsresult
-LoadOneMetafile(nsIFile* aMetaDir,
- const nsAString& aFilename,
- /*out*/ SECItem& aBuf,
- /*optional, out*/ Digest* aBufDigest)
-{
- nsCOMPtr<nsIFile> metafile;
- nsresult rv = aMetaDir->Clone(getter_AddRefs(metafile));
- NS_ENSURE_SUCCESS(rv, rv);
-
- rv = metafile->Append(aFilename);
- NS_ENSURE_SUCCESS(rv, rv);
-
- bool exists;
- rv = metafile->Exists(&exists);
- if (NS_FAILED(rv) || !exists) {
- // we can call a missing .rsa file "unsigned" but FindSignatureFilename()
- // already found one: missing other metadata files means a broken signature.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- nsCOMPtr<nsIInputStream> stream;
- rv = NS_NewLocalFileInputStream(getter_AddRefs(stream), metafile);
- NS_ENSURE_SUCCESS(rv, rv);
-
- rv = ReadStream(stream, aBuf);
- stream->Close();
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (aBufDigest) {
- rv = aBufDigest->DigestBuf(SEC_OID_SHA1, aBuf.data, aBuf.len - 1);
- NS_ENSURE_SUCCESS(rv, rv);
- }
-
- return NS_OK;
-}
-
-// Parses MANIFEST.MF and verifies the contents of the unpacked files
-// listed in the manifest.
-// The filenames of all entries will be returned in aMfItems. aBuf must
-// be a pre-allocated scratch buffer that is used for doing I/O.
-nsresult
-ParseMFUnpacked(const char* aFilebuf, nsIFile* aDir,
- /*out*/ nsTHashtable<nsStringHashKey>& aMfItems,
- ScopedAutoSECItem& aBuf)
-{
- nsresult rv;
-
- const char* nextLineStart = aFilebuf;
-
- rv = CheckManifestVersion(nextLineStart, NS_LITERAL_CSTRING(JAR_MF_HEADER));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Skip the rest of the header section, which ends with a blank line.
- {
- nsAutoCString line;
- do {
- rv = ReadLine(nextLineStart, line);
- if (NS_FAILED(rv)) {
- return rv;
- }
- } while (line.Length() > 0);
-
- // Manifest containing no file entries is OK, though useless.
- if (*nextLineStart == '\0') {
- return NS_OK;
- }
- }
-
- nsAutoString curItemName;
- ScopedAutoSECItem digest;
-
- for (;;) {
- nsAutoCString curLine;
- rv = ReadLine(nextLineStart, curLine);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- if (curLine.Length() == 0) {
- // end of section (blank line or end-of-file)
-
- if (curItemName.Length() == 0) {
- // '...Each section must start with an attribute with the name as
- // "Name",...', so every section must have a Name attribute.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (digest.len == 0) {
- // We require every entry to have a digest, since we require every
- // entry to be signed and we don't allow duplicate entries.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (aMfItems.Contains(curItemName)) {
- // Duplicate entry
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Verify that the file's content digest matches the digest from this
- // MF section.
- rv = VerifyFileContentDigest(aDir, curItemName, digest, aBuf);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- aMfItems.PutEntry(curItemName);
-
- if (*nextLineStart == '\0') {
- // end-of-file
- break;
- }
-
- // reset so we know we haven't encountered either of these for the next
- // item yet.
- curItemName.Truncate();
- digest.reset();
-
- continue; // skip the rest of the loop below
- }
-
- nsAutoCString attrName;
- nsAutoCString attrValue;
- rv = ParseAttribute(curLine, attrName, attrValue);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Lines to look for:
-
- // (1) Digest:
- if (attrName.LowerCaseEqualsLiteral("sha1-digest")) {
- if (digest.len > 0) {
- // multiple SHA1 digests in section
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- rv = MapSECStatus(ATOB_ConvertAsciiToItem(&digest, attrValue.get()));
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- continue;
- }
-
- // (2) Name: associates this manifest section with a file in the jar.
- if (attrName.LowerCaseEqualsLiteral("name")) {
- if (MOZ_UNLIKELY(curItemName.Length() > 0)) {
- // multiple names in section
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (MOZ_UNLIKELY(attrValue.Length() == 0)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- curItemName = NS_ConvertUTF8toUTF16(attrValue);
-
- continue;
- }
-
- // (3) Magic: the only other must-understand attribute
- if (attrName.LowerCaseEqualsLiteral("magic")) {
- // We don't understand any magic, so we can't verify an entry that
- // requires magic. Since we require every entry to have a valid
- // signature, we have no choice but to reject the entry.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // unrecognized attributes must be ignored
- }
-
- return NS_OK;
-}
-
-// recursively check a directory tree for files not in the list of
-// verified files we found in the manifest. For each file we find
-// Check it against the files found in the manifest. If the file wasn't
-// in the manifest then it's unsigned and we can stop looking. Otherwise
-// remove it from the collection so we can check leftovers later.
-//
-// @param aDir Directory to check
-// @param aPath Relative path to that directory (to check against aItems)
-// @param aItems All the files found
-// @param *Filename signature files that won't be in the manifest
-nsresult
-CheckDirForUnsignedFiles(nsIFile* aDir,
- const nsString& aPath,
- /* in/out */ nsTHashtable<nsStringHashKey>& aItems,
- const nsAString& sigFilename,
- const nsAString& sfFilename,
- const nsAString& mfFilename)
-{
- nsCOMPtr<nsISimpleEnumerator> entries;
- nsresult rv = aDir->GetDirectoryEntries(getter_AddRefs(entries));
- nsCOMPtr<nsIDirectoryEnumerator> files = do_QueryInterface(entries);
- if (NS_FAILED(rv) || !files) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- bool inMeta = StringBeginsWith(aPath, NS_LITERAL_STRING(JAR_META_DIR));
-
- while (NS_SUCCEEDED(rv)) {
- nsCOMPtr<nsIFile> file;
- rv = files->GetNextFile(getter_AddRefs(file));
- if (NS_FAILED(rv) || !file) {
- break;
- }
-
- nsAutoString leafname;
- rv = file->GetLeafName(leafname);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- nsAutoString curName(aPath + leafname);
-
- bool isDir;
- rv = file->IsDirectory(&isDir);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // if it's a directory we need to recurse
- if (isDir) {
- curName.Append(NS_LITERAL_STRING("/"));
- rv = CheckDirForUnsignedFiles(file, curName, aItems,
- sigFilename, sfFilename, mfFilename);
- } else {
- // The files that comprise the signature mechanism are not covered by the
- // signature.
- //
- // XXX: This is OK for a single signature, but doesn't work for
- // multiple signatures because the metadata for the other signatures
- // is not signed either.
- if (inMeta && ( leafname == sigFilename ||
- leafname == sfFilename ||
- leafname == mfFilename )) {
- continue;
- }
-
- // make sure the current file was found in the manifest
- nsStringHashKey* item = aItems.GetEntry(curName);
- if (!item) {
- return NS_ERROR_SIGNED_JAR_UNSIGNED_ENTRY;
- }
-
- // Remove the item so we can check for leftover items later
- aItems.RemoveEntry(item);
- }
- }
- files->Close();
- return rv;
-}
-
-/*
- * Verify the signature of a directory structure as if it were a
- * signed JAR file (used for unpacked JARs)
- */
-nsresult
-VerifySignedDirectory(AppTrustedRoot aTrustedRoot,
- nsIFile* aDirectory,
- /*out, optional */ nsIX509Cert** aSignerCert)
-{
- NS_ENSURE_ARG_POINTER(aDirectory);
-
- if (aSignerCert) {
- *aSignerCert = nullptr;
- }
-
- // Make sure there's a META-INF directory
-
- nsCOMPtr<nsIFile> metaDir;
- nsresult rv = aDirectory->Clone(getter_AddRefs(metaDir));
- if (NS_FAILED(rv)) {
- return rv;
- }
- rv = metaDir->Append(NS_LITERAL_STRING(JAR_META_DIR));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- bool exists;
- rv = metaDir->Exists(&exists);
- if (NS_FAILED(rv) || !exists) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
- bool isDirectory;
- rv = metaDir->IsDirectory(&isDirectory);
- if (NS_FAILED(rv) || !isDirectory) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- // Find and load the Signature (RSA) file
-
- nsAutoString sigFilename;
- rv = FindSignatureFilename(metaDir, sigFilename);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- ScopedAutoSECItem sigBuffer;
- rv = LoadOneMetafile(metaDir, sigFilename, sigBuffer, nullptr);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- // Load the signature (SF) file and verify the signature.
- // The .sf and .rsa files must have the same name apart from the extension.
-
- nsAutoString sfFilename(Substring(sigFilename, 0, sigFilename.Length() - 3)
- + NS_LITERAL_STRING("sf"));
-
- ScopedAutoSECItem sfBuffer;
- Digest sfCalculatedDigest;
- rv = LoadOneMetafile(metaDir, sfFilename, sfBuffer, &sfCalculatedDigest);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- sigBuffer.type = siBuffer;
- UniqueCERTCertList builtChain;
- rv = VerifySignature(aTrustedRoot, sigBuffer, sfCalculatedDigest.get(),
- builtChain);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Get the expected manifest hash from the signed .sf file
-
- ScopedAutoSECItem mfDigest;
- rv = ParseSF(BitwiseCast<char*, unsigned char*>(sfBuffer.data), mfDigest);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Load manifest (MF) file and verify signature
-
- nsAutoString mfFilename(NS_LITERAL_STRING("manifest.mf"));
- ScopedAutoSECItem manifestBuffer;
- Digest mfCalculatedDigest;
- rv = LoadOneMetafile(metaDir, mfFilename, manifestBuffer, &mfCalculatedDigest);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (SECITEM_CompareItem(&mfDigest, &mfCalculatedDigest.get()) != SECEqual) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Parse manifest and verify signed hash of all listed files
-
- // Allocate the I/O buffer only once per JAR, instead of once per entry, in
- // order to minimize malloc/free calls and in order to avoid fragmenting
- // memory.
- ScopedAutoSECItem buf(128 * 1024);
-
- nsTHashtable<nsStringHashKey> items;
- rv = ParseMFUnpacked(BitwiseCast<char*, unsigned char*>(manifestBuffer.data),
- aDirectory, items, buf);
- if (NS_FAILED(rv)){
- return rv;
- }
-
- // We've checked that everything listed in the manifest exists and is signed
- // correctly. Now check on disk for extra (unsigned) files.
- // Deletes found entries from items as it goes.
- rv = CheckDirForUnsignedFiles(aDirectory, EmptyString(), items,
- sigFilename, sfFilename, mfFilename);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // We verified that every entry that we require to be signed is signed. But,
- // were there any missing entries--that is, entries that are mentioned in the
- // manifest but missing from the directory tree? (There shouldn't be given
- // ParseMFUnpacked() checking them all, but it's a cheap sanity check.)
- if (items.Count() != 0) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- // Return the signer's certificate to the reader if they want it.
- // XXX: We should return an nsIX509CertList with the whole validated chain.
- if (aSignerCert) {
- CERTCertListNode* signerCertNode = CERT_LIST_HEAD(builtChain);
- if (!signerCertNode || CERT_LIST_END(signerCertNode, builtChain) ||
- !signerCertNode->cert) {
- return NS_ERROR_FAILURE;
- }
- nsCOMPtr<nsIX509Cert> signerCert =
- nsNSSCertificate::Create(signerCertNode->cert);
- NS_ENSURE_TRUE(signerCert, NS_ERROR_OUT_OF_MEMORY);
- signerCert.forget(aSignerCert);
- }
-
- return NS_OK;
-}
-
-class VerifySignedDirectoryTask final : public CryptoTask
-{
-public:
- VerifySignedDirectoryTask(AppTrustedRoot aTrustedRoot, nsIFile* aUnpackedJar,
- nsIVerifySignedDirectoryCallback* aCallback)
- : mTrustedRoot(aTrustedRoot)
- , mDirectory(aUnpackedJar)
- , mCallback(new nsMainThreadPtrHolder<nsIVerifySignedDirectoryCallback>(aCallback))
- {
- }
-
-private:
- virtual nsresult CalculateResult() override
- {
- return VerifySignedDirectory(mTrustedRoot,
- mDirectory,
- getter_AddRefs(mSignerCert));
- }
-
- // This class doesn't directly hold NSS resources so there's nothing that
- // needs to be released
- virtual void ReleaseNSSResources() override { }
-
- virtual void CallCallback(nsresult rv) override
- {
- (void) mCallback->VerifySignedDirectoryFinished(rv, mSignerCert);
- }
-
- const AppTrustedRoot mTrustedRoot;
- const nsCOMPtr<nsIFile> mDirectory;
- nsMainThreadPtrHandle<nsIVerifySignedDirectoryCallback> mCallback;
- nsCOMPtr<nsIX509Cert> mSignerCert; // out
-};
-
-NS_IMETHODIMP
-nsNSSCertificateDB::VerifySignedDirectoryAsync(
- AppTrustedRoot aTrustedRoot, nsIFile* aUnpackedJar,
- nsIVerifySignedDirectoryCallback* aCallback)
-{
- NS_ENSURE_ARG_POINTER(aUnpackedJar);
- NS_ENSURE_ARG_POINTER(aCallback);
- RefPtr<VerifySignedDirectoryTask> task(new VerifySignedDirectoryTask(aTrustedRoot,
- aUnpackedJar,
- aCallback));
- return task->Dispatch("UnpackedJar");
-}
diff --git a/security/apps/AppTrustDomain.cpp b/security/apps/AppTrustDomain.cpp
deleted file mode 100644
index 35be4ebd97..0000000000
--- a/security/apps/AppTrustDomain.cpp
+++ /dev/null
@@ -1,388 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "AppTrustDomain.h"
-#include "MainThreadUtils.h"
-#include "certdb.h"
-#include "mozilla/ArrayUtils.h"
-#include "mozilla/Casting.h"
-#include "mozilla/Preferences.h"
-#include "nsComponentManagerUtils.h"
-#include "nsIFile.h"
-#include "nsIFileStreams.h"
-#include "nsIX509CertDB.h"
-#include "nsNSSCertificate.h"
-#include "nsNetUtil.h"
-#include "pkix/pkixnss.h"
-#include "prerror.h"
-#include "secerr.h"
-
-// Generated in Makefile.in
-#include "marketplace-prod-public.inc"
-#include "marketplace-prod-reviewers.inc"
-#include "marketplace-dev-public.inc"
-#include "marketplace-dev-reviewers.inc"
-#include "marketplace-stage.inc"
-#include "xpcshell.inc"
-// Trusted Hosted Apps Certificates
-#include "manifest-signing-root.inc"
-#include "manifest-signing-test-root.inc"
-// Add-on signing Certificates
-#include "addons-public.inc"
-#include "addons-stage.inc"
-// Privileged Package Certificates
-#include "privileged-package-root.inc"
-
-using namespace mozilla::pkix;
-
-extern mozilla::LazyLogModule gPIPNSSLog;
-
-static const unsigned int DEFAULT_MIN_RSA_BITS = 2048;
-static char kDevImportedDER[] =
- "network.http.signed-packages.developer-root";
-
-namespace mozilla { namespace psm {
-
-StaticMutex AppTrustDomain::sMutex;
-UniquePtr<unsigned char[]> AppTrustDomain::sDevImportedDERData;
-unsigned int AppTrustDomain::sDevImportedDERLen = 0;
-
-AppTrustDomain::AppTrustDomain(UniqueCERTCertList& certChain, void* pinArg)
- : mCertChain(certChain)
- , mPinArg(pinArg)
- , mMinRSABits(DEFAULT_MIN_RSA_BITS)
-{
-}
-
-SECStatus
-AppTrustDomain::SetTrustedRoot(AppTrustedRoot trustedRoot)
-{
- SECItem trustedDER;
-
- // Load the trusted certificate into the in-memory NSS database so that
- // CERT_CreateSubjectCertList can find it.
-
- switch (trustedRoot)
- {
- case nsIX509CertDB::AppMarketplaceProdPublicRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceProdPublicRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceProdPublicRoot);
- break;
-
- case nsIX509CertDB::AppMarketplaceProdReviewersRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceProdReviewersRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceProdReviewersRoot);
- break;
-
- case nsIX509CertDB::AppMarketplaceDevPublicRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceDevPublicRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceDevPublicRoot);
- break;
-
- case nsIX509CertDB::AppMarketplaceDevReviewersRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceDevReviewersRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceDevReviewersRoot);
- break;
-
- case nsIX509CertDB::AppMarketplaceStageRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceStageRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceStageRoot);
- // The staging root was generated with a 1024-bit key.
- mMinRSABits = 1024u;
- break;
-
- case nsIX509CertDB::AppXPCShellRoot:
- trustedDER.data = const_cast<uint8_t*>(xpcshellRoot);
- trustedDER.len = mozilla::ArrayLength(xpcshellRoot);
- break;
-
- case nsIX509CertDB::AddonsPublicRoot:
- trustedDER.data = const_cast<uint8_t*>(addonsPublicRoot);
- trustedDER.len = mozilla::ArrayLength(addonsPublicRoot);
- break;
-
- case nsIX509CertDB::AddonsStageRoot:
- trustedDER.data = const_cast<uint8_t*>(addonsStageRoot);
- trustedDER.len = mozilla::ArrayLength(addonsStageRoot);
- break;
-
- case nsIX509CertDB::PrivilegedPackageRoot:
- trustedDER.data = const_cast<uint8_t*>(privilegedPackageRoot);
- trustedDER.len = mozilla::ArrayLength(privilegedPackageRoot);
- break;
-
- case nsIX509CertDB::DeveloperImportedRoot: {
- StaticMutexAutoLock lock(sMutex);
- if (!sDevImportedDERData) {
- MOZ_ASSERT(!NS_IsMainThread());
- nsCOMPtr<nsIFile> file(do_CreateInstance("@mozilla.org/file/local;1"));
- if (!file) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
- nsresult rv = file->InitWithNativePath(
- Preferences::GetCString(kDevImportedDER));
- if (NS_FAILED(rv)) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
-
- nsCOMPtr<nsIInputStream> inputStream;
- NS_NewLocalFileInputStream(getter_AddRefs(inputStream), file, -1, -1,
- nsIFileInputStream::CLOSE_ON_EOF);
- if (!inputStream) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
-
- uint64_t length;
- rv = inputStream->Available(&length);
- if (NS_FAILED(rv)) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
-
- auto data = MakeUnique<char[]>(length);
- rv = inputStream->Read(data.get(), length, &sDevImportedDERLen);
- if (NS_FAILED(rv)) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
-
- MOZ_ASSERT(length == sDevImportedDERLen);
- sDevImportedDERData.reset(
- BitwiseCast<unsigned char*, char*>(data.release()));
- }
-
- trustedDER.data = sDevImportedDERData.get();
- trustedDER.len = sDevImportedDERLen;
- break;
- }
-
- default:
- PR_SetError(SEC_ERROR_INVALID_ARGS, 0);
- return SECFailure;
- }
-
- mTrustedRoot.reset(CERT_NewTempCertificate(CERT_GetDefaultCertDB(),
- &trustedDER, nullptr, false, true));
- if (!mTrustedRoot) {
- return SECFailure;
- }
-
- return SECSuccess;
-}
-
-Result
-AppTrustDomain::FindIssuer(Input encodedIssuerName, IssuerChecker& checker,
- Time)
-
-{
- MOZ_ASSERT(mTrustedRoot);
- if (!mTrustedRoot) {
- return Result::FATAL_ERROR_INVALID_STATE;
- }
-
- // TODO(bug 1035418): If/when mozilla::pkix relaxes the restriction that
- // FindIssuer must only pass certificates with a matching subject name to
- // checker.Check, we can stop using CERT_CreateSubjectCertList and instead
- // use logic like this:
- //
- // 1. First, try the trusted trust anchor.
- // 2. Secondly, iterate through the certificates that were stored in the CMS
- // message, passing each one to checker.Check.
- SECItem encodedIssuerNameSECItem =
- UnsafeMapInputToSECItem(encodedIssuerName);
- UniqueCERTCertList
- candidates(CERT_CreateSubjectCertList(nullptr, CERT_GetDefaultCertDB(),
- &encodedIssuerNameSECItem, 0,
- false));
- if (candidates) {
- for (CERTCertListNode* n = CERT_LIST_HEAD(candidates);
- !CERT_LIST_END(n, candidates); n = CERT_LIST_NEXT(n)) {
- Input certDER;
- Result rv = certDER.Init(n->cert->derCert.data, n->cert->derCert.len);
- if (rv != Success) {
- continue; // probably too big
- }
-
- bool keepGoing;
- rv = checker.Check(certDER, nullptr/*additionalNameConstraints*/,
- keepGoing);
- if (rv != Success) {
- return rv;
- }
- if (!keepGoing) {
- break;
- }
- }
- }
-
- return Success;
-}
-
-Result
-AppTrustDomain::GetCertTrust(EndEntityOrCA endEntityOrCA,
- const CertPolicyId& policy,
- Input candidateCertDER,
- /*out*/ TrustLevel& trustLevel)
-{
- MOZ_ASSERT(policy.IsAnyPolicy());
- MOZ_ASSERT(mTrustedRoot);
- if (!policy.IsAnyPolicy()) {
- return Result::FATAL_ERROR_INVALID_ARGS;
- }
- if (!mTrustedRoot) {
- return Result::FATAL_ERROR_INVALID_STATE;
- }
-
- // Handle active distrust of the certificate.
-
- // XXX: This would be cleaner and more efficient if we could get the trust
- // information without constructing a CERTCertificate here, but NSS doesn't
- // expose it in any other easy-to-use fashion.
- SECItem candidateCertDERSECItem =
- UnsafeMapInputToSECItem(candidateCertDER);
- UniqueCERTCertificate candidateCert(
- CERT_NewTempCertificate(CERT_GetDefaultCertDB(), &candidateCertDERSECItem,
- nullptr, false, true));
- if (!candidateCert) {
- return MapPRErrorCodeToResult(PR_GetError());
- }
-
- CERTCertTrust trust;
- if (CERT_GetCertTrust(candidateCert.get(), &trust) == SECSuccess) {
- uint32_t flags = SEC_GET_TRUST_FLAGS(&trust, trustObjectSigning);
-
- // For DISTRUST, we use the CERTDB_TRUSTED or CERTDB_TRUSTED_CA bit,
- // because we can have active distrust for either type of cert. Note that
- // CERTDB_TERMINAL_RECORD means "stop trying to inherit trust" so if the
- // relevant trust bit isn't set then that means the cert must be considered
- // distrusted.
- uint32_t relevantTrustBit = endEntityOrCA == EndEntityOrCA::MustBeCA
- ? CERTDB_TRUSTED_CA
- : CERTDB_TRUSTED;
- if (((flags & (relevantTrustBit | CERTDB_TERMINAL_RECORD)))
- == CERTDB_TERMINAL_RECORD) {
- trustLevel = TrustLevel::ActivelyDistrusted;
- return Success;
- }
- }
-
- // mTrustedRoot is the only trust anchor for this validation.
- if (CERT_CompareCerts(mTrustedRoot.get(), candidateCert.get())) {
- trustLevel = TrustLevel::TrustAnchor;
- return Success;
- }
-
- trustLevel = TrustLevel::InheritsTrust;
- return Success;
-}
-
-Result
-AppTrustDomain::DigestBuf(Input item,
- DigestAlgorithm digestAlg,
- /*out*/ uint8_t* digestBuf,
- size_t digestBufLen)
-{
- return DigestBufNSS(item, digestAlg, digestBuf, digestBufLen);
-}
-
-Result
-AppTrustDomain::CheckRevocation(EndEntityOrCA, const CertID&, Time, Duration,
- /*optional*/ const Input*,
- /*optional*/ const Input*,
- /*optional*/ const Input*)
-{
- // We don't currently do revocation checking. If we need to distrust an Apps
- // certificate, we will use the active distrust mechanism.
- return Success;
-}
-
-Result
-AppTrustDomain::IsChainValid(const DERArray& certChain, Time time,
- const CertPolicyId& requiredPolicy)
-{
- SECStatus srv = ConstructCERTCertListFromReversedDERArray(certChain,
- mCertChain);
- if (srv != SECSuccess) {
- return MapPRErrorCodeToResult(PR_GetError());
- }
- return Success;
-}
-
-Result
-AppTrustDomain::CheckSignatureDigestAlgorithm(DigestAlgorithm,
- EndEntityOrCA,
- Time)
-{
- // TODO: We should restrict signatures to SHA-256 or better.
- return Success;
-}
-
-Result
-AppTrustDomain::CheckRSAPublicKeyModulusSizeInBits(
- EndEntityOrCA /*endEntityOrCA*/, unsigned int modulusSizeInBits)
-{
- if (modulusSizeInBits < mMinRSABits) {
- return Result::ERROR_INADEQUATE_KEY_SIZE;
- }
- return Success;
-}
-
-Result
-AppTrustDomain::VerifyRSAPKCS1SignedDigest(const SignedDigest& signedDigest,
- Input subjectPublicKeyInfo)
-{
- // TODO: We should restrict signatures to SHA-256 or better.
- return VerifyRSAPKCS1SignedDigestNSS(signedDigest, subjectPublicKeyInfo,
- mPinArg);
-}
-
-Result
-AppTrustDomain::CheckECDSACurveIsAcceptable(EndEntityOrCA /*endEntityOrCA*/,
- NamedCurve curve)
-{
- switch (curve) {
- case NamedCurve::secp256r1: // fall through
- case NamedCurve::secp384r1: // fall through
- case NamedCurve::secp521r1:
- return Success;
- }
-
- return Result::ERROR_UNSUPPORTED_ELLIPTIC_CURVE;
-}
-
-Result
-AppTrustDomain::VerifyECDSASignedDigest(const SignedDigest& signedDigest,
- Input subjectPublicKeyInfo)
-{
- return VerifyECDSASignedDigestNSS(signedDigest, subjectPublicKeyInfo,
- mPinArg);
-}
-
-Result
-AppTrustDomain::CheckValidityIsAcceptable(Time /*notBefore*/, Time /*notAfter*/,
- EndEntityOrCA /*endEntityOrCA*/,
- KeyPurposeId /*keyPurpose*/)
-{
- return Success;
-}
-
-Result
-AppTrustDomain::NetscapeStepUpMatchesServerAuth(Time /*notBefore*/,
- /*out*/ bool& matches)
-{
- matches = false;
- return Success;
-}
-
-void
-AppTrustDomain::NoteAuxiliaryExtension(AuxiliaryExtension /*extension*/,
- Input /*extensionData*/)
-{
-}
-
-} } // namespace mozilla::psm
diff --git a/security/apps/AppTrustDomain.h b/security/apps/AppTrustDomain.h
deleted file mode 100644
index e4a8ec5e50..0000000000
--- a/security/apps/AppTrustDomain.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef AppTrustDomain_h
-#define AppTrustDomain_h
-
-#include "pkix/pkixtypes.h"
-#include "mozilla/StaticMutex.h"
-#include "mozilla/UniquePtr.h"
-#include "nsDebug.h"
-#include "nsIX509CertDB.h"
-#include "ScopedNSSTypes.h"
-
-namespace mozilla { namespace psm {
-
-class AppTrustDomain final : public mozilla::pkix::TrustDomain
-{
-public:
- typedef mozilla::pkix::Result Result;
-
- AppTrustDomain(UniqueCERTCertList& certChain, void* pinArg);
-
- SECStatus SetTrustedRoot(AppTrustedRoot trustedRoot);
-
- virtual Result GetCertTrust(mozilla::pkix::EndEntityOrCA endEntityOrCA,
- const mozilla::pkix::CertPolicyId& policy,
- mozilla::pkix::Input candidateCertDER,
- /*out*/ mozilla::pkix::TrustLevel& trustLevel)
- override;
- virtual Result FindIssuer(mozilla::pkix::Input encodedIssuerName,
- IssuerChecker& checker,
- mozilla::pkix::Time time) override;
- virtual Result CheckRevocation(mozilla::pkix::EndEntityOrCA endEntityOrCA,
- const mozilla::pkix::CertID& certID,
- mozilla::pkix::Time time,
- mozilla::pkix::Duration validityDuration,
- /*optional*/ const mozilla::pkix::Input* stapledOCSPresponse,
- /*optional*/ const mozilla::pkix::Input* aiaExtension,
- /*optional*/ const mozilla::pkix::Input* sctExtension) override;
- virtual Result IsChainValid(const mozilla::pkix::DERArray& certChain,
- mozilla::pkix::Time time,
- const mozilla::pkix::CertPolicyId& requiredPolicy) override;
- virtual Result CheckSignatureDigestAlgorithm(
- mozilla::pkix::DigestAlgorithm digestAlg,
- mozilla::pkix::EndEntityOrCA endEntityOrCA,
- mozilla::pkix::Time notBefore) override;
- virtual Result CheckRSAPublicKeyModulusSizeInBits(
- mozilla::pkix::EndEntityOrCA endEntityOrCA,
- unsigned int modulusSizeInBits) override;
- virtual Result VerifyRSAPKCS1SignedDigest(
- const mozilla::pkix::SignedDigest& signedDigest,
- mozilla::pkix::Input subjectPublicKeyInfo) override;
- virtual Result CheckECDSACurveIsAcceptable(
- mozilla::pkix::EndEntityOrCA endEntityOrCA,
- mozilla::pkix::NamedCurve curve) override;
- virtual Result VerifyECDSASignedDigest(
- const mozilla::pkix::SignedDigest& signedDigest,
- mozilla::pkix::Input subjectPublicKeyInfo) override;
- virtual Result CheckValidityIsAcceptable(
- mozilla::pkix::Time notBefore, mozilla::pkix::Time notAfter,
- mozilla::pkix::EndEntityOrCA endEntityOrCA,
- mozilla::pkix::KeyPurposeId keyPurpose) override;
- virtual Result NetscapeStepUpMatchesServerAuth(
- mozilla::pkix::Time notBefore,
- /*out*/ bool& matches) override;
- virtual void NoteAuxiliaryExtension(
- mozilla::pkix::AuxiliaryExtension extension,
- mozilla::pkix::Input extensionData) override;
- virtual Result DigestBuf(mozilla::pkix::Input item,
- mozilla::pkix::DigestAlgorithm digestAlg,
- /*out*/ uint8_t* digestBuf,
- size_t digestBufLen) override;
-
-private:
- /*out*/ UniqueCERTCertList& mCertChain;
- void* mPinArg; // non-owning!
- UniqueCERTCertificate mTrustedRoot;
- unsigned int mMinRSABits;
-
- static StaticMutex sMutex;
- static UniquePtr<unsigned char[]> sDevImportedDERData;
- static unsigned int sDevImportedDERLen;
-};
-
-} } // namespace mozilla::psm
-
-#endif // AppTrustDomain_h
diff --git a/security/apps/addons-public.crt b/security/apps/addons-public.crt
deleted file mode 100644
index 6ab711b996..0000000000
--- a/security/apps/addons-public.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/addons-stage.crt b/security/apps/addons-stage.crt
deleted file mode 100644
index 73e48cadfe..0000000000
--- a/security/apps/addons-stage.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/gen_cert_header.py b/security/apps/gen_cert_header.py
deleted file mode 100644
index 0ffe25cf4e..0000000000
--- a/security/apps/gen_cert_header.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-import binascii
-
-def _file_byte_generator(filename):
- with open(filename, "rb") as f:
- contents = f.read()
-
- # Treat empty files the same as a file containing a lone 0;
- # a single-element array will fail cert verifcation just as an
- # empty array would.
- if not contents:
- return ['\0']
-
- return contents
-
-def _create_header(array_name, cert_bytes):
- hexified = ["0x" + binascii.hexlify(byte) for byte in cert_bytes]
- substs = { 'array_name': array_name, 'bytes': ', '.join(hexified) }
- return "const uint8_t %(array_name)s[] = {\n%(bytes)s\n};\n" % substs
-
-# Create functions named the same as the data arrays that we're going to
-# write to the headers, so we don't have to duplicate the names like so:
-#
-# def arrayName(header, cert_filename):
-# header.write(_create_header("arrayName", cert_filename))
-array_names = [
- 'marketplaceProdPublicRoot',
- 'marketplaceProdReviewersRoot',
- 'marketplaceDevPublicRoot',
- 'marketplaceDevReviewersRoot',
- 'marketplaceStageRoot',
- 'trustedAppPublicRoot',
- 'trustedAppTestRoot',
- 'xpcshellRoot',
- 'addonsPublicRoot',
- 'addonsStageRoot',
- 'privilegedPackageRoot',
-]
-
-for n in array_names:
- # Make sure the lambda captures the right string.
- globals()[n] = lambda header, cert_filename, name=n: header.write(_create_header(name, _file_byte_generator(cert_filename)))
diff --git a/security/apps/marketplace-dev-public.crt b/security/apps/marketplace-dev-public.crt
deleted file mode 100644
index 490b8682b7..0000000000
--- a/security/apps/marketplace-dev-public.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/marketplace-dev-reviewers.crt b/security/apps/marketplace-dev-reviewers.crt
deleted file mode 100644
index 5b8bde9337..0000000000
--- a/security/apps/marketplace-dev-reviewers.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/marketplace-prod-public.crt b/security/apps/marketplace-prod-public.crt
deleted file mode 100644
index 85c2fed92a..0000000000
--- a/security/apps/marketplace-prod-public.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/marketplace-prod-reviewers.crt b/security/apps/marketplace-prod-reviewers.crt
deleted file mode 100644
index 53be8c81ed..0000000000
--- a/security/apps/marketplace-prod-reviewers.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/marketplace-stage.crt b/security/apps/marketplace-stage.crt
deleted file mode 100644
index 84504f3574..0000000000
--- a/security/apps/marketplace-stage.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/moz.build b/security/apps/moz.build
deleted file mode 100644
index 365379881b..0000000000
--- a/security/apps/moz.build
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-UNIFIED_SOURCES += [
- 'AppSignatureVerification.cpp',
- 'AppTrustDomain.cpp',
-]
-
-FINAL_LIBRARY = 'xul'
-
-LOCAL_INCLUDES += [
- '/security/certverifier',
- '/security/manager/ssl',
- '/security/pkix/include',
-]
-
-DEFINES['NSS_ENABLE_ECC'] = 'True'
-for var in ('DLL_PREFIX', 'DLL_SUFFIX'):
- DEFINES[var] = '"%s"' % CONFIG[var]
-
-test_ssl_path = '/security/manager/ssl/tests/unit'
-
-headers_arrays_certs = [
- ('marketplace-prod-public.inc', 'marketplaceProdPublicRoot', 'marketplace-prod-public.crt'),
- ('marketplace-prod-reviewers.inc', 'marketplaceProdReviewersRoot', 'marketplace-prod-reviewers.crt'),
- ('marketplace-dev-public.inc', 'marketplaceDevPublicRoot', 'marketplace-dev-public.crt'),
- ('marketplace-dev-reviewers.inc', 'marketplaceDevReviewersRoot', 'marketplace-dev-reviewers.crt'),
- ('marketplace-stage.inc', 'marketplaceStageRoot', 'marketplace-stage.crt'),
- ('manifest-signing-root.inc', 'trustedAppPublicRoot', 'trusted-app-public.der'),
- ('manifest-signing-test-root.inc', 'trustedAppTestRoot', test_ssl_path + '/test_signed_manifest/trusted_ca1.der'),
- ('xpcshell.inc', 'xpcshellRoot', test_ssl_path + '/test_signed_apps/trusted_ca1.der'),
- ('addons-public.inc', 'addonsPublicRoot', 'addons-public.crt'),
- ('addons-stage.inc', 'addonsStageRoot', 'addons-stage.crt'),
- ('privileged-package-root.inc', 'privilegedPackageRoot', 'privileged-package-root.der'),
-]
-
-for header, array_name, cert in headers_arrays_certs:
- GENERATED_FILES += [header]
- h = GENERATED_FILES[header]
- h.script = 'gen_cert_header.py:' + array_name
- h.inputs = [cert]
diff --git a/security/apps/privileged-package-root.der b/security/apps/privileged-package-root.der
deleted file mode 100644
index 9f77af5823..0000000000
--- a/security/apps/privileged-package-root.der
+++ /dev/null
Binary files differ
diff --git a/security/apps/trusted-app-public.der b/security/apps/trusted-app-public.der
deleted file mode 100644
index e69de29bb2..0000000000
--- a/security/apps/trusted-app-public.der
+++ /dev/null
diff --git a/security/manager/ssl/nsIX509CertDB.idl b/security/manager/ssl/nsIX509CertDB.idl
index 44d8e0588c..1dbef22fbd 100644
--- a/security/manager/ssl/nsIX509CertDB.idl
+++ b/security/manager/ssl/nsIX509CertDB.idl
@@ -265,74 +265,11 @@ interface nsIX509CertDB : nsISupports {
*/
nsIX509Cert constructX509(in string certDER, in unsigned long length);
- /**
- * Verifies the signature on the given JAR file to verify that it has a
- * valid signature. To be considered valid, there must be exactly one
- * signature on the JAR file and that signature must have signed every
- * entry. Further, the signature must come from a certificate that
- * is trusted for code signing.
- *
- * On success, NS_OK, a nsIZipReader, and the trusted certificate that
- * signed the JAR are returned.
- *
- * On failure, an error code is returned.
- *
- * This method returns a nsIZipReader, instead of taking an nsIZipReader
- * as input, to encourage users of the API to verify the signature as the
- * first step in opening the JAR.
- */
- const AppTrustedRoot AppMarketplaceProdPublicRoot = 1;
- const AppTrustedRoot AppMarketplaceProdReviewersRoot = 2;
- const AppTrustedRoot AppMarketplaceDevPublicRoot = 3;
- const AppTrustedRoot AppMarketplaceDevReviewersRoot = 4;
- const AppTrustedRoot AppMarketplaceStageRoot = 5;
- const AppTrustedRoot AppXPCShellRoot = 6;
- const AppTrustedRoot AddonsPublicRoot = 7;
- const AppTrustedRoot AddonsStageRoot = 8;
- const AppTrustedRoot PrivilegedPackageRoot = 9;
- /*
- * If DeveloperImportedRoot is set as trusted root, a CA from local file
- * system will be imported. Only used when preference
- * "network.http.packaged-apps-developer-mode" is set.
- * The path of the CA is specified by preference
- * "network.http.packaged-apps-developer-trusted-root".
- */
- const AppTrustedRoot DeveloperImportedRoot = 10;
- void openSignedAppFileAsync(in AppTrustedRoot trustedRoot,
- in nsIFile aJarFile,
- in nsIOpenSignedAppFileCallback callback);
-
- /**
- * Verifies the signature on a directory representing an unpacked signed
- * JAR file. To be considered valid, there must be exactly one signature
- * on the directory structure and that signature must have signed every
- * entry. Further, the signature must come from a certificate that
- * is trusted for code signing.
- *
- * On success NS_OK and the trusted certificate that signed the
- * unpacked JAR are returned.
- *
- * On failure, an error code is returned.
- */
- void verifySignedDirectoryAsync(in AppTrustedRoot trustedRoot,
- in nsIFile aUnpackedDir,
- in nsIVerifySignedDirectoryCallback callback);
-
- /**
- * Given streams containing a signature and a manifest file, verifies
- * that the signature is valid for the manifest. The signature must
- * come from a certificate that is trusted for code signing and that
- * was issued by the given trusted root.
- *
- * On success, NS_OK and the trusted certificate that signed the
- * Manifest are returned.
- *
- * On failure, an error code is returned.
- */
- void verifySignedManifestAsync(in AppTrustedRoot trustedRoot,
- in nsIInputStream aManifestStream,
- in nsIInputStream aSignatureStream,
- in nsIVerifySignedManifestCallback callback);
+ // Flags to indicate the type of cert root for signed extensions
+ // This can probably be removed eventually.
+ const AppTrustedRoot AddonsPublicRoot = 1;
+ const AppTrustedRoot AddonsStageRoot = 2;
+ const AppTrustedRoot PrivilegedPackageRoot = 3;
/*
* Add a cert to a cert DB from a binary string.
diff --git a/settings.gradle b/settings.gradle
deleted file mode 100644
index 9d75e3242b..0000000000
--- a/settings.gradle
+++ /dev/null
@@ -1 +0,0 @@
-throw new GradleException("Building with Gradle is not supported.")
diff --git a/toolkit/components/moz.build b/toolkit/components/moz.build
index cd80ca1197..759b07e18e 100644
--- a/toolkit/components/moz.build
+++ b/toolkit/components/moz.build
@@ -33,7 +33,6 @@ DIRS += [
'lz4',
'mediasniffer',
'microformats',
- 'mozprotocol',
'osfile',
'parentalcontrols',
'passwordmgr',
diff --git a/toolkit/components/mozprotocol/moz.build b/toolkit/components/mozprotocol/moz.build
deleted file mode 100644
index b96a64ec2a..0000000000
--- a/toolkit/components/mozprotocol/moz.build
+++ /dev/null
@@ -1,9 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-EXTRA_COMPONENTS += [
- 'mozProtocolHandler.js',
- 'mozProtocolHandler.manifest',
-]
diff --git a/toolkit/components/mozprotocol/mozProtocolHandler.js b/toolkit/components/mozprotocol/mozProtocolHandler.js
deleted file mode 100644
index 97bfb737e4..0000000000
--- a/toolkit/components/mozprotocol/mozProtocolHandler.js
+++ /dev/null
@@ -1,48 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-"use strict";
-
-const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
-
-Cu.import("resource://gre/modules/XPCOMUtils.jsm");
-Cu.import("resource://gre/modules/Services.jsm");
-Cu.import("resource://gre/modules/NetUtil.jsm");
-
-function mozProtocolHandler() {
- XPCOMUtils.defineLazyPreferenceGetter(this, "urlToLoad", "toolkit.mozprotocol.url",
- "http://thereisonlyxul.org/");
-}
-
-mozProtocolHandler.prototype = {
- scheme: "moz",
- defaultPort: -1,
- protocolFlags: Ci.nsIProtocolHandler.URI_DANGEROUS_TO_LOAD,
-
- newURI(spec, charset, base) {
- let uri = Cc["@mozilla.org/network/simple-uri;1"].createInstance(Ci.nsIURI);
- if (base) {
- uri.spec = base.resolve(spec);
- } else {
- uri.spec = spec;
- }
- return uri;
- },
-
- newChannel2(uri, loadInfo) {
- let realURL = NetUtil.newURI(this.urlToLoad);
- let channel = Services.io.newChannelFromURIWithLoadInfo(realURL, loadInfo)
- channel.loadFlags |= Ci.nsIChannel.LOAD_REPLACE;
- return channel;
- },
-
- newChannel(uri) {
- return this.newChannel(uri, null);
- },
-
- classID: Components.ID("{47a45e5f-691e-4799-8686-14f8d3fc0f8c}"),
-
- QueryInterface: XPCOMUtils.generateQI([Ci.nsIProtocolHandler]),
-};
-
-this.NSGetFactory = XPCOMUtils.generateNSGetFactory([mozProtocolHandler]);
diff --git a/toolkit/components/mozprotocol/mozProtocolHandler.manifest b/toolkit/components/mozprotocol/mozProtocolHandler.manifest
deleted file mode 100644
index bbfdf780af..0000000000
--- a/toolkit/components/mozprotocol/mozProtocolHandler.manifest
+++ /dev/null
@@ -1,2 +0,0 @@
-component {47a45e5f-691e-4799-8686-14f8d3fc0f8c} mozProtocolHandler.js
-contract @mozilla.org/network/protocol;1?name=moz {47a45e5f-691e-4799-8686-14f8d3fc0f8c}
diff --git a/toolkit/mozapps/installer/packager.mk b/toolkit/mozapps/installer/packager.mk
index 6cae156580..99bb0de911 100644
--- a/toolkit/mozapps/installer/packager.mk
+++ b/toolkit/mozapps/installer/packager.mk
@@ -62,19 +62,6 @@ ifdef MOZ_PACKAGE_JSSHELL
$(RM) $(PKG_JSSHELL)
$(MAKE_JSSHELL)
endif # MOZ_PACKAGE_JSSHELL
-ifdef MOZ_ARTIFACT_BUILD_SYMBOLS
- @echo 'Packaging existing crashreporter symbols from artifact build...'
- $(NSINSTALL) -D $(DIST)/$(PKG_PATH)
- cd $(DIST)/crashreporter-symbols && \
- zip -r5D '../$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip' . -i '*.sym' -i '*.txt'
-endif # MOZ_ARTIFACT_BUILD_SYMBOLS
-ifdef MOZ_CODE_COVERAGE
- # Package code coverage gcno tree
- @echo 'Packaging code coverage data...'
- $(RM) $(CODE_COVERAGE_ARCHIVE_BASENAME).zip
- $(PYTHON) -mmozbuild.codecoverage.packager \
- --output-file='$(DIST)/$(PKG_PATH)$(CODE_COVERAGE_ARCHIVE_BASENAME).zip'
-endif
ifeq (Darwin, $(OS_ARCH))
ifdef MOZ_ASAN
@echo "Rewriting ASan runtime dylib paths for all binaries in $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) ..."
diff --git a/toolkit/mozapps/installer/upload-files.mk b/toolkit/mozapps/installer/upload-files.mk
index 1bbccecb2f..3f0d1f7061 100644
--- a/toolkit/mozapps/installer/upload-files.mk
+++ b/toolkit/mozapps/installer/upload-files.mk
@@ -446,11 +446,6 @@ UPLOAD_FILES= \
$(call QUOTED_WILDCARD,$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip) \
$(if $(UPLOAD_EXTRA_FILES), $(foreach f, $(UPLOAD_EXTRA_FILES), $(wildcard $(DIST)/$(f))))
-ifdef MOZ_CODE_COVERAGE
- UPLOAD_FILES += \
- $(call QUOTED_WILDCARD,$(DIST)/$(PKG_PATH)$(CODE_COVERAGE_ARCHIVE_BASENAME).zip)
-endif
-
ifdef UNIFY_DIST
UNIFY_ARCH := $(notdir $(patsubst %/,%,$(dir $(UNIFY_DIST))))
UPLOAD_FILES += \
diff --git a/toolkit/toolkit.mozbuild b/toolkit/toolkit.mozbuild
index b8f30d64a9..bb5cac7cba 100644
--- a/toolkit/toolkit.mozbuild
+++ b/toolkit/toolkit.mozbuild
@@ -13,8 +13,6 @@ if CONFIG['MOZ_MAILNEWS']:
DIRS += [
# Depends on NSS and NSPR
'/security/certverifier',
- # Depends on certverifier
- '/security/apps',
]
# MAR support at all times.