summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile.in40
-rw-r--r--build.gradle129
-rw-r--r--build/docs/cppeclipse.rst54
-rw-r--r--build/docs/index.rst8
-rw-r--r--build/docs/supported-configurations.rst46
-rw-r--r--build/docs/visualstudio.rst100
-rw-r--r--build/mach_bootstrap.py1
-rw-r--r--build/moz.build7
-rw-r--r--caps/tests/mochitest/browser_checkloaduri.js3
-rw-r--r--config/baseconfig.mk2
-rw-r--r--config/faster/rules.mk110
-rw-r--r--gradle.properties2
-rw-r--r--gradle/wrapper/gradle-wrapper.jarbin53638 -> 0 bytes
-rw-r--r--gradle/wrapper/gradle-wrapper.properties7
-rwxr-xr-xgradlew160
-rw-r--r--media/libtheora/AUTHORS14
-rw-r--r--media/libtheora/LICENSE8
-rw-r--r--media/libtheora/README.md (renamed from media/libtheora/README)110
-rw-r--r--media/libtheora/README_MCP7
-rw-r--r--media/libtheora/README_MOZILLA5
-rw-r--r--media/libtheora/bug468275-r18219.patch22
-rw-r--r--media/libtheora/bug625773-r17780.patch23
-rw-r--r--media/libtheora/bug752139-r18031.patch53
-rw-r--r--media/libtheora/bug752668-r18268.patch28
-rw-r--r--media/libtheora/bug920992.patch103
-rw-r--r--media/libtheora/include/theora/codec.h77
-rw-r--r--media/libtheora/include/theora/theora.h112
-rw-r--r--media/libtheora/include/theora/theoradec.h19
-rw-r--r--media/libtheora/include/theora/theoraenc.h540
-rw-r--r--media/libtheora/lib/apiwrapper.c166
-rw-r--r--media/libtheora/lib/apiwrapper.h54
-rwxr-xr-x[-rw-r--r--]media/libtheora/lib/arm/arm2gnu.pl55
-rw-r--r--media/libtheora/lib/arm/armbits.s8
-rw-r--r--media/libtheora/lib/arm/armcpu.c40
-rw-r--r--media/libtheora/lib/arm/armfrag.s11
-rw-r--r--media/libtheora/lib/arm/armidct.s177
-rw-r--r--media/libtheora/lib/arm/armloop.s8
-rw-r--r--media/libtheora/lib/arm/armopts.s2
-rw-r--r--media/libtheora/lib/bitpack.c2
-rw-r--r--media/libtheora/lib/config.h41
-rw-r--r--media/libtheora/lib/dct.h2
-rw-r--r--media/libtheora/lib/decapiwrapper.c193
-rw-r--r--media/libtheora/lib/decinfo.c36
-rw-r--r--media/libtheora/lib/decint.h3
-rw-r--r--media/libtheora/lib/decode.c1349
-rw-r--r--media/libtheora/lib/dequant.c2
-rw-r--r--media/libtheora/lib/dequant.h2
-rw-r--r--media/libtheora/lib/fragment.c2
-rw-r--r--media/libtheora/lib/huffdec.c2
-rw-r--r--media/libtheora/lib/huffdec.h2
-rw-r--r--media/libtheora/lib/huffman.h4
-rw-r--r--media/libtheora/lib/idct.c13
-rw-r--r--media/libtheora/lib/info.c10
-rw-r--r--media/libtheora/lib/internal.c4
-rw-r--r--media/libtheora/lib/internal.h2
-rw-r--r--media/libtheora/lib/ocintrin.h2
-rw-r--r--media/libtheora/lib/quant.c2
-rw-r--r--media/libtheora/lib/quant.h2
-rw-r--r--media/libtheora/lib/state.c19
-rw-r--r--media/libtheora/lib/x86/mmxfrag.c4
-rw-r--r--media/libtheora/lib/x86/mmxidct.c42
-rw-r--r--media/libtheora/lib/x86/mmxstate.c2
-rw-r--r--media/libtheora/lib/x86/sse2idct.c44
-rw-r--r--media/libtheora/lib/x86/x86cpu.c2
-rw-r--r--media/libtheora/lib/x86/x86cpu.h2
-rw-r--r--media/libtheora/lib/x86/x86int.h2
-rw-r--r--media/libtheora/lib/x86/x86state.c4
-rw-r--r--media/libtheora/lib/x86_vc/mmxfrag.c2
-rw-r--r--media/libtheora/lib/x86_vc/mmxidct.c45
-rw-r--r--media/libtheora/lib/x86_vc/mmxstate.c2
-rw-r--r--media/libtheora/lib/x86_vc/x86cpu.c2
-rw-r--r--media/libtheora/lib/x86_vc/x86cpu.h2
-rw-r--r--media/libtheora/lib/x86_vc/x86int.h2
-rw-r--r--media/libtheora/lib/x86_vc/x86state.c2
-rw-r--r--media/libtheora/moz.build52
-rwxr-xr-x[-rw-r--r--]media/libtheora/update.sh14
-rw-r--r--moz.configure77
-rw-r--r--old-configure.in1
-rw-r--r--python/moz.build1
-rw-r--r--python/mozbuild/mozbuild/backend/__init__.py11
-rw-r--r--python/mozbuild/mozbuild/backend/base.py55
-rw-r--r--python/mozbuild/mozbuild/backend/common.py36
-rw-r--r--python/mozbuild/mozbuild/backend/configenvironment.py4
-rw-r--r--python/mozbuild/mozbuild/backend/cpp_eclipse.py685
-rw-r--r--python/mozbuild/mozbuild/backend/fastermake.py165
-rw-r--r--python/mozbuild/mozbuild/backend/mach_commands.py123
-rw-r--r--python/mozbuild/mozbuild/backend/visualstudio.py582
-rw-r--r--python/mozbuild/mozbuild/codecoverage/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/codecoverage/chrome_map.py105
-rw-r--r--python/mozbuild/mozbuild/codecoverage/packager.py43
-rw-r--r--python/mozbuild/mozbuild/config_status.py17
-rw-r--r--python/mozbuild/mozbuild/mach_commands.py150
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_visualstudio.py64
-rw-r--r--security/apps/AppSignatureVerification.cpp1559
-rw-r--r--security/apps/AppTrustDomain.cpp388
-rw-r--r--security/apps/AppTrustDomain.h89
-rw-r--r--security/apps/addons-public.crtbin1637 -> 0 bytes
-rw-r--r--security/apps/addons-stage.crtbin1895 -> 0 bytes
-rw-r--r--security/apps/gen_cert_header.py45
-rw-r--r--security/apps/marketplace-dev-public.crtbin964 -> 0 bytes
-rw-r--r--security/apps/marketplace-dev-reviewers.crtbin1012 -> 0 bytes
-rw-r--r--security/apps/marketplace-prod-public.crtbin1177 -> 0 bytes
-rw-r--r--security/apps/marketplace-prod-reviewers.crtbin1171 -> 0 bytes
-rw-r--r--security/apps/marketplace-stage.crtbin1157 -> 0 bytes
-rw-r--r--security/apps/moz.build43
-rw-r--r--security/apps/privileged-package-root.derbin930 -> 0 bytes
-rw-r--r--security/apps/trusted-app-public.der0
-rw-r--r--security/manager/ssl/nsIX509CertDB.idl73
-rw-r--r--settings.gradle1
-rw-r--r--toolkit/components/moz.build1
-rw-r--r--toolkit/components/mozprotocol/moz.build9
-rw-r--r--toolkit/components/mozprotocol/mozProtocolHandler.js48
-rw-r--r--toolkit/components/mozprotocol/mozProtocolHandler.manifest2
-rw-r--r--toolkit/mozapps/installer/packager.mk13
-rw-r--r--toolkit/mozapps/installer/upload-files.mk5
-rw-r--r--toolkit/toolkit.mozbuild2
116 files changed, 1248 insertions, 7379 deletions
diff --git a/Makefile.in b/Makefile.in
index 6c23273884..aec100ea0d 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -137,16 +137,10 @@ default:: $(BUILD_BACKEND_FILES)
endif
install_manifests := \
- $(addprefix dist/,branding idl include public private sdk xpi-stage) \
+ $(addprefix dist/,branding bin idl include public private sdk xpi-stage) \
_tests \
$(NULL)
-# Skip the dist/bin install manifest when using the hybrid
-# FasterMake/RecursiveMake backend. This is a hack until bug 1241744 moves
-# xpidl handling to FasterMake in that case, mechanically making the dist/bin
-# install manifest non-existent (non-existent manifests being skipped)
-ifeq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
-install_manifests += dist/bin
-endif
+
install_manifest_depends = \
CLOBBER \
$(configure_dir)/configure \
@@ -166,27 +160,6 @@ endif
.PHONY: install-manifests
install-manifests: $(addprefix install-,$(install_manifests))
-# If we're using the hybrid FasterMake/RecursiveMake backend, we want
-# to recurse in the faster/ directory in parallel of install manifests.
-# But dist/idl needs to happen before (cf. dependencies in
-# config/faster/rules.mk)
-ifneq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
-install-manifests: faster
-.PHONY: faster
-faster: install-dist/idl
- $(MAKE) -C faster FASTER_RECURSIVE_MAKE=1
-endif
-
-.PHONY: tup
-tup:
- $(call BUILDSTATUS,TIERS make tup)
- $(call BUILDSTATUS,TIER_START make)
- $(MAKE) install-manifests buildid.h source-repo.h
- $(call BUILDSTATUS,TIER_FINISH make)
- $(call BUILDSTATUS,TIER_START tup)
- @$(TUP) $(if $(findstring s,$(filter-out --%,$(MAKEFLAGS))),,--verbose)
- $(call BUILDSTATUS,TIER_FINISH tup)
-
# process_install_manifest needs to be invoked with --no-remove when building
# js as standalone because automated builds are building nspr separately and
# that would remove the resulting files.
@@ -198,17 +171,8 @@ endif
.PHONY: $(addprefix install-,$(subst /,_,$(install_manifests)))
$(addprefix install-,$(install_manifests)): install-%: $(install_manifest_depends)
-ifneq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
- @# If we're using the hybrid FasterMake/RecursiveMake backend, we want
- @# to ensure the FasterMake end doesn't have install manifests for the
- @# same directory, because that would blow up
- $(if $(wildcard _build_manifests/install/$(subst /,_,$*)),$(if $(wildcard faster/install_$(subst /,_,$*)*),$(error FasterMake and RecursiveMake ends of the hybrid build system want to handle $*)))
-endif
$(addprefix $(call py_action,process_install_manifest,$(if $(NO_REMOVE),--no-remove )$*) ,$(wildcard _build_manifests/install/$(subst /,_,$*)))
-# Dummy wrapper rule to allow the faster backend to piggy back
-$(addprefix install-,$(subst /,_,$(filter dist/%,$(install_manifests)))): install-dist_%: install-dist/% ;
-
.PHONY: install-tests
install-tests: install-test-files
diff --git a/build.gradle b/build.gradle
deleted file mode 100644
index d31d07cdcd..0000000000
--- a/build.gradle
+++ /dev/null
@@ -1,129 +0,0 @@
-import java.util.regex.Pattern
-
-allprojects {
- // Expose the per-object-directory configuration to all projects.
- ext {
- mozconfig = gradle.mozconfig
- topsrcdir = gradle.mozconfig.topsrcdir
- topobjdir = gradle.mozconfig.topobjdir
- }
-
- repositories {
- if (gradle.mozconfig.substs.GRADLE_MAVEN_REPOSITORY) {
- maven {
- url gradle.mozconfig.substs.GRADLE_MAVEN_REPOSITORY
- }
- }
- }
-}
-
-buildDir "${topobjdir}/gradle/build"
-
-buildscript {
- repositories {
- if (gradle.mozconfig.substs.GRADLE_MAVEN_REPOSITORY) {
- maven {
- url gradle.mozconfig.substs.GRADLE_MAVEN_REPOSITORY
- }
- }
- // For android-sdk-manager SNAPSHOT releases.
- maven {
- url "file://${gradle.mozconfig.topsrcdir}/mobile/android/gradle/m2repo"
- }
- }
-
- dependencies {
- classpath 'com.android.tools.build:gradle:2.1.3'
- classpath('com.stanfy.spoon:spoon-gradle-plugin:1.0.4') {
- // Without these, we get errors linting.
- exclude module: 'guava'
- }
- // Provided in tree.
- classpath 'com.jakewharton.sdkmanager:gradle-plugin:1.5.0-SNAPSHOT'
- }
-}
-
-task generateCodeAndResources(type:Exec) {
- workingDir "${topobjdir}"
-
- commandLine mozconfig.substs.GMAKE
- args '-C'
- args "${topobjdir}/mobile/android/base"
- args 'gradle-targets'
-
- // Only show the output if something went wrong.
- ignoreExitValue = true
- standardOutput = new ByteArrayOutputStream()
- errorOutput = standardOutput
- doLast {
- if (execResult.exitValue != 0) {
- throw new GradleException("Process '${commandLine}' finished with non-zero exit value ${execResult.exitValue}:\n\n${standardOutput.toString()}")
- }
- }
-}
-
-// Skip unit test for all build variants, unless if it was specifically requested by user.
-// The enabled property for the unit test tasks is reset based on the command line task names just before the task execution.
-// I bet there is a easier/cleaner way to do this, but this gets the job done for now.
-Pattern pattern = Pattern.compile('.*test(.+UnitTest)?.*')
-boolean startTasksIncludeTest = gradle.startParameter.taskNames.any {
- taskName ->
- taskName.matches(pattern)
-}
-gradle.taskGraph.beforeTask {
- Task task ->
- if (task.name.matches(pattern)) {
- task.enabled = startTasksIncludeTest
- }
-}
-
-afterEvaluate {
- subprojects {
- if (!hasProperty('android')) {
- return
- }
- android.applicationVariants.all {
- preBuild.dependsOn rootProject.generateCodeAndResources
- }
- android.libraryVariants.all {
- preBuild.dependsOn rootProject.generateCodeAndResources
- }
- }
-}
-
-apply plugin: 'idea'
-
-idea {
- project {
- languageLevel = '1.7'
- }
-
- module {
- // Object directories take a huge amount of time for IntelliJ to index.
- // Exclude them. Convention is that object directories start with obj.
- // IntelliJ is clever and will not exclude the parts of the object
- // directory that are referenced, if there are any. In practice,
- // indexing the entirety of the tree is taking too long, so exclude all
- // but mobile/.
- def topsrcdirURI = file(topsrcdir).toURI()
- excludeDirs += files(file(topsrcdir)
- .listFiles({it.isDirectory()} as FileFilter)
- .collect({topsrcdirURI.relativize(it.toURI()).toString()}) // Relative paths.
- .findAll({!it.equals('mobile/')}))
-
- // If topobjdir is below topsrcdir, hide only some portions of that tree.
- def topobjdirURI = file(topobjdir).toURI()
- if (!topsrcdirURI.relativize(topobjdirURI).isAbsolute()) {
- excludeDirs -= file(topobjdir)
- excludeDirs += files(file(topobjdir).listFiles())
- excludeDirs -= file("${topobjdir}/gradle")
- }
-
- if (!mozconfig.substs.MOZ_INSTALL_TRACKING) {
- excludeDirs += file("${topsrcdir}/mobile/android/thirdparty/com/adjust")
- }
- }
-}
-
-task wrapper(type: Wrapper) {
-}
diff --git a/build/docs/cppeclipse.rst b/build/docs/cppeclipse.rst
deleted file mode 100644
index 3596a2f9a6..0000000000
--- a/build/docs/cppeclipse.rst
+++ /dev/null
@@ -1,54 +0,0 @@
-.. _build_cppeclipse:
-
-=====================
-Cpp Eclipse Projects
-=====================
-
-For additional information on using Eclipse CDT see
-`the MDN page
-<https://developer.mozilla.org/en-US/docs/Eclipse_CDT>`_.
-
-The build system contains alpha support for generating C++ Eclipse
-project files to aid with development.
-
-Please report bugs to bugzilla and make them depend on bug 973770.
-
-To generate a C++ Eclipse project files, you'll need to have a fully
-built tree::
-
- mach build
-
-Then, simply generate the Eclipse build backend::
-
- mach build-backend -b CppEclipse
-
-If all goes well, the path to the generated workspace should be
-printed.
-
-To use the generated Eclipse project files, you'll need to
-have a Eclipse CDT 8.3 (We plan to follow the latest Eclipse release)
-`Eclipse CDT plugin
-<https://www.eclipse.org/cdt/>`_
-installed. You can then import all the projects into Eclipse using
-*File > Import ... > General > Existing Projects into Workspace*
--only- if you have not ran the background indexer.
-
-Updating Project Files
-======================
-
-As you pull and update the source tree, your C++ Eclipse files may
-fall out of sync with the build configuration. The tree should still
-build fine from within Eclipse, but source files may be missing and in
-rare circumstances Eclipse's index may not have the proper build
-configuration.
-
-To account for this, you'll want to periodically regenerate the
-Eclipse project files. You can do this by running ``mach build
-&& mach build-backend -b CppEclipse`` from the
-command line.
-
-Currently, regeneration rewrites the original project files. **If
-you've made any customizations to the projects, they will likely get
-overwritten.** We would like to improve this user experience in the
-future.
-
diff --git a/build/docs/index.rst b/build/docs/index.rst
index 75000aff33..fa1434b852 100644
--- a/build/docs/index.rst
+++ b/build/docs/index.rst
@@ -28,14 +28,6 @@ Important Concepts
locales
rust
-integrated development environment (IDE)
-========================================
-.. toctree::
- :maxdepth: 1
-
- cppeclipse
- visualstudio
-
mozbuild
========
diff --git a/build/docs/supported-configurations.rst b/build/docs/supported-configurations.rst
index cc2c1ea728..cfd1c98264 100644
--- a/build/docs/supported-configurations.rst
+++ b/build/docs/supported-configurations.rst
@@ -5,41 +5,44 @@ Supported Configurations
========================
This page attempts to document supported build configurations.
+For more up-to-date information please go to http://developer.palemoon.org/
Windows
=======
-We support building on Windows XP and newer operating systems using
-Visual Studio 2010 and newer.
+We support building on Windows 7 and newer operating systems using
+Visual Studio 2015 U3.
-The following are not fully supported by Mozilla (but may work):
+The following are not fully supported (but may work):
-* Building without the latest *MozillaBuild* Windows development
- environment
+* Building with a *MozillaBuild* Windows development
+ environment not mentioned on the developer documentation site.
* Building with Mingw or any other non-Visual Studio toolchain.
OS X
====
-
-We support building on OS X 10.6 and newer with the OS X 10.6 SDK.
+(This section needs updating)
+We support building on OS X 10.8 and newer with the OS X 10.8 SDK.
The tree should build with the following OS X releases and SDK versions:
-* 10.6 Snow Leopard
-* 10.7 Lion
* 10.8 Mountain Lion
* 10.9 Mavericks
-
-The tree requires building with Clang 3.3 and newer. This corresponds to
-version of 4.2 of Apple's Clang that ships with Xcode. This corresponds
-to Xcode 4.6 and newer. Xcode 4.6 only runs on OS X 10.7.4 and newer.
-So, OS X 10.6 users will need to install a non-Apple toolchain. Running
-``mach bootstrap`` should install an appropriate toolchain from Homebrew
-or MacPorts automatically.
-
-The tree should build with GCC 4.4 and newer on OS X. However, this
+* 10.10 Yosemite
+* 10.11 El Capitan
+* 10.12 Sierra
+* 10.13 High Sierra
+* 10.14 Mojave
+* 10.15 Catalina
+* 11 Big Sur (Including Apple ARM SoC)
+
+The tree requires building with Apple's Clang 4.2 that ships with Xcode.
+This corresponds to Xcode 4.6 and newer. Xcode 4.6 only runs on OS X 10.7.4
+and newer.
+
+The tree should build with GCC 7.1 and newer on OS X. However, this
build configuration isn't as widely used (and differs from what Mozilla
-uses to produce OS X builds), so it's recommended to stick with Clang.
+uses to produce OS X builds).
Linux
=====
@@ -47,9 +50,6 @@ Linux
Linux 2.6 and later kernels are supported.
Most distributions are supported as long as the proper package
-dependencies are in place. Running ``mach bootstrap`` should install
-packages for popular Linux distributions. ``configure`` will typically
+dependencies are in place. ``configure`` will typically
detect missing dependencies and inform you how to disable features to
work around unsatisfied dependencies.
-
-Clang 3.3 or GCC 4.4 is required to build the tree.
diff --git a/build/docs/visualstudio.rst b/build/docs/visualstudio.rst
deleted file mode 100644
index 3fbf28e94b..0000000000
--- a/build/docs/visualstudio.rst
+++ /dev/null
@@ -1,100 +0,0 @@
-.. _build_visualstudio:
-
-======================
-Visual Studio Projects
-======================
-
-The build system contains alpha support for generating Visual Studio
-project files to aid with development.
-
-To generate Visual Studio project files, you'll need to have a configured tree::
-
- mach configure
-
-(If you have built recently, your tree is already configured.)
-
-Then, simply generate the Visual Studio build backend::
-
- mach build-backend -b VisualStudio
-
-If all goes well, the path to the generated Solution (``.sln``) file should be
-printed. You should be able to open that solution with Visual Studio 2010 or
-newer.
-
-Currently, output is hard-coded to the Visual Studio 2010 format. If you open
-the solution in a newer Visual Studio release, you will be prompted to upgrade
-projects. Simply click through the wizard to do that.
-
-Structure of Solution
-=====================
-
-The Visual Studio solution consists of hundreds of projects spanning thousands
-of files. To help with organization, the solution is divided into the following
-trees/folders:
-
-Build Targets
- This folder contains common build targets. The *full* project is used to
- perform a full build. The *binaries* project is used to build just binaries.
- The *visual-studio* project can be built to regenerate the Visual Studio
- project files.
-
- Performing the *clean* action on any of these targets will clean the
- *entire* build output.
-
-Binaries
- This folder contains common binaries that can be executed from within
- Visual Studio. If you are building the Firefox desktop application,
- the *firefox* project will launch firefox.exe. You probably want one of
- these set to your startup project.
-
-Libraries
- This folder contains entries for each static library that is produced as
- part of the build. These roughly correspond to each directory in the tree
- containing C/C++. e.g. code from ``dom/base`` will be contained in the
- ``dom_base`` project.
-
- These projects don't do anything when built. If you build a project here,
- the *binaries* build target project is built.
-
-Updating Project Files
-======================
-
-As you pull and update the source tree, your Visual Studio files may fall out
-of sync with the build configuration. The tree should still build fine from
-within Visual Studio. But source files may be missing and IntelliSense may not
-have the proper build configuration.
-
-To account for this, you'll want to periodically regenerate the Visual Studio
-project files. You can do this within Visual Studio by building the
-``Build Targets :: visual-studio`` project or by running
-``mach build-backend -b VisualStudio`` from the command line.
-
-Currently, regeneration rewrites the original project files. **If you've made
-any customizations to the solution or projects, they will likely get
-overwritten.** We would like to improve this user experience in the
-future.
-
-Moving Project Files Around
-===========================
-
-The produced Visual Studio solution and project files should be portable.
-If you want to move them to a non-default directory, they should continue
-to work from wherever they are. If they don't, please file a bug.
-
-Invoking mach through Visual Studio
-===================================
-
-It's possible to build the tree via Visual Studio. There is some light magic
-involved here.
-
-Alongside the Visual Studio project files is a batch script named ``mach.bat``.
-This batch script sets the environment variables present in your *MozillaBuild*
-development environment at the time of Visual Studio project generation
-and invokes *mach* inside an msys shell with the arguments specified to the
-batch script. This script essentially allows you to invoke mach commands
-inside the MozillaBuild environment without having to load MozillaBuild.
-
-While projects currently only utilize the ``mach build`` command, the batch
-script does not limit it's use: any mach command can be invoked. Developers
-may abuse this fact to add custom projects and commands that invoke other
-mach commands.
diff --git a/build/mach_bootstrap.py b/build/mach_bootstrap.py
index 0443eedda2..22eaa3425a 100644
--- a/build/mach_bootstrap.py
+++ b/build/mach_bootstrap.py
@@ -100,7 +100,6 @@ MACH_MODULES = [
'python/mach/mach/commands/settings.py',
'python/compare-locales/mach_commands.py',
'python/mozbuild/mozbuild/mach_commands.py',
- 'python/mozbuild/mozbuild/backend/mach_commands.py',
'python/mozbuild/mozbuild/compilation/codecomplete.py',
'python/mozbuild/mozbuild/frontend/mach_commands.py',
'services/common/tests/mach_commands.py',
diff --git a/build/moz.build b/build/moz.build
index 27f681369d..6567dd944c 100644
--- a/build/moz.build
+++ b/build/moz.build
@@ -97,10 +97,3 @@ if CONFIG['MOZ_VALGRIND']:
'valgrind/i386-redhat-linux-gnu.sup',
'valgrind/x86_64-redhat-linux-gnu.sup',
]
-
-if CONFIG['MOZ_ARTIFACT_BUILDS']:
- # Ensure a pre-built interfaces.xpt installed to the objdir by the artifact
- # code is included by the top-level chrome.manifest.
- EXTRA_COMPONENTS += [
- 'prebuilt-interfaces.manifest',
- ]
diff --git a/caps/tests/mochitest/browser_checkloaduri.js b/caps/tests/mochitest/browser_checkloaduri.js
index 24a97c1c45..1fac5c97c7 100644
--- a/caps/tests/mochitest/browser_checkloaduri.js
+++ b/caps/tests/mochitest/browser_checkloaduri.js
@@ -58,7 +58,6 @@ const URLs = new Map([
["data:text/html,Hi", true, false, true],
["view-source:data:text/html,Hi", false, false, true],
["javascript:alert('hi')", true, false, true],
- ["moz://a", false, false, true],
["about:test-chrome-privs", false, false, true],
["about:test-unknown-unlinkable", false, false, true],
["about:test-content-unlinkable", false, false, true],
@@ -80,7 +79,6 @@ const URLs = new Map([
["data:text/html,Hi", true, false, true],
["view-source:data:text/html,Hi", false, false, true],
["javascript:alert('hi')", true, false, true],
- ["moz://a", false, false, true],
["about:test-chrome-privs", false, false, true],
["about:test-unknown-unlinkable", false, false, true],
["about:test-content-unlinkable", false, false, true],
@@ -102,7 +100,6 @@ const URLs = new Map([
["data:text/html,Hi", true, false, true],
["view-source:data:text/html,Hi", true, false, true],
["javascript:alert('hi')", true, false, true],
- ["moz://a", false, false, true],
["about:test-chrome-privs", false, false, true],
["about:test-unknown-unlinkable", false, false, true],
["about:test-content-unlinkable", false, false, true],
diff --git a/config/baseconfig.mk b/config/baseconfig.mk
index 47a12b16e4..a125466abf 100644
--- a/config/baseconfig.mk
+++ b/config/baseconfig.mk
@@ -45,7 +45,7 @@ endif # WINNT
ifndef INCLUDED_AUTOCONF_MK
default::
else
-TIERS := $(if $(MOZ_ARTIFACT_BUILDS),artifact )pre-export export $(if $(COMPILE_ENVIRONMENT),compile )misc libs tools
+TIERS := pre-export export $(if $(COMPILE_ENVIRONMENT),compile )misc libs tools
endif
# These defines are used to support the twin-topsrcdir model for comm-central.
diff --git a/config/faster/rules.mk b/config/faster/rules.mk
deleted file mode 100644
index 9d7b322fa2..0000000000
--- a/config/faster/rules.mk
+++ /dev/null
@@ -1,110 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# /!\ Please make sure to update the following comment when you touch this
-# file. Thank you /!\
-
-# The traditional Mozilla build system relied on going through the entire
-# build tree a number of times with different targets, and many of the
-# things happening at each step required other things happening in previous
-# steps without any documentation of those dependencies.
-#
-# This new build system tries to start afresh by establishing what files or
-# operations are needed for the build, and applying the necessary rules to
-# have those in place, relying on make dependencies to get them going.
-#
-# As of writing, only building non-compiled parts of Firefox is supported
-# here (a few other things are also left out). This is a starting point, with
-# the intent to grow this build system to make it more complete.
-#
-# This file contains rules and dependencies to get things working. The intent
-# is for a Makefile to define some dependencies and variables, and include
-# this file. What needs to be defined there, and ends up being generated by
-# python/mozbuild/mozbuild/backend/fastermake.py is the following:
-# - TOPSRCDIR/TOPOBJDIR, respectively the top source directory and the top
-# object directory
-# - PYTHON, the path to the python executable
-# - ACDEFINES, which contains a set of -Dvar=name to be used during
-# preprocessing
-# - INSTALL_MANIFESTS, which defines the list of base directories handled
-# by install manifests, see further below
-#
-# A convention used between this file and the Makefile including it is that
-# global Make variables names are uppercase, while "local" Make variables
-# applied to specific targets are lowercase.
-
-# Targets to be triggered for a default build
-default: $(addprefix install-,$(INSTALL_MANIFESTS))
-
-ifndef NO_XPIDL
-# Targets from the recursive make backend to be built for a default build
-default: $(TOPOBJDIR)/config/makefiles/xpidl/xpidl
-endif
-
-# Mac builds require to copy things in dist/bin/*.app
-# TODO: remove the MOZ_WIDGET_TOOLKIT and MOZ_BUILD_APP variables from
-# faster/Makefile and python/mozbuild/mozbuild/test/backend/test_build.py
-# when this is not required anymore.
-# We however don't need to do this when using the hybrid
-# FasterMake/RecursiveMake backend (FASTER_RECURSIVE_MAKE is set when
-# recursing from the RecursiveMake backend)
-ifndef FASTER_RECURSIVE_MAKE
-ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
-default:
- $(MAKE) -C $(TOPOBJDIR)/$(MOZ_BUILD_APP)/app repackage
-endif
-endif
-
-.PHONY: FORCE
-
-# Extra define to trigger some workarounds. We should strive to limit the
-# use of those. As of writing the only ones are in
-# toolkit/content/buildconfig.html and browser/locales/jar.mn.
-ACDEFINES += -DBUILD_FASTER
-
-# Files under the faster/ sub-directory, however, are not meant to use the
-# fallback
-$(TOPOBJDIR)/faster/%: ;
-
-# Generic rule to fall back to the recursive make backend.
-# This needs to stay after other $(TOPOBJDIR)/* rules because GNU Make
-# <3.82 apply pattern rules in definition order, not stem length like
-# modern GNU Make.
-$(TOPOBJDIR)/%: FORCE
- $(MAKE) -C $(dir $@) $(notdir $@)
-
-# Install files using install manifests
-#
-# The list of base directories is given in INSTALL_MANIFESTS. The
-# corresponding install manifests are named correspondingly, with forward
-# slashes replaced with underscores, and prefixed with `install_`. That is,
-# the install manifest for `dist/bin` would be `install_dist_bin`.
-$(addprefix install-,$(INSTALL_MANIFESTS)): install-%: $(addprefix $(TOPOBJDIR)/,buildid.h source-repo.h)
- @# For now, force preprocessed files to be reprocessed every time.
- @# The overhead is not that big, and this avoids waiting for proper
- @# support for defines tracking in process_install_manifest.
- @touch install_$(subst /,_,$*)
- @# BOOKMARKS_INCLUDE_DIR is for bookmarks.html only.
- $(PYTHON) -m mozbuild.action.process_install_manifest \
- --track install_$(subst /,_,$*).track \
- $(TOPOBJDIR)/$* \
- -DAB_CD=en-US \
- -DBOOKMARKS_INCLUDE_DIR=$(TOPSRCDIR)/browser/locales/en-US/profile \
- $(ACDEFINES) \
- install_$(subst /,_,$*)
-
-# ============================================================================
-# Below is a set of additional dependencies and variables used to build things
-# that are not supported by data in moz.build.
-
-# The xpidl target in config/makefiles/xpidl requires the install manifest for
-# dist/idl to have been processed. When using the hybrid
-# FasterMake/RecursiveMake backend, this dependency is handled in the top-level
-# Makefile.
-ifndef FASTER_RECURSIVE_MAKE
-$(TOPOBJDIR)/config/makefiles/xpidl/xpidl: $(TOPOBJDIR)/install-dist_idl
-endif
-# It also requires all the install manifests for dist/bin to have been processed
-# because it adds interfaces.manifest references with buildlist.py.
-$(TOPOBJDIR)/config/makefiles/xpidl/xpidl: $(addprefix install-,$(filter dist/bin%,$(INSTALL_MANIFESTS)))
diff --git a/gradle.properties b/gradle.properties
deleted file mode 100644
index 40ca366b2f..0000000000
--- a/gradle.properties
+++ /dev/null
@@ -1,2 +0,0 @@
-org.gradle.parallel=true
-org.gradle.daemon=true
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
deleted file mode 100644
index e8c6bf7bb4..0000000000
--- a/gradle/wrapper/gradle-wrapper.jar
+++ /dev/null
Binary files differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
deleted file mode 100644
index 8964ccd444..0000000000
--- a/gradle/wrapper/gradle-wrapper.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-#Fri Sep 16 15:41:50 PDT 2016
-distributionBase=GRADLE_USER_HOME
-distributionPath=wrapper/dists
-zipStoreBase=GRADLE_USER_HOME
-zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-2.14.1-all.zip
-distributionSha256Sum=88a910cdf2e03ebbb5fe90f7ecf534fc9ac22e12112dc9a2fee810c598a76091
diff --git a/gradlew b/gradlew
deleted file mode 100755
index 97fac783e1..0000000000
--- a/gradlew
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env bash
-
-##############################################################################
-##
-## Gradle start up script for UN*X
-##
-##############################################################################
-
-# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-DEFAULT_JVM_OPTS=""
-
-APP_NAME="Gradle"
-APP_BASE_NAME=`basename "$0"`
-
-# Use the maximum available, or set MAX_FD != -1 to use that value.
-MAX_FD="maximum"
-
-warn ( ) {
- echo "$*"
-}
-
-die ( ) {
- echo
- echo "$*"
- echo
- exit 1
-}
-
-# OS specific support (must be 'true' or 'false').
-cygwin=false
-msys=false
-darwin=false
-case "`uname`" in
- CYGWIN* )
- cygwin=true
- ;;
- Darwin* )
- darwin=true
- ;;
- MINGW* )
- msys=true
- ;;
-esac
-
-# Attempt to set APP_HOME
-# Resolve links: $0 may be a link
-PRG="$0"
-# Need this for relative symlinks.
-while [ -h "$PRG" ] ; do
- ls=`ls -ld "$PRG"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '/.*' > /dev/null; then
- PRG="$link"
- else
- PRG=`dirname "$PRG"`"/$link"
- fi
-done
-SAVED="`pwd`"
-cd "`dirname \"$PRG\"`/" >&-
-APP_HOME="`pwd -P`"
-cd "$SAVED" >&-
-
-CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
-
-# Determine the Java command to use to start the JVM.
-if [ -n "$JAVA_HOME" ] ; then
- if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
- # IBM's JDK on AIX uses strange locations for the executables
- JAVACMD="$JAVA_HOME/jre/sh/java"
- else
- JAVACMD="$JAVA_HOME/bin/java"
- fi
- if [ ! -x "$JAVACMD" ] ; then
- die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
- fi
-else
- JAVACMD="java"
- which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
-fi
-
-# Increase the maximum file descriptors if we can.
-if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
- MAX_FD_LIMIT=`ulimit -H -n`
- if [ $? -eq 0 ] ; then
- if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
- MAX_FD="$MAX_FD_LIMIT"
- fi
- ulimit -n $MAX_FD
- if [ $? -ne 0 ] ; then
- warn "Could not set maximum file descriptor limit: $MAX_FD"
- fi
- else
- warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
- fi
-fi
-
-# For Darwin, add options to specify how the application appears in the dock
-if $darwin; then
- GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
-fi
-
-# For Cygwin, switch paths to Windows format before running java
-if $cygwin ; then
- APP_HOME=`cygpath --path --mixed "$APP_HOME"`
- CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
- JAVACMD=`cygpath --unix "$JAVACMD"`
-
- # We build the pattern for arguments to be converted via cygpath
- ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
- SEP=""
- for dir in $ROOTDIRSRAW ; do
- ROOTDIRS="$ROOTDIRS$SEP$dir"
- SEP="|"
- done
- OURCYGPATTERN="(^($ROOTDIRS))"
- # Add a user-defined pattern to the cygpath arguments
- if [ "$GRADLE_CYGPATTERN" != "" ] ; then
- OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
- fi
- # Now convert the arguments - kludge to limit ourselves to /bin/sh
- i=0
- for arg in "$@" ; do
- CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
- CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
-
- if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
- eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
- else
- eval `echo args$i`="\"$arg\""
- fi
- i=$((i+1))
- done
- case $i in
- (0) set -- ;;
- (1) set -- "$args0" ;;
- (2) set -- "$args0" "$args1" ;;
- (3) set -- "$args0" "$args1" "$args2" ;;
- (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
- (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
- (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
- (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
- (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
- (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
- esac
-fi
-
-# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
-function splitJvmOpts() {
- JVM_OPTS=("$@")
-}
-eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
-JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
-
-exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
diff --git a/media/libtheora/AUTHORS b/media/libtheora/AUTHORS
index 077d4e588c..8a4e529b5a 100644
--- a/media/libtheora/AUTHORS
+++ b/media/libtheora/AUTHORS
@@ -1,24 +1,25 @@
Monty <monty@xiph.org>
- Original VP3 port
-Ralph Giles
Timothy B. Terriberry
+Gregory Maxwell
+Ralph Giles
Monty
- Ongoing development
-
+
Dan B. Miller
- Pre alpha3 development
-
+
Rudolf Marek
Wim Tayman
Dan Lenski
Nils Pipenbrinck
Monty
- MMX optimized functions
-
+
David Schleef
- C64x port
-
+
Aaron Colwell
Thomas Vander Stichele
Jan Gerber
@@ -26,8 +27,9 @@ Conrad Parker
Cristian Adam
Sebastian Pippin
Simon Hosie
+Brad Smith
- Bug fixes, enhancements, build systems.
-
+
Mauricio Piacentini
- Original win32 projects and example ports
- VP3->Theora transcoder
diff --git a/media/libtheora/LICENSE b/media/libtheora/LICENSE
index 5e5ec08469..97e8431790 100644
--- a/media/libtheora/LICENSE
+++ b/media/libtheora/LICENSE
@@ -4,13 +4,13 @@ In addition to and irrespective of the copyright license associated
with this software, On2 Technologies, Inc. makes the following statement
regarding technology used in this software:
- On2 represents and warrants that it shall not assert any rights
+ On2 represents and warrants that it shall not assert any rights
relating to infringement of On2's registered patents, nor initiate
any litigation asserting such rights, against any person who, or
- entity which utilizes the On2 VP3 Codec Software, including any
- use, distribution, and sale of said Software; which make changes,
+ entity which utilizes the On2 VP3 Codec Software, including any
+ use, distribution, and sale of said Software; which make changes,
modifications, and improvements in said Software; and to use,
- distribute, and sell said changes as well as applications for other
+ distribute, and sell said changes as well as applications for other
fields of use.
This reference implementation is originally derived from the On2 VP3
diff --git a/media/libtheora/README b/media/libtheora/README.md
index 7663f9ff8c..b136f7114c 100644
--- a/media/libtheora/README
+++ b/media/libtheora/README.md
@@ -1,13 +1,11 @@
--------------------------------------------------------------------------
- The Xiph.org Foundation's libtheora 1.2
--------------------------------------------------------------------------
+# Xiph.org Foundation's libtheora
-*** What is Theora?
+### What is Theora?
-Theora is Xiph.Org's first publicly released video codec, intended
+Theora was Xiph.Org's first publicly released video codec, intended
for use within the Foundation's Ogg multimedia streaming system.
Theora is derived directly from On2's VP3 codec, adds new features
-while allow it a longer useful lifetime as an competitive codec.
+while allowing it a longer useful lifetime.
The 1.0 release decoder supported all the new features, but the
encoder is nearly identical to the VP3 code.
@@ -20,46 +18,43 @@ The 1.2 release features significant additional improvements in
compression and performance. Files produced by newer encoders can
be decoded by earlier releases.
-*** Where is Theora?
+### Where is Theora?
-Theora's main site is www.theora.org. Theora and related libraries
-can be gotten from www.theora.org or the main Xiph.Org site at
-www.xiph.org. Development source is kept in an open subversion
-repository, see http://theora.org/svn/ for instructions.
+Theora's main site is https://www.theora.org. Releases of Theora
+and related libraries can be found on the
+[download page](https://www.theora.org/downloads/) or the
+[main Xiph.Org site](https://xiph.org/downloads/).
--------------------------------------------------------------------------
-Getting started with the code
--------------------------------------------------------------------------
+Development source is kept at https://gitlab.xiph.org/xiph/theora.
-*** What do I need to build the source?
+## Getting started with the code
-Requirements summary:
-
- For libtheora:
-
- libogg 1.1 or newer.
+### What do I need to build the source?
- For example encoder:
+Requirements summary:
- as above,
+For libtheora:
- libvorbis and libvorbisenc 1.0.1 or newer.
- (libvorbis 1.3.1 or newer for 5.1 audio)
+* libogg 1.1 or newer.
- For creating a source distribution package:
+For example encoder:
- as above,
+* as above,
+* libvorbis and libvorbisenc 1.0.1 or newer.
+ (libvorbis 1.3.1 or newer for 5.1 audio)
- Doxygen to build the API documentation,
- pdflatex and fig2dev to build the format specification
- (transfig package in Ubuntu).
+For creating a source distribution package:
- For the player only:
+* as above,
+* Doxygen to build the API documentation,
+* pdflatex and fig2dev to build the format specification
+ (transfig package in Ubuntu).
- as above,
+For the player only:
- SDL (Simple Direct media Layer) libraries and headers,
- OSS audio driver and development headers.
+* as above,
+* SDL (Simple Direct media Layer) libraries and headers,
+* OSS audio driver and development headers.
The provided build system is the GNU automake/autoconf system, and
the main library, libtheora, should already build smoothly on any
@@ -72,11 +67,11 @@ Project files for Apple XCode are included in the macosx directory.
There is also a more limited scons build.
-*** How do I use the sample encoder?
+### How do I use the sample encoder?
The sample encoder takes raw video in YUV4MPEG2 format, as used by
lavtools, mjpeg-tools and other packages. The encoder expects audio,
-if any, in a separate wave WAV file. Try 'encoder_example -h' for a
+if any, in a separate wave WAV file. Try 'encoder_example -h' for a
complete list of options.
An easy way to get raw video and audio files is to use MPlayer as an
@@ -85,7 +80,7 @@ wav file named audiodump.wav and a YUV video file in the correct
format for encoder_example as stream.yuv. Be careful when exporting
video alone; MPlayer may drop frames to 'keep up' with the audio
timer. The example encoder can't properly synchronize input audio and
-video file that aren't in sync to begin with.
+video file that aren't in sync to begin with.
The encoder will also take video or audio on stdin if '-' is specified
as the input file name.
@@ -93,64 +88,61 @@ as the input file name.
There is also a 'png2theora' example which accepts a set of image
files in that format.
-*** How do I use the sample player?
+### How do I use the sample player?
The sample player takes an Ogg file on standard in; the file may be
audio alone, video alone or video with audio.
-*** What other tools are available?
+### What other tools are available?
-The programs in the examples directory are intended as tutorial source
-for developers using the library. As such they sacrifice features and
-robustness in the interests of comprehension and should not be
+The programs in the examples directory are intended as tutorial source
+for developers using the library. As such they sacrifice features and
+robustness in the interests of comprehension and should not be
considered serious applications.
If you're wanting to just use theora, consider the programs linked
-from http://www.theora.org/. There is playback support in a number
+from https://www.theora.org/. There is playback support in a number
of common free players, and plugins for major media frameworks.
Jan Gerber's ffmpeg2theora is an excellent encoding front end.
--------------------------------------------------------------------------
-Troubleshooting the build process
--------------------------------------------------------------------------
+## Troubleshooting the build process
-*** Compile error, such as:
+### Compile error, such as:
-encoder_internal.h:664: parse error before `ogg_uint16_t'
+encoder_internal.h:664: parse error before `ogg_uint16_t`
-This means you have version of libogg prior to 1.1. A *complete* new Ogg
+This means you have version of libogg prior to 1.1. A *complete* new Ogg
install, libs and headers is needed.
Also be sure that there aren't multiple copies of Ogg installed in
/usr and /usr/local; an older one might be first on the search path
for libs and headers.
-*** Link error, such as:
+### Link error, such as:
-undefined reference to `oggpackB_stream'
+undefined reference to `oggpackB_stream`
See above; you need libogg 1.1 or later.
-*** Link error, such as:
+### Link error, such as:
-undefined reference to `vorbis_granule_time'
+undefined reference to `vorbis_granule_time`
You need libvorbis and libvorbisenc from the 1.0.1 release or later.
-*** Link error, such as:
+### Link error, such as:
-/usr/lib/libSDL.a(SDL_esdaudio.lo): In function `ESD_OpenAudio':
-SDL_esdaudio.lo(.text+0x25d): undefined reference to `esd_play_stream'
+/usr/lib/libSDL.a(SDL_esdaudio.lo): In function `ESD_OpenAudio`:
+SDL_esdaudio.lo(.text+0x25d): undefined reference to `esd_play_stream`
Be sure to use an SDL that's built to work with OSS. If you use an
SDL that is also built with ESD and/or ALSA support, it will try to
suck in all those extra libraries at link time too. That will only
work if the extra libraries are also installed.
-*** Link warning, such as:
+### Link warning, such as:
-libtool: link: warning: library `/usr/lib/libogg.la' was moved.
-libtool: link: warning: library `/usr/lib/libogg.la' was moved.
+libtool: link: warning: library `/usr/lib/libogg.la` was moved.
+libtool: link: warning: library `/usr/lib/libogg.la` was moved.
Re-run theora/autogen.sh after an Ogg or Vorbis rebuild/reinstall
-
diff --git a/media/libtheora/README_MCP b/media/libtheora/README_MCP
new file mode 100644
index 0000000000..d0b00002ef
--- /dev/null
+++ b/media/libtheora/README_MCP
@@ -0,0 +1,7 @@
+The source from this directory was copied from the theora
+git repository using the update.sh script. The only changes
+made were those applied by update.sh and the addition/update of
+Makefile.in files for the UXP build system.
+
+The upstream release used was https://gitlab.xiph.org/xiph/theora
+The git revision used was 7180717276af1ebc7da15c83162d6c5d6203aabf.
diff --git a/media/libtheora/README_MOZILLA b/media/libtheora/README_MOZILLA
deleted file mode 100644
index d48dbfa6f6..0000000000
--- a/media/libtheora/README_MOZILLA
+++ /dev/null
@@ -1,5 +0,0 @@
-The source from this directory was copied from the theora subversion trunk
-using the update.sh script. The changes made were those applied by update.sh,
-the addition/update of Makefile.in files for the Mozilla build system.
-
-The subversion revision used was r17578.
diff --git a/media/libtheora/bug468275-r18219.patch b/media/libtheora/bug468275-r18219.patch
deleted file mode 100644
index 7b64b4195a..0000000000
--- a/media/libtheora/bug468275-r18219.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-diff --git a/media/libtheora/lib/state.c b/media/libtheora/lib/state.c
---- a/media/libtheora/lib/state.c
-+++ b/media/libtheora/lib/state.c
-@@ -583,17 +583,17 @@ static int oc_state_ref_bufs_init(oc_the
- ref_frame_sz<yplane_sz||ref_frame_data_sz/_nrefs!=ref_frame_sz){
- return TH_EIMPL;
- }
- ref_frame_data=oc_aligned_malloc(ref_frame_data_sz,16);
- frag_buf_offs=_state->frag_buf_offs=
- _ogg_malloc(_state->nfrags*sizeof(*frag_buf_offs));
- if(ref_frame_data==NULL||frag_buf_offs==NULL){
- _ogg_free(frag_buf_offs);
-- _ogg_free(ref_frame_data);
-+ oc_aligned_free(ref_frame_data);
- return TH_EFAULT;
- }
- /*Set up the width, height and stride for the image buffers.*/
- _state->ref_frame_bufs[0][0].width=info->frame_width;
- _state->ref_frame_bufs[0][0].height=info->frame_height;
- _state->ref_frame_bufs[0][0].stride=yhstride;
- _state->ref_frame_bufs[0][1].width=_state->ref_frame_bufs[0][2].width=
- info->frame_width>>hdec;
diff --git a/media/libtheora/bug625773-r17780.patch b/media/libtheora/bug625773-r17780.patch
deleted file mode 100644
index af112560b0..0000000000
--- a/media/libtheora/bug625773-r17780.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-diff --git a/media/libtheora/lib/decode.c b/media/libtheora/lib/decode.c
---- a/media/libtheora/lib/decode.c
-+++ b/media/libtheora/lib/decode.c
-@@ -2072,18 +2072,17 @@ static void oc_dec_init_dummy_frame(th_d
- sizeof(_dec->pp_frame_buf[0])*3);
- info=&_dec->state.info;
- yhstride=abs(_dec->state.ref_ystride[0]);
- yheight=info->frame_height+2*OC_UMV_PADDING;
- chstride=abs(_dec->state.ref_ystride[1]);
- cheight=yheight>>!(info->pixel_fmt&2);
- yplane_sz=yhstride*(size_t)yheight+16;
- cplane_sz=chstride*(size_t)cheight;
-- yoffset=_dec->state.ref_ystride[0]*(yheight-1)-
-- (OC_UMV_PADDING+OC_UMV_PADDING*(ptrdiff_t)yhstride);
-+ yoffset=yhstride*(ptrdiff_t)(yheight-OC_UMV_PADDING-1)+OC_UMV_PADDING;
- memset(_dec->state.ref_frame_data[0]-yoffset,0x80,yplane_sz+2*cplane_sz);
- }
-
- int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
- ogg_int64_t *_granpos){
- int ret;
- if(_dec==NULL||_op==NULL)return TH_EFAULT;
- /*A completely empty packet indicates a dropped frame and is treated exactly
diff --git a/media/libtheora/bug752139-r18031.patch b/media/libtheora/bug752139-r18031.patch
deleted file mode 100644
index 62ea87632b..0000000000
--- a/media/libtheora/bug752139-r18031.patch
+++ /dev/null
@@ -1,53 +0,0 @@
-diff --git a/media/libtheora/lib/arm/arm2gnu.pl b/media/libtheora/lib/arm/arm2gnu.pl
---- a/media/libtheora/lib/arm/arm2gnu.pl
-+++ b/media/libtheora/lib/arm/arm2gnu.pl
-@@ -1,11 +1,14 @@
- #!/usr/bin/perl
-
- my $bigend; # little/big endian
-+my $nxstack;
-+
-+$nxstack = 0;
-
- eval 'exec /usr/local/bin/perl -S $0 ${1+"$@"}'
- if $running_under_some_shell;
-
- while ($ARGV[0] =~ /^-/) {
- $_ = shift;
- last if /^--/;
- if (/^-n/) {
-@@ -77,16 +80,19 @@ while (<>) {
- if ( /\bMEND\b/ ) {
- s/\bMEND\b/.endm/;
- $n=0;
- }
-
- # ".rdata" doesn't work in 'as' version 2.13.2, as it is ".rodata" there.
- #
- if ( /\bAREA\b/ ) {
-+ if ( /CODE/ ) {
-+ $nxstack = 1;
-+ }
- s/^(.+)CODE(.+)READONLY(.*)/ .text/;
- s/^(.+)DATA(.+)READONLY(.*)/ .section .rdata\n .align 2/;
- s/^(.+)\|\|\.data\|\|(.+)/ .data\n .align 2/;
- s/^(.+)\|\|\.bss\|\|(.+)/ .bss/;
- }
-
- s/\|\|\.constdata\$(\d+)\|\|/.L_CONST$1/; # ||.constdata$3||
- s/\|\|\.bss\$(\d+)\|\|/.L_BSS$1/; # ||.bss$2||
-@@ -263,9 +269,13 @@ while (<>) {
- } continue {
- printf ("%s", $_) if $printit;
- if ($addPadding != 0)
- {
- printf (" mov r0,r0\n");
- $addPadding = 0;
- }
- }
--
-+#If we had a code section, mark that this object doesn't need an executable
-+# stack.
-+if ($nxstack) {
-+ printf (" .section\t.note.GNU-stack,\"\",\%\%progbits\n");
-+}
diff --git a/media/libtheora/bug752668-r18268.patch b/media/libtheora/bug752668-r18268.patch
deleted file mode 100644
index 6931e07faf..0000000000
--- a/media/libtheora/bug752668-r18268.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-diff -r 1092c1a3ac50 media/libtheora/lib/decode.c
---- a/media/libtheora/lib/decode.c Tue May 08 08:53:50 2012 +0200
-+++ b/media/libtheora/lib/decode.c Tue May 08 01:00:59 2012 -0700
-@@ -395,20 +395,20 @@ static int oc_dec_init(oc_dec_ctx *_dec,
- _dec->state.dequant_table_data[qi][pli][qti];
- }
- oc_dequant_tables_init(_dec->state.dequant_tables,_dec->pp_dc_scale,
- &_setup->qinfo);
- for(qi=0;qi<64;qi++){
- int qsum;
- qsum=0;
- for(qti=0;qti<2;qti++)for(pli=0;pli<3;pli++){
-- qsum+=_dec->state.dequant_tables[qti][pli][qi][12]+
-- _dec->state.dequant_tables[qti][pli][qi][17]+
-- _dec->state.dequant_tables[qti][pli][qi][18]+
-- _dec->state.dequant_tables[qti][pli][qi][24]<<(pli==0);
-+ qsum+=_dec->state.dequant_tables[qi][pli][qti][12]+
-+ _dec->state.dequant_tables[qi][pli][qti][17]+
-+ _dec->state.dequant_tables[qi][pli][qti][18]+
-+ _dec->state.dequant_tables[qi][pli][qti][24]<<(pli==0);
- }
- _dec->pp_sharp_mod[qi]=-(qsum>>11);
- }
- memcpy(_dec->state.loop_filter_limits,_setup->qinfo.loop_filter_limits,
- sizeof(_dec->state.loop_filter_limits));
- oc_dec_accel_init(_dec);
- _dec->pp_level=OC_PP_LEVEL_DISABLED;
- _dec->dc_qis=NULL;
diff --git a/media/libtheora/bug920992.patch b/media/libtheora/bug920992.patch
deleted file mode 100644
index eada85a49b..0000000000
--- a/media/libtheora/bug920992.patch
+++ /dev/null
@@ -1,103 +0,0 @@
-diff --git a/media/libtheora/lib/arm/armbits.s b/media/libtheora/lib/arm/armbits.s
---- a/media/libtheora/lib/arm/armbits.s
-+++ b/media/libtheora/lib/arm/armbits.s
-@@ -12,16 +12,22 @@
- ;
- ; function:
- ; last mod: $Id: armbits.s 17481 2010-10-03 22:49:42Z tterribe $
- ;
- ;********************************************************************
-
- AREA |.text|, CODE, READONLY
-
-+ ; Explicitly specifying alignment here because some versions of
-+ ; gas don't align code correctly. See
-+ ; http://lists.gnu.org/archive/html/bug-binutils/2011-06/msg00199.html
-+ ; https://bugzilla.mozilla.org/show_bug.cgi?id=920992
-+ ALIGN
-+
- EXPORT oc_pack_read_arm
- EXPORT oc_pack_read1_arm
- EXPORT oc_huff_token_decode_arm
-
- oc_pack_read1_arm PROC
- ; r0 = oc_pack_buf *_b
- ADD r12,r0,#8
- LDMIA r12,{r2,r3} ; r2 = window
-diff --git a/media/libtheora/lib/arm/armfrag.s b/media/libtheora/lib/arm/armfrag.s
---- a/media/libtheora/lib/arm/armfrag.s
-+++ b/media/libtheora/lib/arm/armfrag.s
-@@ -11,16 +11,22 @@
- ;********************************************************************
- ; Original implementation:
- ; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
- ; last mod: $Id: armfrag.s 17481 2010-10-03 22:49:42Z tterribe $
- ;********************************************************************
-
- AREA |.text|, CODE, READONLY
-
-+ ; Explicitly specifying alignment here because some versions of
-+ ; gas don't align code correctly. See
-+ ; http://lists.gnu.org/archive/html/bug-binutils/2011-06/msg00199.html
-+ ; https://bugzilla.mozilla.org/show_bug.cgi?id=920992
-+ ALIGN
-+
- GET armopts.s
-
- ; Vanilla ARM v4 versions
- EXPORT oc_frag_copy_list_arm
- EXPORT oc_frag_recon_intra_arm
- EXPORT oc_frag_recon_inter_arm
- EXPORT oc_frag_recon_inter2_arm
-
-diff --git a/media/libtheora/lib/arm/armidct.s b/media/libtheora/lib/arm/armidct.s
---- a/media/libtheora/lib/arm/armidct.s
-+++ b/media/libtheora/lib/arm/armidct.s
-@@ -11,16 +11,22 @@
- ;********************************************************************
- ; Original implementation:
- ; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
- ; last mod: $Id: armidct.s 17481 2010-10-03 22:49:42Z tterribe $
- ;********************************************************************
-
- AREA |.text|, CODE, READONLY
-
-+ ; Explicitly specifying alignment here because some versions of
-+ ; gas don't align code correctly. See
-+ ; http://lists.gnu.org/archive/html/bug-binutils/2011-06/msg00199.html
-+ ; https://bugzilla.mozilla.org/show_bug.cgi?id=920992
-+ ALIGN
-+
- GET armopts.s
-
- EXPORT oc_idct8x8_1_arm
- EXPORT oc_idct8x8_arm
-
- oc_idct8x8_1_arm PROC
- ; r0 = ogg_int16_t *_y
- ; r1 = ogg_uint16_t _dc
-diff --git a/media/libtheora/lib/arm/armloop.s b/media/libtheora/lib/arm/armloop.s
---- a/media/libtheora/lib/arm/armloop.s
-+++ b/media/libtheora/lib/arm/armloop.s
-@@ -11,16 +11,22 @@
- ;********************************************************************
- ; Original implementation:
- ; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
- ; last mod: $Id: armloop.s 17481 2010-10-03 22:49:42Z tterribe $
- ;********************************************************************
-
- AREA |.text|, CODE, READONLY
-
-+ ; Explicitly specifying alignment here because some versions of
-+ ; gas don't align code correctly. See
-+ ; http://lists.gnu.org/archive/html/bug-binutils/2011-06/msg00199.html
-+ ; https://bugzilla.mozilla.org/show_bug.cgi?id=920992
-+ ALIGN
-+
- GET armopts.s
-
- EXPORT oc_loop_filter_frag_rows_arm
-
- ; Which bit this is depends on the order of packing within a bitfield.
- ; Hopefully that doesn't change among any of the relevant compilers.
- OC_FRAG_CODED_FLAG * 1
diff --git a/media/libtheora/include/theora/codec.h b/media/libtheora/include/theora/codec.h
index 5c2669630c..29b8602325 100644
--- a/media/libtheora/include/theora/codec.h
+++ b/media/libtheora/include/theora/codec.h
@@ -16,11 +16,12 @@
********************************************************************/
/**\mainpage
- *
+ *
* \section intro Introduction
*
- * This is the documentation for <tt>libtheora</tt> C API.
- * The current reference
+ * This is the documentation for the <tt>libtheora</tt> C API.
+ *
+ * The \c libtheora package is the current reference
* implementation for <a href="http://www.theora.org/">Theora</a>, a free,
* patent-unencumbered video codec.
* Theora is derived from On2's VP3 codec with additional features and
@@ -30,29 +31,31 @@
* <a href="http://www.theora.org/doc/Theora.pdf">the Theora
* specification</a>.
*
- * \subsection Organization
+ * \section Organization
*
- * The functions documented here are actually subdivided into three
+ * The functions documented here are divided between two
* separate libraries:
- * - <tt>libtheoraenc</tt> contains the encoder interface,
+ * - \c libtheoraenc contains the encoder interface,
* described in \ref encfuncs.
- * - <tt>libtheoradec</tt> contains the decoder interface and
- * routines shared with the encoder.
- * You must also link to this if you link to <tt>libtheoraenc</tt>.
- * The routines in this library are described in \ref decfuncs and
- * \ref basefuncs.
- * - <tt>libtheora</tt> contains the \ref oldfuncs.
+ * - \c libtheoradec contains the decoder interface,
+ * described in \ref decfuncs, \n
+ * and additional \ref basefuncs.
+ *
+ * New code should link to \c libtheoradec. If using encoder
+ * features, it must also link to \c libtheoraenc.
*
- * New code should link to <tt>libtheoradec</tt> and, if using encoder
- * features, <tt>libtheoraenc</tt>. Together these two export both
- * the standard and the legacy API, so this is all that is needed by
- * any code. The older <tt>libtheora</tt> library is provided just for
- * compatibility with older build configurations.
+ * During initial development, prior to the 1.0 release,
+ * \c libtheora exported a different \ref oldfuncs which
+ * combined both encode and decode functions.
+ * In general, legacy API symbols can be indentified
+ * by their \c theora_ or \c OC_ namespace prefixes.
+ * The current API uses \c th_ or \c TH_ instead.
*
- * In general the recommended 1.x API symbols can be distinguished
- * by their <tt>th_</tt> or <tt>TH_</tt> namespace prefix.
- * The older, legacy API uses <tt>theora_</tt> or <tt>OC_</tt>
- * prefixes instead.
+ * While deprecated, \c libtheoraenc and \c libtheoradec
+ * together export the legacy api as well at the one documented above.
+ * Likewise, the legacy \c libtheora included with this package
+ * exports the new 1.x API. Older code and build scripts can therefore
+ * but updated independently to the current scheme.
*/
/**\file
@@ -168,7 +171,7 @@ typedef struct{
typedef th_img_plane th_ycbcr_buffer[3];
/**Theora bitstream information.
- * This contains the basic playback parameters for a stream, and corresponds to
+ * This contains the basic playback parameters for a stream, and corresponds to
* the initial 'info' header packet.
* To initialize an encoder, the application fills in this structure and
* passes it to th_encode_alloc().
@@ -317,7 +320,7 @@ typedef struct{
* In filling in this structure, th_decode_headerin() will null-terminate
* the user_comment strings for safety.
* However, the bitstream format itself treats them as 8-bit clean vectors,
- * possibly containing null characters, and so the length array should be
+ * possibly containing null characters, so the length array should be
* treated as their authoritative length.
*/
typedef struct th_comment{
@@ -448,7 +451,13 @@ typedef struct{
/**\defgroup basefuncs Functions Shared by Encode and Decode*/
/*@{*/
-/**\name Basic shared functions*/
+/**\name Basic shared functions
+ * These functions return information about the library itself,
+ * or provide high-level information about codec state
+ * and packet type.
+ *
+ * You must link to \c libtheoradec if you use any of the
+ * functions in this section.*/
/*@{*/
/**Retrieves a human-readable string to identify the library vendor and
* version.
@@ -510,7 +519,12 @@ extern int th_packet_iskeyframe(ogg_packet *_op);
/*@}*/
-/**\name Functions for manipulating header data*/
+/**\name Functions for manipulating header data
+ * These functions manipulate the #th_info and #th_comment structures
+ * which describe video parameters and key-value metadata, respectively.
+ *
+ * You must link to \c libtheoradec if you use any of the
+ * functions in this section.*/
/*@{*/
/**Initializes a th_info structure.
* This should be called on a freshly allocated #th_info structure before
@@ -537,7 +551,7 @@ extern void th_comment_init(th_comment *_tc);
* \param _tc The #th_comment struct to add the comment to.
* \param _comment Must be a null-terminated UTF-8 string containing the
* comment in "TAG=the value" form.*/
-extern void th_comment_add(th_comment *_tc, char *_comment);
+extern void th_comment_add(th_comment *_tc,const char *_comment);
/**Add a comment to an initialized #th_comment structure.
* \note Neither th_comment_add() nor th_comment_add_tag() support
* comments containing null values, although the bitstream format does
@@ -545,10 +559,11 @@ extern void th_comment_add(th_comment *_tc, char *_comment);
* To add such comments you will need to manipulate the #th_comment
* structure directly.
* \param _tc The #th_comment struct to add the comment to.
- * \param _tag A null-terminated string containing the tag associated with
+ * \param _tag A null-terminated string containing the tag associated with
* the comment.
* \param _val The corresponding value as a null-terminated string.*/
-extern void th_comment_add_tag(th_comment *_tc,char *_tag,char *_val);
+extern void th_comment_add_tag(th_comment *_tc,const char *_tag,
+ const char *_val);
/**Look up a comment value by its tag.
* \param _tc An initialized #th_comment structure.
* \param _tag The tag to look up.
@@ -564,15 +579,15 @@ extern void th_comment_add_tag(th_comment *_tc,char *_tag,char *_val);
* It should not be modified or freed by the application, and
* modifications to the structure may invalidate the pointer.
* \retval NULL If no matching tag is found.*/
-extern char *th_comment_query(th_comment *_tc,char *_tag,int _count);
+extern char *th_comment_query(th_comment *_tc,const char *_tag,int _count);
/**Look up the number of instances of a tag.
* Call this first when querying for a specific tag and then iterate over the
* number of instances with separate calls to th_comment_query() to
* retrieve all the values for that tag in order.
* \param _tc An initialized #th_comment structure.
* \param _tag The tag to look up.
- * \return The number on instances of this particular tag.*/
-extern int th_comment_query_count(th_comment *_tc,char *_tag);
+ * \return The number of instances of this particular tag.*/
+extern int th_comment_query_count(th_comment *_tc,const char *_tag);
/**Clears a #th_comment structure.
* This should be called on a #th_comment structure after it is no longer
* needed.
diff --git a/media/libtheora/include/theora/theora.h b/media/libtheora/include/theora/theora.h
index 575a889b71..a729a76890 100644
--- a/media/libtheora/include/theora/theora.h
+++ b/media/libtheora/include/theora/theora.h
@@ -34,41 +34,41 @@ extern "C"
*
* \section intro Introduction
*
- * This is the documentation for the libtheora legacy C API, declared in
+ * This is the documentation for the libtheora legacy C API, declared in
* the theora.h header, which describes the old interface used before
* the 1.0 release. This API was widely deployed for several years and
- * remains supported, but for new code we recommend the cleaner API
+ * remains supported, but for new code we recommend the cleaner API
* declared in theoradec.h and theoraenc.h.
*
* libtheora is the reference implementation for
* <a href="http://www.theora.org/">Theora</a>, a free video codec.
* Theora is derived from On2's VP3 codec with improved integration with
* Ogg multimedia formats by <a href="http://www.xiph.org/">Xiph.Org</a>.
- *
+ *
* \section overview Overview
*
- * This library will both decode and encode theora packets to/from raw YUV
+ * This library will both decode and encode theora packets to/from raw YUV
* frames. In either case, the packets will most likely either come from or
- * need to be embedded in an Ogg stream. Use
- * <a href="http://xiph.org/ogg/">libogg</a> or
+ * need to be embedded in an Ogg stream. Use
+ * <a href="http://xiph.org/ogg/">libogg</a> or
* <a href="http://www.annodex.net/software/liboggz/index.html">liboggz</a>
* to extract/package these packets.
*
* \section decoding Decoding Process
*
* Decoding can be separated into the following steps:
- * -# initialise theora_info and theora_comment structures using
+ * -# initialise theora_info and theora_comment structures using
* theora_info_init() and theora_comment_init():
\verbatim
theora_info info;
theora_comment comment;
-
+
theora_info_init(&info);
theora_comment_init(&comment);
\endverbatim
- * -# retrieve header packets from Ogg stream (there should be 3) and decode
- * into theora_info and theora_comment structures using
- * theora_decode_header(). See \ref identification for more information on
+ * -# retrieve header packets from Ogg stream (there should be 3) and decode
+ * into theora_info and theora_comment structures using
+ * theora_decode_header(). See \ref identification for more information on
* identifying which packets are theora packets.
\verbatim
int i;
@@ -79,14 +79,14 @@ extern "C"
}
\endverbatim
* -# initialise the decoder based on the information retrieved into the
- * theora_info struct by theora_decode_header(). You will need a
+ * theora_info struct by theora_decode_header(). You will need a
* theora_state struct.
\verbatim
theora_state state;
-
+
theora_decode_init(&state, &info);
\endverbatim
- * -# pass in packets and retrieve decoded frames! See the yuv_buffer
+ * -# pass in packets and retrieve decoded frames! See the yuv_buffer
* documentation for information on how to retrieve raw YUV data.
\verbatim
yuf_buffer buffer;
@@ -96,20 +96,20 @@ extern "C"
theora_decode_YUVout(&state, &buffer);
}
\endverbatim
- *
+ *
*
* \subsection identification Identifying Theora Packets
*
- * All streams inside an Ogg file have a unique serial_no attached to the
- * stream. Typically, you will want to
- * - retrieve the serial_no for each b_o_s (beginning of stream) page
- * encountered within the Ogg file;
- * - test the first (only) packet on that page to determine if it is a theora
+ * All streams inside an Ogg file have a unique serial_no attached to the
+ * stream. Typically, you will want to
+ * - retrieve the serial_no for each b_o_s (beginning of stream) page
+ * encountered within the Ogg file;
+ * - test the first (only) packet on that page to determine if it is a theora
* packet;
- * - once you have found a theora b_o_s page then use the retrieved serial_no
+ * - once you have found a theora b_o_s page then use the retrieved serial_no
* to identify future packets belonging to the same theora stream.
- *
- * Note that you \e cannot use theora_packet_isheader() to determine if a
+ *
+ * Note that you \e cannot use theora_packet_isheader() to determine if a
* packet is a theora packet or not, as this function does not perform any
* checking beyond whether a header bit is present. Instead, use the
* theora_decode_header() function and check the return value; or examine the
@@ -124,9 +124,9 @@ extern "C"
* A YUV buffer for passing uncompressed frames to and from the codec.
* This holds a Y'CbCr frame in planar format. The CbCr planes can be
* subsampled and have their own separate dimensions and row stride
- * offsets. Note that the strides may be negative in some
+ * offsets. Note that the strides may be negative in some
* configurations. For theora the width and height of the largest plane
- * must be a multiple of 16. The actual meaningful picture size and
+ * must be a multiple of 16. The actual meaningful picture size and
* offset are stored in the theora_info structure; frames returned by
* the decoder may need to be cropped for display.
*
@@ -135,8 +135,8 @@ extern "C"
* are ordered from left to right.
*
* During decode, the yuv_buffer struct is allocated by the user, but all
- * fields (including luma and chroma pointers) are filled by the library.
- * These pointers address library-internal memory and their contents should
+ * fields (including luma and chroma pointers) are filled by the library.
+ * These pointers address library-internal memory and their contents should
* not be modified.
*
* Conversely, during encode the user allocates the struct and fills out all
@@ -186,7 +186,7 @@ typedef enum {
* Theora bitstream info.
* Contains the basic playback parameters for a stream,
* corresponding to the initial 'info' header packet.
- *
+ *
* Encoded theora frames must be a multiple of 16 in width and height.
* To handle other frame sizes, a crop rectangle is specified in
* frame_height and frame_width, offset_x and * offset_y. The offset
@@ -198,10 +198,10 @@ typedef enum {
* fraction. Aspect ratio is also stored as a rational fraction, and
* refers to the aspect ratio of the frame pixels, not of the
* overall frame itself.
- *
+ *
* See <a href="http://svn.xiph.org/trunk/theora/examples/encoder_example.c">
* examples/encoder_example.c</a> for usage examples of the
- * other paramters and good default settings for the encoder parameters.
+ * other parameters and good default settings for the encoder parameters.
*/
typedef struct {
ogg_uint32_t width; /**< encoded frame width */
@@ -253,14 +253,14 @@ typedef struct{
} theora_state;
-/**
+/**
* Comment header metadata.
*
* This structure holds the in-stream metadata corresponding to
* the 'comment' header packet.
*
* Meta data is stored as a series of (tag, value) pairs, in
- * length-encoded string vectors. The first occurence of the
+ * length-encoded string vectors. The first occurence of the
* '=' character delimits the tag and value. A particular tag
* may occur more than once. The character set encoding for
* the strings is always UTF-8, but the tag names are limited
@@ -285,7 +285,7 @@ typedef struct theora_comment{
/* \anchor decctlcodes_old
* These are the available request codes for theora_control()
* when called with a decoder instance.
- * By convention decoder control codes are odd, to distinguish
+ * By convention decoder control codes are odd, to distinguish
* them from \ref encctlcodes_old "encoder control codes" which
* are even.
*
@@ -306,7 +306,7 @@ typedef struct theora_comment{
#define TH_DECCTL_GET_PPLEVEL_MAX (1)
/**Set the post-processing level.
- * Sets the level of post-processing to use when decoding the
+ * Sets the level of post-processing to use when decoding the
* compressed stream. This must be a value between zero (off)
* and the maximum returned by TH_DECCTL_GET_PPLEVEL_MAX.
*/
@@ -345,9 +345,9 @@ typedef struct theora_comment{
* \param[in] buf #th_quant_info
* \retval OC_FAULT \a theora_state is <tt>NULL</tt>.
* \retval OC_EINVAL Encoding has already begun, the quantization parameters
- * are not acceptable to this version of the encoder,
- * \a buf is <tt>NULL</tt> and \a buf_sz is not zero,
- * or \a buf is non-<tt>NULL</tt> and \a buf_sz is
+ * are not acceptable to this version of the encoder,
+ * \a buf is <tt>NULL</tt> and \a buf_sz is not zero,
+ * or \a buf is non-<tt>NULL</tt> and \a buf_sz is
* not <tt>sizeof(#th_quant_info)</tt>.
* \retval OC_IMPL Not supported by this implementation.*/
#define TH_ENCCTL_SET_QUANT_PARAMS (2)
@@ -424,7 +424,7 @@ typedef struct theora_comment{
#define OC_NEWPACKET -25 /**< Packet is an (ignorable) unhandled extension */
#define OC_DUPFRAME 1 /**< Packet is a dropped frame */
-/**
+/**
* Retrieve a human-readable string to identify the encoder vendor and version.
* \returns A version string.
*/
@@ -462,7 +462,7 @@ extern int theora_encode_init(theora_state *th, theora_info *ti);
extern int theora_encode_YUVin(theora_state *t, yuv_buffer *yuv);
/**
- * Request the next packet of encoded video.
+ * Request the next packet of encoded video.
* The encoded data is placed in a user-provided ogg_packet structure.
* \param t A theora_state handle previously initialized for encoding.
* \param last_p whether this is the last packet the encoder should produce.
@@ -496,7 +496,11 @@ extern int theora_encode_header(theora_state *t, ogg_packet *op);
* \param op An ogg_packet structure to fill. libtheora will set all
* elements of this structure, including a pointer to the encoded
* comment data. The memory for the comment data is owned by
- * libtheora.
+ * the application, and must be freed by it using _ogg_free().
+ * On some systems (such as Windows when using dynamic linking), this
+ * may mean the free is executed in a different module from the
+ * malloc, which will crash; there is no way to free this memory on
+ * such systems.
* \retval 0 Success
*/
extern int theora_encode_comment(theora_comment *tc, ogg_packet *op);
@@ -581,8 +585,8 @@ extern int theora_decode_packetin(theora_state *th,ogg_packet *op);
* \param th A theora_state handle previously initialized for decoding.
* \param yuv A yuv_buffer in which libtheora should place the decoded data.
* Note that the buffer struct itself is allocated by the user, but
- * that the luma and chroma pointers will be filled in by the
- * library. Also note that these luma and chroma regions should be
+ * that the luma and chroma pointers will be filled in by the
+ * library. Also note that these luma and chroma regions should be
* considered read-only by the user.
* \retval 0 Success
*/
@@ -617,22 +621,22 @@ extern int theora_packet_iskeyframe(ogg_packet *op);
/**
* Report the granulepos shift radix
*
- * When embedded in Ogg, Theora uses a two-part granulepos,
+ * When embedded in Ogg, Theora uses a two-part granulepos,
* splitting the 64-bit field into two pieces. The more-significant
* section represents the frame count at the last keyframe,
* and the less-significant section represents the count of
* frames since the last keyframe. In this way the overall
* field is still non-decreasing with time, but usefully encodes
* a pointer to the last keyframe, which is necessary for
- * correctly restarting decode after a seek.
+ * correctly restarting decode after a seek.
*
* This function reports the number of bits used to represent
* the distance to the last keyframe, and thus how the granulepos
* field must be shifted or masked to obtain the two parts.
- *
+ *
* Since libtheora returns compressed data in an ogg_packet
* structure, this may be generally useful even if the Theora
- * packets are not being used in an Ogg container.
+ * packets are not being used in an Ogg container.
*
* \param ti A previously initialized theora_info struct
* \returns The bit shift dividing the two granulepos fields
@@ -644,7 +648,7 @@ int theora_granule_shift(theora_info *ti);
/**
* Convert a granulepos to an absolute frame index, starting at 0.
* The granulepos is interpreted in the context of a given theora_state handle.
- *
+ *
* Note that while the granulepos encodes the frame count (i.e. starting
* from 1) this call returns the frame index, starting from zero. Thus
* One can calculate the presentation time by multiplying the index by
@@ -670,9 +674,7 @@ extern ogg_int64_t theora_granule_frame(theora_state *th,ogg_int64_t granulepos)
* This is the "end time" for the frame, or the latest time it should
* be displayed.
* It is not the presentation time.
- * \retval -1. The given granulepos is undefined (i.e. negative), or
- * \retval -1. The function has been disabled because floating
- * point support is not available.
+ * \retval -1. The given granulepos is undefined (i.e. negative).
*/
extern double theora_granule_time(theora_state *th,ogg_int64_t granulepos);
@@ -699,7 +701,7 @@ extern void theora_clear(theora_state *t);
/**
* Initialize an allocated theora_comment structure
- * \param tc An allocated theora_comment structure
+ * \param tc An allocated theora_comment structure
**/
extern void theora_comment_init(theora_comment *tc);
@@ -720,7 +722,7 @@ extern void theora_comment_add(theora_comment *tc, char *comment);
/**
* Add a comment to an initialized theora_comment structure.
* \param tc A previously initialized theora comment structure
- * \param tag A null-terminated string containing the tag
+ * \param tag A null-terminated string containing the tag
* associated with the comment.
* \param value The corresponding value as a null-terminated string
*
@@ -752,9 +754,9 @@ extern char *theora_comment_query(theora_comment *tc, char *tag, int count);
* \param tc An initialized theora_comment structure
* \param tag The tag to look up
* \returns The number on instances of a particular tag.
- *
+ *
* Call this first when querying for a specific tag and then interate
- * over the number of instances with separate calls to
+ * over the number of instances with separate calls to
* theora_comment_query() to retrieve all instances in order.
**/
extern int theora_comment_query_count(theora_comment *tc, char *tag);
@@ -769,7 +771,7 @@ extern void theora_comment_clear(theora_comment *tc);
* This is used to provide advanced control the encoding process.
* \param th A #theora_state handle.
* \param req The control code to process.
- * See \ref encctlcodes_old "the list of available
+ * See \ref encctlcodes_old "the list of available
* control codes" for details.
* \param buf The parameters for this control code.
* \param buf_sz The size of the parameter buffer.*/
diff --git a/media/libtheora/include/theora/theoradec.h b/media/libtheora/include/theora/theoradec.h
index 6177af3216..77bef81909 100644
--- a/media/libtheora/include/theora/theoradec.h
+++ b/media/libtheora/include/theora/theoradec.h
@@ -92,13 +92,17 @@ extern "C" {
* <tt>sizeof(th_stripe_callback)</tt>.*/
#define TH_DECCTL_SET_STRIPE_CB (7)
-/**Enables telemetry and sets the macroblock display mode */
+/**Sets the macroblock display mode. Set to 0 to disable displaying
+ * macroblocks.*/
#define TH_DECCTL_SET_TELEMETRY_MBMODE (9)
-/**Enables telemetry and sets the motion vector display mode */
+/**Sets the motion vector display mode. Set to 0 to disable displaying motion
+ * vectors.*/
#define TH_DECCTL_SET_TELEMETRY_MV (11)
-/**Enables telemetry and sets the adaptive quantization display mode */
+/**Sets the adaptive quantization display mode. Set to 0 to disable displaying
+ * adaptive quantization. */
#define TH_DECCTL_SET_TELEMETRY_QI (13)
-/**Enables telemetry and sets the bitstream breakdown visualization mode */
+/**Sets the bitstream breakdown visualization mode. Set to 0 to disable
+ * displaying bitstream breakdown.*/
#define TH_DECCTL_SET_TELEMETRY_BITS (15)
/*@}*/
@@ -171,7 +175,7 @@ typedef struct th_setup_info th_setup_info;
/**\defgroup decfuncs Functions for Decoding*/
/*@{*/
/**\name Functions for decoding
- * You must link to <tt>libtheoradec</tt> if you use any of the
+ * You must link to <tt>libtheoradec</tt> if you use any of the
* functions in this section.
*
* The functions are listed in the order they are used in a typical decode.
@@ -267,7 +271,10 @@ extern void th_setup_free(th_setup_info *_setup);
* See \ref decctlcodes "the list of available control codes"
* for details.
* \param _buf The parameters for this control code.
- * \param _buf_sz The size of the parameter buffer.*/
+ * \param _buf_sz The size of the parameter buffer.
+ * \return Possible return values depend on the control code used.
+ * See \ref decctlcodes "the list of control codes" for
+ * specific values. Generally 0 indicates success.*/
extern int th_decode_ctl(th_dec_ctx *_dec,int _req,void *_buf,
size_t _buf_sz);
/**Submits a packet containing encoded video data to the decoder.
diff --git a/media/libtheora/include/theora/theoraenc.h b/media/libtheora/include/theora/theoraenc.h
deleted file mode 100644
index 721ea89b9d..0000000000
--- a/media/libtheora/include/theora/theoraenc.h
+++ /dev/null
@@ -1,540 +0,0 @@
-/********************************************************************
- * *
- * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
- * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
- * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
- * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
- * *
- * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
- * by the Xiph.Org Foundation http://www.xiph.org/ *
- * *
- ********************************************************************
-
- function:
- last mod: $Id: theora.h,v 1.8 2004/03/15 22:17:32 derf Exp $
-
- ********************************************************************/
-
-/**\file
- * The <tt>libtheoraenc</tt> C encoding API.*/
-
-#if !defined(_O_THEORA_THEORAENC_H_)
-# define _O_THEORA_THEORAENC_H_ (1)
-# include <stddef.h>
-# include <ogg/ogg.h>
-# include "codec.h"
-
-#if defined(__cplusplus)
-extern "C" {
-#endif
-
-
-
-/**\name th_encode_ctl() codes
- * \anchor encctlcodes
- * These are the available request codes for th_encode_ctl().
- * By convention, these are even, to distinguish them from the
- * \ref decctlcodes "decoder control codes".
- * Keep any experimental or vendor-specific values above \c 0x8000.*/
-/*@{*/
-/**Sets the Huffman tables to use.
- * The tables are copied, not stored by reference, so they can be freed after
- * this call.
- * <tt>NULL</tt> may be specified to revert to the default tables.
- *
- * \param[in] _buf <tt>#th_huff_code[#TH_NHUFFMAN_TABLES][#TH_NDCT_TOKENS]</tt>
- * \retval TH_EFAULT \a _enc is <tt>NULL</tt>.
- * \retval TH_EINVAL Encoding has already begun or one or more of the given
- * tables is not full or prefix-free, \a _buf is
- * <tt>NULL</tt> and \a _buf_sz is not zero, or \a _buf is
- * non-<tt>NULL</tt> and \a _buf_sz is not
- * <tt>sizeof(#th_huff_code)*#TH_NHUFFMAN_TABLES*#TH_NDCT_TOKENS</tt>.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_SET_HUFFMAN_CODES (0)
-/**Sets the quantization parameters to use.
- * The parameters are copied, not stored by reference, so they can be freed
- * after this call.
- * <tt>NULL</tt> may be specified to revert to the default parameters.
- *
- * \param[in] _buf #th_quant_info
- * \retval TH_EFAULT \a _enc is <tt>NULL</tt>.
- * \retval TH_EINVAL Encoding has already begun, \a _buf is
- * <tt>NULL</tt> and \a _buf_sz is not zero,
- * or \a _buf is non-<tt>NULL</tt> and
- * \a _buf_sz is not <tt>sizeof(#th_quant_info)</tt>.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_SET_QUANT_PARAMS (2)
-/**Sets the maximum distance between key frames.
- * This can be changed during an encode, but will be bounded by
- * <tt>1<<th_info#keyframe_granule_shift</tt>.
- * If it is set before encoding begins, th_info#keyframe_granule_shift will
- * be enlarged appropriately.
- *
- * \param[in] _buf <tt>ogg_uint32_t</tt>: The maximum distance between key
- * frames.
- * \param[out] _buf <tt>ogg_uint32_t</tt>: The actual maximum distance set.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(ogg_uint32_t)</tt>.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_SET_KEYFRAME_FREQUENCY_FORCE (4)
-/**Disables any encoder features that would prevent lossless transcoding back
- * to VP3.
- * This primarily means disabling block-adaptive quantization and always coding
- * all four luma blocks in a macro block when 4MV is used.
- * It also includes using the VP3 quantization tables and Huffman codes; if you
- * set them explicitly after calling this function, the resulting stream will
- * not be VP3-compatible.
- * If you enable VP3-compatibility when encoding 4:2:2 or 4:4:4 source
- * material, or when using a picture region smaller than the full frame (e.g.
- * a non-multiple-of-16 width or height), then non-VP3 bitstream features will
- * still be disabled, but the stream will still not be VP3-compatible, as VP3
- * was not capable of encoding such formats.
- * If you call this after encoding has already begun, then the quantization
- * tables and codebooks cannot be changed, but the frame-level features will
- * be enabled or disabled as requested.
- *
- * \param[in] _buf <tt>int</tt>: a non-zero value to enable VP3 compatibility,
- * or 0 to disable it (the default).
- * \param[out] _buf <tt>int</tt>: 1 if all bitstream features required for
- * VP3-compatibility could be set, and 0 otherwise.
- * The latter will be returned if the pixel format is not
- * 4:2:0, the picture region is smaller than the full frame,
- * or if encoding has begun, preventing the quantization
- * tables and codebooks from being set.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_SET_VP3_COMPATIBLE (10)
-/**Gets the maximum speed level.
- * Higher speed levels favor quicker encoding over better quality per bit.
- * Depending on the encoding mode, and the internal algorithms used, quality
- * may actually improve, but in this case bitrate will also likely increase.
- * In any case, overall rate/distortion performance will probably decrease.
- * The maximum value, and the meaning of each value, may change depending on
- * the current encoding mode (VBR vs. constant quality, etc.).
- *
- * \param[out] _buf <tt>int</tt>: The maximum encoding speed level.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>.
- * \retval TH_EIMPL Not supported by this implementation in the current
- * encoding mode.*/
-#define TH_ENCCTL_GET_SPLEVEL_MAX (12)
-/**Sets the speed level.
- * The current speed level may be retrieved using #TH_ENCCTL_GET_SPLEVEL.
- *
- * \param[in] _buf <tt>int</tt>: The new encoding speed level.
- * 0 is slowest, larger values use less CPU.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>, or the
- * encoding speed level is out of bounds.
- * The maximum encoding speed level may be
- * implementation- and encoding mode-specific, and can be
- * obtained via #TH_ENCCTL_GET_SPLEVEL_MAX.
- * \retval TH_EIMPL Not supported by this implementation in the current
- * encoding mode.*/
-#define TH_ENCCTL_SET_SPLEVEL (14)
-/**Gets the current speed level.
- * The default speed level may vary according to encoder implementation, but if
- * this control code is not supported (it returns #TH_EIMPL), the default may
- * be assumed to be the slowest available speed (0).
- * The maximum encoding speed level may be implementation- and encoding
- * mode-specific, and can be obtained via #TH_ENCCTL_GET_SPLEVEL_MAX.
- *
- * \param[out] _buf <tt>int</tt>: The current encoding speed level.
- * 0 is slowest, larger values use less CPU.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>.
- * \retval TH_EIMPL Not supported by this implementation in the current
- * encoding mode.*/
-#define TH_ENCCTL_GET_SPLEVEL (16)
-/**Sets the number of duplicates of the next frame to produce.
- * Although libtheora can encode duplicate frames very cheaply, it costs some
- * amount of CPU to detect them, and a run of duplicates cannot span a
- * keyframe boundary.
- * This control code tells the encoder to produce the specified number of extra
- * duplicates of the next frame.
- * This allows the encoder to make smarter keyframe placement decisions and
- * rate control decisions, and reduces CPU usage as well, when compared to
- * just submitting the same frame for encoding multiple times.
- * This setting only applies to the next frame submitted for encoding.
- * You MUST call th_encode_packetout() repeatedly until it returns 0, or the
- * extra duplicate frames will be lost.
- *
- * \param[in] _buf <tt>int</tt>: The number of duplicates to produce.
- * If this is negative or zero, no duplicates will be produced.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>, or the
- * number of duplicates is greater than or equal to the
- * maximum keyframe interval.
- * In the latter case, NO duplicate frames will be produced.
- * You must ensure that the maximum keyframe interval is set
- * larger than the maximum number of duplicates you will
- * ever wish to insert prior to encoding.
- * \retval TH_EIMPL Not supported by this implementation in the current
- * encoding mode.*/
-#define TH_ENCCTL_SET_DUP_COUNT (18)
-/**Modifies the default bitrate management behavior.
- * Use to allow or disallow frame dropping, and to enable or disable capping
- * bit reservoir overflows and underflows.
- * See \ref encctlcodes "the list of available flags".
- * The flags are set by default to
- * <tt>#TH_RATECTL_DROP_FRAMES|#TH_RATECTL_CAP_OVERFLOW</tt>.
- *
- * \param[in] _buf <tt>int</tt>: Any combination of
- * \ref ratectlflags "the available flags":
- * - #TH_RATECTL_DROP_FRAMES: Enable frame dropping.
- * - #TH_RATECTL_CAP_OVERFLOW: Don't bank excess bits for later
- * use.
- * - #TH_RATECTL_CAP_UNDERFLOW: Don't try to make up shortfalls
- * later.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt> or rate control
- * is not enabled.
- * \retval TH_EIMPL Not supported by this implementation in the current
- * encoding mode.*/
-#define TH_ENCCTL_SET_RATE_FLAGS (20)
-/**Sets the size of the bitrate management bit reservoir as a function
- * of number of frames.
- * The reservoir size affects how quickly bitrate management reacts to
- * instantaneous changes in the video complexity.
- * Larger reservoirs react more slowly, and provide better overall quality, but
- * require more buffering by a client, adding more latency to live streams.
- * By default, libtheora sets the reservoir to the maximum distance between
- * keyframes, subject to a minimum and maximum limit.
- * This call may be used to increase or decrease the reservoir, increasing or
- * decreasing the allowed temporary variance in bitrate.
- * An implementation may impose some limits on the size of a reservoir it can
- * handle, in which case the actual reservoir size may not be exactly what was
- * requested.
- * The actual value set will be returned.
- *
- * \param[in] _buf <tt>int</tt>: Requested size of the reservoir measured in
- * frames.
- * \param[out] _buf <tt>int</tt>: The actual size of the reservoir set.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(int)</tt>, or rate control
- * is not enabled. The buffer has an implementation
- * defined minimum and maximum size and the value in _buf
- * will be adjusted to match the actual value set.
- * \retval TH_EIMPL Not supported by this implementation in the current
- * encoding mode.*/
-#define TH_ENCCTL_SET_RATE_BUFFER (22)
-/**Enable pass 1 of two-pass encoding mode and retrieve the first pass metrics.
- * Pass 1 mode must be enabled before the first frame is encoded, and a target
- * bitrate must have already been specified to the encoder.
- * Although this does not have to be the exact rate that will be used in the
- * second pass, closer values may produce better results.
- * The first call returns the size of the two-pass header data, along with some
- * placeholder content, and sets the encoder into pass 1 mode implicitly.
- * This call sets the encoder to pass 1 mode implicitly.
- * Then, a subsequent call must be made after each call to
- * th_encode_ycbcr_in() to retrieve the metrics for that frame.
- * An additional, final call must be made to retrieve the summary data,
- * containing such information as the total number of frames, etc.
- * This must be stored in place of the placeholder data that was returned
- * in the first call, before the frame metrics data.
- * All of this data must be presented back to the encoder during pass 2 using
- * #TH_ENCCTL_2PASS_IN.
- *
- * \param[out] <tt>char *</tt>_buf: Returns a pointer to internal storage
- * containing the two pass metrics data.
- * This storage is only valid until the next call, or until the
- * encoder context is freed, and must be copied by the
- * application.
- * \retval >=0 The number of bytes of metric data available in the
- * returned buffer.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL \a _buf_sz is not <tt>sizeof(char *)</tt>, no target
- * bitrate has been set, or the first call was made after
- * the first frame was submitted for encoding.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_2PASS_OUT (24)
-/**Submits two-pass encoding metric data collected the first encoding pass to
- * the second pass.
- * The first call must be made before the first frame is encoded, and a target
- * bitrate must have already been specified to the encoder.
- * It sets the encoder to pass 2 mode implicitly; this cannot be disabled.
- * The encoder may require reading data from some or all of the frames in
- * advance, depending on, e.g., the reservoir size used in the second pass.
- * You must call this function repeatedly before each frame to provide data
- * until either a) it fails to consume all of the data presented or b) all of
- * the pass 1 data has been consumed.
- * In the first case, you must save the remaining data to be presented after
- * the next frame.
- * You can call this function with a NULL argument to get an upper bound on
- * the number of bytes that will be required before the next frame.
- *
- * When pass 2 is first enabled, the default bit reservoir is set to the entire
- * file; this gives maximum flexibility but can lead to very high peak rates.
- * You can subsequently set it to another value with #TH_ENCCTL_SET_RATE_BUFFER
- * (e.g., to set it to the keyframe interval for non-live streaming), however,
- * you may then need to provide more data before the next frame.
- *
- * \param[in] _buf <tt>char[]</tt>: A buffer containing the data returned by
- * #TH_ENCCTL_2PASS_OUT in pass 1.
- * You may pass <tt>NULL</tt> for \a _buf to return an upper
- * bound on the number of additional bytes needed before the
- * next frame.
- * The summary data returned at the end of pass 1 must be at
- * the head of the buffer on the first call with a
- * non-<tt>NULL</tt> \a _buf, and the placeholder data
- * returned at the start of pass 1 should be omitted.
- * After each call you should advance this buffer by the number
- * of bytes consumed.
- * \retval >0 The number of bytes of metric data required/consumed.
- * \retval 0 No more data is required before the next frame.
- * \retval TH_EFAULT \a _enc is <tt>NULL</tt>.
- * \retval TH_EINVAL No target bitrate has been set, or the first call was
- * made after the first frame was submitted for
- * encoding.
- * \retval TH_ENOTFORMAT The data did not appear to be pass 1 from a compatible
- * implementation of this library.
- * \retval TH_EBADHEADER The data was invalid; this may be returned when
- * attempting to read an aborted pass 1 file that still
- * has the placeholder data in place of the summary
- * data.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_2PASS_IN (26)
-/**Sets the current encoding quality.
- * This is only valid so long as no bitrate has been specified, either through
- * the #th_info struct used to initialize the encoder or through
- * #TH_ENCCTL_SET_BITRATE (this restriction may be relaxed in a future
- * version).
- * If it is set before the headers are emitted, the target quality encoded in
- * them will be updated.
- *
- * \param[in] _buf <tt>int</tt>: The new target quality, in the range 0...63,
- * inclusive.
- * \retval 0 Success.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL A target bitrate has already been specified, or the
- * quality index was not in the range 0...63.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_SET_QUALITY (28)
-/**Sets the current encoding bitrate.
- * Once a bitrate is set, the encoder must use a rate-controlled mode for all
- * future frames (this restriction may be relaxed in a future version).
- * If it is set before the headers are emitted, the target bitrate encoded in
- * them will be updated.
- * Due to the buffer delay, the exact bitrate of each section of the encode is
- * not guaranteed.
- * The encoder may have already used more bits than allowed for the frames it
- * has encoded, expecting to make them up in future frames, or it may have
- * used fewer, holding the excess in reserve.
- * The exact transition between the two bitrates is not well-defined by this
- * API, but may be affected by flags set with #TH_ENCCTL_SET_RATE_FLAGS.
- * After a number of frames equal to the buffer delay, one may expect further
- * output to average at the target bitrate.
- *
- * \param[in] _buf <tt>long</tt>: The new target bitrate, in bits per second.
- * \retval 0 Success.
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL The target bitrate was not positive.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_SET_BITRATE (30)
-/**Sets the configuration to be compatible with that from the given setup
- * header.
- * This sets the Huffman codebooks and quantization parameters to match those
- * found in the given setup header.
- * This guarantees that packets encoded by this encoder will be decodable using
- * a decoder configured with the passed-in setup header.
- * It does <em>not</em> guarantee that th_encode_flushheader() will produce a
- * bit-identical setup header, only that they will be compatible.
- * If you need a bit-identical setup header, then use the one you passed into
- * this command, and not the one returned by th_encode_flushheader().
- *
- * This also does <em>not</em> enable or disable VP3 compatibility; that is not
- * signaled in the setup header (or anywhere else in the encoded stream), and
- * is controlled independently by the #TH_ENCCTL_SET_VP3_COMPATIBLE function.
- * If you wish to enable VP3 compatibility mode <em>and</em> want the codebooks
- * and quantization parameters to match the given setup header, you should
- * enable VP3 compatibility before invoking this command, otherwise the
- * codebooks and quantization parameters will be reset to the VP3 defaults.
- *
- * The current encoder does not support Huffman codebooks which do not contain
- * codewords for all 32 tokens.
- * Such codebooks are legal, according to the specification, but cannot be
- * configured with this function.
- *
- * \param[in] _buf <tt>unsigned char[]</tt>: The encoded setup header to copy
- * the configuration from.
- * This should be the original,
- * undecoded setup header packet,
- * and <em>not</em> a #th_setup_info
- * structure filled in by
- * th_decode_headerin().
- * \retval TH_EFAULT \a _enc or \a _buf is <tt>NULL</tt>.
- * \retval TH_EINVAL Encoding has already begun, so the codebooks and
- * quantization parameters cannot be changed, or the
- * data in the setup header was not supported by this
- * encoder.
- * \retval TH_EBADHEADER \a _buf did not contain a valid setup header packet.
- * \retval TH_ENOTFORMAT \a _buf did not contain a Theora header at all.
- * \retval TH_EIMPL Not supported by this implementation.*/
-#define TH_ENCCTL_SET_COMPAT_CONFIG (32)
-
-/*@}*/
-
-
-/**\name TH_ENCCTL_SET_RATE_FLAGS flags
- * \anchor ratectlflags
- * These are the flags available for use with #TH_ENCCTL_SET_RATE_FLAGS.*/
-/*@{*/
-/**Drop frames to keep within bitrate buffer constraints.
- * This can have a severe impact on quality, but is the only way to ensure that
- * bitrate targets are met at low rates during sudden bursts of activity.*/
-#define TH_RATECTL_DROP_FRAMES (0x1)
-/**Ignore bitrate buffer overflows.
- * If the encoder uses so few bits that the reservoir of available bits
- * overflows, ignore the excess.
- * The encoder will not try to use these extra bits in future frames.
- * At high rates this may cause the result to be undersized, but allows a
- * client to play the stream using a finite buffer; it should normally be
- * enabled.*/
-#define TH_RATECTL_CAP_OVERFLOW (0x2)
-/**Ignore bitrate buffer underflows.
- * If the encoder uses so many bits that the reservoir of available bits
- * underflows, ignore the deficit.
- * The encoder will not try to make up these extra bits in future frames.
- * At low rates this may cause the result to be oversized; it should normally
- * be disabled.*/
-#define TH_RATECTL_CAP_UNDERFLOW (0x4)
-/*@}*/
-
-
-
-/**The quantization parameters used by VP3.*/
-extern const th_quant_info TH_VP31_QUANT_INFO;
-
-/**The Huffman tables used by VP3.*/
-extern const th_huff_code
- TH_VP31_HUFF_CODES[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS];
-
-
-
-/**\name Encoder state
- The following data structure is opaque, and its contents are not publicly
- defined by this API.
- Referring to its internals directly is unsupported, and may break without
- warning.*/
-/*@{*/
-/**The encoder context.*/
-typedef struct th_enc_ctx th_enc_ctx;
-/*@}*/
-
-
-
-/**\defgroup encfuncs Functions for Encoding*/
-/*@{*/
-/**\name Functions for encoding
- * You must link to <tt>libtheoraenc</tt> and <tt>libtheoradec</tt>
- * if you use any of the functions in this section.
- *
- * The functions are listed in the order they are used in a typical encode.
- * The basic steps are:
- * - Fill in a #th_info structure with details on the format of the video you
- * wish to encode.
- * - Allocate a #th_enc_ctx handle with th_encode_alloc().
- * - Perform any additional encoder configuration required with
- * th_encode_ctl().
- * - Repeatedly call th_encode_flushheader() to retrieve all the header
- * packets.
- * - For each uncompressed frame:
- * - Submit the uncompressed frame via th_encode_ycbcr_in()
- * - Repeatedly call th_encode_packetout() to retrieve any video data packets
- * that are ready.
- * - Call th_encode_free() to release all encoder memory.*/
-/*@{*/
-/**Allocates an encoder instance.
- * \param _info A #th_info struct filled with the desired encoding parameters.
- * \return The initialized #th_enc_ctx handle.
- * \retval NULL If the encoding parameters were invalid.*/
-extern th_enc_ctx *th_encode_alloc(const th_info *_info);
-/**Encoder control function.
- * This is used to provide advanced control the encoding process.
- * \param _enc A #th_enc_ctx handle.
- * \param _req The control code to process.
- * See \ref encctlcodes "the list of available control codes"
- * for details.
- * \param _buf The parameters for this control code.
- * \param _buf_sz The size of the parameter buffer.*/
-extern int th_encode_ctl(th_enc_ctx *_enc,int _req,void *_buf,size_t _buf_sz);
-/**Outputs the next header packet.
- * This should be called repeatedly after encoder initialization until it
- * returns 0 in order to get all of the header packets, in order, before
- * encoding actual video data.
- * \param _enc A #th_enc_ctx handle.
- * \param _comments The metadata to place in the comment header, when it is
- * encoded.
- * \param _op An <tt>ogg_packet</tt> structure to fill.
- * All of the elements of this structure will be set,
- * including a pointer to the header data.
- * The memory for the header data is owned by
- * <tt>libtheoraenc</tt>, and may be invalidated when the
- * next encoder function is called.
- * \return A positive value indicates that a header packet was successfully
- * produced.
- * \retval 0 No packet was produced, and no more header packets remain.
- * \retval TH_EFAULT \a _enc, \a _comments, or \a _op was <tt>NULL</tt>.*/
-extern int th_encode_flushheader(th_enc_ctx *_enc,
- th_comment *_comments,ogg_packet *_op);
-/**Submits an uncompressed frame to the encoder.
- * \param _enc A #th_enc_ctx handle.
- * \param _ycbcr A buffer of Y'CbCr data to encode.
- * If the width and height of the buffer matches the frame size
- * the encoder was initialized with, the encoder will only
- * reference the portion inside the picture region.
- * Any data outside this region will be ignored, and need not map
- * to a valid address.
- * Alternatively, you can pass a buffer equal to the size of the
- * picture region, if this is less than the full frame size.
- * When using subsampled chroma planes, odd picture sizes or odd
- * picture offsets may require an unexpected chroma plane size,
- * and their use is generally discouraged, as they will not be
- * well-supported by players and other media frameworks.
- * See Section 4.4 of
- * <a href="http://www.theora.org/doc/Theora.pdf">the Theora
- * specification</a> for details if you wish to use them anyway.
- * \retval 0 Success.
- * \retval TH_EFAULT \a _enc or \a _ycbcr is <tt>NULL</tt>.
- * \retval TH_EINVAL The buffer size matches neither the frame size nor the
- * picture size the encoder was initialized with, or
- * encoding has already completed.*/
-extern int th_encode_ycbcr_in(th_enc_ctx *_enc,th_ycbcr_buffer _ycbcr);
-/**Retrieves encoded video data packets.
- * This should be called repeatedly after each frame is submitted to flush any
- * encoded packets, until it returns 0.
- * The encoder will not buffer these packets as subsequent frames are
- * compressed, so a failure to do so will result in lost video data.
- * \note Currently the encoder operates in a one-frame-in, one-packet-out
- * manner.
- * However, this may be changed in the future.
- * \param _enc A #th_enc_ctx handle.
- * \param _last Set this flag to a non-zero value if no more uncompressed
- * frames will be submitted.
- * This ensures that a proper EOS flag is set on the last packet.
- * \param _op An <tt>ogg_packet</tt> structure to fill.
- * All of the elements of this structure will be set, including a
- * pointer to the video data.
- * The memory for the video data is owned by
- * <tt>libtheoraenc</tt>, and may be invalidated when the next
- * encoder function is called.
- * \return A positive value indicates that a video data packet was successfully
- * produced.
- * \retval 0 No packet was produced, and no more encoded video data
- * remains.
- * \retval TH_EFAULT \a _enc or \a _op was <tt>NULL</tt>.*/
-extern int th_encode_packetout(th_enc_ctx *_enc,int _last,ogg_packet *_op);
-/**Frees an allocated encoder instance.
- * \param _enc A #th_enc_ctx handle.*/
-extern void th_encode_free(th_enc_ctx *_enc);
-/*@}*/
-/*@}*/
-
-
-
-#if defined(__cplusplus)
-}
-#endif
-
-#endif
diff --git a/media/libtheora/lib/apiwrapper.c b/media/libtheora/lib/apiwrapper.c
deleted file mode 100644
index dc959b8d13..0000000000
--- a/media/libtheora/lib/apiwrapper.c
+++ /dev/null
@@ -1,166 +0,0 @@
-/********************************************************************
- * *
- * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
- * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
- * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
- * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
- * *
- * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
- * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
- * *
- ********************************************************************
-
- function:
- last mod: $Id: apiwrapper.c 16503 2009-08-22 18:14:02Z giles $
-
- ********************************************************************/
-
-#include <stdlib.h>
-#include <string.h>
-#include <limits.h>
-#include "apiwrapper.h"
-
-
-
-const char *theora_version_string(void){
- return th_version_string();
-}
-
-ogg_uint32_t theora_version_number(void){
- return th_version_number();
-}
-
-void theora_info_init(theora_info *_ci){
- memset(_ci,0,sizeof(*_ci));
-}
-
-void theora_info_clear(theora_info *_ci){
- th_api_wrapper *api;
- api=(th_api_wrapper *)_ci->codec_setup;
- memset(_ci,0,sizeof(*_ci));
- if(api!=NULL){
- if(api->clear!=NULL)(*api->clear)(api);
- _ogg_free(api);
- }
-}
-
-void theora_clear(theora_state *_th){
- /*Provide compatibility with mixed encoder and decoder shared lib versions.*/
- if(_th->internal_decode!=NULL){
- (*((oc_state_dispatch_vtable *)_th->internal_decode)->clear)(_th);
- }
- if(_th->internal_encode!=NULL){
- (*((oc_state_dispatch_vtable *)_th->internal_encode)->clear)(_th);
- }
- if(_th->i!=NULL)theora_info_clear(_th->i);
- memset(_th,0,sizeof(*_th));
-}
-
-int theora_control(theora_state *_th,int _req,void *_buf,size_t _buf_sz){
- /*Provide compatibility with mixed encoder and decoder shared lib versions.*/
- if(_th->internal_decode!=NULL){
- return (*((oc_state_dispatch_vtable *)_th->internal_decode)->control)(_th,
- _req,_buf,_buf_sz);
- }
- else if(_th->internal_encode!=NULL){
- return (*((oc_state_dispatch_vtable *)_th->internal_encode)->control)(_th,
- _req,_buf,_buf_sz);
- }
- else return TH_EINVAL;
-}
-
-ogg_int64_t theora_granule_frame(theora_state *_th,ogg_int64_t _gp){
- /*Provide compatibility with mixed encoder and decoder shared lib versions.*/
- if(_th->internal_decode!=NULL){
- return (*((oc_state_dispatch_vtable *)_th->internal_decode)->granule_frame)(
- _th,_gp);
- }
- else if(_th->internal_encode!=NULL){
- return (*((oc_state_dispatch_vtable *)_th->internal_encode)->granule_frame)(
- _th,_gp);
- }
- else return -1;
-}
-
-double theora_granule_time(theora_state *_th, ogg_int64_t _gp){
- /*Provide compatibility with mixed encoder and decoder shared lib versions.*/
- if(_th->internal_decode!=NULL){
- return (*((oc_state_dispatch_vtable *)_th->internal_decode)->granule_time)(
- _th,_gp);
- }
- else if(_th->internal_encode!=NULL){
- return (*((oc_state_dispatch_vtable *)_th->internal_encode)->granule_time)(
- _th,_gp);
- }
- else return -1;
-}
-
-void oc_theora_info2th_info(th_info *_info,const theora_info *_ci){
- _info->version_major=_ci->version_major;
- _info->version_minor=_ci->version_minor;
- _info->version_subminor=_ci->version_subminor;
- _info->frame_width=_ci->width;
- _info->frame_height=_ci->height;
- _info->pic_width=_ci->frame_width;
- _info->pic_height=_ci->frame_height;
- _info->pic_x=_ci->offset_x;
- _info->pic_y=_ci->offset_y;
- _info->fps_numerator=_ci->fps_numerator;
- _info->fps_denominator=_ci->fps_denominator;
- _info->aspect_numerator=_ci->aspect_numerator;
- _info->aspect_denominator=_ci->aspect_denominator;
- switch(_ci->colorspace){
- case OC_CS_ITU_REC_470M:_info->colorspace=TH_CS_ITU_REC_470M;break;
- case OC_CS_ITU_REC_470BG:_info->colorspace=TH_CS_ITU_REC_470BG;break;
- default:_info->colorspace=TH_CS_UNSPECIFIED;break;
- }
- switch(_ci->pixelformat){
- case OC_PF_420:_info->pixel_fmt=TH_PF_420;break;
- case OC_PF_422:_info->pixel_fmt=TH_PF_422;break;
- case OC_PF_444:_info->pixel_fmt=TH_PF_444;break;
- default:_info->pixel_fmt=TH_PF_RSVD;
- }
- _info->target_bitrate=_ci->target_bitrate;
- _info->quality=_ci->quality;
- _info->keyframe_granule_shift=_ci->keyframe_frequency_force>0?
- OC_MINI(31,oc_ilog(_ci->keyframe_frequency_force-1)):0;
-}
-
-int theora_packet_isheader(ogg_packet *_op){
- return th_packet_isheader(_op);
-}
-
-int theora_packet_iskeyframe(ogg_packet *_op){
- return th_packet_iskeyframe(_op);
-}
-
-int theora_granule_shift(theora_info *_ci){
- /*This breaks when keyframe_frequency_force is not positive or is larger than
- 2**31 (if your int is more than 32 bits), but that's what the original
- function does.*/
- return oc_ilog(_ci->keyframe_frequency_force-1);
-}
-
-void theora_comment_init(theora_comment *_tc){
- th_comment_init((th_comment *)_tc);
-}
-
-char *theora_comment_query(theora_comment *_tc,char *_tag,int _count){
- return th_comment_query((th_comment *)_tc,_tag,_count);
-}
-
-int theora_comment_query_count(theora_comment *_tc,char *_tag){
- return th_comment_query_count((th_comment *)_tc,_tag);
-}
-
-void theora_comment_clear(theora_comment *_tc){
- th_comment_clear((th_comment *)_tc);
-}
-
-void theora_comment_add(theora_comment *_tc,char *_comment){
- th_comment_add((th_comment *)_tc,_comment);
-}
-
-void theora_comment_add_tag(theora_comment *_tc, char *_tag, char *_value){
- th_comment_add_tag((th_comment *)_tc,_tag,_value);
-}
diff --git a/media/libtheora/lib/apiwrapper.h b/media/libtheora/lib/apiwrapper.h
deleted file mode 100644
index ff45e0a4d6..0000000000
--- a/media/libtheora/lib/apiwrapper.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/********************************************************************
- * *
- * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
- * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
- * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
- * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
- * *
- * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
- * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
- * *
- ********************************************************************
-
- function:
- last mod: $Id: apiwrapper.h 13596 2007-08-23 20:05:38Z tterribe $
-
- ********************************************************************/
-
-#if !defined(_apiwrapper_H)
-# define _apiwrapper_H (1)
-# include <ogg/ogg.h>
-# include <theora/theora.h>
-# include "theora/theoradec.h"
-# include "theora/theoraenc.h"
-# include "state.h"
-
-typedef struct th_api_wrapper th_api_wrapper;
-typedef struct th_api_info th_api_info;
-
-/*Provide an entry point for the codec setup to clear itself in case we ever
- want to break pieces off into a common base library shared by encoder and
- decoder.
- In addition, this makes several other pieces of the API wrapper cleaner.*/
-typedef void (*oc_setup_clear_func)(void *_ts);
-
-/*Generally only one of these pointers will be non-NULL in any given instance.
- Technically we do not even really need this struct, since we should be able
- to figure out which one from "context", but doing it this way makes sure we
- don't flub it up.*/
-struct th_api_wrapper{
- oc_setup_clear_func clear;
- th_setup_info *setup;
- th_dec_ctx *decode;
- th_enc_ctx *encode;
-};
-
-struct th_api_info{
- th_api_wrapper api;
- theora_info info;
-};
-
-
-void oc_theora_info2th_info(th_info *_info,const theora_info *_ci);
-
-#endif
diff --git a/media/libtheora/lib/arm/arm2gnu.pl b/media/libtheora/lib/arm/arm2gnu.pl
index 5831bd81e2..8cb68e4a9f 100644..100755
--- a/media/libtheora/lib/arm/arm2gnu.pl
+++ b/media/libtheora/lib/arm/arm2gnu.pl
@@ -23,6 +23,7 @@ $\ = "\n"; # automatically add newline on print
$n=0;
$thumb = 0; # ARM mode by default, not Thumb.
+@proc_stack = ();
LINE:
while (<>) {
@@ -85,13 +86,19 @@ while (<>) {
# ".rdata" doesn't work in 'as' version 2.13.2, as it is ".rodata" there.
#
if ( /\bAREA\b/ ) {
+ my $align;
+ $align = "2";
+ if ( /ALIGN=(\d+)/ ) {
+ $align = $1;
+ }
if ( /CODE/ ) {
$nxstack = 1;
}
s/^(.+)CODE(.+)READONLY(.*)/ .text/;
- s/^(.+)DATA(.+)READONLY(.*)/ .section .rdata\n .align 2/;
- s/^(.+)\|\|\.data\|\|(.+)/ .data\n .align 2/;
+ s/^(.+)DATA(.+)READONLY(.*)/ .section .rdata/;
+ s/^(.+)\|\|\.data\|\|(.+)/ .data/;
s/^(.+)\|\|\.bss\|\|(.+)/ .bss/;
+ s/$/; .p2align $align/;
}
s/\|\|\.constdata\$(\d+)\|\|/.L_CONST$1/; # ||.constdata$3||
@@ -105,12 +112,30 @@ while (<>) {
s/\bCODE16\b/.code 16/ && do {$thumb = 1};
if (/\bPROC\b/)
{
- print " .thumb_func" if ($thumb);
+ my $prefix;
+ my $proc;
+ /^([A-Za-z_\.]\w+)\b/;
+ $proc = $1;
+ $prefix = "";
+ if ($proc)
+ {
+ $prefix = $prefix.sprintf("\t.type\t%s, %%function; ",$proc);
+ push(@proc_stack, $proc);
+ s/^[A-Za-z_\.]\w+/$&:/;
+ }
+ $prefix = $prefix."\t.thumb_func; " if ($thumb);
s/\bPROC\b/@ $&/;
+ $_ = $prefix.$_;
}
s/^(\s*)(S|Q|SH|U|UQ|UH)ASX\b/$1$2ADDSUBX/;
s/^(\s*)(S|Q|SH|U|UQ|UH)SAX\b/$1$2SUBADDX/;
- s/\bENDP\b/@ $&/;
+ if (/\bENDP\b/)
+ {
+ my $proc;
+ s/\bENDP\b/@ $&/;
+ $proc = pop(@proc_stack);
+ $_ = "\t.size $proc, .-$proc".$_ if ($proc);
+ }
s/\bSUBT\b/@ $&/;
s/\bDATA\b/@ $&/; # DATA directive is deprecated -- Asm guide, p.7-25
s/\bKEEP\b/@ $&/;
@@ -223,6 +248,7 @@ while (<>) {
{
my $cmd=$_;
my $value;
+ my $prefix;
my $w1;
my $w2;
my $w3;
@@ -241,25 +267,22 @@ while (<>) {
if( $bigend ne "")
{
# big endian
-
- print " .byte 0x".$w1;
- print " .byte 0x".$w2;
- print " .byte 0x".$w3;
- print " .byte 0x".$w4;
+ $prefix = "\t.byte\t0x".$w1.";".
+ "\t.byte\t0x".$w2.";".
+ "\t.byte\t0x".$w3.";".
+ "\t.byte\t0x".$w4."; ";
}
else
{
# little endian
-
- print " .byte 0x".$w4;
- print " .byte 0x".$w3;
- print " .byte 0x".$w2;
- print " .byte 0x".$w1;
+ $prefix = "\t.byte\t0x".$w4.";".
+ "\t.byte\t0x".$w3.";".
+ "\t.byte\t0x".$w2.";".
+ "\t.byte\t0x".$w1."; ";
}
-
+ $_=$prefix.$_;
}
-
if ( /\badrl\b/i )
{
s/\badrl\s+(\w+)\s*,\s*(\w+)/ldr $1,=$2/i;
diff --git a/media/libtheora/lib/arm/armbits.s b/media/libtheora/lib/arm/armbits.s
index 0fdb6fdd37..9400722543 100644
--- a/media/libtheora/lib/arm/armbits.s
+++ b/media/libtheora/lib/arm/armbits.s
@@ -11,18 +11,12 @@
;********************************************************************
;
; function:
-; last mod: $Id: armbits.s 17481 2010-10-03 22:49:42Z tterribe $
+; last mod: $Id$
;
;********************************************************************
AREA |.text|, CODE, READONLY
- ; Explicitly specifying alignment here because some versions of
- ; gas don't align code correctly. See
- ; http://lists.gnu.org/archive/html/bug-binutils/2011-06/msg00199.html
- ; https://bugzilla.mozilla.org/show_bug.cgi?id=920992
- ALIGN
-
EXPORT oc_pack_read_arm
EXPORT oc_pack_read1_arm
EXPORT oc_huff_token_decode_arm
diff --git a/media/libtheora/lib/arm/armcpu.c b/media/libtheora/lib/arm/armcpu.c
index 8b0f9a8574..f1941bdc15 100644
--- a/media/libtheora/lib/arm/armcpu.c
+++ b/media/libtheora/lib/arm/armcpu.c
@@ -20,7 +20,7 @@
#include "armcpu.h"
#if !defined(OC_ARM_ASM)|| \
- !defined(OC_ARM_ASM_EDSP)&&!defined(OC_ARM_ASM_ARMV6)&& \
+ !defined(OC_ARM_ASM_EDSP)&&!defined(OC_ARM_ASM_MEDIA)&& \
!defined(OC_ARM_ASM_NEON)
ogg_uint32_t oc_cpu_flags_get(void){
return 0;
@@ -107,6 +107,44 @@ ogg_uint32_t oc_cpu_flags_get(void){
return flags;
}
+#elif defined(__riscos__)
+#include <kernel.h>
+#include <swis.h>
+
+ogg_uint32_t oc_cpu_flags_get(void) {
+ ogg_uint32_t flags = 0;
+
+#if defined(OC_ARM_ASM_EDSP) || defined(OC_ARM_ASM_MEDIA)
+
+ if (_swi(OS_Byte,_IN(0)|_IN(2)|_RETURN(1), 129, 0xFF) <= 0xA9)
+ _swix(OS_Module, _INR(0,1), 1, "System:Modules.CallASWI");
+
+ ogg_uint32_t features;
+ _kernel_oserror* test = _swix(OS_PlatformFeatures, _IN(0)|_OUT(0), 0, &features);
+ if (test == NULL) {
+#if defined(OC_ARM_ASM_EDSP)
+ if((features>>10 & 1) == 1)flags|=OC_CPU_ARM_EDSP;
+#endif
+
+#if defined(OC_ARM_ASM_MEDIA)
+ if ((features>>31 & 1) == 1) {
+ ogg_uint32_t shadd = 0;
+ test =_swix(OS_PlatformFeatures, _INR(0,1)|_OUT(0), 34, 29, &shadd);
+ if (test==NULL && shadd==1)flags|=OC_CPU_ARM_MEDIA;
+ }
+#endif
+ }
+#endif
+
+#if defined(OC_ARM_ASM_NEON)
+ ogg_uint32_t mvfr1;
+ test = _swix(VFPSupport_Features, _IN(0)|_OUT(2), 0, &mvfr1);
+ if (test==NULL && (mvfr1 & 0xFFF00)==0x11100)flags|=OC_CPU_ARM_NEON;
+#endif
+
+ return flags;
+}
+
#else
/*The feature registers which can tell us what the processor supports are
accessible in priveleged modes only, so we can't have a general user-space
diff --git a/media/libtheora/lib/arm/armfrag.s b/media/libtheora/lib/arm/armfrag.s
index e20579eee4..38627ed669 100644
--- a/media/libtheora/lib/arm/armfrag.s
+++ b/media/libtheora/lib/arm/armfrag.s
@@ -11,17 +11,11 @@
;********************************************************************
; Original implementation:
; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
-; last mod: $Id: armfrag.s 17481 2010-10-03 22:49:42Z tterribe $
+; last mod: $Id$
;********************************************************************
AREA |.text|, CODE, READONLY
- ; Explicitly specifying alignment here because some versions of
- ; gas don't align code correctly. See
- ; http://lists.gnu.org/archive/html/bug-binutils/2011-06/msg00199.html
- ; https://bugzilla.mozilla.org/show_bug.cgi?id=920992
- ALIGN
-
GET armopts.s
; Vanilla ARM v4 versions
@@ -516,8 +510,7 @@ oc_frag_recon_intra_neon PROC
; r0 = unsigned char *_dst
; r1 = int _ystride
; r2 = const ogg_int16_t _residue[64]
- MOV r3, #128
- VDUP.S16 Q0, r3
+ VMOV.I16 Q0, #128
VLDMIA r2, {D16-D31} ; D16= 3333222211110000 etc ; 9(8) cycles
VQADD.S16 Q8, Q8, Q0
VQADD.S16 Q9, Q9, Q0
diff --git a/media/libtheora/lib/arm/armidct.s b/media/libtheora/lib/arm/armidct.s
index babd846ecd..68530c7140 100644
--- a/media/libtheora/lib/arm/armidct.s
+++ b/media/libtheora/lib/arm/armidct.s
@@ -11,17 +11,11 @@
;********************************************************************
; Original implementation:
; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
-; last mod: $Id: armidct.s 17481 2010-10-03 22:49:42Z tterribe $
+; last mod: $Id$
;********************************************************************
AREA |.text|, CODE, READONLY
- ; Explicitly specifying alignment here because some versions of
- ; gas don't align code correctly. See
- ; http://lists.gnu.org/archive/html/bug-binutils/2011-06/msg00199.html
- ; https://bugzilla.mozilla.org/show_bug.cgi?id=920992
- ALIGN
-
GET armopts.s
EXPORT oc_idct8x8_1_arm
@@ -70,11 +64,8 @@ oc_idct8x8_slow_arm
BL idct8core_arm
BL idct8core_arm
LDR r0, [r13], #4 ; Write to the final destination.
- ; Clear input data for next block (decoder only).
SUB r2, r1, #8*16
- CMP r0, r2
- MOV r1, r13 ; And read from temp storage.
- BEQ oc_idct8x8_slow_arm_cols
+ ; Clear input data for next block.
MOV r4, #0
MOV r5, #0
MOV r6, #0
@@ -87,7 +78,7 @@ oc_idct8x8_slow_arm
STMIA r2!,{r4,r5,r6,r7}
STMIA r2!,{r4,r5,r6,r7}
STMIA r2!,{r4,r5,r6,r7}
-oc_idct8x8_slow_arm_cols
+ MOV r1, r13 ; And read from temp storage.
; Column transforms
BL idct8core_down_arm
BL idct8core_down_arm
@@ -111,18 +102,15 @@ oc_idct8x8_10_arm PROC
BL idct3core_arm
BL idct2core_arm
BL idct1core_arm
- ; Clear input data for next block (decoder only).
- SUB r0, r1, #4*16
- CMP r0, r2
- MOV r1, r13 ; Read from temp storage.
- BEQ oc_idct8x8_10_arm_cols
+ ; Clear input data for next block.
MOV r4, #0
- STR r4, [r0]
- STR r4, [r0,#4]
- STR r4, [r0,#16]
- STR r4, [r0,#20]
- STR r4, [r0,#32]
- STR r4, [r0,#48]
+ STR r4, [r1,#-4*16]!
+ STR r4, [r1,#4]
+ STR r4, [r1,#16]
+ STR r4, [r1,#20]
+ STR r4, [r1,#32]
+ STR r4, [r1,#48]
+ MOV r1, r13 ; Read from temp storage.
MOV r0, r2 ; Write to the final destination
oc_idct8x8_10_arm_cols
; Column transforms
@@ -147,18 +135,14 @@ oc_idct8x8_6_arm PROC
BL idct3core_arm
BL idct2core_arm
BL idct1core_arm
- ; Clear input data for next block (decoder only).
- SUB r0, r1, #3*16
- CMP r0, r2
- MOV r1, r13 ; Read from temp storage.
- BEQ oc_idct8x8_6_arm_cols
+ ; Clear input data for next block.
MOV r4, #0
- STR r4, [r0]
- STR r4, [r0,#4]
- STR r4, [r0,#16]
- STR r4, [r0,#32]
+ STR r4, [r1,#-3*16]!
+ STR r4, [r1,#4]
+ STR r4, [r1,#16]
+ STR r4, [r1,#32]
+ MOV r1, r13 ; Read from temp storage.
MOV r0, r2 ; Write to the final destination
-oc_idct8x8_6_arm_cols
; Column transforms
BL idct3core_down_arm
BL idct3core_down_arm
@@ -180,14 +164,12 @@ oc_idct8x8_3_arm PROC
MOV r0, r13 ; Write to temp storage.
BL idct2core_arm
BL idct1core_arm
- ; Clear input data for next block (decoder only).
- SUB r0, r1, #2*16
- CMP r0, r2
+ ; Clear input data for next block.
+ MOV r4, #0
+ STR r4, [r1,#-2*16]!
+ STR r4, [r1,#16]
MOV r1, r13 ; Read from temp storage.
- MOVNE r4, #0
- STRNE r4, [r0]
- STRNE r4, [r0,#16]
- MOVNE r0, r2 ; Write to the final destination
+ MOV r0, r2 ; Write to the final destination
; Column transforms
BL idct2core_down_arm
BL idct2core_down_arm
@@ -805,30 +787,26 @@ oc_idct8x8_slow_v6
BL idct8_8core_v6
BL idct8_8core_v6
LDR r0, [r13], #4 ; Write to the final destination.
- ; Clear input data for next block (decoder only).
- SUB r2, r1, #8*16
- CMP r0, r2
- MOV r1, r13 ; And read from temp storage.
- BEQ oc_idct8x8_slow_v6_cols
+ ; Clear input data for next block.
MOV r4, #0
MOV r5, #0
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
- STRD r4, [r2], #8
-oc_idct8x8_slow_v6_cols
+ STRD r4, [r1,#-8*16]!
+ STRD r4, [r1,#8]
+ STRD r4, [r1,#16]
+ STRD r4, [r1,#24]
+ STRD r4, [r1,#32]
+ STRD r4, [r1,#40]
+ STRD r4, [r1,#48]
+ STRD r4, [r1,#56]
+ STRD r4, [r1,#64]
+ STRD r4, [r1,#72]
+ STRD r4, [r1,#80]
+ STRD r4, [r1,#88]
+ STRD r4, [r1,#96]
+ STRD r4, [r1,#104]
+ STRD r4, [r1,#112]
+ STRD r4, [r1,#120]
+ MOV r1, r13 ; And read from temp storage.
; Column transforms
BL idct8_8core_down_v6
BL idct8_8core_down_v6
@@ -849,20 +827,16 @@ oc_idct8x8_10_v6 PROC
BL idct4_3core_v6
BL idct2_1core_v6
LDR r0, [r13], #4 ; Write to the final destination.
- ; Clear input data for next block (decoder only).
- SUB r2, r1, #4*16
- CMP r0, r2
- AND r1, r13,#4 ; Align the stack.
- BEQ oc_idct8x8_10_v6_cols
+ ; Clear input data for next block.
MOV r4, #0
MOV r5, #0
- STRD r4, [r2]
- STRD r4, [r2,#16]
- STR r4, [r2,#32]
- STR r4, [r2,#48]
-oc_idct8x8_10_v6_cols
-; Column transforms
+ STRD r4, [r1,#-4*16]!
+ STRD r4, [r1,#16]
+ STR r4, [r1,#32]
+ STR r4, [r1,#48]
+ AND r1, r13,#4 ; Align the stack.
ADD r1, r1, r13 ; And read from temp storage.
+; Column transforms
BL idct4_4core_down_v6
BL idct4_4core_down_v6
BL idct4_4core_down_v6
@@ -878,14 +852,12 @@ oc_idct8x8_3_v6 PROC
MOV r8, r0
MOV r0, r13 ; Write to temp storage.
BL idct2_1core_v6
- ; Clear input data for next block (decoder only).
- SUB r0, r1, #2*16
- CMP r0, r8
+ ; Clear input data for next block.
+ MOV r4, #0
+ STR r4, [r1,#-2*16]!
+ STR r4, [r1,#16]
MOV r1, r13 ; Read from temp storage.
- MOVNE r4, #0
- STRNE r4, [r0]
- STRNE r4, [r0,#16]
- MOVNE r0, r8 ; Write to the final destination.
+ MOV r0, r8 ; Write to the final destination.
; Column transforms
BL idct2_2core_down_v6
BL idct2_2core_down_v6
@@ -1041,20 +1013,16 @@ oc_idct8x8_6_v6 PROC
ADD r0, r0, r13 ; Write to temp storage.
BL idct3_2core_v6
BL idct1core_v6
- ; Clear input data for next block (decoder only).
- SUB r0, r1, #3*16
- CMP r0, r8
- AND r1, r13,#4 ; Align the stack.
- BEQ oc_idct8x8_6_v6_cols
+ ; Clear input data for next block.
MOV r4, #0
MOV r5, #0
- STRD r4, [r0]
- STR r4, [r0,#16]
- STR r4, [r0,#32]
+ STRD r4, [r1,#-3*16]!
+ STR r4, [r1,#16]
+ STR r4, [r1,#32]
+ AND r1, r13,#4 ; Align the stack.
MOV r0, r8 ; Write to the final destination.
-oc_idct8x8_6_v6_cols
-; Column transforms
ADD r1, r1, r13 ; And read from temp storage.
+; Column transforms
BL idct3_3core_down_v6
BL idct3_3core_down_v6
BL idct3_3core_down_v6
@@ -1596,7 +1564,6 @@ oc_idct8x8_slow_neon
VSWP D23,D30
; Column transforms
BL oc_idct8x8_stage123_neon
- CMP r0,r1
; We have to put the return address back in the LR, or the branch
; predictor will not recognize the function return and mis-predict the
; entire call stack.
@@ -1610,7 +1577,6 @@ oc_idct8x8_slow_neon
VADD.S16 Q10,Q10,Q5 ; Q10 = y[2]=t[2]'+t[5]''
VSUB.S16 Q12,Q11,Q4 ; Q12 = y[4]=t[3]'-t[4]'
VADD.S16 Q11,Q11,Q4 ; Q11 = y[3]=t[3]'+t[4]'
- BEQ oc_idct8x8_slow_neon_noclear
VMOV.I8 Q2,#0
VPOP {D8-D15}
VMOV.I8 Q3,#0
@@ -1628,19 +1594,6 @@ oc_idct8x8_slow_neon
VRSHR.S16 Q15,Q15,#4 ; Q15 = y[7]+8>>4
VSTMIA r0, {D16-D31}
MOV PC, r14
-
-oc_idct8x8_slow_neon_noclear
- VPOP {D8-D15}
- VRSHR.S16 Q8, Q8, #4 ; Q8 = y[0]+8>>4
- VRSHR.S16 Q9, Q9, #4 ; Q9 = y[1]+8>>4
- VRSHR.S16 Q10,Q10,#4 ; Q10 = y[2]+8>>4
- VRSHR.S16 Q11,Q11,#4 ; Q11 = y[3]+8>>4
- VRSHR.S16 Q12,Q12,#4 ; Q12 = y[4]+8>>4
- VRSHR.S16 Q13,Q13,#4 ; Q13 = y[5]+8>>4
- VRSHR.S16 Q14,Q14,#4 ; Q14 = y[6]+8>>4
- VRSHR.S16 Q15,Q15,#4 ; Q15 = y[7]+8>>4
- VSTMIA r0, {D16-D31}
- MOV PC, r14
ENDP
oc_idct8x8_stage123_neon PROC
@@ -1871,7 +1824,6 @@ oc_idct8x8_10_neon PROC
VADD.S16 Q10,Q1, Q2 ; Q10= t[1]'=t[0]+t[2]
VSUB.S16 Q2, Q1, Q2 ; Q2 = t[2]'=t[0]-t[2]
; Stage 4
- CMP r0, r1
VADD.S16 Q8, Q11,Q15 ; Q8 = y[0]=t[0]'+t[7]'
VADD.S16 Q9, Q10,Q14 ; Q9 = y[1]=t[1]'+t[6]''
VSUB.S16 Q15,Q11,Q15 ; Q15 = y[7]=t[0]'-t[7]'
@@ -1880,7 +1832,6 @@ oc_idct8x8_10_neon PROC
VADD.S16 Q11,Q3, Q12 ; Q11 = y[3]=t[3]'+t[4]'
VSUB.S16 Q12,Q3, Q12 ; Q12 = y[4]=t[3]'-t[4]'
VSUB.S16 Q13,Q2, Q13 ; Q13 = y[5]=t[2]'-t[5]''
- BEQ oc_idct8x8_10_neon_noclear
VMOV.I8 D2, #0
VRSHR.S16 Q8, Q8, #4 ; Q8 = y[0]+8>>4
VST1.64 {D2}, [r1@64], r12
@@ -1896,18 +1847,6 @@ oc_idct8x8_10_neon PROC
VRSHR.S16 Q15,Q15,#4 ; Q15 = y[7]+8>>4
VSTMIA r0, {D16-D31}
MOV PC, r14
-
-oc_idct8x8_10_neon_noclear
- VRSHR.S16 Q8, Q8, #4 ; Q8 = y[0]+8>>4
- VRSHR.S16 Q9, Q9, #4 ; Q9 = y[1]+8>>4
- VRSHR.S16 Q10,Q10,#4 ; Q10 = y[2]+8>>4
- VRSHR.S16 Q11,Q11,#4 ; Q11 = y[3]+8>>4
- VRSHR.S16 Q12,Q12,#4 ; Q12 = y[4]+8>>4
- VRSHR.S16 Q13,Q13,#4 ; Q13 = y[5]+8>>4
- VRSHR.S16 Q14,Q14,#4 ; Q14 = y[6]+8>>4
- VRSHR.S16 Q15,Q15,#4 ; Q15 = y[7]+8>>4
- VSTMIA r0, {D16-D31}
- MOV PC, r14
ENDP
]
diff --git a/media/libtheora/lib/arm/armloop.s b/media/libtheora/lib/arm/armloop.s
index 0a1d4705e7..bbd4d630ed 100644
--- a/media/libtheora/lib/arm/armloop.s
+++ b/media/libtheora/lib/arm/armloop.s
@@ -11,17 +11,11 @@
;********************************************************************
; Original implementation:
; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
-; last mod: $Id: armloop.s 17481 2010-10-03 22:49:42Z tterribe $
+; last mod: $Id$
;********************************************************************
AREA |.text|, CODE, READONLY
- ; Explicitly specifying alignment here because some versions of
- ; gas don't align code correctly. See
- ; http://lists.gnu.org/archive/html/bug-binutils/2011-06/msg00199.html
- ; https://bugzilla.mozilla.org/show_bug.cgi?id=920992
- ALIGN
-
GET armopts.s
EXPORT oc_loop_filter_frag_rows_arm
diff --git a/media/libtheora/lib/arm/armopts.s b/media/libtheora/lib/arm/armopts.s
index e4da429e47..4dfdca9608 100644
--- a/media/libtheora/lib/arm/armopts.s
+++ b/media/libtheora/lib/arm/armopts.s
@@ -11,7 +11,7 @@
;********************************************************************
; Original implementation:
; Copyright (C) 2009 Robin Watts for Pinknoise Productions Ltd
-; last mod: $Id: armopts.s.in 17430 2010-09-22 21:54:09Z tterribe $
+; last mod: $Id$
;********************************************************************
; Set the following to 1 if we have EDSP instructions
diff --git a/media/libtheora/lib/bitpack.c b/media/libtheora/lib/bitpack.c
index 8bfce4c3d0..5125dde6b0 100644
--- a/media/libtheora/lib/bitpack.c
+++ b/media/libtheora/lib/bitpack.c
@@ -11,7 +11,7 @@
********************************************************************
function: packing variable sized words into an octet stream
- last mod: $Id: bitpack.c 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
#include <string.h>
diff --git a/media/libtheora/lib/config.h b/media/libtheora/lib/config.h
index 49772ac7f3..b5c9aedfbc 100644
--- a/media/libtheora/lib/config.h
+++ b/media/libtheora/lib/config.h
@@ -13,8 +13,8 @@
/* Define to 1 if you have the <machine/soundcard.h> header file. */
/* #undef HAVE_MACHINE_SOUNDCARD_H */
-/* Define to 1 if you have the <memory.h> header file. */
-#define HAVE_MEMORY_H 1
+/* Abort if size exceeds 16384x16384 (for fuzzing only) */
+/* #undef HAVE_MEMORY_CONSTRAINT */
/* Define to 1 if you have the <soundcard.h> header file. */
/* #undef HAVE_SOUNDCARD_H */
@@ -22,6 +22,9 @@
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
+/* Define to 1 if you have the <stdio.h> header file. */
+#define HAVE_STDIO_H 1
+
/* Define to 1 if you have the <stdlib.h> header file. */
#define HAVE_STDLIB_H 1
@@ -43,56 +46,58 @@
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
-/* Define to 1 if your C compiler doesn't accept -c and -o together. */
-/* #undef NO_MINUS_C_MINUS_O */
+/* Define to the sub-directory where libtool stores uninstalled libraries. */
+#define LT_OBJDIR ".libs/"
/* make use of arm asm optimization */
-
+/* #undef OC_ARM_ASM */
/* Define if assembler supports EDSP instructions */
-
+/* #undef OC_ARM_ASM_EDSP */
/* Define if assembler supports ARMv6 media instructions */
-
+/* #undef OC_ARM_ASM_MEDIA */
/* Define if compiler supports NEON instructions */
-
+/* #undef OC_ARM_ASM_NEON */
/* make use of c64x+ asm optimization */
/* #undef OC_C64X_ASM */
/* make use of x86_64 asm optimization */
-/* #undef OC_X86_64_ASM */
+ /**/
/* make use of x86 asm optimization */
-/* #undef OC_X86_ASM */
+ /**/
/* Name of package */
#define PACKAGE "libtheora"
/* Define to the address where bug reports for this package should be sent. */
-#define PACKAGE_BUGREPORT ""
+#define PACKAGE_BUGREPORT "theora-dev@xiph.org"
/* Define to the full name of this package. */
#define PACKAGE_NAME "libtheora"
/* Define to the full name and version of this package. */
-#define PACKAGE_STRING "libtheora 1.2.0alpha1+svn"
+#define PACKAGE_STRING "libtheora 1.2.0alpha1+git"
/* Define to the one symbol short name of this package. */
#define PACKAGE_TARNAME "libtheora"
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
/* Define to the version of this package. */
-#define PACKAGE_VERSION "1.2.0alpha1+svn"
+#define PACKAGE_VERSION "1.2.0alpha1+git"
-/* Define to 1 if you have the ANSI C header files. */
+/* Define to 1 if all of the C90 standard headers exist (not just the ones
+ required in a freestanding environment). This macro is provided for
+ backward compatibility; new code need not use it. */
#define STDC_HEADERS 1
/* Define to exclude encode support from the build */
/* #undef THEORA_DISABLE_ENCODE */
-/* Define to exclude floating point code from the build */
-/* #undef THEORA_DISABLE_FLOAT */
-
/* Version number of package */
-#define VERSION "1.2.0alpha1+svn"
+#define VERSION "1.2.0alpha1+git"
diff --git a/media/libtheora/lib/dct.h b/media/libtheora/lib/dct.h
index 24ba6f111a..8052ea6bc1 100644
--- a/media/libtheora/lib/dct.h
+++ b/media/libtheora/lib/dct.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: dct.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/decapiwrapper.c b/media/libtheora/lib/decapiwrapper.c
deleted file mode 100644
index 12ea475d17..0000000000
--- a/media/libtheora/lib/decapiwrapper.c
+++ /dev/null
@@ -1,193 +0,0 @@
-/********************************************************************
- * *
- * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
- * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
- * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
- * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
- * *
- * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2009 *
- * by the Xiph.Org Foundation and contributors http://www.xiph.org/ *
- * *
- ********************************************************************
-
- function:
- last mod: $Id: decapiwrapper.c 13596 2007-08-23 20:05:38Z tterribe $
-
- ********************************************************************/
-
-#include <stdlib.h>
-#include <string.h>
-#include <limits.h>
-#include "apiwrapper.h"
-#include "decint.h"
-#include "theora/theoradec.h"
-
-static void th_dec_api_clear(th_api_wrapper *_api){
- if(_api->setup)th_setup_free(_api->setup);
- if(_api->decode)th_decode_free(_api->decode);
- memset(_api,0,sizeof(*_api));
-}
-
-static void theora_decode_clear(theora_state *_td){
- if(_td->i!=NULL)theora_info_clear(_td->i);
- memset(_td,0,sizeof(*_td));
-}
-
-static int theora_decode_control(theora_state *_td,int _req,
- void *_buf,size_t _buf_sz){
- return th_decode_ctl(((th_api_wrapper *)_td->i->codec_setup)->decode,
- _req,_buf,_buf_sz);
-}
-
-static ogg_int64_t theora_decode_granule_frame(theora_state *_td,
- ogg_int64_t _gp){
- return th_granule_frame(((th_api_wrapper *)_td->i->codec_setup)->decode,_gp);
-}
-
-static double theora_decode_granule_time(theora_state *_td,ogg_int64_t _gp){
- return th_granule_time(((th_api_wrapper *)_td->i->codec_setup)->decode,_gp);
-}
-
-static const oc_state_dispatch_vtable OC_DEC_DISPATCH_VTBL={
- (oc_state_clear_func)theora_decode_clear,
- (oc_state_control_func)theora_decode_control,
- (oc_state_granule_frame_func)theora_decode_granule_frame,
- (oc_state_granule_time_func)theora_decode_granule_time,
-};
-
-static void th_info2theora_info(theora_info *_ci,const th_info *_info){
- _ci->version_major=_info->version_major;
- _ci->version_minor=_info->version_minor;
- _ci->version_subminor=_info->version_subminor;
- _ci->width=_info->frame_width;
- _ci->height=_info->frame_height;
- _ci->frame_width=_info->pic_width;
- _ci->frame_height=_info->pic_height;
- _ci->offset_x=_info->pic_x;
- _ci->offset_y=_info->pic_y;
- _ci->fps_numerator=_info->fps_numerator;
- _ci->fps_denominator=_info->fps_denominator;
- _ci->aspect_numerator=_info->aspect_numerator;
- _ci->aspect_denominator=_info->aspect_denominator;
- switch(_info->colorspace){
- case TH_CS_ITU_REC_470M:_ci->colorspace=OC_CS_ITU_REC_470M;break;
- case TH_CS_ITU_REC_470BG:_ci->colorspace=OC_CS_ITU_REC_470BG;break;
- default:_ci->colorspace=OC_CS_UNSPECIFIED;break;
- }
- switch(_info->pixel_fmt){
- case TH_PF_420:_ci->pixelformat=OC_PF_420;break;
- case TH_PF_422:_ci->pixelformat=OC_PF_422;break;
- case TH_PF_444:_ci->pixelformat=OC_PF_444;break;
- default:_ci->pixelformat=OC_PF_RSVD;
- }
- _ci->target_bitrate=_info->target_bitrate;
- _ci->quality=_info->quality;
- _ci->keyframe_frequency_force=1<<_info->keyframe_granule_shift;
-}
-
-int theora_decode_init(theora_state *_td,theora_info *_ci){
- th_api_info *apiinfo;
- th_api_wrapper *api;
- th_info info;
- api=(th_api_wrapper *)_ci->codec_setup;
- /*Allocate our own combined API wrapper/theora_info struct.
- We put them both in one malloc'd block so that when the API wrapper is
- freed, the info struct goes with it.
- This avoids having to figure out whether or not we need to free the info
- struct in either theora_info_clear() or theora_clear().*/
- apiinfo=(th_api_info *)_ogg_calloc(1,sizeof(*apiinfo));
- if(apiinfo==NULL)return OC_FAULT;
- /*Make our own copy of the info struct, since its lifetime should be
- independent of the one we were passed in.*/
- *&apiinfo->info=*_ci;
- /*Convert the info struct now instead of saving the the one we decoded with
- theora_decode_header(), since the user might have modified values (i.e.,
- color space, aspect ratio, etc. can be specified from a higher level).
- The user also might be doing something "clever" with the header packets if
- they are not using an Ogg encapsulation.*/
- oc_theora_info2th_info(&info,_ci);
- /*Don't bother to copy the setup info; th_decode_alloc() makes its own copy
- of the stuff it needs.*/
- apiinfo->api.decode=th_decode_alloc(&info,api->setup);
- if(apiinfo->api.decode==NULL){
- _ogg_free(apiinfo);
- return OC_EINVAL;
- }
- apiinfo->api.clear=(oc_setup_clear_func)th_dec_api_clear;
- _td->internal_encode=NULL;
- /*Provide entry points for ABI compatibility with old decoder shared libs.*/
- _td->internal_decode=(void *)&OC_DEC_DISPATCH_VTBL;
- _td->granulepos=0;
- _td->i=&apiinfo->info;
- _td->i->codec_setup=&apiinfo->api;
- return 0;
-}
-
-int theora_decode_header(theora_info *_ci,theora_comment *_cc,ogg_packet *_op){
- th_api_wrapper *api;
- th_info info;
- int ret;
- api=(th_api_wrapper *)_ci->codec_setup;
- /*Allocate an API wrapper struct on demand, since it will not also include a
- theora_info struct like the ones that are used in a theora_state struct.*/
- if(api==NULL){
- _ci->codec_setup=_ogg_calloc(1,sizeof(*api));
- if(_ci->codec_setup==NULL)return OC_FAULT;
- api=(th_api_wrapper *)_ci->codec_setup;
- api->clear=(oc_setup_clear_func)th_dec_api_clear;
- }
- /*Convert from the theora_info struct instead of saving our own th_info
- struct between calls.
- The user might be doing something "clever" with the header packets if they
- are not using an Ogg encapsulation, and we don't want to break this.*/
- oc_theora_info2th_info(&info,_ci);
- /*We rely on the fact that theora_comment and th_comment structures are
- actually identical.
- Take care not to change this fact unless you change the code here as
- well!*/
- ret=th_decode_headerin(&info,(th_comment *)_cc,&api->setup,_op);
- /*We also rely on the fact that the error return code values are the same,
- and that the implementations of these two functions return the same set of
- them.
- Note that theora_decode_header() really can return OC_NOTFORMAT, even
- though it is not currently documented to do so.*/
- if(ret<0)return ret;
- th_info2theora_info(_ci,&info);
- return 0;
-}
-
-int theora_decode_packetin(theora_state *_td,ogg_packet *_op){
- th_api_wrapper *api;
- ogg_int64_t gp;
- int ret;
- if(!_td||!_td->i||!_td->i->codec_setup)return OC_FAULT;
- api=(th_api_wrapper *)_td->i->codec_setup;
- ret=th_decode_packetin(api->decode,_op,&gp);
- if(ret<0)return OC_BADPACKET;
- _td->granulepos=gp;
- return 0;
-}
-
-int theora_decode_YUVout(theora_state *_td,yuv_buffer *_yuv){
- th_api_wrapper *api;
- th_dec_ctx *decode;
- th_ycbcr_buffer buf;
- int ret;
- if(!_td||!_td->i||!_td->i->codec_setup)return OC_FAULT;
- api=(th_api_wrapper *)_td->i->codec_setup;
- decode=(th_dec_ctx *)api->decode;
- if(!decode)return OC_FAULT;
- ret=th_decode_ycbcr_out(decode,buf);
- if(ret>=0){
- _yuv->y_width=buf[0].width;
- _yuv->y_height=buf[0].height;
- _yuv->y_stride=buf[0].stride;
- _yuv->uv_width=buf[1].width;
- _yuv->uv_height=buf[1].height;
- _yuv->uv_stride=buf[1].stride;
- _yuv->y=buf[0].data;
- _yuv->u=buf[1].data;
- _yuv->v=buf[2].data;
- }
- return ret;
-}
diff --git a/media/libtheora/lib/decinfo.c b/media/libtheora/lib/decinfo.c
index 603b1f93e4..a91e740b15 100644
--- a/media/libtheora/lib/decinfo.c
+++ b/media/libtheora/lib/decinfo.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: decinfo.c 17276 2010-06-05 05:57:05Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -20,6 +20,11 @@
#include <limits.h>
#include "decint.h"
+/*Only used for fuzzing.*/
+#if defined(HAVE_MEMORY_CONSTRAINT)
+static const int MAX_FUZZING_WIDTH = 16384;
+static const int MAX_FUZZING_HEIGHT = 16384;
+#endif
/*Unpacks a series of octets from a given byte array into the pack buffer.
@@ -55,8 +60,8 @@ static int oc_info_unpack(oc_pack_buf *_opb,th_info *_info){
/*verify we can parse this bitstream version.
We accept earlier minors and all subminors, by spec*/
if(_info->version_major>TH_VERSION_MAJOR||
- _info->version_major==TH_VERSION_MAJOR&&
- _info->version_minor>TH_VERSION_MINOR){
+ (_info->version_major==TH_VERSION_MAJOR&&
+ _info->version_minor>TH_VERSION_MINOR)){
return TH_EVERSION;
}
/*Read the encoded frame description.*/
@@ -82,6 +87,11 @@ static int oc_info_unpack(oc_pack_buf *_opb,th_info *_info){
_info->fps_numerator==0||_info->fps_denominator==0){
return TH_EBADHEADER;
}
+#if defined(HAVE_MEMORY_CONSTRAINT)
+ if(_info->frame_width>=MAX_FUZZING_WIDTH&&_info->frame_height>=MAX_FUZZING_HEIGHT){
+ return TH_EBADHEADER;
+ }
+#endif
/*Note: The sense of pic_y is inverted in what we pass back to the
application compared to how it is stored in the bitstream.
This is because the bitstream uses a right-handed coordinate system, while
@@ -172,9 +182,23 @@ static int oc_dec_headerin(oc_pack_buf *_opb,th_info *_info,
int ret;
val=oc_pack_read(_opb,8);
packtype=(int)val;
- /*If we're at a data packet and we have received all three headers, we're
- done.*/
- if(!(packtype&0x80)&&_info->frame_width>0&&_tc->vendor!=NULL&&*_setup!=NULL){
+ /*If we're at a data packet...*/
+ if(!(packtype&0x80)){
+ /*Check to make sure we received all three headers...
+ If we haven't seen any valid headers, assume this is not actually
+ Theora.*/
+ if(_info->frame_width<=0)return TH_ENOTFORMAT;
+ /*Follow our documentation, which says we'll return TH_EFAULT if this
+ are NULL (_info was checked by our caller).*/
+ if(_tc==NULL)return TH_EFAULT;
+ /*And if any other headers were missing, declare this packet "out of
+ sequence" instead.*/
+ if(_tc->vendor==NULL)return TH_EBADHEADER;
+ /*Don't check this until it's needed, since we allow passing NULL for the
+ arguments that we're not expecting the next header to fill in yet.*/
+ if(_setup==NULL)return TH_EFAULT;
+ if(*_setup==NULL)return TH_EBADHEADER;
+ /*If we got everything, we're done.*/
return 0;
}
/*Check the codec string.*/
diff --git a/media/libtheora/lib/decint.h b/media/libtheora/lib/decint.h
index bd65222732..3cea6b1439 100644
--- a/media/libtheora/lib/decint.h
+++ b/media/libtheora/lib/decint.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: decint.h 17457 2010-09-24 02:05:49Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -162,7 +162,6 @@ struct th_dec_ctx{
# endif
# if defined(HAVE_CAIRO)
/*Output metrics for debugging.*/
- int telemetry;
int telemetry_mbmode;
int telemetry_mv;
int telemetry_qi;
diff --git a/media/libtheora/lib/decode.c b/media/libtheora/lib/decode.c
index 563782b7a2..fad26e0927 100644
--- a/media/libtheora/lib/decode.c
+++ b/media/libtheora/lib/decode.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: decode.c 17576 2010-10-29 01:07:51Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -417,7 +417,6 @@ static int oc_dec_init(oc_dec_ctx *_dec,const th_info *_info,
_dec->stripe_cb.ctx=NULL;
_dec->stripe_cb.stripe_decoded=NULL;
#if defined(HAVE_CAIRO)
- _dec->telemetry=0;
_dec->telemetry_bits=0;
_dec->telemetry_qi=0;
_dec->telemetry_mbmode=0;
@@ -1203,6 +1202,9 @@ static void oc_dec_residual_tokens_unpack(oc_dec_ctx *_dec){
static int oc_dec_postprocess_init(oc_dec_ctx *_dec){
+ /*musl libc malloc()/realloc() calls might use floating point, so make sure
+ we've cleared the MMX state for them.*/
+ oc_restore_fpu(&_dec->state);
/*pp_level 0: disabled; free any memory used and return*/
if(_dec->pp_level<=OC_PP_LEVEL_DISABLED){
if(_dec->dc_qis!=NULL){
@@ -2019,28 +2021,24 @@ int th_decode_ctl(th_dec_ctx *_dec,int _req,void *_buf,
case TH_DECCTL_SET_TELEMETRY_MBMODE:{
if(_dec==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(int))return TH_EINVAL;
- _dec->telemetry=1;
_dec->telemetry_mbmode=*(int *)_buf;
return 0;
}break;
case TH_DECCTL_SET_TELEMETRY_MV:{
if(_dec==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(int))return TH_EINVAL;
- _dec->telemetry=1;
_dec->telemetry_mv=*(int *)_buf;
return 0;
}break;
case TH_DECCTL_SET_TELEMETRY_QI:{
if(_dec==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(int))return TH_EINVAL;
- _dec->telemetry=1;
_dec->telemetry_qi=*(int *)_buf;
return 0;
}break;
case TH_DECCTL_SET_TELEMETRY_BITS:{
if(_dec==NULL||_buf==NULL)return TH_EFAULT;
if(_buf_sz!=sizeof(int))return TH_EINVAL;
- _dec->telemetry=1;
_dec->telemetry_bits=*(int *)_buf;
return 0;
}break;
@@ -2081,6 +2079,664 @@ static void oc_dec_init_dummy_frame(th_dec_ctx *_dec){
memset(_dec->state.ref_frame_data[0]-yoffset,0x80,yplane_sz+2*cplane_sz);
}
+#if defined(HAVE_CAIRO)
+static void oc_render_telemetry(th_dec_ctx *_dec,th_ycbcr_buffer _ycbcr,
+ int _telemetry){
+ /*Stuff the plane into cairo.*/
+ cairo_surface_t *cs;
+ unsigned char *data;
+ unsigned char *y_row;
+ unsigned char *u_row;
+ unsigned char *v_row;
+ unsigned char *rgb_row;
+ int cstride;
+ int w;
+ int h;
+ int x;
+ int y;
+ int hdec;
+ int vdec;
+ w=_ycbcr[0].width;
+ h=_ycbcr[0].height;
+ hdec=!(_dec->state.info.pixel_fmt&1);
+ vdec=!(_dec->state.info.pixel_fmt&2);
+ /*Lazy data buffer init.
+ We could try to re-use the post-processing buffer, which would save
+ memory, but complicate the allocation logic there.
+ I don't think anyone cares about memory usage when using telemetry; it is
+ not meant for embedded devices.*/
+ if(_dec->telemetry_frame_data==NULL){
+ _dec->telemetry_frame_data=_ogg_malloc(
+ (w*h+2*(w>>hdec)*(h>>vdec))*sizeof(*_dec->telemetry_frame_data));
+ if(_dec->telemetry_frame_data==NULL)return;
+ }
+ cs=cairo_image_surface_create(CAIRO_FORMAT_RGB24,w,h);
+ /*Sadly, no YUV support in Cairo (yet); convert into the RGB buffer.*/
+ data=cairo_image_surface_get_data(cs);
+ if(data==NULL){
+ cairo_surface_destroy(cs);
+ return;
+ }
+ cstride=cairo_image_surface_get_stride(cs);
+ y_row=_ycbcr[0].data;
+ u_row=_ycbcr[1].data;
+ v_row=_ycbcr[2].data;
+ rgb_row=data;
+ for(y=0;y<h;y++){
+ for(x=0;x<w;x++){
+ int r;
+ int g;
+ int b;
+ r=(1904000*y_row[x]+2609823*v_row[x>>hdec]-363703744)/1635200;
+ g=(3827562*y_row[x]-1287801*u_row[x>>hdec]
+ -2672387*v_row[x>>hdec]+447306710)/3287200;
+ b=(952000*y_row[x]+1649289*u_row[x>>hdec]-225932192)/817600;
+ rgb_row[4*x+0]=OC_CLAMP255(b);
+ rgb_row[4*x+1]=OC_CLAMP255(g);
+ rgb_row[4*x+2]=OC_CLAMP255(r);
+ }
+ y_row+=_ycbcr[0].stride;
+ u_row+=_ycbcr[1].stride&-((y&1)|!vdec);
+ v_row+=_ycbcr[2].stride&-((y&1)|!vdec);
+ rgb_row+=cstride;
+ }
+ /*Draw coded identifier for each macroblock (stored in Hilbert order).*/
+ {
+ cairo_t *c;
+ const oc_fragment *frags;
+ oc_mv *frag_mvs;
+ const signed char *mb_modes;
+ oc_mb_map *mb_maps;
+ size_t nmbs;
+ size_t mbi;
+ int row2;
+ int col2;
+ int qim[3]={0,0,0};
+ if(_dec->state.nqis==2){
+ int bqi;
+ bqi=_dec->state.qis[0];
+ if(_dec->state.qis[1]>bqi)qim[1]=1;
+ if(_dec->state.qis[1]<bqi)qim[1]=-1;
+ }
+ if(_dec->state.nqis==3){
+ int bqi;
+ int cqi;
+ int dqi;
+ bqi=_dec->state.qis[0];
+ cqi=_dec->state.qis[1];
+ dqi=_dec->state.qis[2];
+ if(cqi>bqi&&dqi>bqi){
+ if(dqi>cqi){
+ qim[1]=1;
+ qim[2]=2;
+ }
+ else{
+ qim[1]=2;
+ qim[2]=1;
+ }
+ }
+ else if(cqi<bqi&&dqi<bqi){
+ if(dqi<cqi){
+ qim[1]=-1;
+ qim[2]=-2;
+ }
+ else{
+ qim[1]=-2;
+ qim[2]=-1;
+ }
+ }
+ else{
+ if(cqi<bqi)qim[1]=-1;
+ else qim[1]=1;
+ if(dqi<bqi)qim[2]=-1;
+ else qim[2]=1;
+ }
+ }
+ c=cairo_create(cs);
+ frags=_dec->state.frags;
+ frag_mvs=_dec->state.frag_mvs;
+ mb_modes=_dec->state.mb_modes;
+ mb_maps=_dec->state.mb_maps;
+ nmbs=_dec->state.nmbs;
+ row2=0;
+ col2=0;
+ for(mbi=0;mbi<nmbs;mbi++){
+ float x;
+ float y;
+ int bi;
+ y=h-(row2+((col2+1>>1)&1))*16-16;
+ x=(col2>>1)*16;
+ cairo_set_line_width(c,1.);
+ /*Keyframe (all intra) red box.*/
+ if(_dec->state.frame_type==OC_INTRA_FRAME){
+ if(_dec->telemetry_mbmode&0x02){
+ cairo_set_source_rgba(c,1.,0,0,.5);
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,0,0,.25);
+ cairo_fill(c);
+ }
+ }
+ else{
+ ptrdiff_t fragi;
+ int frag_mvx;
+ int frag_mvy;
+ for(bi=0;bi<4;bi++){
+ fragi=mb_maps[mbi][0][bi];
+ if(fragi>=0&&frags[fragi].coded){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvy=OC_MV_Y(frag_mvs[fragi]);
+ break;
+ }
+ }
+ if(bi<4){
+ switch(mb_modes[mbi]){
+ case OC_MODE_INTRA:{
+ if(_dec->telemetry_mbmode&0x02){
+ cairo_set_source_rgba(c,1.,0,0,.5);
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,0,0,.25);
+ cairo_fill(c);
+ }
+ }break;
+ case OC_MODE_INTER_NOMV:{
+ if(_dec->telemetry_mbmode&0x01){
+ cairo_set_source_rgba(c,0,0,1.,.5);
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,0,0,1.,.25);
+ cairo_fill(c);
+ }
+ }break;
+ case OC_MODE_INTER_MV:{
+ if(_dec->telemetry_mbmode&0x04){
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_set_source_rgba(c,0,1.,0,.5);
+ cairo_stroke(c);
+ }
+ if(_dec->telemetry_mv&0x04){
+ cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+8,y+8);
+ cairo_stroke(c);
+ }
+ }break;
+ case OC_MODE_INTER_MV_LAST:{
+ if(_dec->telemetry_mbmode&0x08){
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_set_source_rgba(c,0,1.,0,.5);
+ cairo_move_to(c,x+13.5,y+2.5);
+ cairo_line_to(c,x+2.5,y+8);
+ cairo_line_to(c,x+13.5,y+13.5);
+ cairo_stroke(c);
+ }
+ if(_dec->telemetry_mv&0x08){
+ cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+8,y+8);
+ cairo_stroke(c);
+ }
+ }break;
+ case OC_MODE_INTER_MV_LAST2:{
+ if(_dec->telemetry_mbmode&0x10){
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_set_source_rgba(c,0,1.,0,.5);
+ cairo_move_to(c,x+8,y+2.5);
+ cairo_line_to(c,x+2.5,y+8);
+ cairo_line_to(c,x+8,y+13.5);
+ cairo_move_to(c,x+13.5,y+2.5);
+ cairo_line_to(c,x+8,y+8);
+ cairo_line_to(c,x+13.5,y+13.5);
+ cairo_stroke(c);
+ }
+ if(_dec->telemetry_mv&0x10){
+ cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+8,y+8);
+ cairo_stroke(c);
+ }
+ }break;
+ case OC_MODE_GOLDEN_NOMV:{
+ if(_dec->telemetry_mbmode&0x20){
+ cairo_set_source_rgba(c,1.,1.,0,.5);
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,1.,0,.25);
+ cairo_fill(c);
+ }
+ }break;
+ case OC_MODE_GOLDEN_MV:{
+ if(_dec->telemetry_mbmode&0x40){
+ cairo_rectangle(c,x+2.5,y+2.5,11,11);
+ cairo_set_source_rgba(c,1.,1.,0,.5);
+ cairo_stroke(c);
+ }
+ if(_dec->telemetry_mv&0x40){
+ cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+8,y+8);
+ cairo_stroke(c);
+ }
+ }break;
+ case OC_MODE_INTER_MV_FOUR:{
+ if(_dec->telemetry_mbmode&0x80){
+ cairo_rectangle(c,x+2.5,y+2.5,4,4);
+ cairo_rectangle(c,x+9.5,y+2.5,4,4);
+ cairo_rectangle(c,x+2.5,y+9.5,4,4);
+ cairo_rectangle(c,x+9.5,y+9.5,4,4);
+ cairo_set_source_rgba(c,0,1.,0,.5);
+ cairo_stroke(c);
+ }
+ /*4mv is odd, coded in raster order.*/
+ fragi=mb_maps[mbi][0][0];
+ if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvx=OC_MV_Y(frag_mvs[fragi]);
+ cairo_move_to(c,x+4+frag_mvx,y+12-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+4+frag_mvx*.66,y+12-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+4+frag_mvx*.33,y+12-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+4,y+12);
+ cairo_stroke(c);
+ }
+ fragi=mb_maps[mbi][0][1];
+ if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvx=OC_MV_Y(frag_mvs[fragi]);
+ cairo_move_to(c,x+12+frag_mvx,y+12-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+12+frag_mvx*.66,y+12-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+12+frag_mvx*.33,y+12-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+12,y+12);
+ cairo_stroke(c);
+ }
+ fragi=mb_maps[mbi][0][2];
+ if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvx=OC_MV_Y(frag_mvs[fragi]);
+ cairo_move_to(c,x+4+frag_mvx,y+4-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+4+frag_mvx*.66,y+4-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+4+frag_mvx*.33,y+4-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+4,y+4);
+ cairo_stroke(c);
+ }
+ fragi=mb_maps[mbi][0][3];
+ if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
+ frag_mvx=OC_MV_X(frag_mvs[fragi]);
+ frag_mvx=OC_MV_Y(frag_mvs[fragi]);
+ cairo_move_to(c,x+12+frag_mvx,y+4-frag_mvy);
+ cairo_set_source_rgba(c,1.,1.,1.,.9);
+ cairo_set_line_width(c,3.);
+ cairo_line_to(c,x+12+frag_mvx*.66,y+4-frag_mvy*.66);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,2.);
+ cairo_line_to(c,x+12+frag_mvx*.33,y+4-frag_mvy*.33);
+ cairo_stroke_preserve(c);
+ cairo_set_line_width(c,1.);
+ cairo_line_to(c,x+12,y+4);
+ cairo_stroke(c);
+ }
+ }break;
+ }
+ }
+ }
+ /*qii illustration.*/
+ if(_dec->telemetry_qi&0x2){
+ cairo_set_line_cap(c,CAIRO_LINE_CAP_SQUARE);
+ for(bi=0;bi<4;bi++){
+ ptrdiff_t fragi;
+ int qiv;
+ int xp;
+ int yp;
+ xp=x+(bi&1)*8;
+ yp=y+8-(bi&2)*4;
+ fragi=mb_maps[mbi][0][bi];
+ if(fragi>=0&&frags[fragi].coded){
+ qiv=qim[frags[fragi].qii];
+ cairo_set_line_width(c,3.);
+ cairo_set_source_rgba(c,0.,0.,0.,.5);
+ switch(qiv){
+ /*Double plus:*/
+ case 2:{
+ if((bi&1)^((bi&2)>>1)){
+ cairo_move_to(c,xp+2.5,yp+1.5);
+ cairo_line_to(c,xp+2.5,yp+3.5);
+ cairo_move_to(c,xp+1.5,yp+2.5);
+ cairo_line_to(c,xp+3.5,yp+2.5);
+ cairo_move_to(c,xp+5.5,yp+4.5);
+ cairo_line_to(c,xp+5.5,yp+6.5);
+ cairo_move_to(c,xp+4.5,yp+5.5);
+ cairo_line_to(c,xp+6.5,yp+5.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,0.,1.,1.,1.);
+ }
+ else{
+ cairo_move_to(c,xp+5.5,yp+1.5);
+ cairo_line_to(c,xp+5.5,yp+3.5);
+ cairo_move_to(c,xp+4.5,yp+2.5);
+ cairo_line_to(c,xp+6.5,yp+2.5);
+ cairo_move_to(c,xp+2.5,yp+4.5);
+ cairo_line_to(c,xp+2.5,yp+6.5);
+ cairo_move_to(c,xp+1.5,yp+5.5);
+ cairo_line_to(c,xp+3.5,yp+5.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,0.,1.,1.,1.);
+ }
+ }break;
+ /*Double minus:*/
+ case -2:{
+ cairo_move_to(c,xp+2.5,yp+2.5);
+ cairo_line_to(c,xp+5.5,yp+2.5);
+ cairo_move_to(c,xp+2.5,yp+5.5);
+ cairo_line_to(c,xp+5.5,yp+5.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,1.,1.,1.);
+ }break;
+ /*Plus:*/
+ case 1:{
+ if((bi&2)==0)yp-=2;
+ if((bi&1)==0)xp-=2;
+ cairo_move_to(c,xp+4.5,yp+2.5);
+ cairo_line_to(c,xp+4.5,yp+6.5);
+ cairo_move_to(c,xp+2.5,yp+4.5);
+ cairo_line_to(c,xp+6.5,yp+4.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,.1,1.,.3,1.);
+ break;
+ }
+ /*Fall through.*/
+ /*Minus:*/
+ case -1:{
+ cairo_move_to(c,xp+2.5,yp+4.5);
+ cairo_line_to(c,xp+6.5,yp+4.5);
+ cairo_stroke_preserve(c);
+ cairo_set_source_rgba(c,1.,.3,.1,1.);
+ }break;
+ default:continue;
+ }
+ cairo_set_line_width(c,1.);
+ cairo_stroke(c);
+ }
+ }
+ }
+ col2++;
+ if((col2>>1)>=_dec->state.nhmbs){
+ col2=0;
+ row2+=2;
+ }
+ }
+ /*Bit usage indicator[s]:*/
+ if(_dec->telemetry_bits){
+ int widths[6];
+ int fpsn;
+ int fpsd;
+ int mult;
+ int fullw;
+ int padw;
+ int i;
+ fpsn=_dec->state.info.fps_numerator;
+ fpsd=_dec->state.info.fps_denominator;
+ mult=(_dec->telemetry_bits>=0xFF?1:_dec->telemetry_bits);
+ fullw=250.f*h*fpsd*mult/fpsn;
+ padw=w-24;
+ /*Header and coded block bits.*/
+ if(_dec->telemetry_frame_bytes<0||
+ _dec->telemetry_frame_bytes==OC_LOTS_OF_BITS){
+ _dec->telemetry_frame_bytes=0;
+ }
+ if(_dec->telemetry_coding_bytes<0||
+ _dec->telemetry_coding_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_coding_bytes=0;
+ }
+ if(_dec->telemetry_mode_bytes<0||
+ _dec->telemetry_mode_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_mode_bytes=0;
+ }
+ if(_dec->telemetry_mv_bytes<0||
+ _dec->telemetry_mv_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_mv_bytes=0;
+ }
+ if(_dec->telemetry_qi_bytes<0||
+ _dec->telemetry_qi_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_qi_bytes=0;
+ }
+ if(_dec->telemetry_dc_bytes<0||
+ _dec->telemetry_dc_bytes>_dec->telemetry_frame_bytes){
+ _dec->telemetry_dc_bytes=0;
+ }
+ widths[0]=padw*
+ (_dec->telemetry_frame_bytes-_dec->telemetry_coding_bytes)/fullw;
+ widths[1]=padw*
+ (_dec->telemetry_coding_bytes-_dec->telemetry_mode_bytes)/fullw;
+ widths[2]=padw*
+ (_dec->telemetry_mode_bytes-_dec->telemetry_mv_bytes)/fullw;
+ widths[3]=padw*(_dec->telemetry_mv_bytes-_dec->telemetry_qi_bytes)/fullw;
+ widths[4]=padw*(_dec->telemetry_qi_bytes-_dec->telemetry_dc_bytes)/fullw;
+ widths[5]=padw*(_dec->telemetry_dc_bytes)/fullw;
+ for(i=0;i<6;i++)if(widths[i]>w)widths[i]=w;
+ cairo_set_source_rgba(c,.0,.0,.0,.6);
+ cairo_rectangle(c,10,h-33,widths[0]+1,5);
+ cairo_rectangle(c,10,h-29,widths[1]+1,5);
+ cairo_rectangle(c,10,h-25,widths[2]+1,5);
+ cairo_rectangle(c,10,h-21,widths[3]+1,5);
+ cairo_rectangle(c,10,h-17,widths[4]+1,5);
+ cairo_rectangle(c,10,h-13,widths[5]+1,5);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,1,0,0);
+ cairo_rectangle(c,10.5,h-32.5,widths[0],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,0,1,0);
+ cairo_rectangle(c,10.5,h-28.5,widths[1],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,0,0,1);
+ cairo_rectangle(c,10.5,h-24.5,widths[2],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,.6,.4,.0);
+ cairo_rectangle(c,10.5,h-20.5,widths[3],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,.3,.3,.3);
+ cairo_rectangle(c,10.5,h-16.5,widths[4],4);
+ cairo_fill(c);
+ cairo_set_source_rgb(c,.5,.5,.8);
+ cairo_rectangle(c,10.5,h-12.5,widths[5],4);
+ cairo_fill(c);
+ }
+ /*Master qi indicator[s]:*/
+ if(_dec->telemetry_qi&0x1){
+ cairo_text_extents_t extents;
+ char buffer[10];
+ int p;
+ int y;
+ p=0;
+ y=h-7.5;
+ if(_dec->state.qis[0]>=10)buffer[p++]=48+_dec->state.qis[0]/10;
+ buffer[p++]=48+_dec->state.qis[0]%10;
+ if(_dec->state.nqis>=2){
+ buffer[p++]=' ';
+ if(_dec->state.qis[1]>=10)buffer[p++]=48+_dec->state.qis[1]/10;
+ buffer[p++]=48+_dec->state.qis[1]%10;
+ }
+ if(_dec->state.nqis==3){
+ buffer[p++]=' ';
+ if(_dec->state.qis[2]>=10)buffer[p++]=48+_dec->state.qis[2]/10;
+ buffer[p++]=48+_dec->state.qis[2]%10;
+ }
+ buffer[p++]='\0';
+ cairo_select_font_face(c,"sans",
+ CAIRO_FONT_SLANT_NORMAL,CAIRO_FONT_WEIGHT_BOLD);
+ cairo_set_font_size(c,18);
+ cairo_text_extents(c,buffer,&extents);
+ cairo_set_source_rgb(c,1,1,1);
+ cairo_move_to(c,w-extents.x_advance-10,y);
+ cairo_show_text(c,buffer);
+ cairo_set_source_rgb(c,0,0,0);
+ cairo_move_to(c,w-extents.x_advance-10,y);
+ cairo_text_path(c,buffer);
+ cairo_set_line_width(c,.8);
+ cairo_set_line_join(c,CAIRO_LINE_JOIN_ROUND);
+ cairo_stroke(c);
+ }
+ cairo_destroy(c);
+ }
+ /*Out of the Cairo plane into the telemetry YUV buffer.*/
+ _ycbcr[0].data=_dec->telemetry_frame_data;
+ _ycbcr[0].stride=_ycbcr[0].width;
+ _ycbcr[1].data=_ycbcr[0].data+h*_ycbcr[0].stride;
+ _ycbcr[1].stride=_ycbcr[1].width;
+ _ycbcr[2].data=_ycbcr[1].data+(h>>vdec)*_ycbcr[1].stride;
+ _ycbcr[2].stride=_ycbcr[2].width;
+ y_row=_ycbcr[0].data;
+ u_row=_ycbcr[1].data;
+ v_row=_ycbcr[2].data;
+ rgb_row=data;
+ /*This is one of the few places it's worth handling chroma on a
+ case-by-case basis.*/
+ switch(_dec->state.info.pixel_fmt){
+ case TH_PF_420:{
+ for(y=0;y<h;y+=2){
+ unsigned char *y_row2;
+ unsigned char *rgb_row2;
+ y_row2=y_row+_ycbcr[0].stride;
+ rgb_row2=rgb_row+cstride;
+ for(x=0;x<w;x+=2){
+ int y;
+ int u;
+ int v;
+ y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
+ +24966*rgb_row[4*x+0]+4207500)/255000;
+ y_row[x]=OC_CLAMP255(y);
+ y=(65481*rgb_row[4*x+6]+128553*rgb_row[4*x+5]
+ +24966*rgb_row[4*x+4]+4207500)/255000;
+ y_row[x+1]=OC_CLAMP255(y);
+ y=(65481*rgb_row2[4*x+2]+128553*rgb_row2[4*x+1]
+ +24966*rgb_row2[4*x+0]+4207500)/255000;
+ y_row2[x]=OC_CLAMP255(y);
+ y=(65481*rgb_row2[4*x+6]+128553*rgb_row2[4*x+5]
+ +24966*rgb_row2[4*x+4]+4207500)/255000;
+ y_row2[x+1]=OC_CLAMP255(y);
+ u=(-8372*(rgb_row[4*x+2]+rgb_row[4*x+6]
+ +rgb_row2[4*x+2]+rgb_row2[4*x+6])
+ -16436*(rgb_row[4*x+1]+rgb_row[4*x+5]
+ +rgb_row2[4*x+1]+rgb_row2[4*x+5])
+ +24808*(rgb_row[4*x+0]+rgb_row[4*x+4]
+ +rgb_row2[4*x+0]+rgb_row2[4*x+4])+29032005)/225930;
+ v=(39256*(rgb_row[4*x+2]+rgb_row[4*x+6]
+ +rgb_row2[4*x+2]+rgb_row2[4*x+6])
+ -32872*(rgb_row[4*x+1]+rgb_row[4*x+5]
+ +rgb_row2[4*x+1]+rgb_row2[4*x+5])
+ -6384*(rgb_row[4*x+0]+rgb_row[4*x+4]
+ +rgb_row2[4*x+0]+rgb_row2[4*x+4])+45940035)/357510;
+ u_row[x>>1]=OC_CLAMP255(u);
+ v_row[x>>1]=OC_CLAMP255(v);
+ }
+ y_row+=_ycbcr[0].stride<<1;
+ u_row+=_ycbcr[1].stride;
+ v_row+=_ycbcr[2].stride;
+ rgb_row+=cstride<<1;
+ }
+ }break;
+ case TH_PF_422:{
+ for(y=0;y<h;y++){
+ for(x=0;x<w;x+=2){
+ int y;
+ int u;
+ int v;
+ y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
+ +24966*rgb_row[4*x+0]+4207500)/255000;
+ y_row[x]=OC_CLAMP255(y);
+ y=(65481*rgb_row[4*x+6]+128553*rgb_row[4*x+5]
+ +24966*rgb_row[4*x+4]+4207500)/255000;
+ y_row[x+1]=OC_CLAMP255(y);
+ u=(-16744*(rgb_row[4*x+2]+rgb_row[4*x+6])
+ -32872*(rgb_row[4*x+1]+rgb_row[4*x+5])
+ +49616*(rgb_row[4*x+0]+rgb_row[4*x+4])+29032005)/225930;
+ v=(78512*(rgb_row[4*x+2]+rgb_row[4*x+6])
+ -65744*(rgb_row[4*x+1]+rgb_row[4*x+5])
+ -12768*(rgb_row[4*x+0]+rgb_row[4*x+4])+45940035)/357510;
+ u_row[x>>1]=OC_CLAMP255(u);
+ v_row[x>>1]=OC_CLAMP255(v);
+ }
+ y_row+=_ycbcr[0].stride;
+ u_row+=_ycbcr[1].stride;
+ v_row+=_ycbcr[2].stride;
+ rgb_row+=cstride;
+ }
+ }break;
+ /*case TH_PF_444:*/
+ default:{
+ for(y=0;y<h;y++){
+ for(x=0;x<w;x++){
+ int y;
+ int u;
+ int v;
+ y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
+ +24966*rgb_row[4*x+0]+4207500)/255000;
+ u=(-33488*rgb_row[4*x+2]-65744*rgb_row[4*x+1]
+ +99232*rgb_row[4*x+0]+29032005)/225930;
+ v=(157024*rgb_row[4*x+2]-131488*rgb_row[4*x+1]
+ -25536*rgb_row[4*x+0]+45940035)/357510;
+ y_row[x]=OC_CLAMP255(y);
+ u_row[x]=OC_CLAMP255(u);
+ v_row[x]=OC_CLAMP255(v);
+ }
+ y_row+=_ycbcr[0].stride;
+ u_row+=_ycbcr[1].stride;
+ v_row+=_ycbcr[2].stride;
+ rgb_row+=cstride;
+ }
+ }break;
+ }
+ /*Finished.
+ Destroy the surface.*/
+ cairo_surface_destroy(cs);
+}
+#endif
+
int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
ogg_int64_t *_granpos){
int ret;
@@ -2121,6 +2777,15 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
int pli;
int notstart;
int notdone;
+#ifdef HAVE_CAIRO
+ int telemetry;
+ /*Save the current telemetry state.
+ This prevents it from being modified in the middle of decoding this
+ frame, which could cause us to skip calls to the striped decoding
+ callback.*/
+ telemetry=_dec->telemetry_mbmode||_dec->telemetry_mv||
+ _dec->telemetry_qi||_dec->telemetry_bits;
+#endif
/*Select a free buffer to use for the reconstructed version of this frame.*/
for(refi=0;refi==_dec->state.ref_frame_idx[OC_FRAME_GOLD]||
refi==_dec->state.ref_frame_idx[OC_FRAME_PREV];refi++);
@@ -2258,7 +2923,11 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
avail_fragy_end=OC_MINI(avail_fragy_end,
_dec->pipe.fragy_end[pli]-edelay<<frag_shift);
}
+#ifdef HAVE_CAIRO
+ if(_dec->stripe_cb.stripe_decoded!=NULL&&!telemetry){
+#else
if(_dec->stripe_cb.stripe_decoded!=NULL){
+#endif
/*The callback might want to use the FPU, so let's make sure they can.
We violate all kinds of ABI restrictions by not doing this until
now, but none of them actually matter since we don't use floating
@@ -2294,6 +2963,20 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
/*Restore the FPU before dump_frame, since that _does_ use the FPU (for PNG
gamma values, if nothing else).*/
oc_restore_fpu(&_dec->state);
+#ifdef HAVE_CAIRO
+ /*If telemetry ioctls are active, we need to draw to the output buffer.*/
+ if(telemetry){
+ oc_render_telemetry(_dec,stripe_buf,telemetry);
+ oc_ycbcr_buffer_flip(_dec->pp_frame_buf,stripe_buf);
+ /*If we had a striped decoding callback, we skipped calling it above
+ (because the telemetry wasn't rendered yet).
+ Call it now with the whole frame.*/
+ if(_dec->stripe_cb.stripe_decoded!=NULL){
+ (*_dec->stripe_cb.stripe_decoded)(_dec->stripe_cb.ctx,
+ stripe_buf,0,_dec->state.fplanes[0].nvfrags);
+ }
+ }
+#endif
#if defined(OC_DUMP_IMAGES)
/*We only dump images if there were some coded blocks.*/
oc_state_dump_frame(&_dec->state,OC_FRAME_SELF,"dec");
@@ -2305,659 +2988,5 @@ int th_decode_packetin(th_dec_ctx *_dec,const ogg_packet *_op,
int th_decode_ycbcr_out(th_dec_ctx *_dec,th_ycbcr_buffer _ycbcr){
if(_dec==NULL||_ycbcr==NULL)return TH_EFAULT;
oc_ycbcr_buffer_flip(_ycbcr,_dec->pp_frame_buf);
-#if defined(HAVE_CAIRO)
- /*If telemetry ioctls are active, we need to draw to the output buffer.
- Stuff the plane into cairo.*/
- if(_dec->telemetry){
- cairo_surface_t *cs;
- unsigned char *data;
- unsigned char *y_row;
- unsigned char *u_row;
- unsigned char *v_row;
- unsigned char *rgb_row;
- int cstride;
- int w;
- int h;
- int x;
- int y;
- int hdec;
- int vdec;
- w=_ycbcr[0].width;
- h=_ycbcr[0].height;
- hdec=!(_dec->state.info.pixel_fmt&1);
- vdec=!(_dec->state.info.pixel_fmt&2);
- /*Lazy data buffer init.
- We could try to re-use the post-processing buffer, which would save
- memory, but complicate the allocation logic there.
- I don't think anyone cares about memory usage when using telemetry; it is
- not meant for embedded devices.*/
- if(_dec->telemetry_frame_data==NULL){
- _dec->telemetry_frame_data=_ogg_malloc(
- (w*h+2*(w>>hdec)*(h>>vdec))*sizeof(*_dec->telemetry_frame_data));
- if(_dec->telemetry_frame_data==NULL)return 0;
- }
- cs=cairo_image_surface_create(CAIRO_FORMAT_RGB24,w,h);
- /*Sadly, no YUV support in Cairo (yet); convert into the RGB buffer.*/
- data=cairo_image_surface_get_data(cs);
- if(data==NULL){
- cairo_surface_destroy(cs);
- return 0;
- }
- cstride=cairo_image_surface_get_stride(cs);
- y_row=_ycbcr[0].data;
- u_row=_ycbcr[1].data;
- v_row=_ycbcr[2].data;
- rgb_row=data;
- for(y=0;y<h;y++){
- for(x=0;x<w;x++){
- int r;
- int g;
- int b;
- r=(1904000*y_row[x]+2609823*v_row[x>>hdec]-363703744)/1635200;
- g=(3827562*y_row[x]-1287801*u_row[x>>hdec]
- -2672387*v_row[x>>hdec]+447306710)/3287200;
- b=(952000*y_row[x]+1649289*u_row[x>>hdec]-225932192)/817600;
- rgb_row[4*x+0]=OC_CLAMP255(b);
- rgb_row[4*x+1]=OC_CLAMP255(g);
- rgb_row[4*x+2]=OC_CLAMP255(r);
- }
- y_row+=_ycbcr[0].stride;
- u_row+=_ycbcr[1].stride&-((y&1)|!vdec);
- v_row+=_ycbcr[2].stride&-((y&1)|!vdec);
- rgb_row+=cstride;
- }
- /*Draw coded identifier for each macroblock (stored in Hilbert order).*/
- {
- cairo_t *c;
- const oc_fragment *frags;
- oc_mv *frag_mvs;
- const signed char *mb_modes;
- oc_mb_map *mb_maps;
- size_t nmbs;
- size_t mbi;
- int row2;
- int col2;
- int qim[3]={0,0,0};
- if(_dec->state.nqis==2){
- int bqi;
- bqi=_dec->state.qis[0];
- if(_dec->state.qis[1]>bqi)qim[1]=1;
- if(_dec->state.qis[1]<bqi)qim[1]=-1;
- }
- if(_dec->state.nqis==3){
- int bqi;
- int cqi;
- int dqi;
- bqi=_dec->state.qis[0];
- cqi=_dec->state.qis[1];
- dqi=_dec->state.qis[2];
- if(cqi>bqi&&dqi>bqi){
- if(dqi>cqi){
- qim[1]=1;
- qim[2]=2;
- }
- else{
- qim[1]=2;
- qim[2]=1;
- }
- }
- else if(cqi<bqi&&dqi<bqi){
- if(dqi<cqi){
- qim[1]=-1;
- qim[2]=-2;
- }
- else{
- qim[1]=-2;
- qim[2]=-1;
- }
- }
- else{
- if(cqi<bqi)qim[1]=-1;
- else qim[1]=1;
- if(dqi<bqi)qim[2]=-1;
- else qim[2]=1;
- }
- }
- c=cairo_create(cs);
- frags=_dec->state.frags;
- frag_mvs=_dec->state.frag_mvs;
- mb_modes=_dec->state.mb_modes;
- mb_maps=_dec->state.mb_maps;
- nmbs=_dec->state.nmbs;
- row2=0;
- col2=0;
- for(mbi=0;mbi<nmbs;mbi++){
- float x;
- float y;
- int bi;
- y=h-(row2+((col2+1>>1)&1))*16-16;
- x=(col2>>1)*16;
- cairo_set_line_width(c,1.);
- /*Keyframe (all intra) red box.*/
- if(_dec->state.frame_type==OC_INTRA_FRAME){
- if(_dec->telemetry_mbmode&0x02){
- cairo_set_source_rgba(c,1.,0,0,.5);
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,0,0,.25);
- cairo_fill(c);
- }
- }
- else{
- ptrdiff_t fragi;
- int frag_mvx;
- int frag_mvy;
- for(bi=0;bi<4;bi++){
- fragi=mb_maps[mbi][0][bi];
- if(fragi>=0&&frags[fragi].coded){
- frag_mvx=OC_MV_X(frag_mvs[fragi]);
- frag_mvy=OC_MV_Y(frag_mvs[fragi]);
- break;
- }
- }
- if(bi<4){
- switch(mb_modes[mbi]){
- case OC_MODE_INTRA:{
- if(_dec->telemetry_mbmode&0x02){
- cairo_set_source_rgba(c,1.,0,0,.5);
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,0,0,.25);
- cairo_fill(c);
- }
- }break;
- case OC_MODE_INTER_NOMV:{
- if(_dec->telemetry_mbmode&0x01){
- cairo_set_source_rgba(c,0,0,1.,.5);
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,0,0,1.,.25);
- cairo_fill(c);
- }
- }break;
- case OC_MODE_INTER_MV:{
- if(_dec->telemetry_mbmode&0x04){
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_set_source_rgba(c,0,1.,0,.5);
- cairo_stroke(c);
- }
- if(_dec->telemetry_mv&0x04){
- cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+8,y+8);
- cairo_stroke(c);
- }
- }break;
- case OC_MODE_INTER_MV_LAST:{
- if(_dec->telemetry_mbmode&0x08){
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_set_source_rgba(c,0,1.,0,.5);
- cairo_move_to(c,x+13.5,y+2.5);
- cairo_line_to(c,x+2.5,y+8);
- cairo_line_to(c,x+13.5,y+13.5);
- cairo_stroke(c);
- }
- if(_dec->telemetry_mv&0x08){
- cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+8,y+8);
- cairo_stroke(c);
- }
- }break;
- case OC_MODE_INTER_MV_LAST2:{
- if(_dec->telemetry_mbmode&0x10){
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_set_source_rgba(c,0,1.,0,.5);
- cairo_move_to(c,x+8,y+2.5);
- cairo_line_to(c,x+2.5,y+8);
- cairo_line_to(c,x+8,y+13.5);
- cairo_move_to(c,x+13.5,y+2.5);
- cairo_line_to(c,x+8,y+8);
- cairo_line_to(c,x+13.5,y+13.5);
- cairo_stroke(c);
- }
- if(_dec->telemetry_mv&0x10){
- cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+8,y+8);
- cairo_stroke(c);
- }
- }break;
- case OC_MODE_GOLDEN_NOMV:{
- if(_dec->telemetry_mbmode&0x20){
- cairo_set_source_rgba(c,1.,1.,0,.5);
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,1.,0,.25);
- cairo_fill(c);
- }
- }break;
- case OC_MODE_GOLDEN_MV:{
- if(_dec->telemetry_mbmode&0x40){
- cairo_rectangle(c,x+2.5,y+2.5,11,11);
- cairo_set_source_rgba(c,1.,1.,0,.5);
- cairo_stroke(c);
- }
- if(_dec->telemetry_mv&0x40){
- cairo_move_to(c,x+8+frag_mvx,y+8-frag_mvy);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+8+frag_mvx*.66,y+8-frag_mvy*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+8+frag_mvx*.33,y+8-frag_mvy*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+8,y+8);
- cairo_stroke(c);
- }
- }break;
- case OC_MODE_INTER_MV_FOUR:{
- if(_dec->telemetry_mbmode&0x80){
- cairo_rectangle(c,x+2.5,y+2.5,4,4);
- cairo_rectangle(c,x+9.5,y+2.5,4,4);
- cairo_rectangle(c,x+2.5,y+9.5,4,4);
- cairo_rectangle(c,x+9.5,y+9.5,4,4);
- cairo_set_source_rgba(c,0,1.,0,.5);
- cairo_stroke(c);
- }
- /*4mv is odd, coded in raster order.*/
- fragi=mb_maps[mbi][0][0];
- if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
- frag_mvx=OC_MV_X(frag_mvs[fragi]);
- frag_mvx=OC_MV_Y(frag_mvs[fragi]);
- cairo_move_to(c,x+4+frag_mvx,y+12-frag_mvy);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+4+frag_mvx*.66,y+12-frag_mvy*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+4+frag_mvx*.33,y+12-frag_mvy*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+4,y+12);
- cairo_stroke(c);
- }
- fragi=mb_maps[mbi][0][1];
- if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
- frag_mvx=OC_MV_X(frag_mvs[fragi]);
- frag_mvx=OC_MV_Y(frag_mvs[fragi]);
- cairo_move_to(c,x+12+frag_mvx,y+12-frag_mvy);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+12+frag_mvx*.66,y+12-frag_mvy*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+12+frag_mvx*.33,y+12-frag_mvy*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+12,y+12);
- cairo_stroke(c);
- }
- fragi=mb_maps[mbi][0][2];
- if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
- frag_mvx=OC_MV_X(frag_mvs[fragi]);
- frag_mvx=OC_MV_Y(frag_mvs[fragi]);
- cairo_move_to(c,x+4+frag_mvx,y+4-frag_mvy);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+4+frag_mvx*.66,y+4-frag_mvy*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+4+frag_mvx*.33,y+4-frag_mvy*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+4,y+4);
- cairo_stroke(c);
- }
- fragi=mb_maps[mbi][0][3];
- if(frags[fragi].coded&&_dec->telemetry_mv&0x80){
- frag_mvx=OC_MV_X(frag_mvs[fragi]);
- frag_mvx=OC_MV_Y(frag_mvs[fragi]);
- cairo_move_to(c,x+12+frag_mvx,y+4-frag_mvy);
- cairo_set_source_rgba(c,1.,1.,1.,.9);
- cairo_set_line_width(c,3.);
- cairo_line_to(c,x+12+frag_mvx*.66,y+4-frag_mvy*.66);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,2.);
- cairo_line_to(c,x+12+frag_mvx*.33,y+4-frag_mvy*.33);
- cairo_stroke_preserve(c);
- cairo_set_line_width(c,1.);
- cairo_line_to(c,x+12,y+4);
- cairo_stroke(c);
- }
- }break;
- }
- }
- }
- /*qii illustration.*/
- if(_dec->telemetry_qi&0x2){
- cairo_set_line_cap(c,CAIRO_LINE_CAP_SQUARE);
- for(bi=0;bi<4;bi++){
- ptrdiff_t fragi;
- int qiv;
- int xp;
- int yp;
- xp=x+(bi&1)*8;
- yp=y+8-(bi&2)*4;
- fragi=mb_maps[mbi][0][bi];
- if(fragi>=0&&frags[fragi].coded){
- qiv=qim[frags[fragi].qii];
- cairo_set_line_width(c,3.);
- cairo_set_source_rgba(c,0.,0.,0.,.5);
- switch(qiv){
- /*Double plus:*/
- case 2:{
- if((bi&1)^((bi&2)>>1)){
- cairo_move_to(c,xp+2.5,yp+1.5);
- cairo_line_to(c,xp+2.5,yp+3.5);
- cairo_move_to(c,xp+1.5,yp+2.5);
- cairo_line_to(c,xp+3.5,yp+2.5);
- cairo_move_to(c,xp+5.5,yp+4.5);
- cairo_line_to(c,xp+5.5,yp+6.5);
- cairo_move_to(c,xp+4.5,yp+5.5);
- cairo_line_to(c,xp+6.5,yp+5.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,0.,1.,1.,1.);
- }
- else{
- cairo_move_to(c,xp+5.5,yp+1.5);
- cairo_line_to(c,xp+5.5,yp+3.5);
- cairo_move_to(c,xp+4.5,yp+2.5);
- cairo_line_to(c,xp+6.5,yp+2.5);
- cairo_move_to(c,xp+2.5,yp+4.5);
- cairo_line_to(c,xp+2.5,yp+6.5);
- cairo_move_to(c,xp+1.5,yp+5.5);
- cairo_line_to(c,xp+3.5,yp+5.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,0.,1.,1.,1.);
- }
- }break;
- /*Double minus:*/
- case -2:{
- cairo_move_to(c,xp+2.5,yp+2.5);
- cairo_line_to(c,xp+5.5,yp+2.5);
- cairo_move_to(c,xp+2.5,yp+5.5);
- cairo_line_to(c,xp+5.5,yp+5.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,1.,1.,1.);
- }break;
- /*Plus:*/
- case 1:{
- if(bi&2==0)yp-=2;
- if(bi&1==0)xp-=2;
- cairo_move_to(c,xp+4.5,yp+2.5);
- cairo_line_to(c,xp+4.5,yp+6.5);
- cairo_move_to(c,xp+2.5,yp+4.5);
- cairo_line_to(c,xp+6.5,yp+4.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,.1,1.,.3,1.);
- break;
- }
- /*Fall through.*/
- /*Minus:*/
- case -1:{
- cairo_move_to(c,xp+2.5,yp+4.5);
- cairo_line_to(c,xp+6.5,yp+4.5);
- cairo_stroke_preserve(c);
- cairo_set_source_rgba(c,1.,.3,.1,1.);
- }break;
- default:continue;
- }
- cairo_set_line_width(c,1.);
- cairo_stroke(c);
- }
- }
- }
- col2++;
- if((col2>>1)>=_dec->state.nhmbs){
- col2=0;
- row2+=2;
- }
- }
- /*Bit usage indicator[s]:*/
- if(_dec->telemetry_bits){
- int widths[6];
- int fpsn;
- int fpsd;
- int mult;
- int fullw;
- int padw;
- int i;
- fpsn=_dec->state.info.fps_numerator;
- fpsd=_dec->state.info.fps_denominator;
- mult=(_dec->telemetry_bits>=0xFF?1:_dec->telemetry_bits);
- fullw=250.f*h*fpsd*mult/fpsn;
- padw=w-24;
- /*Header and coded block bits.*/
- if(_dec->telemetry_frame_bytes<0||
- _dec->telemetry_frame_bytes==OC_LOTS_OF_BITS){
- _dec->telemetry_frame_bytes=0;
- }
- if(_dec->telemetry_coding_bytes<0||
- _dec->telemetry_coding_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_coding_bytes=0;
- }
- if(_dec->telemetry_mode_bytes<0||
- _dec->telemetry_mode_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_mode_bytes=0;
- }
- if(_dec->telemetry_mv_bytes<0||
- _dec->telemetry_mv_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_mv_bytes=0;
- }
- if(_dec->telemetry_qi_bytes<0||
- _dec->telemetry_qi_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_qi_bytes=0;
- }
- if(_dec->telemetry_dc_bytes<0||
- _dec->telemetry_dc_bytes>_dec->telemetry_frame_bytes){
- _dec->telemetry_dc_bytes=0;
- }
- widths[0]=padw*(_dec->telemetry_frame_bytes-_dec->telemetry_coding_bytes)/fullw;
- widths[1]=padw*(_dec->telemetry_coding_bytes-_dec->telemetry_mode_bytes)/fullw;
- widths[2]=padw*(_dec->telemetry_mode_bytes-_dec->telemetry_mv_bytes)/fullw;
- widths[3]=padw*(_dec->telemetry_mv_bytes-_dec->telemetry_qi_bytes)/fullw;
- widths[4]=padw*(_dec->telemetry_qi_bytes-_dec->telemetry_dc_bytes)/fullw;
- widths[5]=padw*(_dec->telemetry_dc_bytes)/fullw;
- for(i=0;i<6;i++)if(widths[i]>w)widths[i]=w;
- cairo_set_source_rgba(c,.0,.0,.0,.6);
- cairo_rectangle(c,10,h-33,widths[0]+1,5);
- cairo_rectangle(c,10,h-29,widths[1]+1,5);
- cairo_rectangle(c,10,h-25,widths[2]+1,5);
- cairo_rectangle(c,10,h-21,widths[3]+1,5);
- cairo_rectangle(c,10,h-17,widths[4]+1,5);
- cairo_rectangle(c,10,h-13,widths[5]+1,5);
- cairo_fill(c);
- cairo_set_source_rgb(c,1,0,0);
- cairo_rectangle(c,10.5,h-32.5,widths[0],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,0,1,0);
- cairo_rectangle(c,10.5,h-28.5,widths[1],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,0,0,1);
- cairo_rectangle(c,10.5,h-24.5,widths[2],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,.6,.4,.0);
- cairo_rectangle(c,10.5,h-20.5,widths[3],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,.3,.3,.3);
- cairo_rectangle(c,10.5,h-16.5,widths[4],4);
- cairo_fill(c);
- cairo_set_source_rgb(c,.5,.5,.8);
- cairo_rectangle(c,10.5,h-12.5,widths[5],4);
- cairo_fill(c);
- }
- /*Master qi indicator[s]:*/
- if(_dec->telemetry_qi&0x1){
- cairo_text_extents_t extents;
- char buffer[10];
- int p;
- int y;
- p=0;
- y=h-7.5;
- if(_dec->state.qis[0]>=10)buffer[p++]=48+_dec->state.qis[0]/10;
- buffer[p++]=48+_dec->state.qis[0]%10;
- if(_dec->state.nqis>=2){
- buffer[p++]=' ';
- if(_dec->state.qis[1]>=10)buffer[p++]=48+_dec->state.qis[1]/10;
- buffer[p++]=48+_dec->state.qis[1]%10;
- }
- if(_dec->state.nqis==3){
- buffer[p++]=' ';
- if(_dec->state.qis[2]>=10)buffer[p++]=48+_dec->state.qis[2]/10;
- buffer[p++]=48+_dec->state.qis[2]%10;
- }
- buffer[p++]='\0';
- cairo_select_font_face(c,"sans",
- CAIRO_FONT_SLANT_NORMAL,CAIRO_FONT_WEIGHT_BOLD);
- cairo_set_font_size(c,18);
- cairo_text_extents(c,buffer,&extents);
- cairo_set_source_rgb(c,1,1,1);
- cairo_move_to(c,w-extents.x_advance-10,y);
- cairo_show_text(c,buffer);
- cairo_set_source_rgb(c,0,0,0);
- cairo_move_to(c,w-extents.x_advance-10,y);
- cairo_text_path(c,buffer);
- cairo_set_line_width(c,.8);
- cairo_set_line_join(c,CAIRO_LINE_JOIN_ROUND);
- cairo_stroke(c);
- }
- cairo_destroy(c);
- }
- /*Out of the Cairo plane into the telemetry YUV buffer.*/
- _ycbcr[0].data=_dec->telemetry_frame_data;
- _ycbcr[0].stride=_ycbcr[0].width;
- _ycbcr[1].data=_ycbcr[0].data+h*_ycbcr[0].stride;
- _ycbcr[1].stride=_ycbcr[1].width;
- _ycbcr[2].data=_ycbcr[1].data+(h>>vdec)*_ycbcr[1].stride;
- _ycbcr[2].stride=_ycbcr[2].width;
- y_row=_ycbcr[0].data;
- u_row=_ycbcr[1].data;
- v_row=_ycbcr[2].data;
- rgb_row=data;
- /*This is one of the few places it's worth handling chroma on a
- case-by-case basis.*/
- switch(_dec->state.info.pixel_fmt){
- case TH_PF_420:{
- for(y=0;y<h;y+=2){
- unsigned char *y_row2;
- unsigned char *rgb_row2;
- y_row2=y_row+_ycbcr[0].stride;
- rgb_row2=rgb_row+cstride;
- for(x=0;x<w;x+=2){
- int y;
- int u;
- int v;
- y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
- +24966*rgb_row[4*x+0]+4207500)/255000;
- y_row[x]=OC_CLAMP255(y);
- y=(65481*rgb_row[4*x+6]+128553*rgb_row[4*x+5]
- +24966*rgb_row[4*x+4]+4207500)/255000;
- y_row[x+1]=OC_CLAMP255(y);
- y=(65481*rgb_row2[4*x+2]+128553*rgb_row2[4*x+1]
- +24966*rgb_row2[4*x+0]+4207500)/255000;
- y_row2[x]=OC_CLAMP255(y);
- y=(65481*rgb_row2[4*x+6]+128553*rgb_row2[4*x+5]
- +24966*rgb_row2[4*x+4]+4207500)/255000;
- y_row2[x+1]=OC_CLAMP255(y);
- u=(-8372*(rgb_row[4*x+2]+rgb_row[4*x+6]
- +rgb_row2[4*x+2]+rgb_row2[4*x+6])
- -16436*(rgb_row[4*x+1]+rgb_row[4*x+5]
- +rgb_row2[4*x+1]+rgb_row2[4*x+5])
- +24808*(rgb_row[4*x+0]+rgb_row[4*x+4]
- +rgb_row2[4*x+0]+rgb_row2[4*x+4])+29032005)/225930;
- v=(39256*(rgb_row[4*x+2]+rgb_row[4*x+6]
- +rgb_row2[4*x+2]+rgb_row2[4*x+6])
- -32872*(rgb_row[4*x+1]+rgb_row[4*x+5]
- +rgb_row2[4*x+1]+rgb_row2[4*x+5])
- -6384*(rgb_row[4*x+0]+rgb_row[4*x+4]
- +rgb_row2[4*x+0]+rgb_row2[4*x+4])+45940035)/357510;
- u_row[x>>1]=OC_CLAMP255(u);
- v_row[x>>1]=OC_CLAMP255(v);
- }
- y_row+=_ycbcr[0].stride<<1;
- u_row+=_ycbcr[1].stride;
- v_row+=_ycbcr[2].stride;
- rgb_row+=cstride<<1;
- }
- }break;
- case TH_PF_422:{
- for(y=0;y<h;y++){
- for(x=0;x<w;x+=2){
- int y;
- int u;
- int v;
- y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
- +24966*rgb_row[4*x+0]+4207500)/255000;
- y_row[x]=OC_CLAMP255(y);
- y=(65481*rgb_row[4*x+6]+128553*rgb_row[4*x+5]
- +24966*rgb_row[4*x+4]+4207500)/255000;
- y_row[x+1]=OC_CLAMP255(y);
- u=(-16744*(rgb_row[4*x+2]+rgb_row[4*x+6])
- -32872*(rgb_row[4*x+1]+rgb_row[4*x+5])
- +49616*(rgb_row[4*x+0]+rgb_row[4*x+4])+29032005)/225930;
- v=(78512*(rgb_row[4*x+2]+rgb_row[4*x+6])
- -65744*(rgb_row[4*x+1]+rgb_row[4*x+5])
- -12768*(rgb_row[4*x+0]+rgb_row[4*x+4])+45940035)/357510;
- u_row[x>>1]=OC_CLAMP255(u);
- v_row[x>>1]=OC_CLAMP255(v);
- }
- y_row+=_ycbcr[0].stride;
- u_row+=_ycbcr[1].stride;
- v_row+=_ycbcr[2].stride;
- rgb_row+=cstride;
- }
- }break;
- /*case TH_PF_444:*/
- default:{
- for(y=0;y<h;y++){
- for(x=0;x<w;x++){
- int y;
- int u;
- int v;
- y=(65481*rgb_row[4*x+2]+128553*rgb_row[4*x+1]
- +24966*rgb_row[4*x+0]+4207500)/255000;
- u=(-33488*rgb_row[4*x+2]-65744*rgb_row[4*x+1]
- +99232*rgb_row[4*x+0]+29032005)/225930;
- v=(157024*rgb_row[4*x+2]-131488*rgb_row[4*x+1]
- -25536*rgb_row[4*x+0]+45940035)/357510;
- y_row[x]=OC_CLAMP255(y);
- u_row[x]=OC_CLAMP255(u);
- v_row[x]=OC_CLAMP255(v);
- }
- y_row+=_ycbcr[0].stride;
- u_row+=_ycbcr[1].stride;
- v_row+=_ycbcr[2].stride;
- rgb_row+=cstride;
- }
- }break;
- }
- /*Finished.
- Destroy the surface.*/
- cairo_surface_destroy(cs);
- }
-#endif
return 0;
}
diff --git a/media/libtheora/lib/dequant.c b/media/libtheora/lib/dequant.c
index e554872d4e..860536f72d 100644
--- a/media/libtheora/lib/dequant.c
+++ b/media/libtheora/lib/dequant.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: dequant.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/dequant.h b/media/libtheora/lib/dequant.h
index ef25838e35..9d6cd6be56 100644
--- a/media/libtheora/lib/dequant.h
+++ b/media/libtheora/lib/dequant.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: dequant.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/fragment.c b/media/libtheora/lib/fragment.c
index 4ba6af1b71..14c38be507 100644
--- a/media/libtheora/lib/fragment.c
+++ b/media/libtheora/lib/fragment.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: fragment.c 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
#include <string.h>
diff --git a/media/libtheora/lib/huffdec.c b/media/libtheora/lib/huffdec.c
index fe013c611c..e227b40d71 100644
--- a/media/libtheora/lib/huffdec.c
+++ b/media/libtheora/lib/huffdec.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: huffdec.c 17577 2010-10-29 04:00:07Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/huffdec.h b/media/libtheora/lib/huffdec.h
index 2fd112a90b..03d25dcd1e 100644
--- a/media/libtheora/lib/huffdec.h
+++ b/media/libtheora/lib/huffdec.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: huffdec.h 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/huffman.h b/media/libtheora/lib/huffman.h
index 36cf7572e5..eb805866b9 100644
--- a/media/libtheora/lib/huffman.h
+++ b/media/libtheora/lib/huffman.h
@@ -11,12 +11,12 @@
********************************************************************
function:
- last mod: $Id: huffman.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
#if !defined(_huffman_H)
-# define _hufffman_H (1)
+# define _huffman_H (1)
# include "theora/codec.h"
# include "ocintrin.h"
diff --git a/media/libtheora/lib/idct.c b/media/libtheora/lib/idct.c
index c56eb94c5c..838e3ad8ca 100644
--- a/media/libtheora/lib/idct.c
+++ b/media/libtheora/lib/idct.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: idct.c 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -241,8 +241,8 @@ static void oc_idct8x8_3(ogg_int16_t _y[64],ogg_int16_t _x[64]){
for(i=0;i<8;i++)idct8_2(_y+i,w+i*8);
/*Adjust for the scale factor.*/
for(i=0;i<64;i++)_y[i]=(ogg_int16_t)(_y[i]+8>>4);
- /*Clear input data for next block (decoder only).*/
- if(_x!=_y)_x[0]=_x[1]=_x[8]=0;
+ /*Clear input data for next block.*/
+ _x[0]=_x[1]=_x[8]=0;
}
/*Performs an inverse 8x8 Type-II DCT transform.
@@ -272,8 +272,8 @@ static void oc_idct8x8_10(ogg_int16_t _y[64],ogg_int16_t _x[64]){
for(i=0;i<8;i++)idct8_4(_y+i,w+i*8);
/*Adjust for the scale factor.*/
for(i=0;i<64;i++)_y[i]=(ogg_int16_t)(_y[i]+8>>4);
- /*Clear input data for next block (decoder only).*/
- if(_x!=_y)_x[0]=_x[1]=_x[2]=_x[3]=_x[8]=_x[9]=_x[10]=_x[16]=_x[17]=_x[24]=0;
+ /*Clear input data for next block.*/
+ _x[0]=_x[1]=_x[2]=_x[3]=_x[8]=_x[9]=_x[10]=_x[16]=_x[17]=_x[24]=0;
}
/*Performs an inverse 8x8 Type-II DCT transform.
@@ -291,7 +291,8 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64],ogg_int16_t _x[64]){
for(i=0;i<8;i++)idct8(_y+i,w+i*8);
/*Adjust for the scale factor.*/
for(i=0;i<64;i++)_y[i]=(ogg_int16_t)(_y[i]+8>>4);
- if(_x!=_y)for(i=0;i<64;i++)_x[i]=0;
+ /*Clear input data for next block.*/
+ for(i=0;i<64;i++)_x[i]=0;
}
/*Performs an inverse 8x8 Type-II DCT transform.
diff --git a/media/libtheora/lib/info.c b/media/libtheora/lib/info.c
index 6b9762978b..e5cecd2de5 100644
--- a/media/libtheora/lib/info.c
+++ b/media/libtheora/lib/info.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: info.c 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
@@ -54,7 +54,7 @@ void th_comment_init(th_comment *_tc){
memset(_tc,0,sizeof(*_tc));
}
-void th_comment_add(th_comment *_tc,char *_comment){
+void th_comment_add(th_comment *_tc,const char *_comment){
char **user_comments;
int *comment_lengths;
int comment_len;
@@ -75,7 +75,7 @@ void th_comment_add(th_comment *_tc,char *_comment){
_tc->user_comments[_tc->comments]=NULL;
}
-void th_comment_add_tag(th_comment *_tc,char *_tag,char *_val){
+void th_comment_add_tag(th_comment *_tc,const char *_tag,const char *_val){
char *comment;
int tag_len;
int val_len;
@@ -91,7 +91,7 @@ void th_comment_add_tag(th_comment *_tc,char *_tag,char *_val){
_ogg_free(comment);
}
-char *th_comment_query(th_comment *_tc,char *_tag,int _count){
+char *th_comment_query(th_comment *_tc,const char *_tag,int _count){
long i;
int found;
int tag_len;
@@ -107,7 +107,7 @@ char *th_comment_query(th_comment *_tc,char *_tag,int _count){
return NULL;
}
-int th_comment_query_count(th_comment *_tc,char *_tag){
+int th_comment_query_count(th_comment *_tc,const char *_tag){
long i;
int tag_len;
int count;
diff --git a/media/libtheora/lib/internal.c b/media/libtheora/lib/internal.c
index 1b2611da15..afbb6efae7 100644
--- a/media/libtheora/lib/internal.c
+++ b/media/libtheora/lib/internal.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: internal.c 17506 2010-10-13 02:52:41Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -131,7 +131,6 @@ void **oc_malloc_2d(size_t _height,size_t _width,size_t _sz){
datsz=rowsz*_height;
/*Alloc array and row pointers.*/
ret=(char *)_ogg_malloc(datsz+colsz);
- if(ret==NULL)return NULL;
/*Initialize the array.*/
if(ret!=NULL){
size_t i;
@@ -154,7 +153,6 @@ void **oc_calloc_2d(size_t _height,size_t _width,size_t _sz){
datsz=rowsz*_height;
/*Alloc array and row pointers.*/
ret=(char *)_ogg_calloc(datsz+colsz,1);
- if(ret==NULL)return NULL;
/*Initialize the array.*/
if(ret!=NULL){
size_t i;
diff --git a/media/libtheora/lib/internal.h b/media/libtheora/lib/internal.h
index 24e1b51252..53c77b88be 100644
--- a/media/libtheora/lib/internal.h
+++ b/media/libtheora/lib/internal.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: internal.h 17578 2010-10-29 04:21:26Z tterribe $
+ last mod: $Id$
********************************************************************/
#if !defined(_internal_H)
diff --git a/media/libtheora/lib/ocintrin.h b/media/libtheora/lib/ocintrin.h
index d49ebb2159..b200ceafce 100644
--- a/media/libtheora/lib/ocintrin.h
+++ b/media/libtheora/lib/ocintrin.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: ocintrin.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/quant.c b/media/libtheora/lib/quant.c
index c3f3f47713..e206202844 100644
--- a/media/libtheora/lib/quant.c
+++ b/media/libtheora/lib/quant.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: quant.c 17307 2010-06-27 06:02:15Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/quant.h b/media/libtheora/lib/quant.h
index 49ce13a65c..247210eaae 100644
--- a/media/libtheora/lib/quant.h
+++ b/media/libtheora/lib/quant.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: quant.h 16503 2009-08-22 18:14:02Z giles $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/state.c b/media/libtheora/lib/state.c
index 5e7b0ae651..f4c6240387 100644
--- a/media/libtheora/lib/state.c
+++ b/media/libtheora/lib/state.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: state.c 17576 2010-10-29 01:07:51Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -21,6 +21,7 @@
#if defined(OC_DUMP_IMAGES)
# include <stdio.h>
# include "png.h"
+# include "zlib.h"
#endif
/*The function used to fill in the chroma plane motion vectors for a macro
@@ -253,10 +254,14 @@ static void oc_mb_fill_cmapping10(oc_mb_map_plane _mb_map[3],
This version is for use with no chroma decimation (4:4:4).
This uses the already filled-in luma plane values.
_mb_map: The macro block map to fill.
- _fplanes: The descriptions of the fragment planes.*/
+ _fplanes: The descriptions of the fragment planes.
+ _xfrag0: The X location of the upper-left hand fragment in the luma plane.
+ _yfrag0: The Y location of the upper-left hand fragment in the luma plane.*/
static void oc_mb_fill_cmapping11(oc_mb_map_plane _mb_map[3],
- const oc_fragment_plane _fplanes[3]){
+ const oc_fragment_plane _fplanes[3],int _xfrag0,int _yfrag0){
int k;
+ (void)_xfrag0;
+ (void)_yfrag0;
for(k=0;k<4;k++){
_mb_map[1][k]=_mb_map[0][k]+_fplanes[1].froffset;
_mb_map[2][k]=_mb_map[0][k]+_fplanes[2].froffset;
@@ -278,7 +283,7 @@ static const oc_mb_fill_cmapping_func OC_MB_FILL_CMAPPING_TABLE[4]={
oc_mb_fill_cmapping00,
oc_mb_fill_cmapping01,
oc_mb_fill_cmapping10,
- (oc_mb_fill_cmapping_func)oc_mb_fill_cmapping11
+ oc_mb_fill_cmapping11
};
/*Fills in the mapping from macro blocks to their corresponding fragment
@@ -702,7 +707,8 @@ int oc_state_init(oc_theora_state *_state,const th_info *_info,int _nrefs){
how it is specified in the bitstream, because the Y axis is flipped in
the bitstream.
The displayable frame must fit inside the encoded frame.
- The color space must be one known by the encoder.*/
+ The color space must be one known by the encoder.
+ The framerate ratio must not contain a zero value.*/
if((_info->frame_width&0xF)||(_info->frame_height&0xF)||
_info->frame_width<=0||_info->frame_width>=0x100000||
_info->frame_height<=0||_info->frame_height>=0x100000||
@@ -715,7 +721,8 @@ int oc_state_init(oc_theora_state *_state,const th_info *_info,int _nrefs){
but there are a number of compilers which will mis-optimize this.
It's better to live with the spurious warnings.*/
_info->colorspace<0||_info->colorspace>=TH_CS_NSPACES||
- _info->pixel_fmt<0||_info->pixel_fmt>=TH_PF_NFORMATS){
+ _info->pixel_fmt<0||_info->pixel_fmt>=TH_PF_NFORMATS||
+ _info->fps_numerator<1||_info->fps_denominator<1){
return TH_EINVAL;
}
memset(_state,0,sizeof(*_state));
diff --git a/media/libtheora/lib/x86/mmxfrag.c b/media/libtheora/lib/x86/mmxfrag.c
index b7df1c1ec9..b3ec508956 100644
--- a/media/libtheora/lib/x86/mmxfrag.c
+++ b/media/libtheora/lib/x86/mmxfrag.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxfrag.c 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -355,7 +355,7 @@ void oc_frag_recon_inter2_mmx(unsigned char *_dst,const unsigned char *_src1,
/*Advance dest ptr.*/
"lea (%[dst],%[ystride],2),%[dst]\n\t"
:[dst]"+r"(_dst),[residue]"+r"(_residue),
- [src1]"+%r"(_src1),[src2]"+r"(_src2)
+ [src1]"+r"(_src1),[src2]"+r"(_src2)
:[ystride]"r"((ptrdiff_t)_ystride)
:"memory"
);
diff --git a/media/libtheora/lib/x86/mmxidct.c b/media/libtheora/lib/x86/mmxidct.c
index 8d61bdfb16..b8e3077066 100644
--- a/media/libtheora/lib/x86/mmxidct.c
+++ b/media/libtheora/lib/x86/mmxidct.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxidct.c 17446 2010-09-23 20:06:20Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -284,6 +284,7 @@
"#end OC_COLUMN_IDCT\n\t" \
static void oc_idct8x8_slow_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64]){
+ int i;
/*This routine accepts an 8x8 matrix, but in partially transposed form.
Every 4x4 block is transposed.*/
__asm__ __volatile__(
@@ -313,18 +314,15 @@ static void oc_idct8x8_slow_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64]){
:[x]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,_x,64),
[c]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,OC_IDCT_CONSTS,128)
);
- if(_x!=_y){
- int i;
- __asm__ __volatile__("pxor %%mm0,%%mm0\n\t"::);
- for(i=0;i<4;i++){
- __asm__ __volatile__(
- "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x08,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x18,x)"\n\t"
- :[x]"=m"OC_ARRAY_OPERAND(ogg_int16_t,_x+16*i,16)
- );
- }
+ __asm__ __volatile__("pxor %%mm0,%%mm0\n\t"::);
+ for(i=0;i<4;i++){
+ __asm__ __volatile__(
+ "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x08,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x18,x)"\n\t"
+ :[x]"=m"OC_ARRAY_OPERAND(ogg_int16_t,_x+16*i,16)
+ );
}
}
@@ -514,16 +512,14 @@ static void oc_idct8x8_10_mmx(ogg_int16_t _y[64],ogg_int16_t _x[64]){
:[x]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,_x,64),
[c]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,OC_IDCT_CONSTS,128)
);
- if(_x!=_y){
- __asm__ __volatile__(
- "pxor %%mm0,%%mm0\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x20,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x30,x)"\n\t"
- :[x]"+m"OC_ARRAY_OPERAND(ogg_int16_t,_x,28)
- );
- }
+ __asm__ __volatile__(
+ "pxor %%mm0,%%mm0\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x20,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x30,x)"\n\t"
+ :[x]"+m"OC_ARRAY_OPERAND(ogg_int16_t,_x,28)
+ );
}
/*Performs an inverse 8x8 Type-II DCT transform.
diff --git a/media/libtheora/lib/x86/mmxstate.c b/media/libtheora/lib/x86/mmxstate.c
index 0b9586f943..eebea14fba 100644
--- a/media/libtheora/lib/x86/mmxstate.c
+++ b/media/libtheora/lib/x86/mmxstate.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxstate.c 17563 2010-10-25 17:40:54Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86/sse2idct.c b/media/libtheora/lib/x86/sse2idct.c
index 5f8523fa5f..4597ab074f 100644
--- a/media/libtheora/lib/x86/sse2idct.c
+++ b/media/libtheora/lib/x86/sse2idct.c
@@ -208,6 +208,7 @@ const unsigned short __attribute__((aligned(16),used)) OC_IDCT_CONSTS[64]={
static void oc_idct8x8_slow_sse2(ogg_int16_t _y[64],ogg_int16_t _x[64]){
OC_ALIGN16(ogg_int16_t buf[16]);
+ int i;
/*This routine accepts an 8x8 matrix pre-transposed.*/
__asm__ __volatile__(
/*Load rows 2, 3, 5, and 6 for the first stage of the iDCT.*/
@@ -230,19 +231,16 @@ static void oc_idct8x8_slow_sse2(ogg_int16_t _y[64],ogg_int16_t _x[64]){
:[x]"m"(OC_CONST_ARRAY_OPERAND(ogg_int16_t,_x,64)),
[c]"m"(OC_CONST_ARRAY_OPERAND(ogg_int16_t,OC_IDCT_CONSTS,128))
);
- if(_x!=_y){
- int i;
- __asm__ __volatile__("pxor %%xmm0,%%xmm0\n\t"::);
- /*Clear input data for next block (decoder only).*/
- for(i=0;i<2;i++){
- __asm__ __volatile__(
- "movdqa %%xmm0,"OC_MEM_OFFS(0x00,x)"\n\t"
- "movdqa %%xmm0,"OC_MEM_OFFS(0x10,x)"\n\t"
- "movdqa %%xmm0,"OC_MEM_OFFS(0x20,x)"\n\t"
- "movdqa %%xmm0,"OC_MEM_OFFS(0x30,x)"\n\t"
- :[x]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,_x+i*32,32))
- );
- }
+ __asm__ __volatile__("pxor %%xmm0,%%xmm0\n\t"::);
+ /*Clear input data for next block (decoder only).*/
+ for(i=0;i<2;i++){
+ __asm__ __volatile__(
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x00,x)"\n\t"
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x10,x)"\n\t"
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x20,x)"\n\t"
+ "movdqa %%xmm0,"OC_MEM_OFFS(0x30,x)"\n\t"
+ :[x]"=m"(OC_ARRAY_OPERAND(ogg_int16_t,_x+i*32,32))
+ );
}
}
@@ -411,17 +409,15 @@ static void oc_idct8x8_10_sse2(ogg_int16_t _y[64],ogg_int16_t _x[64]){
:[x]"m"OC_CONST_ARRAY_OPERAND(ogg_int16_t,_x,64),
[c]"m"(OC_CONST_ARRAY_OPERAND(ogg_int16_t,OC_IDCT_CONSTS,128))
);
- if(_x!=_y){
- /*Clear input data for next block (decoder only).*/
- __asm__ __volatile__(
- "pxor %%mm0,%%mm0\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x20,x)"\n\t"
- "movq %%mm0,"OC_MEM_OFFS(0x30,x)"\n\t"
- :[x]"+m"(OC_ARRAY_OPERAND(ogg_int16_t,_x,28))
- );
- }
+ /*Clear input data for next block (decoder only).*/
+ __asm__ __volatile__(
+ "pxor %%mm0,%%mm0\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x00,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x10,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x20,x)"\n\t"
+ "movq %%mm0,"OC_MEM_OFFS(0x30,x)"\n\t"
+ :[x]"+m"(OC_ARRAY_OPERAND(ogg_int16_t,_x,28))
+ );
}
/*Performs an inverse 8x8 Type-II DCT transform.
diff --git a/media/libtheora/lib/x86/x86cpu.c b/media/libtheora/lib/x86/x86cpu.c
index c3a20b319c..49fd76d0ac 100644
--- a/media/libtheora/lib/x86/x86cpu.c
+++ b/media/libtheora/lib/x86/x86cpu.c
@@ -14,7 +14,7 @@
Originally written by Rudolf Marek.
function:
- last mod: $Id: x86cpu.c 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86/x86cpu.h b/media/libtheora/lib/x86/x86cpu.h
index 153a48d892..e0192d52e2 100644
--- a/media/libtheora/lib/x86/x86cpu.h
+++ b/media/libtheora/lib/x86/x86cpu.h
@@ -10,7 +10,7 @@
* *
********************************************************************
function:
- last mod: $Id: x86cpu.h 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86/x86int.h b/media/libtheora/lib/x86/x86int.h
index 35bfb0a02b..ceb2dbb0ec 100644
--- a/media/libtheora/lib/x86/x86int.h
+++ b/media/libtheora/lib/x86/x86int.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: x86int.h 17578 2010-10-29 04:21:26Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86/x86state.c b/media/libtheora/lib/x86/x86state.c
index a3d37267f6..9f8bceb534 100644
--- a/media/libtheora/lib/x86/x86state.c
+++ b/media/libtheora/lib/x86/x86state.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: x86state.c 17421 2010-09-22 16:46:18Z giles $
+ last mod: $Id$
********************************************************************/
@@ -19,6 +19,7 @@
#if defined(OC_X86_ASM)
+#if defined(OC_STATE_USE_VTABLE)
/*This table has been modified from OC_FZIG_ZAG by baking a 4x4 transpose into
each quadrant of the destination.*/
static const unsigned char OC_FZIG_ZAG_MMX[128]={
@@ -39,6 +40,7 @@ static const unsigned char OC_FZIG_ZAG_MMX[128]={
64,64,64,64,64,64,64,64,
64,64,64,64,64,64,64,64
};
+#endif
/*This table has been modified from OC_FZIG_ZAG by baking an 8x8 transpose into
the destination.*/
diff --git a/media/libtheora/lib/x86_vc/mmxfrag.c b/media/libtheora/lib/x86_vc/mmxfrag.c
index c16b026ffc..248312ff90 100644
--- a/media/libtheora/lib/x86_vc/mmxfrag.c
+++ b/media/libtheora/lib/x86_vc/mmxfrag.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxfrag.c 17446 2010-09-23 20:06:20Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86_vc/mmxidct.c b/media/libtheora/lib/x86_vc/mmxidct.c
index 53a9ac7f38..55e00aedcf 100644
--- a/media/libtheora/lib/x86_vc/mmxidct.c
+++ b/media/libtheora/lib/x86_vc/mmxidct.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxidct.c 17446 2010-09-23 20:06:20Z tterribe $
+ last mod: $Id$
********************************************************************/
@@ -339,22 +339,19 @@ static void oc_idct8x8_slow(ogg_int16_t _y[64],ogg_int16_t _x[64]){
#undef Y
#undef X
}
- if(_x!=_y){
- int i;
- __asm pxor mm0,mm0;
- for(i=0;i<4;i++){
- ogg_int16_t *x;
- x=_x+16*i;
+ __asm pxor mm0,mm0;
+ for(i=0;i<4;i++){
+ ogg_int16_t *x;
+ x=_x+16*i;
#define X ecx
- __asm{
- mov X,x
- movq [X+0x00],mm0
- movq [X+0x08],mm0
- movq [X+0x10],mm0
- movq [X+0x18],mm0
- }
-#undef X
+ __asm{
+ mov X,x
+ movq [X+0x00],mm0
+ movq [X+0x08],mm0
+ movq [X+0x10],mm0
+ movq [X+0x18],mm0
}
+#undef X
}
}
@@ -547,18 +544,16 @@ static void oc_idct8x8_10(ogg_int16_t _y[64],ogg_int16_t _x[64]){
#undef Y
#undef X
}
- if(_x!=_y){
#define X ecx
- __asm{
- pxor mm0,mm0;
- mov X,_x
- movq [X+0x00],mm0
- movq [X+0x10],mm0
- movq [X+0x20],mm0
- movq [X+0x30],mm0
- }
-#undef X
+ __asm{
+ pxor mm0,mm0;
+ mov X,_x
+ movq [X+0x00],mm0
+ movq [X+0x10],mm0
+ movq [X+0x20],mm0
+ movq [X+0x30],mm0
}
+#undef X
}
/*Performs an inverse 8x8 Type-II DCT transform.
diff --git a/media/libtheora/lib/x86_vc/mmxstate.c b/media/libtheora/lib/x86_vc/mmxstate.c
index d3d468d5f2..f532ee1b6f 100644
--- a/media/libtheora/lib/x86_vc/mmxstate.c
+++ b/media/libtheora/lib/x86_vc/mmxstate.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: mmxstate.c 17563 2010-10-25 17:40:54Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86_vc/x86cpu.c b/media/libtheora/lib/x86_vc/x86cpu.c
index 41f4bcba9d..6a1d8d850c 100644
--- a/media/libtheora/lib/x86_vc/x86cpu.c
+++ b/media/libtheora/lib/x86_vc/x86cpu.c
@@ -14,7 +14,7 @@
Originally written by Rudolf Marek.
function:
- last mod: $Id: x86cpu.c 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86_vc/x86cpu.h b/media/libtheora/lib/x86_vc/x86cpu.h
index 327d932467..eea261d448 100644
--- a/media/libtheora/lib/x86_vc/x86cpu.h
+++ b/media/libtheora/lib/x86_vc/x86cpu.h
@@ -10,7 +10,7 @@
* *
********************************************************************
function:
- last mod: $Id: x86cpu.h 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86_vc/x86int.h b/media/libtheora/lib/x86_vc/x86int.h
index bc4c54a2f6..318a09dca0 100644
--- a/media/libtheora/lib/x86_vc/x86int.h
+++ b/media/libtheora/lib/x86_vc/x86int.h
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: x86int.h 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/lib/x86_vc/x86state.c b/media/libtheora/lib/x86_vc/x86state.c
index 7aa73deae4..fa3a0d42fc 100644
--- a/media/libtheora/lib/x86_vc/x86state.c
+++ b/media/libtheora/lib/x86_vc/x86state.c
@@ -11,7 +11,7 @@
********************************************************************
function:
- last mod: $Id: x86state.c 17410 2010-09-21 21:53:48Z tterribe $
+ last mod: $Id$
********************************************************************/
diff --git a/media/libtheora/moz.build b/media/libtheora/moz.build
index 8bcdb5fd47..aaec4c2ad8 100644
--- a/media/libtheora/moz.build
+++ b/media/libtheora/moz.build
@@ -1,4 +1,5 @@
# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
@@ -9,7 +10,6 @@ with Files('*'):
EXPORTS.theora += [
'include/theora/codec.h',
'include/theora/theoradec.h',
- 'include/theora/theoraenc.h',
]
# We allow warnings for third-party code that can be updated from upstream.
@@ -24,20 +24,20 @@ if CONFIG['GKMEDIAS_SHARED_LIBRARY']:
DEFINES['THEORA_DISABLE_ENCODE'] = True
# Suppress warnings in third-party code.
-if CONFIG['GNU_CC'] or CONFIG['CLANG_CL']:
- CFLAGS += ['-Wno-type-limits']
-if CONFIG['CLANG_CXX'] or CONFIG['CLANG_CL']:
- CFLAGS += ['-Wno-tautological-compare']
-if CONFIG['CLANG_CL']:
+CFLAGS += ['-Wno-type-limits']
+if CONFIG['CC_TYPE'] in ('clang', 'clang-cl'):
CFLAGS += [
+ '-Wno-shift-negative-value',
+ '-Wno-tautological-compare',
+ ]
+if CONFIG['CC_TYPE'] == 'clang-cl':
+ CFLAGS += [
+ '-Wno-parentheses',
'-Wno-pointer-sign',
- '-Wno-shift-op-parentheses',
]
UNIFIED_SOURCES += [
- 'lib/apiwrapper.c',
'lib/bitpack.c',
- 'lib/decapiwrapper.c',
'lib/decinfo.c',
'lib/decode.c',
'lib/dequant.c',
@@ -52,27 +52,25 @@ UNIFIED_SOURCES += [
LOCAL_INCLUDES += ['include']
-if '86' in CONFIG['OS_TEST']:
+if CONFIG['INTEL_ARCHITECTURE']:
if CONFIG['OS_ARCH'] != 'SunOS':
- if CONFIG['CLANG_CL']:
+ if CONFIG['CC_TYPE'] == 'clang-cl':
# clang-cl can't handle libtheora's inline asm.
pass
- elif CONFIG['OS_ARCH'] != 'WINNT' or CONFIG['OS_TEST'] != 'x86_64':
+ elif CONFIG['OS_ARCH'] != 'WINNT' or CONFIG['CPU_ARCH'] != 'x86_64':
DEFINES['OC_X86_ASM'] = True
- if '64' in CONFIG['OS_TEST']:
+ if CONFIG['CPU_ARCH'] == 'x86_64':
DEFINES['OC_X86_64_ASM'] = True
- if CONFIG['_MSC_VER']:
- if CONFIG['CLANG_CL']:
- # clang-cl can't handle libtheora's inline asm.
- pass
- elif '64' not in CONFIG['OS_TEST']:
- SOURCES += [
- 'lib/x86_vc/mmxfrag.c',
- 'lib/x86_vc/mmxidct.c',
- 'lib/x86_vc/mmxstate.c',
- 'lib/x86_vc/x86cpu.c',
- 'lib/x86_vc/x86state.c',
- ]
+ if CONFIG['CC_TYPE'] == 'clang-cl':
+ # clang-cl can't handle libtheora's inline asm.
+ pass
+ #SOURCES += [
+ # 'lib/x86_vc/mmxfrag.c',
+ # 'lib/x86_vc/mmxidct.c',
+ # 'lib/x86_vc/mmxstate.c',
+ # 'lib/x86_vc/x86cpu.c',
+ # 'lib/x86_vc/x86state.c',
+ #]
else:
SOURCES += [
'lib/x86/mmxfrag.c',
@@ -84,7 +82,7 @@ if '86' in CONFIG['OS_TEST']:
]
if CONFIG['GNU_AS']:
- if 'arm' in CONFIG['OS_TEST']:
+ if CONFIG['CPU_ARCH'] == 'arm':
SOURCES += [
'lib/arm/armcpu.c',
'lib/arm/armstate.c',
@@ -109,7 +107,7 @@ if CONFIG['GNU_AS']:
]
ASFLAGS += CONFIG['NEON_FLAGS']
- if CONFIG['CLANG_CXX']:
+ if CONFIG['CC_TYPE'] == 'clang':
ASFLAGS += [
'-no-integrated-as',
]
diff --git a/media/libtheora/update.sh b/media/libtheora/update.sh
index e1a95425a6..acfde68922 100644..100755
--- a/media/libtheora/update.sh
+++ b/media/libtheora/update.sh
@@ -2,6 +2,9 @@
#
# Copies the needed files from a directory containing the original
# libtheora source that we need for the Mozilla HTML5 media support.
+
+mkdir -p include/theora lib lib/arm lib/x86 lib/x86_vc
+
sed \
-e s/\#define\ OC_X86_ASM//g \
-e s/\#define\ OC_X86_64_ASM//g \
@@ -19,14 +22,11 @@ sed \
cp $1/LICENSE ./LICENSE
cp $1/CHANGES ./CHANGES
cp $1/COPYING ./COPYING
-cp $1/README ./README
+cp $1/README.md ./README.md
cp $1/AUTHORS ./AUTHORS
-cp $1/lib/apiwrapper.c ./lib/
-cp $1/lib/apiwrapper.h ./lib/
cp $1/lib/bitpack.c ./lib/
cp $1/lib/bitpack.h ./lib/
cp $1/lib/dct.h ./lib/
-cp $1/lib/decapiwrapper.c ./lib/
cp $1/lib/decinfo.c ./lib/
cp $1/lib/decint.h ./lib/
cp $1/lib/decode.c ./lib/
@@ -76,11 +76,5 @@ cp $1/lib/x86_vc/x86int.h ./lib/x86_vc/
cp $1/lib/x86_vc/x86state.c ./lib/x86_vc/
cp $1/include/theora/theora.h ./include/theora/theora.h
cp $1/include/theora/theoradec.h ./include/theora/theoradec.h
-cp $1/include/theora/theoraenc.h ./include/theora/theoraenc.h
cp $1/include/theora/codec.h ./include/theora/codec.h
-patch -p3 < ./bug625773-r17780.patch
-patch -p3 < ./bug468275-r18219.patch
-patch -p3 < ./bug752139-r18031.patch
-patch -p3 < ./bug752668-r18268.patch
patch -p3 < ./bug703135.patch
-patch -p3 < ./bug920992.patch
diff --git a/moz.configure b/moz.configure
index a4bba5bc3b..e3ee68bac3 100644
--- a/moz.configure
+++ b/moz.configure
@@ -11,33 +11,6 @@ include('build/moz.configure/init.configure')
# - Spidermonkey-specific options and rules should go in js/moz.configure.
# - etc.
-option('--enable-artifact-builds', env='MOZ_ARTIFACT_BUILDS',
- help='Download and use prebuilt binary artifacts.')
-
-@depends('--enable-artifact-builds')
-def artifact_builds(value):
- if value:
- return True
-
-set_config('MOZ_ARTIFACT_BUILDS', artifact_builds)
-
-imply_option('--enable-artifact-build-symbols',
- depends(artifact_builds)(lambda v: False if v is None else None),
- reason='--disable-artifact-builds')
-
-option('--enable-artifact-build-symbols',
- help='Download symbols when artifact builds are enabled.')
-
-set_config('MOZ_ARTIFACT_BUILD_SYMBOLS',
- depends_if('--enable-artifact-build-symbols')(lambda _: True))
-
-@depends('--enable-artifact-builds')
-def imply_disable_compile_environment(value):
- if value:
- return False
-
-imply_option('--enable-compile-environment', imply_disable_compile_environment)
-
option('--disable-compile-environment',
help='Disable compiler/library checks')
@@ -73,43 +46,8 @@ include('build/moz.configure/warnings.configure',
include(include_project_configure)
-@depends('--help')
-@imports(_from='mozbuild.backend', _import='backends')
-def build_backends_choices(_):
- return tuple(backends)
-
-
-@deprecated_option('--enable-build-backend', nargs='+',
- choices=build_backends_choices)
-def build_backend(backends):
- if backends:
- return tuple('+%s' % b for b in backends)
-
-imply_option('--build-backends', build_backend)
-
-
-@depends('--enable-artifact-builds', '--disable-compile-environment', '--help')
-@imports('sys')
-def build_backend_defaults(artifact_builds, compile_environment, _):
- if artifact_builds:
- all_backends = ['FasterMake+RecursiveMake']
- else:
- all_backends = ['RecursiveMake', 'FasterMake']
- # Normally, we'd use target.os == 'WINNT', but a dependency on target
- # would require target to depend on --help, as well as host and shell,
- # and this is not a can of worms we can open at the moment.
- if sys.platform == 'win32' and compile_environment:
- all_backends.append('VisualStudio')
- return tuple(all_backends)
-
-option('--build-backends', nargs='+', default=build_backend_defaults,
- choices=build_backends_choices, help='Build backends to generate')
-
-@depends('--build-backends')
-def build_backends(backends):
- return backends
-
-set_config('BUILD_BACKENDS', build_backends)
+# We only support one build-backend, namely RecursiveMake.
+set_config('BUILD_BACKENDS', tuple(['RecursiveMake']))
# Awk detection
@@ -186,17 +124,6 @@ def possible_makes(make, host):
check_prog('GMAKE', possible_makes)
-# tup detection
-# ==============================================================
-@depends(build_backends)
-def tup_progs(build_backends):
- for backend in build_backends:
- if 'Tup' in backend:
- return ['tup']
- return None
-
-tup = check_prog('TUP', tup_progs)
-
# Miscellaneous programs
# ==============================================================
check_prog('DOXYGEN', ('doxygen',), allow_missing=True)
diff --git a/old-configure.in b/old-configure.in
index 0ed6984ca9..6ed4fc1f3d 100644
--- a/old-configure.in
+++ b/old-configure.in
@@ -4961,7 +4961,6 @@ AC_SUBST_LIST(VPX_ASFLAGS)
AC_SUBST(VPX_AS_CONVERSION)
AC_SUBST(VPX_X86_ASM)
AC_SUBST(VPX_ARM_ASM)
-AC_SUBST(MOZ_CODE_COVERAGE)
AC_SUBST(LIBJPEG_TURBO_USE_YASM)
AC_SUBST_LIST(LIBJPEG_TURBO_ASFLAGS)
AC_SUBST(MOZ_LIBAV_FFT)
diff --git a/python/moz.build b/python/moz.build
index 108b986b55..819d1db9d8 100644
--- a/python/moz.build
+++ b/python/moz.build
@@ -27,7 +27,6 @@ PYTHON_UNIT_TESTS += [
'mozbuild/mozbuild/test/backend/test_build.py',
'mozbuild/mozbuild/test/backend/test_configenvironment.py',
'mozbuild/mozbuild/test/backend/test_recursivemake.py',
- 'mozbuild/mozbuild/test/backend/test_visualstudio.py',
'mozbuild/mozbuild/test/compilation/test_warnings.py',
'mozbuild/mozbuild/test/configure/lint.py',
'mozbuild/mozbuild/test/configure/test_checks_configure.py',
diff --git a/python/mozbuild/mozbuild/backend/__init__.py b/python/mozbuild/mozbuild/backend/__init__.py
index 7093e0c83f..fede9cf9c7 100644
--- a/python/mozbuild/mozbuild/backend/__init__.py
+++ b/python/mozbuild/mozbuild/backend/__init__.py
@@ -3,23 +3,12 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
backends = {
- 'ChromeMap': 'mozbuild.codecoverage.chrome_map',
'CompileDB': 'mozbuild.compilation.database',
- 'CppEclipse': 'mozbuild.backend.cpp_eclipse',
- 'FasterMake': 'mozbuild.backend.fastermake',
- 'FasterMake+RecursiveMake': None,
'RecursiveMake': 'mozbuild.backend.recursivemake',
- 'Tup': 'mozbuild.backend.tup',
- 'VisualStudio': 'mozbuild.backend.visualstudio',
}
def get_backend_class(name):
- if '+' in name:
- from mozbuild.backend.base import HybridBackend
- return HybridBackend(*(get_backend_class(name)
- for name in name.split('+')))
-
class_name = '%sBackend' % name
module = __import__(backends[name], globals(), locals(), [class_name])
return getattr(module, class_name)
diff --git a/python/mozbuild/mozbuild/backend/base.py b/python/mozbuild/mozbuild/backend/base.py
index f5e0c2d3c8..c46a3b1397 100644
--- a/python/mozbuild/mozbuild/backend/base.py
+++ b/python/mozbuild/mozbuild/backend/base.py
@@ -125,13 +125,11 @@ class BuildBackend(LoggingMixin):
for obj in objs:
obj_start = time.time()
- if (not self.consume_object(obj) and
- not isinstance(self, PartialBackend)):
+ if (not self.consume_object(obj)):
raise Exception('Unhandled object of type %s' % type(obj))
self._execution_time += time.time() - obj_start
- if (isinstance(obj, ContextDerived) and
- not isinstance(self, PartialBackend)):
+ if (isinstance(obj, ContextDerived)):
self.backend_input_files |= obj.context_all_paths
# Pull in all loaded Python as dependencies so any Python changes that
@@ -266,52 +264,3 @@ class BuildBackend(LoggingMixin):
with self._write_file(obj.output_path) as fh:
pp.out = fh
yield pp
-
-
-class PartialBackend(BuildBackend):
- """A PartialBackend is a BuildBackend declaring that its consume_object
- method may not handle all build configuration objects it's passed, and
- that it's fine."""
-
-
-def HybridBackend(*backends):
- """A HybridBackend is the combination of one or more PartialBackends
- with a non-partial BuildBackend.
-
- Build configuration objects are passed to each backend, stopping at the
- first of them that declares having handled them.
- """
- assert len(backends) >= 2
- assert all(issubclass(b, PartialBackend) for b in backends[:-1])
- assert not(issubclass(backends[-1], PartialBackend))
- assert all(issubclass(b, BuildBackend) for b in backends)
-
- class TheHybridBackend(BuildBackend):
- def __init__(self, environment):
- self._backends = [b(environment) for b in backends]
- super(TheHybridBackend, self).__init__(environment)
-
- def consume_object(self, obj):
- return any(b.consume_object(obj) for b in self._backends)
-
- def consume_finished(self):
- for backend in self._backends:
- backend.consume_finished()
-
- for attr in ('_execution_time', '_created_count', '_updated_count',
- '_unchanged_count', '_deleted_count'):
- setattr(self, attr,
- sum(getattr(b, attr) for b in self._backends))
-
- for b in self._backends:
- self.file_diffs.update(b.file_diffs)
- for attr in ('backend_input_files', '_backend_output_files'):
- files = getattr(self, attr)
- files |= getattr(b, attr)
-
- name = '+'.join(itertools.chain(
- (b.__name__.replace('Backend', '') for b in backends[:1]),
- (b.__name__ for b in backends[-1:])
- ))
-
- return type(str(name), (TheHybridBackend,), {})
diff --git a/python/mozbuild/mozbuild/backend/common.py b/python/mozbuild/mozbuild/backend/common.py
index 12b2a27c45..a90aa1e5d5 100644
--- a/python/mozbuild/mozbuild/backend/common.py
+++ b/python/mozbuild/mozbuild/backend/common.py
@@ -252,71 +252,35 @@ class CommonBackend(BuildBackend):
# We should consider aggregating WebIDL types in emitter.py.
elif isinstance(obj, WebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.sources.add(mozpath.join(obj.srcdir, obj.basename))
elif isinstance(obj, GeneratedEventWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.generated_events_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, TestWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.test_sources.add(mozpath.join(obj.srcdir,
obj.basename))
elif isinstance(obj, PreprocessedTestWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.preprocessed_test_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, GeneratedWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.generated_sources.add(mozpath.join(obj.srcdir,
obj.basename))
elif isinstance(obj, PreprocessedWebIDLFile):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.preprocessed_sources.add(mozpath.join(
obj.srcdir, obj.basename))
elif isinstance(obj, ExampleWebIDLInterface):
- # WebIDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._webidls.example_interfaces.add(obj.name)
elif isinstance(obj, IPDLFile):
- # IPDL isn't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
self._ipdl_sources.add(mozpath.join(obj.srcdir, obj.basename))
elif isinstance(obj, UnifiedSources):
- # Unified sources aren't relevant to artifact builds.
- if self.environment.is_artifact_build:
- return True
-
if obj.have_unified_mapping:
self._write_unified_files(obj.unified_source_mapping, obj.objdir)
if hasattr(self, '_process_unified_sources'):
diff --git a/python/mozbuild/mozbuild/backend/configenvironment.py b/python/mozbuild/mozbuild/backend/configenvironment.py
index 331309af6d..0edcf53660 100644
--- a/python/mozbuild/mozbuild/backend/configenvironment.py
+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
@@ -187,10 +187,6 @@ class ConfigEnvironment(object):
self.substs_unicode = ReadOnlyDict(self.substs_unicode)
- @property
- def is_artifact_build(self):
- return self.substs.get('MOZ_ARTIFACT_BUILDS', False)
-
@staticmethod
def from_config_status(path):
config = BuildConfig.from_config_status(path)
diff --git a/python/mozbuild/mozbuild/backend/cpp_eclipse.py b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
deleted file mode 100644
index ae89df5b20..0000000000
--- a/python/mozbuild/mozbuild/backend/cpp_eclipse.py
+++ /dev/null
@@ -1,685 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import
-
-import errno
-import random
-import os
-import subprocess
-import types
-import xml.etree.ElementTree as ET
-from .common import CommonBackend
-
-from ..frontend.data import (
- Defines,
-)
-from mozbuild.base import ExecutionSummary
-
-# TODO Have ./mach eclipse generate the workspace and index it:
-# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse
-# Open eclipse:
-# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
-
-class CppEclipseBackend(CommonBackend):
- """Backend that generates Cpp Eclipse project files.
- """
-
- def __init__(self, environment):
- if os.name == 'nt':
- raise Exception('Eclipse is not supported on Windows. '
- 'Consider using Visual Studio instead.')
- super(CppEclipseBackend, self).__init__(environment)
-
- def _init(self):
- CommonBackend._init(self)
-
- self._paths_to_defines = {}
- self._project_name = 'Gecko'
- self._workspace_dir = self._get_workspace_path()
- self._project_dir = os.path.join(self._workspace_dir, self._project_name)
- self._overwriting_workspace = os.path.isdir(self._workspace_dir)
-
- self._macbundle = self.environment.substs['MOZ_MACBUNDLE_NAME']
- self._appname = self.environment.substs['MOZ_APP_NAME']
- self._bin_suffix = self.environment.substs['BIN_SUFFIX']
- self._cxx = self.environment.substs['CXX']
- # Note: We need the C Pre Processor (CPP) flags, not the CXX flags
- self._cppflags = self.environment.substs.get('CPPFLAGS', '')
-
- def summary(self):
- return ExecutionSummary(
- 'CppEclipse backend executed in {execution_time:.2f}s\n'
- 'Generated Cpp Eclipse workspace in "{workspace:s}".\n'
- 'If missing, import the project using File > Import > General > Existing Project into workspace\n'
- '\n'
- 'Run with: eclipse -data {workspace:s}\n',
- execution_time=self._execution_time,
- workspace=self._workspace_dir)
-
- def _get_workspace_path(self):
- return CppEclipseBackend.get_workspace_path(self.environment.topsrcdir, self.environment.topobjdir)
-
- @staticmethod
- def get_workspace_path(topsrcdir, topobjdir):
- # Eclipse doesn't support having the workspace inside the srcdir.
- # Since most people have their objdir inside their srcdir it's easier
- # and more consistent to just put the workspace along side the srcdir
- srcdir_parent = os.path.dirname(topsrcdir)
- workspace_dirname = "eclipse_" + os.path.basename(topobjdir)
- return os.path.join(srcdir_parent, workspace_dirname)
-
- def consume_object(self, obj):
- reldir = getattr(obj, 'relativedir', None)
-
- # Note that unlike VS, Eclipse' indexer seem to crawl the headers and
- # isn't picky about the local includes.
- if isinstance(obj, Defines):
- self._paths_to_defines.setdefault(reldir, {}).update(obj.defines)
-
- return True
-
- def consume_finished(self):
- settings_dir = os.path.join(self._project_dir, '.settings')
- launch_dir = os.path.join(self._project_dir, 'RunConfigurations')
- workspace_settings_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.core.runtime/.settings')
- workspace_language_dir = os.path.join(self._workspace_dir, '.metadata/.plugins/org.eclipse.cdt.core')
-
- for dir_name in [self._project_dir, settings_dir, launch_dir, workspace_settings_dir, workspace_language_dir]:
- try:
- os.makedirs(dir_name)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- project_path = os.path.join(self._project_dir, '.project')
- with open(project_path, 'wb') as fh:
- self._write_project(fh)
-
- cproject_path = os.path.join(self._project_dir, '.cproject')
- with open(cproject_path, 'wb') as fh:
- self._write_cproject(fh)
-
- language_path = os.path.join(settings_dir, 'language.settings.xml')
- with open(language_path, 'wb') as fh:
- self._write_language_settings(fh)
-
- workspace_language_path = os.path.join(workspace_language_dir, 'language.settings.xml')
- with open(workspace_language_path, 'wb') as fh:
- workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
- workspace_lang_settings = workspace_lang_settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
- fh.write(workspace_lang_settings)
-
- self._write_launch_files(launch_dir)
-
- # This will show up as an 'unmanged' formatter. This can be named by generating
- # another file.
- formatter_prefs_path = os.path.join(settings_dir, 'org.eclipse.cdt.core.prefs')
- with open(formatter_prefs_path, 'wb') as fh:
- fh.write(FORMATTER_SETTINGS);
-
- editor_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.editors.prefs");
- with open(editor_prefs_path, 'wb') as fh:
- fh.write(EDITOR_SETTINGS);
-
- # Now import the project into the workspace
- self._import_project()
-
- def _import_project(self):
- # If the workspace already exists then don't import the project again because
- # eclipse doesn't handle this properly
- if self._overwriting_workspace:
- return
-
- # We disable the indexer otherwise we're forced to index
- # the whole codebase when importing the project. Indexing the project can take 20 minutes.
- self._write_noindex()
-
- try:
- process = subprocess.check_call(
- ["eclipse", "-application", "-nosplash",
- "org.eclipse.cdt.managedbuilder.core.headlessbuild",
- "-data", self._workspace_dir, "-importAll", self._project_dir])
- finally:
- self._remove_noindex()
-
- def _write_noindex(self):
- noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
- with open(noindex_path, 'wb') as fh:
- fh.write(NOINDEX_TEMPLATE);
-
- def _remove_noindex(self):
- noindex_path = os.path.join(self._project_dir, '.settings/org.eclipse.cdt.core.prefs')
- os.remove(noindex_path)
-
- def _define_entry(self, name, value):
- define = ET.Element('entry')
- define.set('kind', 'macro')
- define.set('name', name)
- define.set('value', value)
- return ET.tostring(define)
-
- def _write_language_settings(self, fh):
- settings = LANGUAGE_SETTINGS_TEMPLATE
-
- settings = settings.replace('@GLOBAL_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include'))
- settings = settings.replace('@NSPR_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/nspr'))
- settings = settings.replace('@IPDL_INCLUDE_PATH@', os.path.join(self.environment.topobjdir, 'ipc/ipdl/_ipdlheaders'))
- settings = settings.replace('@PREINCLUDE_FILE_PATH@', os.path.join(self.environment.topobjdir, 'dist/include/mozilla-config.h'))
- settings = settings.replace('@DEFINE_MOZILLA_INTERNAL_API@', self._define_entry('MOZILLA_INTERNAL_API', '1'))
- settings = settings.replace("@COMPILER_FLAGS@", self._cxx + " " + self._cppflags);
-
- fh.write(settings)
-
- def _write_launch_files(self, launch_dir):
- bin_dir = os.path.join(self.environment.topobjdir, 'dist')
-
- # TODO Improve binary detection
- if self._macbundle:
- exe_path = os.path.join(bin_dir, self._macbundle, 'Contents/MacOS')
- else:
- exe_path = os.path.join(bin_dir, 'bin')
-
- exe_path = os.path.join(exe_path, self._appname + self._bin_suffix)
-
- main_gecko_launch = os.path.join(launch_dir, 'gecko.launch')
- with open(main_gecko_launch, 'wb') as fh:
- launch = GECKO_LAUNCH_CONFIG_TEMPLATE
- launch = launch.replace('@LAUNCH_PROGRAM@', exe_path)
- launch = launch.replace('@LAUNCH_ARGS@', '-P -no-remote')
- fh.write(launch)
-
- #TODO Add more launch configs (and delegate calls to mach)
-
- def _write_project(self, fh):
- project = PROJECT_TEMPLATE;
-
- project = project.replace('@PROJECT_NAME@', self._project_name)
- project = project.replace('@PROJECT_TOPSRCDIR@', self.environment.topsrcdir)
- fh.write(project)
-
- def _write_cproject(self, fh):
- cproject_header = CPROJECT_TEMPLATE_HEADER
- cproject_header = cproject_header.replace('@PROJECT_TOPSRCDIR@', self.environment.topobjdir)
- cproject_header = cproject_header.replace('@MACH_COMMAND@', os.path.join(self.environment.topsrcdir, 'mach'))
- fh.write(cproject_header)
-
- for path, defines in self._paths_to_defines.items():
- folderinfo = CPROJECT_TEMPLATE_FOLDER_INFO_HEADER
- folderinfo = folderinfo.replace('@FOLDER_ID@', str(random.randint(1000000, 99999999999)))
- folderinfo = folderinfo.replace('@FOLDER_NAME@', 'tree/' + path)
- fh.write(folderinfo)
- for k, v in defines.items():
- define = ET.Element('listOptionValue')
- define.set('builtIn', 'false')
- define.set('value', str(k) + "=" + str(v))
- fh.write(ET.tostring(define))
- fh.write(CPROJECT_TEMPLATE_FOLDER_INFO_FOOTER)
-
-
- fh.write(CPROJECT_TEMPLATE_FOOTER)
-
-
-PROJECT_TEMPLATE = """<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>@PROJECT_NAME@</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
- <triggers>clean,full,incremental,</triggers>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
- <triggers></triggers>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.cdt.core.cnature</nature>
- <nature>org.eclipse.cdt.core.ccnature</nature>
- <nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
- <nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
- </natures>
- <linkedResources>
- <link>
- <name>tree</name>
- <type>2</type>
- <location>@PROJECT_TOPSRCDIR@</location>
- </link>
- </linkedResources>
- <filteredResources>
- <filter>
- <id>17111971</id>
- <name>tree</name>
- <type>30</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-obj-*</arguments>
- </matcher>
- </filter>
- <filter>
- <id>14081994</id>
- <name>tree</name>
- <type>22</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-*.rej</arguments>
- </matcher>
- </filter>
- <filter>
- <id>25121970</id>
- <name>tree</name>
- <type>22</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-*.orig</arguments>
- </matcher>
- </filter>
- <filter>
- <id>10102004</id>
- <name>tree</name>
- <type>10</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-.hg</arguments>
- </matcher>
- </filter>
- <filter>
- <id>23122002</id>
- <name>tree</name>
- <type>22</type>
- <matcher>
- <id>org.eclipse.ui.ide.multiFilter</id>
- <arguments>1.0-name-matches-false-false-*.pyc</arguments>
- </matcher>
- </filter>
- </filteredResources>
-</projectDescription>
-"""
-
-CPROJECT_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?fileVersion 4.0.0?>
-
-<cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage">
- <storageModule moduleId="org.eclipse.cdt.core.settings">
- <cconfiguration id="0.1674256904">
- <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.1674256904" moduleId="org.eclipse.cdt.core.settings" name="Default">
- <externalSettings/>
- <extensions>
- <extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
- </extensions>
- </storageModule>
- <storageModule moduleId="cdtBuildSystem" version="4.0.0">
- <configuration artifactName="${ProjName}" buildProperties="" description="" id="0.1674256904" name="Default" parent="org.eclipse.cdt.build.core.prefbase.cfg">
- <folderInfo id="0.1674256904." name="/" resourcePath="">
- <toolChain id="cdt.managedbuild.toolchain.gnu.cross.exe.debug.1276586933" name="Cross GCC" superClass="cdt.managedbuild.toolchain.gnu.cross.exe.debug">
- <targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF" id="cdt.managedbuild.targetPlatform.gnu.cross.710759961" isAbstract="false" osList="all" superClass="cdt.managedbuild.targetPlatform.gnu.cross"/>
- <builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
- </toolChain>
- </folderInfo>
-"""
-CPROJECT_TEMPLATE_FOLDER_INFO_HEADER = """
- <folderInfo id="0.1674256904.@FOLDER_ID@" name="/" resourcePath="@FOLDER_NAME@">
- <toolChain id="org.eclipse.cdt.build.core.prefbase.toolchain.1022318069" name="No ToolChain" superClass="org.eclipse.cdt.build.core.prefbase.toolchain" unusedChildren="">
- <tool id="org.eclipse.cdt.build.core.settings.holder.libs.1259030812" name="holder for library settings" superClass="org.eclipse.cdt.build.core.settings.holder.libs.1800697532"/>
- <tool id="org.eclipse.cdt.build.core.settings.holder.1407291069" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder.582514939">
- <option id="org.eclipse.cdt.build.core.settings.holder.symbols.1907658087" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
-"""
-CPROJECT_TEMPLATE_FOLDER_INFO_DEFINE = """
- <listOptionValue builtIn="false" value="@FOLDER_DEFINE@"/>
-"""
-CPROJECT_TEMPLATE_FOLDER_INFO_FOOTER = """
- </option>
- <inputType id="org.eclipse.cdt.build.core.settings.holder.inType.440601711" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
- </tool>
- </toolChain>
- </folderInfo>
-"""
-CPROJECT_TEMPLATE_FILEINFO = """ <fileInfo id="0.1674256904.474736658" name="Layers.cpp" rcbsApplicability="disable" resourcePath="tree/gfx/layers/Layers.cpp" toolsToInvoke="org.eclipse.cdt.build.core.settings.holder.582514939.463639939">
- <tool id="org.eclipse.cdt.build.core.settings.holder.582514939.463639939" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder.582514939">
- <option id="org.eclipse.cdt.build.core.settings.holder.symbols.232300236" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
- <listOptionValue builtIn="false" value="BENWA=BENWAVAL"/>
- </option>
- <inputType id="org.eclipse.cdt.build.core.settings.holder.inType.1942876228" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
- </tool>
- </fileInfo>
-"""
-CPROJECT_TEMPLATE_FOOTER = """ </configuration>
- </storageModule>
- <storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
- </cconfiguration>
- </storageModule>
- <storageModule moduleId="cdtBuildSystem" version="4.0.0">
- <project id="Empty.null.1281234804" name="Empty"/>
- </storageModule>
- <storageModule moduleId="scannerConfiguration">
- <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
- <scannerConfigBuildInfo instanceId="0.1674256904">
- <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
- </scannerConfigBuildInfo>
- </storageModule>
- <storageModule moduleId="refreshScope" versionNumber="2">
- <configuration configurationName="Default"/>
- </storageModule>
- <storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/>
-</cproject>
-"""
-
-WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<plugin>
- <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
- <provider class="org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector" console="true" id="org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot;">
- <language-scope id="org.eclipse.cdt.core.gcc"/>
- <language-scope id="org.eclipse.cdt.core.g++"/>
- </provider>
- </extension>
-</plugin>
-"""
-
-LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<project>
- <configuration id="0.1674256904" name="Default">
- <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
- <provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
- <language id="org.eclipse.cdt.core.g++">
- <resource project-relative-path="">
- <entry kind="includePath" name="@GLOBAL_INCLUDE_PATH@">
- <flag value="LOCAL"/>
- </entry>
- <entry kind="includePath" name="@NSPR_INCLUDE_PATH@">
- <flag value="LOCAL"/>
- </entry>
- <entry kind="includePath" name="@IPDL_INCLUDE_PATH@">
- <flag value="LOCAL"/>
- </entry>
- <entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
- <flag value="LOCAL"/>
- </entry>
- <!--
- Because of https://developer.mozilla.org/en-US/docs/Eclipse_CDT#Headers_are_only_parsed_once
- we need to make sure headers are parsed with MOZILLA_INTERNAL_API to make sure
- the indexer gets the version that is used in most of the true. This means that
- MOZILLA_EXTERNAL_API code will suffer.
- -->
- @DEFINE_MOZILLA_INTERNAL_API@
- </resource>
- </language>
- </provider>
- <provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot; -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
- <language-scope id="org.eclipse.cdt.core.gcc"/>
- <language-scope id="org.eclipse.cdt.core.g++"/>
- </provider>
- <provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
- </extension>
- </configuration>
-</project>
-"""
-
-GECKO_LAUNCH_CONFIG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
-<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="lldb"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=""/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
-<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
-<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="2"/>
-<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
-<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_ARGUMENTS" value="@LAUNCH_ARGS@"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="@LAUNCH_PROGRAM@"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="Gecko"/>
-<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="true"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
-<booleanAttribute key="org.eclipse.cdt.launch.use_terminal" value="true"/>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
-<listEntry value="/gecko"/>
-</listAttribute>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
-<listEntry value="4"/>
-</listAttribute>
-<booleanAttribute key="org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND" value="false"/>
-<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
-</launchConfiguration>
-"""
-
-B2GFLASH_LAUNCH_CONFIG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
-<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="lldb"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=""/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
-<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
-<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
-<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
-<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="2"/>
-<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
-<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
-<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="@LAUNCH_PROGRAM@"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="Gecko"/>
-<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="true"/>
-<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
-<stringAttribute key="org.eclipse.cdt.launch.WORKING_DIRECTORY" value="@OBJDIR@"/>
-<booleanAttribute key="org.eclipse.cdt.launch.use_terminal" value="true"/>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
-<listEntry value="/gecko"/>
-</listAttribute>
-<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
-<listEntry value="4"/>
-</listAttribute>
-<booleanAttribute key="org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND" value="false"/>
-<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
-</launchConfiguration>
-"""
-
-
-EDITOR_SETTINGS = """eclipse.preferences.version=1
-lineNumberRuler=true
-overviewRuler_migration=migrated_3.1
-printMargin=true
-printMarginColumn=80
-showCarriageReturn=false
-showEnclosedSpaces=false
-showLeadingSpaces=false
-showLineFeed=false
-showWhitespaceCharacters=true
-spacesForTabs=true
-tabWidth=2
-undoHistorySize=200
-"""
-
-FORMATTER_SETTINGS = """eclipse.preferences.version=1
-org.eclipse.cdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.cdt.core.formatter.alignment_for_assignment=16
-org.eclipse.cdt.core.formatter.alignment_for_base_clause_in_type_declaration=80
-org.eclipse.cdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.cdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.cdt.core.formatter.alignment_for_conditional_expression=34
-org.eclipse.cdt.core.formatter.alignment_for_conditional_expression_chain=18
-org.eclipse.cdt.core.formatter.alignment_for_constructor_initializer_list=48
-org.eclipse.cdt.core.formatter.alignment_for_declarator_list=16
-org.eclipse.cdt.core.formatter.alignment_for_enumerator_list=48
-org.eclipse.cdt.core.formatter.alignment_for_expression_list=0
-org.eclipse.cdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.cdt.core.formatter.alignment_for_member_access=0
-org.eclipse.cdt.core.formatter.alignment_for_overloaded_left_shift_chain=16
-org.eclipse.cdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.cdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.cdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.cdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.cdt.core.formatter.brace_position_for_block_in_case=next_line_shifted
-org.eclipse.cdt.core.formatter.brace_position_for_method_declaration=next_line
-org.eclipse.cdt.core.formatter.brace_position_for_namespace_declaration=end_of_line
-org.eclipse.cdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.cdt.core.formatter.brace_position_for_type_declaration=next_line
-org.eclipse.cdt.core.formatter.comment.min_distance_between_code_and_line_comment=1
-org.eclipse.cdt.core.formatter.comment.never_indent_line_comments_on_first_column=true
-org.eclipse.cdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=true
-org.eclipse.cdt.core.formatter.compact_else_if=true
-org.eclipse.cdt.core.formatter.continuation_indentation=2
-org.eclipse.cdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.cdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.cdt.core.formatter.indent_access_specifier_compare_to_type_header=false
-org.eclipse.cdt.core.formatter.indent_access_specifier_extra_spaces=0
-org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_access_specifier=true
-org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_namespace_header=false
-org.eclipse.cdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.cdt.core.formatter.indent_declaration_compare_to_template_header=true
-org.eclipse.cdt.core.formatter.indent_empty_lines=false
-org.eclipse.cdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.cdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_switch=false
-org.eclipse.cdt.core.formatter.indentation.size=2
-org.eclipse.cdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_after_template_declaration=insert
-org.eclipse.cdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_colon_in_constructor_initializer_list=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_identifier_in_function_declaration=insert
-org.eclipse.cdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.cdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.cdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.cdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_arguments=insert
-org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_parameters=insert
-org.eclipse.cdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.cdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.cdt.core.formatter.insert_space_after_colon_in_base_clause=insert
-org.eclipse.cdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.cdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.cdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_base_types=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_declarator_list=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_expression_list=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_arguments=insert
-org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_parameters=insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_bracket=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_exception_specification=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.cdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.cdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.cdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_bracket=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_exception_specification=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_base_clause=insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_base_types=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_declarator_list=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_expression_list=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_arguments=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_parameters=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_namespace_declaration=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_bracket=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_exception_specification=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.cdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.cdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.cdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_brackets=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_exception_specification=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.cdt.core.formatter.join_wrapped_lines=false
-org.eclipse.cdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.cdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.cdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.cdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.cdt.core.formatter.lineSplit=80
-org.eclipse.cdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.cdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.cdt.core.formatter.tabulation.char=space
-org.eclipse.cdt.core.formatter.tabulation.size=2
-org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
-"""
-
-NOINDEX_TEMPLATE = """eclipse.preferences.version=1
-indexer/indexerId=org.eclipse.cdt.core.nullIndexer
-"""
diff --git a/python/mozbuild/mozbuild/backend/fastermake.py b/python/mozbuild/mozbuild/backend/fastermake.py
deleted file mode 100644
index d55928e8c7..0000000000
--- a/python/mozbuild/mozbuild/backend/fastermake.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, unicode_literals, print_function
-
-from mozbuild.backend.base import PartialBackend
-from mozbuild.backend.common import CommonBackend
-from mozbuild.frontend.context import (
- ObjDirPath,
-)
-from mozbuild.frontend.data import (
- ChromeManifestEntry,
- FinalTargetPreprocessedFiles,
- FinalTargetFiles,
- JARManifest,
- XPIDLFile,
-)
-from mozbuild.makeutil import Makefile
-from mozbuild.util import OrderedDefaultDict
-from mozpack.manifests import InstallManifest
-import mozpack.path as mozpath
-
-
-class FasterMakeBackend(CommonBackend, PartialBackend):
- def _init(self):
- super(FasterMakeBackend, self)._init()
-
- self._manifest_entries = OrderedDefaultDict(set)
-
- self._install_manifests = OrderedDefaultDict(InstallManifest)
-
- self._dependencies = OrderedDefaultDict(list)
-
- self._has_xpidl = False
-
- def _add_preprocess(self, obj, path, dest, target=None, **kwargs):
- if target is None:
- target = mozpath.basename(path)
- # This matches what PP_TARGETS do in config/rules.
- if target.endswith('.in'):
- target = target[:-3]
- if target.endswith('.css'):
- kwargs['marker'] = '%'
- depfile = mozpath.join(
- self.environment.topobjdir, 'faster', '.deps',
- mozpath.join(obj.install_target, dest, target).replace('/', '_'))
- self._install_manifests[obj.install_target].add_preprocess(
- mozpath.join(obj.srcdir, path),
- mozpath.join(dest, target),
- depfile,
- **kwargs)
-
- def consume_object(self, obj):
- if isinstance(obj, JARManifest) and \
- obj.install_target.startswith('dist/bin'):
- self._consume_jar_manifest(obj)
-
- elif isinstance(obj, (FinalTargetFiles,
- FinalTargetPreprocessedFiles)) and \
- obj.install_target.startswith('dist/bin'):
- defines = obj.defines or {}
- if defines:
- defines = defines.defines
- for path, files in obj.files.walk():
- for f in files:
- if isinstance(obj, FinalTargetPreprocessedFiles):
- self._add_preprocess(obj, f.full_path, path,
- target=f.target_basename,
- defines=defines)
- elif '*' in f:
- def _prefix(s):
- for p in mozpath.split(s):
- if '*' not in p:
- yield p + '/'
- prefix = ''.join(_prefix(f.full_path))
-
- self._install_manifests[obj.install_target] \
- .add_pattern_symlink(
- prefix,
- f.full_path[len(prefix):],
- mozpath.join(path, f.target_basename))
- else:
- self._install_manifests[obj.install_target].add_symlink(
- f.full_path,
- mozpath.join(path, f.target_basename)
- )
- if isinstance(f, ObjDirPath):
- dep_target = 'install-%s' % obj.install_target
- self._dependencies[dep_target].append(
- mozpath.relpath(f.full_path,
- self.environment.topobjdir))
-
- elif isinstance(obj, ChromeManifestEntry) and \
- obj.install_target.startswith('dist/bin'):
- top_level = mozpath.join(obj.install_target, 'chrome.manifest')
- if obj.path != top_level:
- entry = 'manifest %s' % mozpath.relpath(obj.path,
- obj.install_target)
- self._manifest_entries[top_level].add(entry)
- self._manifest_entries[obj.path].add(str(obj.entry))
-
- elif isinstance(obj, XPIDLFile):
- self._has_xpidl = True
- # We're not actually handling XPIDL files.
- return False
-
- else:
- return False
-
- return True
-
- def consume_finished(self):
- mk = Makefile()
- # Add the default rule at the very beginning.
- mk.create_rule(['default'])
- mk.add_statement('TOPSRCDIR = %s' % self.environment.topsrcdir)
- mk.add_statement('TOPOBJDIR = %s' % self.environment.topobjdir)
- if not self._has_xpidl:
- mk.add_statement('NO_XPIDL = 1')
-
- # Add a few necessary variables inherited from configure
- for var in (
- 'PYTHON',
- 'ACDEFINES',
- 'MOZ_BUILD_APP',
- 'MOZ_WIDGET_TOOLKIT',
- ):
- value = self.environment.substs.get(var)
- if value is not None:
- mk.add_statement('%s = %s' % (var, value))
-
- install_manifests_bases = self._install_manifests.keys()
-
- # Add information for chrome manifest generation
- manifest_targets = []
-
- for target, entries in self._manifest_entries.iteritems():
- manifest_targets.append(target)
- install_target = mozpath.basedir(target, install_manifests_bases)
- self._install_manifests[install_target].add_content(
- ''.join('%s\n' % e for e in sorted(entries)),
- mozpath.relpath(target, install_target))
-
- # Add information for install manifests.
- mk.add_statement('INSTALL_MANIFESTS = %s'
- % ' '.join(self._install_manifests.keys()))
-
- # Add dependencies we infered:
- for target, deps in self._dependencies.iteritems():
- mk.create_rule([target]).add_dependencies(
- '$(TOPOBJDIR)/%s' % d for d in deps)
-
- mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')
-
- for base, install_manifest in self._install_manifests.iteritems():
- with self._write_file(
- mozpath.join(self.environment.topobjdir, 'faster',
- 'install_%s' % base.replace('/', '_'))) as fh:
- install_manifest.write(fileobj=fh)
-
- with self._write_file(
- mozpath.join(self.environment.topobjdir, 'faster',
- 'Makefile')) as fh:
- mk.dump(fh, removal_guard=False)
diff --git a/python/mozbuild/mozbuild/backend/mach_commands.py b/python/mozbuild/mozbuild/backend/mach_commands.py
deleted file mode 100644
index f2448b2f44..0000000000
--- a/python/mozbuild/mozbuild/backend/mach_commands.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import argparse
-import os
-import sys
-import subprocess
-import which
-
-from mozbuild.base import (
- MachCommandBase,
-)
-
-from mach.decorators import (
- CommandArgument,
- CommandProvider,
- Command,
-)
-
-@CommandProvider
-class MachCommands(MachCommandBase):
- @Command('ide', category='devenv',
- description='Generate a project and launch an IDE.')
- @CommandArgument('ide', choices=['eclipse', 'visualstudio', 'intellij'])
- @CommandArgument('args', nargs=argparse.REMAINDER)
- def eclipse(self, ide, args):
- if ide == 'eclipse':
- backend = 'CppEclipse'
- elif ide == 'visualstudio':
- backend = 'VisualStudio'
-
- if ide == 'eclipse':
- try:
- which.which('eclipse')
- except which.WhichError:
- print('Eclipse CDT 8.4 or later must be installed in your PATH.')
- print('Download: http://www.eclipse.org/cdt/downloads.php')
- return 1
- elif ide =='intellij':
- studio = ['idea']
- if sys.platform != 'darwin':
- try:
- which.which(studio[0])
- except:
- self.print_ide_error(ide)
- return 1
- else:
- # In order of preference!
- for d in self.get_mac_ide_preferences(ide):
- if os.path.isdir(d):
- studio = ['open', '-a', d]
- break
- else:
- print('IntelliJ IDEA 14 is not installed in /Applications.')
- return 1
-
- # Here we refresh the whole build. 'build export' is sufficient here and is probably more
- # correct but it's also nice having a single target to get a fully built and indexed
- # project (gives a easy target to use before go out to lunch).
- res = self._mach_context.commands.dispatch('build', self._mach_context)
- if res != 0:
- return 1
-
- if ide in ('intellij'):
- res = self._mach_context.commands.dispatch('package', self._mach_context)
- if res != 0:
- return 1
- else:
- # Generate or refresh the IDE backend.
- python = self.virtualenv_manager.python_path
- config_status = os.path.join(self.topobjdir, 'config.status')
- args = [python, config_status, '--backend=%s' % backend]
- res = self._run_command_in_objdir(args=args, pass_thru=True, ensure_exit_code=False)
- if res != 0:
- return 1
-
-
- if ide == 'eclipse':
- eclipse_workspace_dir = self.get_eclipse_workspace_path()
- process = subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir])
- elif ide == 'visualstudio':
- visual_studio_workspace_dir = self.get_visualstudio_workspace_path()
- process = subprocess.check_call(['explorer.exe', visual_studio_workspace_dir])
- elif ide == 'intellij':
- gradle_dir = None
- if self.is_gradle_project_already_imported():
- gradle_dir = self.get_gradle_project_path()
- else:
- gradle_dir = self.get_gradle_import_path()
- process = subprocess.check_call(studio + [gradle_dir])
-
- def get_eclipse_workspace_path(self):
- from mozbuild.backend.cpp_eclipse import CppEclipseBackend
- return CppEclipseBackend.get_workspace_path(self.topsrcdir, self.topobjdir)
-
- def get_visualstudio_workspace_path(self):
- return os.path.join(self.topobjdir, 'msvc', 'mozilla.sln')
-
- def get_gradle_project_path(self):
- return os.path.join(self.topobjdir, 'mobile', 'gradle')
-
- def get_gradle_import_path(self):
- return os.path.join(self.get_gradle_project_path(), 'build.gradle')
-
- def is_gradle_project_already_imported(self):
- gradle_project_path = os.path.join(self.get_gradle_project_path(), '.idea')
- return os.path.exists(gradle_project_path)
-
- def get_mac_ide_preferences(self, ide):
- if sys.platform == 'darwin':
- return [
- '/Applications/IntelliJ IDEA 14 EAP.app',
- '/Applications/IntelliJ IDEA 14.app',
- '/Applications/IntelliJ IDEA 14 CE EAP.app',
- '/Applications/IntelliJ IDEA 14 CE.app']
-
- def print_ide_error(self, ide):
- if ide == 'intellij':
- print('IntelliJ is not installed in your PATH.')
- print('You can generate a command-line launcher from IntelliJ IDEA->Tools->Create Command-line launcher with script name \'idea\'')
diff --git a/python/mozbuild/mozbuild/backend/visualstudio.py b/python/mozbuild/mozbuild/backend/visualstudio.py
deleted file mode 100644
index 86e97d13d3..0000000000
--- a/python/mozbuild/mozbuild/backend/visualstudio.py
+++ /dev/null
@@ -1,582 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# This file contains a build backend for generating Visual Studio project
-# files.
-
-from __future__ import absolute_import, unicode_literals
-
-import errno
-import os
-import re
-import types
-import uuid
-
-from xml.dom import getDOMImplementation
-
-from mozpack.files import FileFinder
-
-from .common import CommonBackend
-from ..frontend.data import (
- Defines,
- GeneratedSources,
- HostProgram,
- HostSources,
- Library,
- LocalInclude,
- Program,
- Sources,
- UnifiedSources,
-)
-from mozbuild.base import ExecutionSummary
-
-
-MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
-
-def get_id(name):
- return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
-
-def visual_studio_product_to_solution_version(version):
- if version == '2015':
- return '12.00', '14'
- else:
- raise Exception('Unknown version seen: %s' % version)
-
-def visual_studio_product_to_platform_toolset_version(version):
- if version == '2015':
- return 'v140'
- else:
- raise Exception('Unknown version seen: %s' % version)
-
-class VisualStudioBackend(CommonBackend):
- """Generate Visual Studio project files.
-
- This backend is used to produce Visual Studio projects and a solution
- to foster developing Firefox with Visual Studio.
-
- This backend is currently considered experimental. There are many things
- not optimal about how it works.
- """
-
- def _init(self):
- CommonBackend._init(self)
-
- # These should eventually evolve into parameters.
- self._out_dir = os.path.join(self.environment.topobjdir, 'msvc')
- self._projsubdir = 'projects'
-
- self._version = self.environment.substs.get('MSVS_VERSION', '2015')
-
- self._paths_to_sources = {}
- self._paths_to_includes = {}
- self._paths_to_defines = {}
- self._paths_to_configs = {}
- self._libs_to_paths = {}
- self._progs_to_paths = {}
-
- def summary(self):
- return ExecutionSummary(
- 'VisualStudio backend executed in {execution_time:.2f}s\n'
- 'Generated Visual Studio solution at {path:s}',
- execution_time=self._execution_time,
- path=os.path.join(self._out_dir, 'mozilla.sln'))
-
- def consume_object(self, obj):
- reldir = getattr(obj, 'relativedir', None)
-
- if hasattr(obj, 'config') and reldir not in self._paths_to_configs:
- self._paths_to_configs[reldir] = obj.config
-
- if isinstance(obj, Sources):
- self._add_sources(reldir, obj)
-
- elif isinstance(obj, HostSources):
- self._add_sources(reldir, obj)
-
- elif isinstance(obj, GeneratedSources):
- self._add_sources(reldir, obj)
-
- elif isinstance(obj, UnifiedSources):
- # XXX we should be letting CommonBackend.consume_object call this
- # for us instead.
- self._process_unified_sources(obj);
-
- elif isinstance(obj, Library):
- self._libs_to_paths[obj.basename] = reldir
-
- elif isinstance(obj, Program) or isinstance(obj, HostProgram):
- self._progs_to_paths[obj.program] = reldir
-
- elif isinstance(obj, Defines):
- self._paths_to_defines.setdefault(reldir, {}).update(obj.defines)
-
- elif isinstance(obj, LocalInclude):
- includes = self._paths_to_includes.setdefault(reldir, [])
- includes.append(obj.path.full_path)
-
- # Just acknowledge everything.
- return True
-
- def _add_sources(self, reldir, obj):
- s = self._paths_to_sources.setdefault(reldir, set())
- s.update(obj.files)
-
- def _process_unified_sources(self, obj):
- reldir = getattr(obj, 'relativedir', None)
-
- s = self._paths_to_sources.setdefault(reldir, set())
- s.update(obj.files)
-
- def consume_finished(self):
- out_dir = self._out_dir
- out_proj_dir = os.path.join(self._out_dir, self._projsubdir)
-
- projects = self._write_projects_for_sources(self._libs_to_paths,
- "library", out_proj_dir)
- projects.update(self._write_projects_for_sources(self._progs_to_paths,
- "binary", out_proj_dir))
-
- # Generate projects that can be used to build common targets.
- for target in ('export', 'binaries', 'tools', 'full'):
- basename = 'target_%s' % target
- command = '$(SolutionDir)\\mach.bat build'
- if target != 'full':
- command += ' %s' % target
-
- project_id = self._write_vs_project(out_proj_dir, basename, target,
- build_command=command,
- clean_command='$(SolutionDir)\\mach.bat build clean')
-
- projects[basename] = (project_id, basename, target)
-
- # A project that can be used to regenerate the visual studio projects.
- basename = 'target_vs'
- project_id = self._write_vs_project(out_proj_dir, basename, 'visual-studio',
- build_command='$(SolutionDir)\\mach.bat build-backend -b VisualStudio')
- projects[basename] = (project_id, basename, 'visual-studio')
-
- # Write out a shared property file with common variables.
- props_path = os.path.join(out_proj_dir, 'mozilla.props')
- with self._write_file(props_path, mode='rb') as fh:
- self._write_props(fh)
-
- # Generate some wrapper scripts that allow us to invoke mach inside
- # a MozillaBuild-like environment. We currently only use the batch
- # script. We'd like to use the PowerShell script. However, it seems
- # to buffer output from within Visual Studio (surely this is
- # configurable) and the default execution policy of PowerShell doesn't
- # allow custom scripts to be executed.
- with self._write_file(os.path.join(out_dir, 'mach.bat'), mode='rb') as fh:
- self._write_mach_batch(fh)
-
- with self._write_file(os.path.join(out_dir, 'mach.ps1'), mode='rb') as fh:
- self._write_mach_powershell(fh)
-
- # Write out a solution file to tie it all together.
- solution_path = os.path.join(out_dir, 'mozilla.sln')
- with self._write_file(solution_path, mode='rb') as fh:
- self._write_solution(fh, projects)
-
- def _write_projects_for_sources(self, sources, prefix, out_dir):
- projects = {}
- for item, path in sorted(sources.items()):
- config = self._paths_to_configs.get(path, None)
- sources = self._paths_to_sources.get(path, set())
- sources = set(os.path.join('$(TopSrcDir)', path, s) for s in sources)
- sources = set(os.path.normpath(s) for s in sources)
-
- finder = FileFinder(os.path.join(self.environment.topsrcdir, path),
- find_executables=False)
-
- headers = [t[0] for t in finder.find('*.h')]
- headers = [os.path.normpath(os.path.join('$(TopSrcDir)',
- path, f)) for f in headers]
-
- includes = [
- os.path.join('$(TopSrcDir)', path),
- os.path.join('$(TopObjDir)', path),
- ]
- includes.extend(self._paths_to_includes.get(path, []))
- includes.append('$(TopObjDir)\\dist\\include\\nss')
- includes.append('$(TopObjDir)\\dist\\include')
-
- for v in ('NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS',
- 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'):
- if not config:
- break
-
- args = config.substs.get(v, [])
-
- for i, arg in enumerate(args):
- if arg.startswith('-I'):
- includes.append(os.path.normpath(arg[2:]))
-
- # Pull in system defaults.
- includes.append('$(DefaultIncludes)')
-
- includes = [os.path.normpath(i) for i in includes]
-
- defines = []
- for k, v in self._paths_to_defines.get(path, {}).items():
- if v is True:
- defines.append(k)
- else:
- defines.append('%s=%s' % (k, v))
-
- debugger=None
- if prefix == 'binary':
- if item.startswith(self.environment.substs['MOZ_APP_NAME']):
- debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '-no-remote')
- else:
- debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '')
-
- basename = '%s_%s' % (prefix, item)
-
- project_id = self._write_vs_project(out_dir, basename, item,
- includes=includes,
- forced_includes=['$(TopObjDir)\\dist\\include\\mozilla-config.h'],
- defines=defines,
- headers=headers,
- sources=sources,
- debugger=debugger)
-
- projects[basename] = (project_id, basename, item)
-
- return projects
-
- def _write_solution(self, fh, projects):
- # Visual Studio appears to write out its current version in the
- # solution file. Instead of trying to figure out what version it will
- # write, try to parse the version out of the existing file and use it
- # verbatim.
- vs_version = None
- try:
- with open(fh.name, 'rb') as sfh:
- for line in sfh:
- if line.startswith(b'VisualStudioVersion = '):
- vs_version = line.split(b' = ', 1)[1].strip()
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
-
- format_version, comment_version = visual_studio_product_to_solution_version(self._version)
- # This is a Visual C++ Project type.
- project_type = '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
-
- # Visual Studio seems to require this header.
- fh.write('Microsoft Visual Studio Solution File, Format Version %s\r\n' %
- format_version)
- fh.write('# Visual Studio %s\r\n' % comment_version)
-
- if vs_version:
- fh.write('VisualStudioVersion = %s\r\n' % vs_version)
-
- # Corresponds to VS2013.
- fh.write('MinimumVisualStudioVersion = 12.0.31101.0\r\n')
-
- binaries_id = projects['target_binaries'][0]
-
- # Write out entries for each project.
- for key in sorted(projects):
- project_id, basename, name = projects[key]
- path = os.path.join(self._projsubdir, '%s.vcxproj' % basename)
-
- fh.write('Project("{%s}") = "%s", "%s", "{%s}"\r\n' % (
- project_type, name, path, project_id))
-
- # Make all libraries depend on the binaries target.
- if key.startswith('library_'):
- fh.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
- fh.write('\t\t{%s} = {%s}\r\n' % (binaries_id, binaries_id))
- fh.write('\tEndProjectSection\r\n')
-
- fh.write('EndProject\r\n')
-
- # Write out solution folders for organizing things.
-
- # This is the UUID you use for solution folders.
- container_id = '2150E333-8FDC-42A3-9474-1A3956D46DE8'
-
- def write_container(desc):
- cid = get_id(desc.encode('utf-8'))
- fh.write('Project("{%s}") = "%s", "%s", "{%s}"\r\n' % (
- container_id, desc, desc, cid))
- fh.write('EndProject\r\n')
-
- return cid
-
- library_id = write_container('Libraries')
- target_id = write_container('Build Targets')
- binary_id = write_container('Binaries')
-
- fh.write('Global\r\n')
-
- # Make every project a member of our one configuration.
- fh.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
- fh.write('\t\tBuild|Win32 = Build|Win32\r\n')
- fh.write('\tEndGlobalSection\r\n')
-
- # Set every project's active configuration to the one configuration and
- # set up the default build project.
- fh.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
- for name, project in sorted(projects.items()):
- fh.write('\t\t{%s}.Build|Win32.ActiveCfg = Build|Win32\r\n' % project[0])
-
- # Only build the full build target by default.
- # It's important we don't write multiple entries here because they
- # conflict!
- if name == 'target_full':
- fh.write('\t\t{%s}.Build|Win32.Build.0 = Build|Win32\r\n' % project[0])
-
- fh.write('\tEndGlobalSection\r\n')
-
- fh.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
- fh.write('\t\tHideSolutionNode = FALSE\r\n')
- fh.write('\tEndGlobalSection\r\n')
-
- # Associate projects with containers.
- fh.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
- for key in sorted(projects):
- project_id = projects[key][0]
-
- if key.startswith('library_'):
- container_id = library_id
- elif key.startswith('target_'):
- container_id = target_id
- elif key.startswith('binary_'):
- container_id = binary_id
- else:
- raise Exception('Unknown project type: %s' % key)
-
- fh.write('\t\t{%s} = {%s}\r\n' % (project_id, container_id))
- fh.write('\tEndGlobalSection\r\n')
-
- fh.write('EndGlobal\r\n')
-
- def _write_props(self, fh):
- impl = getDOMImplementation()
- doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
-
- project = doc.documentElement
- project.setAttribute('xmlns', MSBUILD_NAMESPACE)
- project.setAttribute('ToolsVersion', '4.0')
-
- ig = project.appendChild(doc.createElement('ImportGroup'))
- ig.setAttribute('Label', 'PropertySheets')
-
- pg = project.appendChild(doc.createElement('PropertyGroup'))
- pg.setAttribute('Label', 'UserMacros')
-
- ig = project.appendChild(doc.createElement('ItemGroup'))
-
- def add_var(k, v):
- e = pg.appendChild(doc.createElement(k))
- e.appendChild(doc.createTextNode(v))
-
- e = ig.appendChild(doc.createElement('BuildMacro'))
- e.setAttribute('Include', k)
-
- e = e.appendChild(doc.createElement('Value'))
- e.appendChild(doc.createTextNode('$(%s)' % k))
-
- add_var('TopObjDir', os.path.normpath(self.environment.topobjdir))
- add_var('TopSrcDir', os.path.normpath(self.environment.topsrcdir))
- add_var('PYTHON', '$(TopObjDir)\\_virtualenv\\Scripts\\python.exe')
- add_var('MACH', '$(TopSrcDir)\\mach')
-
- # From MozillaBuild.
- add_var('DefaultIncludes', os.environ.get('INCLUDE', ''))
-
- fh.write(b'\xef\xbb\xbf')
- doc.writexml(fh, addindent=' ', newl='\r\n')
-
- def _relevant_environment_variables(self):
- # Write out the environment variables, presumably coming from
- # MozillaBuild.
- for k, v in sorted(os.environ.items()):
- if not re.match('^[a-zA-Z0-9_]+$', k):
- continue
-
- if k in ('OLDPWD', 'PS1'):
- continue
-
- if k.startswith('_'):
- continue
-
- yield k, v
-
- yield 'TOPSRCDIR', self.environment.topsrcdir
- yield 'TOPOBJDIR', self.environment.topobjdir
-
- def _write_mach_powershell(self, fh):
- for k, v in self._relevant_environment_variables():
- fh.write(b'$env:%s = "%s"\r\n' % (k, v))
-
- relpath = os.path.relpath(self.environment.topsrcdir,
- self.environment.topobjdir).replace('\\', '/')
-
- fh.write(b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath)
- fh.write(b'$bashargs = $bashargs + $args\r\n')
-
- fh.write(b"$expanded = $bashargs -join ' '\r\n")
- fh.write(b'$procargs = "-c", $expanded\r\n')
-
- fh.write(b'Start-Process -WorkingDirectory $env:TOPOBJDIR '
- b'-FilePath $env:MOZILLABUILD\\msys\\bin\\bash '
- b'-ArgumentList $procargs '
- b'-Wait -NoNewWindow\r\n')
-
- def _write_mach_batch(self, fh):
- """Write out a batch script that builds the tree.
-
- The script "bootstraps" into the MozillaBuild environment by setting
- the environment variables that are active in the current MozillaBuild
- environment. Then, it builds the tree.
- """
- for k, v in self._relevant_environment_variables():
- fh.write(b'SET "%s=%s"\r\n' % (k, v))
-
- fh.write(b'cd %TOPOBJDIR%\r\n')
-
- # We need to convert Windows-native paths to msys paths. Easiest way is
- # relative paths, since munging c:\ to /c/ is slightly more
- # complicated.
- relpath = os.path.relpath(self.environment.topsrcdir,
- self.environment.topobjdir).replace('\\', '/')
-
- # We go through mach because it has the logic for choosing the most
- # appropriate build tool.
- fh.write(b'"%%MOZILLABUILD%%\\msys\\bin\\bash" '
- b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' % relpath)
-
- def _write_vs_project(self, out_dir, basename, name, **kwargs):
- root = '%s.vcxproj' % basename
- project_id = get_id(basename.encode('utf-8'))
-
- with self._write_file(os.path.join(out_dir, root), mode='rb') as fh:
- project_id, name = VisualStudioBackend.write_vs_project(fh,
- self._version, project_id, name, **kwargs)
-
- with self._write_file(os.path.join(out_dir, '%s.user' % root), mode='rb') as fh:
- fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
- fh.write('<Project ToolsVersion="4.0" xmlns="%s">\r\n' %
- MSBUILD_NAMESPACE)
- fh.write('</Project>\r\n')
-
- return project_id
-
- @staticmethod
- def write_vs_project(fh, version, project_id, name, includes=[],
- forced_includes=[], defines=[],
- build_command=None, clean_command=None,
- debugger=None, headers=[], sources=[]):
-
- impl = getDOMImplementation()
- doc = impl.createDocument(MSBUILD_NAMESPACE, 'Project', None)
-
- project = doc.documentElement
- project.setAttribute('DefaultTargets', 'Build')
- project.setAttribute('ToolsVersion', '4.0')
- project.setAttribute('xmlns', MSBUILD_NAMESPACE)
-
- ig = project.appendChild(doc.createElement('ItemGroup'))
- ig.setAttribute('Label', 'ProjectConfigurations')
-
- pc = ig.appendChild(doc.createElement('ProjectConfiguration'))
- pc.setAttribute('Include', 'Build|Win32')
-
- c = pc.appendChild(doc.createElement('Configuration'))
- c.appendChild(doc.createTextNode('Build'))
-
- p = pc.appendChild(doc.createElement('Platform'))
- p.appendChild(doc.createTextNode('Win32'))
-
- pg = project.appendChild(doc.createElement('PropertyGroup'))
- pg.setAttribute('Label', 'Globals')
-
- n = pg.appendChild(doc.createElement('ProjectName'))
- n.appendChild(doc.createTextNode(name))
-
- k = pg.appendChild(doc.createElement('Keyword'))
- k.appendChild(doc.createTextNode('MakeFileProj'))
-
- g = pg.appendChild(doc.createElement('ProjectGuid'))
- g.appendChild(doc.createTextNode('{%s}' % project_id))
-
- rn = pg.appendChild(doc.createElement('RootNamespace'))
- rn.appendChild(doc.createTextNode('mozilla'))
-
- pts = pg.appendChild(doc.createElement('PlatformToolset'))
- pts.appendChild(doc.createTextNode(visual_studio_product_to_platform_toolset_version(version)))
-
- i = project.appendChild(doc.createElement('Import'))
- i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.Default.props')
-
- ig = project.appendChild(doc.createElement('ImportGroup'))
- ig.setAttribute('Label', 'ExtensionTargets')
-
- ig = project.appendChild(doc.createElement('ImportGroup'))
- ig.setAttribute('Label', 'ExtensionSettings')
-
- ig = project.appendChild(doc.createElement('ImportGroup'))
- ig.setAttribute('Label', 'PropertySheets')
- i = ig.appendChild(doc.createElement('Import'))
- i.setAttribute('Project', 'mozilla.props')
-
- pg = project.appendChild(doc.createElement('PropertyGroup'))
- pg.setAttribute('Label', 'Configuration')
- ct = pg.appendChild(doc.createElement('ConfigurationType'))
- ct.appendChild(doc.createTextNode('Makefile'))
-
- pg = project.appendChild(doc.createElement('PropertyGroup'))
- pg.setAttribute('Condition', "'$(Configuration)|$(Platform)'=='Build|Win32'")
-
- if build_command:
- n = pg.appendChild(doc.createElement('NMakeBuildCommandLine'))
- n.appendChild(doc.createTextNode(build_command))
-
- if clean_command:
- n = pg.appendChild(doc.createElement('NMakeCleanCommandLine'))
- n.appendChild(doc.createTextNode(clean_command))
-
- if includes:
- n = pg.appendChild(doc.createElement('NMakeIncludeSearchPath'))
- n.appendChild(doc.createTextNode(';'.join(includes)))
-
- if forced_includes:
- n = pg.appendChild(doc.createElement('NMakeForcedIncludes'))
- n.appendChild(doc.createTextNode(';'.join(forced_includes)))
-
- if defines:
- n = pg.appendChild(doc.createElement('NMakePreprocessorDefinitions'))
- n.appendChild(doc.createTextNode(';'.join(defines)))
-
- if debugger:
- n = pg.appendChild(doc.createElement('LocalDebuggerCommand'))
- n.appendChild(doc.createTextNode(debugger[0]))
-
- n = pg.appendChild(doc.createElement('LocalDebuggerCommandArguments'))
- n.appendChild(doc.createTextNode(debugger[1]))
-
- i = project.appendChild(doc.createElement('Import'))
- i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.props')
-
- i = project.appendChild(doc.createElement('Import'))
- i.setAttribute('Project', '$(VCTargetsPath)\\Microsoft.Cpp.targets')
-
- # Now add files to the project.
- ig = project.appendChild(doc.createElement('ItemGroup'))
- for header in sorted(headers or []):
- n = ig.appendChild(doc.createElement('ClInclude'))
- n.setAttribute('Include', header)
-
- ig = project.appendChild(doc.createElement('ItemGroup'))
- for source in sorted(sources or []):
- n = ig.appendChild(doc.createElement('ClCompile'))
- n.setAttribute('Include', source)
-
- fh.write(b'\xef\xbb\xbf')
- doc.writexml(fh, addindent=' ', newl='\r\n')
-
- return project_id, name
diff --git a/python/mozbuild/mozbuild/codecoverage/__init__.py b/python/mozbuild/mozbuild/codecoverage/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/python/mozbuild/mozbuild/codecoverage/__init__.py
+++ /dev/null
diff --git a/python/mozbuild/mozbuild/codecoverage/chrome_map.py b/python/mozbuild/mozbuild/codecoverage/chrome_map.py
deleted file mode 100644
index 81c3c9a071..0000000000
--- a/python/mozbuild/mozbuild/codecoverage/chrome_map.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from collections import defaultdict
-import json
-import os
-import urlparse
-
-from mach.config import ConfigSettings
-from mach.logging import LoggingManager
-from mozbuild.backend.common import CommonBackend
-from mozbuild.base import MozbuildObject
-from mozbuild.frontend.data import (
- FinalTargetFiles,
- FinalTargetPreprocessedFiles,
-)
-from mozbuild.frontend.data import JARManifest, ChromeManifestEntry
-from mozpack.chrome.manifest import (
- Manifest,
- ManifestChrome,
- ManifestOverride,
- ManifestResource,
- parse_manifest,
-)
-import mozpack.path as mozpath
-
-
-class ChromeManifestHandler(object):
- def __init__(self):
- self.overrides = {}
- self.chrome_mapping = defaultdict(set)
-
- def handle_manifest_entry(self, entry):
- format_strings = {
- "content": "chrome://%s/content/",
- "resource": "resource://%s/",
- "locale": "chrome://%s/locale/",
- "skin": "chrome://%s/skin/",
- }
-
- if isinstance(entry, (ManifestChrome, ManifestResource)):
- if isinstance(entry, ManifestResource):
- dest = entry.target
- url = urlparse.urlparse(dest)
- if not url.scheme:
- dest = mozpath.normpath(mozpath.join(entry.base, dest))
- if url.scheme == 'file':
- dest = mozpath.normpath(url.path)
- else:
- dest = mozpath.normpath(entry.path)
-
- base_uri = format_strings[entry.type] % entry.name
- self.chrome_mapping[base_uri].add(dest)
- if isinstance(entry, ManifestOverride):
- self.overrides[entry.overloaded] = entry.overload
- if isinstance(entry, Manifest):
- for e in parse_manifest(None, entry.path):
- self.handle_manifest_entry(e)
-
-class ChromeMapBackend(CommonBackend):
- def _init(self):
- CommonBackend._init(self)
-
- log_manager = LoggingManager()
- self._cmd = MozbuildObject(self.environment.topsrcdir, ConfigSettings(),
- log_manager, self.environment.topobjdir)
- self._install_mapping = {}
- self.manifest_handler = ChromeManifestHandler()
-
- def consume_object(self, obj):
- if isinstance(obj, JARManifest):
- self._consume_jar_manifest(obj)
- if isinstance(obj, ChromeManifestEntry):
- self.manifest_handler.handle_manifest_entry(obj.entry)
- if isinstance(obj, (FinalTargetFiles,
- FinalTargetPreprocessedFiles)):
- self._handle_final_target_files(obj)
- return True
-
- def _handle_final_target_files(self, obj):
- for path, files in obj.files.walk():
- for f in files:
- dest = mozpath.join(obj.install_target, path, f.target_basename)
- is_pp = isinstance(obj,
- FinalTargetPreprocessedFiles)
- self._install_mapping[dest] = f.full_path, is_pp
-
- def consume_finished(self):
- # Our result has three parts:
- # A map from url prefixes to objdir directories:
- # { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
- # A map of overrides.
- # A map from objdir paths to sourcedir paths, and a flag for whether the source was preprocessed:
- # { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
- # [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", false ], ... }
- outputfile = os.path.join(self.environment.topobjdir, 'chrome-map.json')
- with self._write_file(outputfile) as fh:
- chrome_mapping = self.manifest_handler.chrome_mapping
- overrides = self.manifest_handler.overrides
- json.dump([
- {k: list(v) for k, v in chrome_mapping.iteritems()},
- overrides,
- self._install_mapping,
- ], fh, sort_keys=True, indent=2)
diff --git a/python/mozbuild/mozbuild/codecoverage/packager.py b/python/mozbuild/mozbuild/codecoverage/packager.py
deleted file mode 100644
index 3a4f359f6a..0000000000
--- a/python/mozbuild/mozbuild/codecoverage/packager.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function
-
-import argparse
-import sys
-
-from mozpack.files import FileFinder
-from mozpack.copier import Jarrer
-
-def package_gcno_tree(root, output_file):
- # XXX JarWriter doesn't support unicode strings, see bug 1056859
- if isinstance(root, unicode):
- root = root.encode('utf-8')
-
- finder = FileFinder(root)
- jarrer = Jarrer(optimize=False)
- for p, f in finder.find("**/*.gcno"):
- jarrer.add(p, f)
- jarrer.copy(output_file)
-
-
-def cli(args=sys.argv[1:]):
- parser = argparse.ArgumentParser()
- parser.add_argument('-o', '--output-file',
- dest='output_file',
- help='Path to save packaged data to.')
- parser.add_argument('--root',
- dest='root',
- default=None,
- help='Root directory to search from.')
- args = parser.parse_args(args)
-
- if not args.root:
- from buildconfig import topobjdir
- args.root = topobjdir
-
- return package_gcno_tree(args.root, args.output_file)
-
-if __name__ == '__main__':
- sys.exit(cli())
diff --git a/python/mozbuild/mozbuild/config_status.py b/python/mozbuild/mozbuild/config_status.py
index 0b4e6e41dd..ac91a65afe 100644
--- a/python/mozbuild/mozbuild/config_status.py
+++ b/python/mozbuild/mozbuild/config_status.py
@@ -32,19 +32,6 @@ from mozbuild.backend import (
log_manager = LoggingManager()
-VISUAL_STUDIO_ADVERTISEMENT = '''
-===============================
-Visual Studio Support Available
-
-You are building Firefox on Windows. You can generate Visual Studio
-files by running:
-
- mach build-backend --backend=VisualStudio
-
-===============================
-'''.strip()
-
-
def config_status(topobjdir='.', topsrcdir='.', defines=None,
non_global_defines=None, substs=None, source=None,
mozconfig=None, args=sys.argv[1:]):
@@ -173,7 +160,3 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
for path, diff in sorted(the_backend.file_diffs.items()):
print('\n'.join(diff))
- # Advertise Visual Studio if appropriate.
- if os.name == 'nt' and 'VisualStudio' not in options.backend:
- print(VISUAL_STUDIO_ADVERTISEMENT)
-
diff --git a/python/mozbuild/mozbuild/mach_commands.py b/python/mozbuild/mozbuild/mach_commands.py
index 6616b24931..38a98b7432 100644
--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -509,7 +509,7 @@ class Build(MachCommandBase):
# to avoid accidentally disclosing PII.
telemetry_data['substs'] = {}
try:
- for key in ['MOZ_ARTIFACT_BUILDS', 'MOZ_USING_CCACHE']:
+ for key in ['MOZ_USING_CCACHE']:
value = self.substs.get(key, False)
telemetry_data['substs'][key] = value
except BuildEnvironmentNotFoundException:
@@ -1493,154 +1493,6 @@ class MachDebug(MachCommandBase):
return json.JSONEncoder.default(self, obj)
json.dump(self, cls=EnvironmentEncoder, sort_keys=True, fp=out)
-class ArtifactSubCommand(SubCommand):
- def __call__(self, func):
- after = SubCommand.__call__(self, func)
- jobchoices = {
- 'linux',
- 'linux64',
- 'macosx64',
- 'win32',
- 'win64'
- }
- args = [
- CommandArgument('--tree', metavar='TREE', type=str,
- help='Firefox tree.'),
- CommandArgument('--job', metavar='JOB', choices=jobchoices,
- help='Build job.'),
- CommandArgument('--verbose', '-v', action='store_true',
- help='Print verbose output.'),
- ]
- for arg in args:
- after = arg(after)
- return after
-
-
-@CommandProvider
-class PackageFrontend(MachCommandBase):
- """Fetch and install binary artifacts from Mozilla automation."""
-
- @Command('artifact', category='post-build',
- description='Use pre-built artifacts to build Firefox.')
- def artifact(self):
- '''Download, cache, and install pre-built binary artifacts to build Firefox.
-
- Use |mach build| as normal to freshen your installed binary libraries:
- artifact builds automatically download, cache, and install binary
- artifacts from Mozilla automation, replacing whatever may be in your
- object directory. Use |mach artifact last| to see what binary artifacts
- were last used.
-
- Never build libxul again!
-
- '''
- pass
-
- def _set_log_level(self, verbose):
- self.log_manager.terminal_handler.setLevel(logging.INFO if not verbose else logging.DEBUG)
-
- def _install_pip_package(self, package):
- if os.environ.get('MOZ_AUTOMATION'):
- self.virtualenv_manager._run_pip([
- 'install',
- package,
- '--no-index',
- '--find-links',
- 'http://pypi.pub.build.mozilla.org/pub',
- '--trusted-host',
- 'pypi.pub.build.mozilla.org',
- ])
- return
- self.virtualenv_manager.install_pip_package(package)
-
- def _make_artifacts(self, tree=None, job=None, skip_cache=False):
- # Undo PATH munging that will be done by activating the virtualenv,
- # so that invoked subprocesses expecting to find system python
- # (git cinnabar, in particular), will not find virtualenv python.
- original_path = os.environ.get('PATH', '')
- self._activate_virtualenv()
- os.environ['PATH'] = original_path
-
- for package in ('taskcluster==0.0.32',
- 'mozregression==1.0.2'):
- self._install_pip_package(package)
-
- state_dir = self._mach_context.state_dir
- cache_dir = os.path.join(state_dir, 'package-frontend')
-
- try:
- os.makedirs(cache_dir)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- import which
-
- here = os.path.abspath(os.path.dirname(__file__))
- build_obj = MozbuildObject.from_environment(cwd=here)
-
- hg = None
- if conditions.is_hg(build_obj):
- if self._is_windows():
- hg = which.which('hg.exe')
- else:
- hg = which.which('hg')
-
- git = None
- if conditions.is_git(build_obj):
- if self._is_windows():
- git = which.which('git.exe')
- else:
- git = which.which('git')
-
- # Absolutely must come after the virtualenv is populated!
- from mozbuild.artifacts import Artifacts
- artifacts = Artifacts(tree, self.substs, self.defines, job,
- log=self.log, cache_dir=cache_dir,
- skip_cache=skip_cache, hg=hg, git=git,
- topsrcdir=self.topsrcdir)
- return artifacts
-
- @ArtifactSubCommand('artifact', 'install',
- 'Install a good pre-built artifact.')
- @CommandArgument('source', metavar='SRC', nargs='?', type=str,
- help='Where to fetch and install artifacts from. Can be omitted, in '
- 'which case the current hg repository is inspected; an hg revision; '
- 'a remote URL; or a local file.',
- default=None)
- @CommandArgument('--skip-cache', action='store_true',
- help='Skip all local caches to force re-fetching remote artifacts.',
- default=False)
- def artifact_install(self, source=None, skip_cache=False, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job, skip_cache=skip_cache)
-
- return artifacts.install_from(source, self.distdir)
-
- @ArtifactSubCommand('artifact', 'last',
- 'Print the last pre-built artifact installed.')
- def artifact_print_last(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.print_last()
- return 0
-
- @ArtifactSubCommand('artifact', 'print-cache',
- 'Print local artifact cache for debugging.')
- def artifact_print_cache(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.print_cache()
- return 0
-
- @ArtifactSubCommand('artifact', 'clear-cache',
- 'Delete local artifacts and reset local artifact cache.')
- def artifact_clear_cache(self, tree=None, job=None, verbose=False):
- self._set_log_level(verbose)
- artifacts = self._make_artifacts(tree=tree, job=job)
- artifacts.clear_cache()
- return 0
-
@CommandProvider
class Vendor(MachCommandBase):
"""Vendor third-party dependencies into the source repository."""
diff --git a/python/mozbuild/mozbuild/test/backend/test_visualstudio.py b/python/mozbuild/mozbuild/test/backend/test_visualstudio.py
deleted file mode 100644
index bfc95e5527..0000000000
--- a/python/mozbuild/mozbuild/test/backend/test_visualstudio.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import unicode_literals
-
-from xml.dom.minidom import parse
-import os
-import unittest
-
-from mozbuild.backend.visualstudio import VisualStudioBackend
-from mozbuild.test.backend.common import BackendTester
-
-from mozunit import main
-
-
-class TestVisualStudioBackend(BackendTester):
- @unittest.skip('Failing inconsistently in automation.')
- def test_basic(self):
- """Ensure we can consume our stub project."""
-
- env = self._consume('visual-studio', VisualStudioBackend)
-
- msvc = os.path.join(env.topobjdir, 'msvc')
- self.assertTrue(os.path.isdir(msvc))
-
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'mozilla.sln')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'mozilla.props')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'mach.bat')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'binary_my_app.vcxproj')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'target_full.vcxproj')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'library_dir1.vcxproj')))
- self.assertTrue(os.path.isfile(os.path.join(msvc, 'library_dir1.vcxproj.user')))
-
- d = parse(os.path.join(msvc, 'library_dir1.vcxproj'))
- self.assertEqual(d.documentElement.tagName, 'Project')
- els = d.getElementsByTagName('ClCompile')
- self.assertEqual(len(els), 2)
-
- # mozilla-config.h should be explicitly listed as an include.
- els = d.getElementsByTagName('NMakeForcedIncludes')
- self.assertEqual(len(els), 1)
- self.assertEqual(els[0].firstChild.nodeValue,
- '$(TopObjDir)\\dist\\include\\mozilla-config.h')
-
- # LOCAL_INCLUDES get added to the include search path.
- els = d.getElementsByTagName('NMakeIncludeSearchPath')
- self.assertEqual(len(els), 1)
- includes = els[0].firstChild.nodeValue.split(';')
- self.assertIn(os.path.normpath('$(TopSrcDir)/includeA/foo'), includes)
- self.assertIn(os.path.normpath('$(TopSrcDir)/dir1'), includes)
- self.assertIn(os.path.normpath('$(TopObjDir)/dir1'), includes)
- self.assertIn(os.path.normpath('$(TopObjDir)\\dist\\include'), includes)
-
- # DEFINES get added to the project.
- els = d.getElementsByTagName('NMakePreprocessorDefinitions')
- self.assertEqual(len(els), 1)
- defines = els[0].firstChild.nodeValue.split(';')
- self.assertIn('DEFINEFOO', defines)
- self.assertIn('DEFINEBAR=bar', defines)
-
-
-if __name__ == '__main__':
- main()
diff --git a/security/apps/AppSignatureVerification.cpp b/security/apps/AppSignatureVerification.cpp
deleted file mode 100644
index aed0b70c1c..0000000000
--- a/security/apps/AppSignatureVerification.cpp
+++ /dev/null
@@ -1,1559 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "nsNSSCertificateDB.h"
-
-#include "AppTrustDomain.h"
-#include "CryptoTask.h"
-#include "NSSCertDBTrustDomain.h"
-#include "ScopedNSSTypes.h"
-#include "base64.h"
-#include "certdb.h"
-#include "mozilla/Casting.h"
-#include "mozilla/Logging.h"
-#include "mozilla/RefPtr.h"
-#include "mozilla/UniquePtr.h"
-#include "nsCOMPtr.h"
-#include "nsComponentManagerUtils.h"
-#include "nsDataSignatureVerifier.h"
-#include "nsHashKeys.h"
-#include "nsIDirectoryEnumerator.h"
-#include "nsIFile.h"
-#include "nsIFileStreams.h"
-#include "nsIInputStream.h"
-#include "nsIStringEnumerator.h"
-#include "nsIZipReader.h"
-#include "nsNSSCertificate.h"
-#include "nsNetUtil.h"
-#include "nsProxyRelease.h"
-#include "nsString.h"
-#include "nsTHashtable.h"
-#include "nssb64.h"
-#include "pkix/pkix.h"
-#include "pkix/pkixnss.h"
-#include "plstr.h"
-#include "secmime.h"
-
-
-using namespace mozilla::pkix;
-using namespace mozilla;
-using namespace mozilla::psm;
-
-extern mozilla::LazyLogModule gPIPNSSLog;
-
-namespace {
-
-// Reads a maximum of 1MB from a stream into the supplied buffer.
-// The reason for the 1MB limit is because this function is used to read
-// signature-related files and we want to avoid OOM. The uncompressed length of
-// an entry can be hundreds of times larger than the compressed version,
-// especially if someone has specifically crafted the entry to cause OOM or to
-// consume massive amounts of disk space.
-//
-// @param stream The input stream to read from.
-// @param buf The buffer that we read the stream into, which must have
-// already been allocated.
-nsresult
-ReadStream(const nsCOMPtr<nsIInputStream>& stream, /*out*/ SECItem& buf)
-{
- // The size returned by Available() might be inaccurate so we need
- // to check that Available() matches up with the actual length of
- // the file.
- uint64_t length;
- nsresult rv = stream->Available(&length);
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
- }
-
- // Cap the maximum accepted size of signature-related files at 1MB (which is
- // still crazily huge) to avoid OOM. The uncompressed length of an entry can be
- // hundreds of times larger than the compressed version, especially if
- // someone has speifically crafted the entry to cause OOM or to consume
- // massive amounts of disk space.
- static const uint32_t MAX_LENGTH = 1024 * 1024;
- if (length > MAX_LENGTH) {
- return NS_ERROR_FILE_TOO_BIG;
- }
-
- // With bug 164695 in mind we +1 to leave room for null-terminating
- // the buffer.
- SECITEM_AllocItem(buf, static_cast<uint32_t>(length + 1));
-
- // buf.len == length + 1. We attempt to read length + 1 bytes
- // instead of length, so that we can check whether the metadata for
- // the entry is incorrect.
- uint32_t bytesRead;
- rv = stream->Read(BitwiseCast<char*, unsigned char*>(buf.data), buf.len,
- &bytesRead);
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
- }
- if (bytesRead != length) {
- return NS_ERROR_FILE_CORRUPTED;
- }
-
- buf.data[buf.len - 1] = 0; // null-terminate
-
- return NS_OK;
-}
-
-// Finds exactly one (signature metadata) JAR entry that matches the given
-// search pattern, and then load it. Fails if there are no matches or if
-// there is more than one match. If bugDigest is not null then on success
-// bufDigest will contain the SHA-1 digeset of the entry.
-nsresult
-FindAndLoadOneEntry(nsIZipReader * zip,
- const nsACString & searchPattern,
- /*out*/ nsACString & filename,
- /*out*/ SECItem & buf,
- /*optional, out*/ Digest * bufDigest)
-{
- nsCOMPtr<nsIUTF8StringEnumerator> files;
- nsresult rv = zip->FindEntries(searchPattern, getter_AddRefs(files));
- if (NS_FAILED(rv) || !files) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- bool more;
- rv = files->HasMore(&more);
- NS_ENSURE_SUCCESS(rv, rv);
- if (!more) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- rv = files->GetNext(filename);
- NS_ENSURE_SUCCESS(rv, rv);
-
- // Check if there is more than one match, if so then error!
- rv = files->HasMore(&more);
- NS_ENSURE_SUCCESS(rv, rv);
- if (more) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- nsCOMPtr<nsIInputStream> stream;
- rv = zip->GetInputStream(filename, getter_AddRefs(stream));
- NS_ENSURE_SUCCESS(rv, rv);
-
- rv = ReadStream(stream, buf);
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- if (bufDigest) {
- rv = bufDigest->DigestBuf(SEC_OID_SHA1, buf.data, buf.len - 1);
- NS_ENSURE_SUCCESS(rv, rv);
- }
-
- return NS_OK;
-}
-
-// Verify the digest of an entry. We avoid loading the entire entry into memory
-// at once, which would require memory in proportion to the size of the largest
-// entry. Instead, we require only a small, fixed amount of memory.
-//
-// @param stream an input stream from a JAR entry or file depending on whether
-// it is from a signed archive or unpacked into a directory
-// @param digestFromManifest The digest that we're supposed to check the file's
-// contents against, from the manifest
-// @param buf A scratch buffer that we use for doing the I/O, which must have
-// already been allocated. The size of this buffer is the unit
-// size of our I/O.
-nsresult
-VerifyStreamContentDigest(nsIInputStream* stream,
- const SECItem& digestFromManifest, SECItem& buf)
-{
- MOZ_ASSERT(buf.len > 0);
- if (digestFromManifest.len != SHA1_LENGTH)
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- nsresult rv;
- uint64_t len64;
- rv = stream->Available(&len64);
- NS_ENSURE_SUCCESS(rv, rv);
- if (len64 > UINT32_MAX) {
- return NS_ERROR_SIGNED_JAR_ENTRY_TOO_LARGE;
- }
-
- UniquePK11Context digestContext(PK11_CreateDigestContext(SEC_OID_SHA1));
- if (!digestContext) {
- return mozilla::psm::GetXPCOMFromNSSError(PR_GetError());
- }
-
- rv = MapSECStatus(PK11_DigestBegin(digestContext.get()));
- NS_ENSURE_SUCCESS(rv, rv);
-
- uint64_t totalBytesRead = 0;
- for (;;) {
- uint32_t bytesRead;
- rv = stream->Read(BitwiseCast<char*, unsigned char*>(buf.data), buf.len,
- &bytesRead);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (bytesRead == 0) {
- break; // EOF
- }
-
- totalBytesRead += bytesRead;
- if (totalBytesRead >= UINT32_MAX) {
- return NS_ERROR_SIGNED_JAR_ENTRY_TOO_LARGE;
- }
-
- rv = MapSECStatus(PK11_DigestOp(digestContext.get(), buf.data, bytesRead));
- NS_ENSURE_SUCCESS(rv, rv);
- }
-
- if (totalBytesRead != len64) {
- // The metadata we used for Available() doesn't match the actual size of
- // the entry.
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- // Verify that the digests match.
- Digest digest;
- rv = digest.End(SEC_OID_SHA1, digestContext);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (SECITEM_CompareItem(&digestFromManifest, &digest.get()) != SECEqual) {
- return NS_ERROR_SIGNED_JAR_MODIFIED_ENTRY;
- }
-
- return NS_OK;
-}
-
-nsresult
-VerifyEntryContentDigest(nsIZipReader* zip, const nsACString& aFilename,
- const SECItem& digestFromManifest, SECItem& buf)
-{
- nsCOMPtr<nsIInputStream> stream;
- nsresult rv = zip->GetInputStream(aFilename, getter_AddRefs(stream));
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- return VerifyStreamContentDigest(stream, digestFromManifest, buf);
-}
-
-// @oaram aDir directory containing the unpacked signed archive
-// @param aFilename path of the target file relative to aDir
-// @param digestFromManifest The digest that we're supposed to check the file's
-// contents against, from the manifest
-// @param buf A scratch buffer that we use for doing the I/O
-nsresult
-VerifyFileContentDigest(nsIFile* aDir, const nsAString& aFilename,
- const SECItem& digestFromManifest, SECItem& buf)
-{
- // Find the file corresponding to the manifest path
- nsCOMPtr<nsIFile> file;
- nsresult rv = aDir->Clone(getter_AddRefs(file));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // We don't know how to handle JARs with signed directory entries.
- // It's technically possible in the manifest but makes no sense on disk.
- // Inside an archive we just ignore them, but here we have to treat it
- // as an error because the signed bytes never got unpacked.
- int32_t pos = 0;
- int32_t slash;
- int32_t namelen = aFilename.Length();
- if (namelen == 0 || aFilename[namelen - 1] == '/') {
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- // Append path segments one by one
- do {
- slash = aFilename.FindChar('/', pos);
- int32_t segend = (slash == kNotFound) ? namelen : slash;
- rv = file->Append(Substring(aFilename, pos, (segend - pos)));
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
- pos = slash + 1;
- } while (pos < namelen && slash != kNotFound);
-
- bool exists;
- rv = file->Exists(&exists);
- if (NS_FAILED(rv) || !exists) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- bool isDir;
- rv = file->IsDirectory(&isDir);
- if (NS_FAILED(rv) || isDir) {
- // We only support signed files, not directory entries
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- // Open an input stream for that file and verify it.
- nsCOMPtr<nsIInputStream> stream;
- rv = NS_NewLocalFileInputStream(getter_AddRefs(stream), file, -1, -1,
- nsIFileInputStream::CLOSE_ON_EOF);
- if (NS_FAILED(rv) || !stream) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- return VerifyStreamContentDigest(stream, digestFromManifest, buf);
-}
-
-// On input, nextLineStart is the start of the current line. On output,
-// nextLineStart is the start of the next line.
-nsresult
-ReadLine(/*in/out*/ const char* & nextLineStart, /*out*/ nsCString & line,
- bool allowContinuations = true)
-{
- line.Truncate();
- size_t previousLength = 0;
- size_t currentLength = 0;
- for (;;) {
- const char* eol = PL_strpbrk(nextLineStart, "\r\n");
-
- if (!eol) { // Reached end of file before newline
- eol = nextLineStart + strlen(nextLineStart);
- }
-
- previousLength = currentLength;
- line.Append(nextLineStart, eol - nextLineStart);
- currentLength = line.Length();
-
- // The spec says "No line may be longer than 72 bytes (not characters)"
- // in its UTF8-encoded form.
- static const size_t lineLimit = 72;
- if (currentLength - previousLength > lineLimit) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // The spec says: "Implementations should support 65535-byte
- // (not character) header values..."
- if (currentLength > 65535) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (*eol == '\r') {
- ++eol;
- }
- if (*eol == '\n') {
- ++eol;
- }
-
- nextLineStart = eol;
-
- if (*eol != ' ') {
- // not a continuation
- return NS_OK;
- }
-
- // continuation
- if (!allowContinuations) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- ++nextLineStart; // skip space and keep appending
- }
-}
-
-// The header strings are defined in the JAR specification.
-#define JAR_MF_SEARCH_STRING "(M|/M)ETA-INF/(M|m)(ANIFEST|anifest).(MF|mf)$"
-#define JAR_SF_SEARCH_STRING "(M|/M)ETA-INF/*.(SF|sf)$"
-#define JAR_RSA_SEARCH_STRING "(M|/M)ETA-INF/*.(RSA|rsa)$"
-#define JAR_META_DIR "META-INF"
-#define JAR_MF_HEADER "Manifest-Version: 1.0"
-#define JAR_SF_HEADER "Signature-Version: 1.0"
-
-nsresult
-ParseAttribute(const nsAutoCString & curLine,
- /*out*/ nsAutoCString & attrName,
- /*out*/ nsAutoCString & attrValue)
-{
- // Find the colon that separates the name from the value.
- int32_t colonPos = curLine.FindChar(':');
- if (colonPos == kNotFound) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // set attrName to the name, skipping spaces between the name and colon
- int32_t nameEnd = colonPos;
- for (;;) {
- if (nameEnd == 0) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID; // colon with no name
- }
- if (curLine[nameEnd - 1] != ' ')
- break;
- --nameEnd;
- }
- curLine.Left(attrName, nameEnd);
-
- // Set attrValue to the value, skipping spaces between the colon and the
- // value. The value may be empty.
- int32_t valueStart = colonPos + 1;
- int32_t curLineLength = curLine.Length();
- while (valueStart != curLineLength && curLine[valueStart] == ' ') {
- ++valueStart;
- }
- curLine.Right(attrValue, curLineLength - valueStart);
-
- return NS_OK;
-}
-
-// Parses the version line of the MF or SF header.
-nsresult
-CheckManifestVersion(const char* & nextLineStart,
- const nsACString & expectedHeader)
-{
- // The JAR spec says: "Manifest-Version and Signature-Version must be first,
- // and in exactly that case (so that they can be recognized easily as magic
- // strings)."
- nsAutoCString curLine;
- nsresult rv = ReadLine(nextLineStart, curLine, false);
- if (NS_FAILED(rv)) {
- return rv;
- }
- if (!curLine.Equals(expectedHeader)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
- return NS_OK;
-}
-
-// Parses a signature file (SF) as defined in the JDK 8 JAR Specification.
-//
-// The SF file *must* contain exactly one SHA1-Digest-Manifest attribute in
-// the main section. All other sections are ignored. This means that this will
-// NOT parse old-style signature files that have separate digests per entry.
-// The JDK8 x-Digest-Manifest variant is better because:
-//
-// (1) It allows us to follow the principle that we should minimize the
-// processing of data that we do before we verify its signature. In
-// particular, with the x-Digest-Manifest style, we can verify the digest
-// of MANIFEST.MF before we parse it, which prevents malicious JARs
-// exploiting our MANIFEST.MF parser.
-// (2) It is more time-efficient and space-efficient to have one
-// x-Digest-Manifest instead of multiple x-Digest values.
-//
-// In order to get benefit (1), we do NOT implement the fallback to the older
-// mechanism as the spec requires/suggests. Also, for simplity's sake, we only
-// support exactly one SHA1-Digest-Manifest attribute, and no other
-// algorithms.
-//
-// filebuf must be null-terminated. On output, mfDigest will contain the
-// decoded value of SHA1-Digest-Manifest.
-nsresult
-ParseSF(const char* filebuf, /*out*/ SECItem & mfDigest)
-{
- nsresult rv;
-
- const char* nextLineStart = filebuf;
- rv = CheckManifestVersion(nextLineStart, NS_LITERAL_CSTRING(JAR_SF_HEADER));
- if (NS_FAILED(rv))
- return rv;
-
- // Find SHA1-Digest-Manifest
- for (;;) {
- nsAutoCString curLine;
- rv = ReadLine(nextLineStart, curLine);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- if (curLine.Length() == 0) {
- // End of main section (blank line or end-of-file), and no
- // SHA1-Digest-Manifest found.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- nsAutoCString attrName;
- nsAutoCString attrValue;
- rv = ParseAttribute(curLine, attrName, attrValue);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- if (attrName.LowerCaseEqualsLiteral("sha1-digest-manifest")) {
- rv = MapSECStatus(ATOB_ConvertAsciiToItem(&mfDigest, attrValue.get()));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // There could be multiple SHA1-Digest-Manifest attributes, which
- // would be an error, but it's better to just skip any erroneous
- // duplicate entries rather than trying to detect them, because:
- //
- // (1) It's simpler, and simpler generally means more secure
- // (2) An attacker can't make us accept a JAR we would otherwise
- // reject just by adding additional SHA1-Digest-Manifest
- // attributes.
- break;
- }
-
- // ignore unrecognized attributes
- }
-
- return NS_OK;
-}
-
-// Parses MANIFEST.MF. The filenames of all entries will be returned in
-// mfItems. buf must be a pre-allocated scratch buffer that is used for doing
-// I/O.
-nsresult
-ParseMF(const char* filebuf, nsIZipReader * zip,
- /*out*/ nsTHashtable<nsCStringHashKey> & mfItems,
- ScopedAutoSECItem & buf)
-{
- nsresult rv;
-
- const char* nextLineStart = filebuf;
-
- rv = CheckManifestVersion(nextLineStart, NS_LITERAL_CSTRING(JAR_MF_HEADER));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Skip the rest of the header section, which ends with a blank line.
- {
- nsAutoCString line;
- do {
- rv = ReadLine(nextLineStart, line);
- if (NS_FAILED(rv)) {
- return rv;
- }
- } while (line.Length() > 0);
-
- // Manifest containing no file entries is OK, though useless.
- if (*nextLineStart == '\0') {
- return NS_OK;
- }
- }
-
- nsAutoCString curItemName;
- ScopedAutoSECItem digest;
-
- for (;;) {
- nsAutoCString curLine;
- rv = ReadLine(nextLineStart, curLine);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (curLine.Length() == 0) {
- // end of section (blank line or end-of-file)
-
- if (curItemName.Length() == 0) {
- // '...Each section must start with an attribute with the name as
- // "Name",...', so every section must have a Name attribute.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (digest.len == 0) {
- // We require every entry to have a digest, since we require every
- // entry to be signed and we don't allow duplicate entries.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (mfItems.Contains(curItemName)) {
- // Duplicate entry
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Verify that the entry's content digest matches the digest from this
- // MF section.
- rv = VerifyEntryContentDigest(zip, curItemName, digest, buf);
- if (NS_FAILED(rv))
- return rv;
-
- mfItems.PutEntry(curItemName);
-
- if (*nextLineStart == '\0') // end-of-file
- break;
-
- // reset so we know we haven't encountered either of these for the next
- // item yet.
- curItemName.Truncate();
- digest.reset();
-
- continue; // skip the rest of the loop below
- }
-
- nsAutoCString attrName;
- nsAutoCString attrValue;
- rv = ParseAttribute(curLine, attrName, attrValue);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Lines to look for:
-
- // (1) Digest:
- if (attrName.LowerCaseEqualsLiteral("sha1-digest"))
- {
- if (digest.len > 0) // multiple SHA1 digests in section
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- rv = MapSECStatus(ATOB_ConvertAsciiToItem(&digest, attrValue.get()));
- if (NS_FAILED(rv))
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- continue;
- }
-
- // (2) Name: associates this manifest section with a file in the jar.
- if (attrName.LowerCaseEqualsLiteral("name"))
- {
- if (MOZ_UNLIKELY(curItemName.Length() > 0)) // multiple names in section
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- if (MOZ_UNLIKELY(attrValue.Length() == 0))
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
-
- curItemName = attrValue;
-
- continue;
- }
-
- // (3) Magic: the only other must-understand attribute
- if (attrName.LowerCaseEqualsLiteral("magic")) {
- // We don't understand any magic, so we can't verify an entry that
- // requires magic. Since we require every entry to have a valid
- // signature, we have no choice but to reject the entry.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // unrecognized attributes must be ignored
- }
-
- return NS_OK;
-}
-
-struct VerifyCertificateContext {
- AppTrustedRoot trustedRoot;
- UniqueCERTCertList& builtChain;
-};
-
-nsresult
-VerifyCertificate(CERTCertificate* signerCert, void* voidContext, void* pinArg)
-{
- // TODO: null pinArg is tolerated.
- if (NS_WARN_IF(!signerCert) || NS_WARN_IF(!voidContext)) {
- return NS_ERROR_INVALID_ARG;
- }
- const VerifyCertificateContext& context =
- *static_cast<const VerifyCertificateContext*>(voidContext);
-
- AppTrustDomain trustDomain(context.builtChain, pinArg);
- if (trustDomain.SetTrustedRoot(context.trustedRoot) != SECSuccess) {
- return MapSECStatus(SECFailure);
- }
- Input certDER;
- mozilla::pkix::Result rv = certDER.Init(signerCert->derCert.data,
- signerCert->derCert.len);
- if (rv != Success) {
- return mozilla::psm::GetXPCOMFromNSSError(MapResultToPRErrorCode(rv));
- }
-
- rv = BuildCertChain(trustDomain, certDER, Now(),
- EndEntityOrCA::MustBeEndEntity,
- KeyUsage::digitalSignature,
- KeyPurposeId::id_kp_codeSigning,
- CertPolicyId::anyPolicy,
- nullptr/*stapledOCSPResponse*/);
- if (rv == mozilla::pkix::Result::ERROR_EXPIRED_CERTIFICATE) {
- // For code-signing you normally need trusted 3rd-party timestamps to
- // handle expiration properly. The signer could always mess with their
- // system clock so you can't trust the certificate was un-expired when
- // the signing took place. The choice is either to ignore expiration
- // or to enforce expiration at time of use. The latter leads to the
- // user-hostile result that perfectly good code stops working.
- //
- // Our package format doesn't support timestamps (nor do we have a
- // trusted 3rd party timestamper), but since we sign all of our apps and
- // add-ons ourselves we can trust ourselves not to mess with the clock
- // on the signing systems. We also have a revocation mechanism if we
- // need it. It's OK to ignore cert expiration under these conditions.
- //
- // This is an invalid approach if
- // * we issue certs to let others sign their own packages
- // * mozilla::pkix returns "expired" when there are "worse" problems
- // with the certificate or chain.
- // (see bug 1267318)
- rv = Success;
- }
- if (rv != Success) {
- return mozilla::psm::GetXPCOMFromNSSError(MapResultToPRErrorCode(rv));
- }
-
- return NS_OK;
-}
-
-nsresult
-VerifySignature(AppTrustedRoot trustedRoot, const SECItem& buffer,
- const SECItem& detachedDigest,
- /*out*/ UniqueCERTCertList& builtChain)
-{
- // Currently, this function is only called within the CalculateResult() method
- // of CryptoTasks. As such, NSS should not be shut down at this point and the
- // CryptoTask implementation should already hold a nsNSSShutDownPreventionLock.
- // We acquire a nsNSSShutDownPreventionLock here solely to prove we did to
- // VerifyCMSDetachedSignatureIncludingCertificate().
- nsNSSShutDownPreventionLock locker;
- VerifyCertificateContext context = { trustedRoot, builtChain };
- // XXX: missing pinArg
- return VerifyCMSDetachedSignatureIncludingCertificate(buffer, detachedDigest,
- VerifyCertificate,
- &context, nullptr,
- locker);
-}
-
-NS_IMETHODIMP
-OpenSignedAppFile(AppTrustedRoot aTrustedRoot, nsIFile* aJarFile,
- /*out, optional */ nsIZipReader** aZipReader,
- /*out, optional */ nsIX509Cert** aSignerCert)
-{
- NS_ENSURE_ARG_POINTER(aJarFile);
-
- if (aZipReader) {
- *aZipReader = nullptr;
- }
-
- if (aSignerCert) {
- *aSignerCert = nullptr;
- }
-
- nsresult rv;
-
- static NS_DEFINE_CID(kZipReaderCID, NS_ZIPREADER_CID);
- nsCOMPtr<nsIZipReader> zip = do_CreateInstance(kZipReaderCID, &rv);
- NS_ENSURE_SUCCESS(rv, rv);
-
- rv = zip->Open(aJarFile);
- NS_ENSURE_SUCCESS(rv, rv);
-
- // Signature (RSA) file
- nsAutoCString sigFilename;
- ScopedAutoSECItem sigBuffer;
- rv = FindAndLoadOneEntry(zip, nsLiteralCString(JAR_RSA_SEARCH_STRING),
- sigFilename, sigBuffer, nullptr);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- // Signature (SF) file
- nsAutoCString sfFilename;
- ScopedAutoSECItem sfBuffer;
- Digest sfCalculatedDigest;
- rv = FindAndLoadOneEntry(zip, NS_LITERAL_CSTRING(JAR_SF_SEARCH_STRING),
- sfFilename, sfBuffer, &sfCalculatedDigest);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- sigBuffer.type = siBuffer;
- UniqueCERTCertList builtChain;
- rv = VerifySignature(aTrustedRoot, sigBuffer, sfCalculatedDigest.get(),
- builtChain);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- ScopedAutoSECItem mfDigest;
- rv = ParseSF(BitwiseCast<char*, unsigned char*>(sfBuffer.data), mfDigest);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Manifest (MF) file
- nsAutoCString mfFilename;
- ScopedAutoSECItem manifestBuffer;
- Digest mfCalculatedDigest;
- rv = FindAndLoadOneEntry(zip, NS_LITERAL_CSTRING(JAR_MF_SEARCH_STRING),
- mfFilename, manifestBuffer, &mfCalculatedDigest);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- if (SECITEM_CompareItem(&mfDigest, &mfCalculatedDigest.get()) != SECEqual) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Allocate the I/O buffer only once per JAR, instead of once per entry, in
- // order to minimize malloc/free calls and in order to avoid fragmenting
- // memory.
- ScopedAutoSECItem buf(128 * 1024);
-
- nsTHashtable<nsCStringHashKey> items;
-
- rv = ParseMF(BitwiseCast<char*, unsigned char*>(manifestBuffer.data), zip,
- items, buf);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Verify every entry in the file.
- nsCOMPtr<nsIUTF8StringEnumerator> entries;
- rv = zip->FindEntries(EmptyCString(), getter_AddRefs(entries));
- if (NS_SUCCEEDED(rv) && !entries) {
- rv = NS_ERROR_UNEXPECTED;
- }
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- for (;;) {
- bool hasMore;
- rv = entries->HasMore(&hasMore);
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (!hasMore) {
- break;
- }
-
- nsAutoCString entryFilename;
- rv = entries->GetNext(entryFilename);
- NS_ENSURE_SUCCESS(rv, rv);
-
- MOZ_LOG(gPIPNSSLog, LogLevel::Debug, ("Verifying digests for %s",
- entryFilename.get()));
-
- // The files that comprise the signature mechanism are not covered by the
- // signature.
- //
- // XXX: This is OK for a single signature, but doesn't work for
- // multiple signatures, because the metadata for the other signatures
- // is not signed either.
- if (entryFilename == mfFilename ||
- entryFilename == sfFilename ||
- entryFilename == sigFilename) {
- continue;
- }
-
- if (entryFilename.Length() == 0) {
- return NS_ERROR_SIGNED_JAR_ENTRY_INVALID;
- }
-
- // Entries with names that end in "/" are directory entries, which are not
- // signed.
- //
- // XXX: As long as we don't unpack the JAR into the filesystem, the "/"
- // entries are harmless. But, it is not clear what the security
- // implications of directory entries are if/when we were to unpackage the
- // JAR into the filesystem.
- if (entryFilename[entryFilename.Length() - 1] == '/') {
- continue;
- }
-
- nsCStringHashKey * item = items.GetEntry(entryFilename);
- if (!item) {
- return NS_ERROR_SIGNED_JAR_UNSIGNED_ENTRY;
- }
-
- // Remove the item so we can check for leftover items later
- items.RemoveEntry(item);
- }
-
- // We verified that every entry that we require to be signed is signed. But,
- // were there any missing entries--that is, entries that are mentioned in the
- // manifest but missing from the archive?
- if (items.Count() != 0) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- // Return the reader to the caller if they want it
- if (aZipReader) {
- zip.forget(aZipReader);
- }
-
- // Return the signer's certificate to the reader if they want it.
- // XXX: We should return an nsIX509CertList with the whole validated chain.
- if (aSignerCert) {
- CERTCertListNode* signerCertNode = CERT_LIST_HEAD(builtChain);
- if (!signerCertNode || CERT_LIST_END(signerCertNode, builtChain) ||
- !signerCertNode->cert) {
- return NS_ERROR_FAILURE;
- }
- nsCOMPtr<nsIX509Cert> signerCert =
- nsNSSCertificate::Create(signerCertNode->cert);
- NS_ENSURE_TRUE(signerCert, NS_ERROR_OUT_OF_MEMORY);
- signerCert.forget(aSignerCert);
- }
-
- return NS_OK;
-}
-
-nsresult
-VerifySignedManifest(AppTrustedRoot aTrustedRoot,
- nsIInputStream* aManifestStream,
- nsIInputStream* aSignatureStream,
- /*out, optional */ nsIX509Cert** aSignerCert)
-{
- NS_ENSURE_ARG(aManifestStream);
- NS_ENSURE_ARG(aSignatureStream);
-
- if (aSignerCert) {
- *aSignerCert = nullptr;
- }
-
- // Load signature file in buffer
- ScopedAutoSECItem signatureBuffer;
- nsresult rv = ReadStream(aSignatureStream, signatureBuffer);
- if (NS_FAILED(rv)) {
- return rv;
- }
- signatureBuffer.type = siBuffer;
-
- // Load manifest file in buffer
- ScopedAutoSECItem manifestBuffer;
- rv = ReadStream(aManifestStream, manifestBuffer);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Calculate SHA1 digest of the manifest buffer
- Digest manifestCalculatedDigest;
- rv = manifestCalculatedDigest.DigestBuf(SEC_OID_SHA1,
- manifestBuffer.data,
- manifestBuffer.len - 1); // buffer is null terminated
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
- }
-
- // Get base64 encoded string from manifest buffer digest
- UniquePORTString
- base64EncDigest(NSSBase64_EncodeItem(nullptr, nullptr, 0,
- const_cast<SECItem*>(&manifestCalculatedDigest.get())));
- if (NS_WARN_IF(!base64EncDigest)) {
- return NS_ERROR_OUT_OF_MEMORY;
- }
-
- // Calculate SHA1 digest of the base64 encoded string
- Digest doubleDigest;
- rv = doubleDigest.DigestBuf(SEC_OID_SHA1,
- BitwiseCast<uint8_t*, char*>(base64EncDigest.get()),
- strlen(base64EncDigest.get()));
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return rv;
- }
-
- // Verify the manifest signature (signed digest of the base64 encoded string)
- UniqueCERTCertList builtChain;
- rv = VerifySignature(aTrustedRoot, signatureBuffer,
- doubleDigest.get(), builtChain);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Return the signer's certificate to the reader if they want it.
- if (aSignerCert) {
- CERTCertListNode* signerCertNode = CERT_LIST_HEAD(builtChain);
- if (!signerCertNode || CERT_LIST_END(signerCertNode, builtChain) ||
- !signerCertNode->cert) {
- return NS_ERROR_FAILURE;
- }
- nsCOMPtr<nsIX509Cert> signerCert =
- nsNSSCertificate::Create(signerCertNode->cert);
- if (NS_WARN_IF(!signerCert)) {
- return NS_ERROR_OUT_OF_MEMORY;
- }
-
- signerCert.forget(aSignerCert);
- }
-
- return NS_OK;
-}
-
-class OpenSignedAppFileTask final : public CryptoTask
-{
-public:
- OpenSignedAppFileTask(AppTrustedRoot aTrustedRoot, nsIFile* aJarFile,
- nsIOpenSignedAppFileCallback* aCallback)
- : mTrustedRoot(aTrustedRoot)
- , mJarFile(aJarFile)
- , mCallback(new nsMainThreadPtrHolder<nsIOpenSignedAppFileCallback>(aCallback))
- {
- }
-
-private:
- virtual nsresult CalculateResult() override
- {
- return OpenSignedAppFile(mTrustedRoot, mJarFile,
- getter_AddRefs(mZipReader),
- getter_AddRefs(mSignerCert));
- }
-
- // nsNSSCertificate implements nsNSSShutdownObject, so there's nothing that
- // needs to be released
- virtual void ReleaseNSSResources() override { }
-
- virtual void CallCallback(nsresult rv) override
- {
- (void) mCallback->OpenSignedAppFileFinished(rv, mZipReader, mSignerCert);
- }
-
- const AppTrustedRoot mTrustedRoot;
- const nsCOMPtr<nsIFile> mJarFile;
- nsMainThreadPtrHandle<nsIOpenSignedAppFileCallback> mCallback;
- nsCOMPtr<nsIZipReader> mZipReader; // out
- nsCOMPtr<nsIX509Cert> mSignerCert; // out
-};
-
-class VerifySignedmanifestTask final : public CryptoTask
-{
-public:
- VerifySignedmanifestTask(AppTrustedRoot aTrustedRoot,
- nsIInputStream* aManifestStream,
- nsIInputStream* aSignatureStream,
- nsIVerifySignedManifestCallback* aCallback)
- : mTrustedRoot(aTrustedRoot)
- , mManifestStream(aManifestStream)
- , mSignatureStream(aSignatureStream)
- , mCallback(
- new nsMainThreadPtrHolder<nsIVerifySignedManifestCallback>(aCallback))
- {
- }
-
-private:
- virtual nsresult CalculateResult() override
- {
- return VerifySignedManifest(mTrustedRoot, mManifestStream,
- mSignatureStream, getter_AddRefs(mSignerCert));
- }
-
- // nsNSSCertificate implements nsNSSShutdownObject, so there's nothing that
- // needs to be released
- virtual void ReleaseNSSResources() override { }
-
- virtual void CallCallback(nsresult rv) override
- {
- (void) mCallback->VerifySignedManifestFinished(rv, mSignerCert);
- }
-
- const AppTrustedRoot mTrustedRoot;
- const nsCOMPtr<nsIInputStream> mManifestStream;
- const nsCOMPtr<nsIInputStream> mSignatureStream;
- nsMainThreadPtrHandle<nsIVerifySignedManifestCallback> mCallback;
- nsCOMPtr<nsIX509Cert> mSignerCert; // out
-};
-
-} // unnamed namespace
-
-NS_IMETHODIMP
-nsNSSCertificateDB::OpenSignedAppFileAsync(
- AppTrustedRoot aTrustedRoot, nsIFile* aJarFile,
- nsIOpenSignedAppFileCallback* aCallback)
-{
- NS_ENSURE_ARG_POINTER(aJarFile);
- NS_ENSURE_ARG_POINTER(aCallback);
- RefPtr<OpenSignedAppFileTask> task(new OpenSignedAppFileTask(aTrustedRoot,
- aJarFile,
- aCallback));
- return task->Dispatch("SignedJAR");
-}
-
-NS_IMETHODIMP
-nsNSSCertificateDB::VerifySignedManifestAsync(
- AppTrustedRoot aTrustedRoot, nsIInputStream* aManifestStream,
- nsIInputStream* aSignatureStream, nsIVerifySignedManifestCallback* aCallback)
-{
- NS_ENSURE_ARG_POINTER(aManifestStream);
- NS_ENSURE_ARG_POINTER(aSignatureStream);
- NS_ENSURE_ARG_POINTER(aCallback);
-
- RefPtr<VerifySignedmanifestTask> task(
- new VerifySignedmanifestTask(aTrustedRoot, aManifestStream,
- aSignatureStream, aCallback));
- return task->Dispatch("SignedManifest");
-}
-
-
-//
-// Signature verification for archives unpacked into a file structure
-//
-
-// Finds the "*.rsa" signature file in the META-INF directory and returns
-// the name. It is an error if there are none or more than one .rsa file
-nsresult
-FindSignatureFilename(nsIFile* aMetaDir,
- /*out*/ nsAString& aFilename)
-{
- nsCOMPtr<nsISimpleEnumerator> entries;
- nsresult rv = aMetaDir->GetDirectoryEntries(getter_AddRefs(entries));
- nsCOMPtr<nsIDirectoryEnumerator> files = do_QueryInterface(entries);
- if (NS_FAILED(rv) || !files) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- bool found = false;
- nsCOMPtr<nsIFile> file;
- rv = files->GetNextFile(getter_AddRefs(file));
-
- while (NS_SUCCEEDED(rv) && file) {
- nsAutoString leafname;
- rv = file->GetLeafName(leafname);
- if (NS_SUCCEEDED(rv)) {
- if (StringEndsWith(leafname, NS_LITERAL_STRING(".rsa"))) {
- if (!found) {
- found = true;
- aFilename = leafname;
- } else {
- // second signature file is an error
- rv = NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- break;
- }
- }
- rv = files->GetNextFile(getter_AddRefs(file));
- }
- }
-
- if (!found) {
- rv = NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- files->Close();
- return rv;
-}
-
-// Loads the signature metadata file that matches the given filename in
-// the passed-in Meta-inf directory. If bufDigest is not null then on
-// success bufDigest will contain the SHA-1 digest of the entry.
-nsresult
-LoadOneMetafile(nsIFile* aMetaDir,
- const nsAString& aFilename,
- /*out*/ SECItem& aBuf,
- /*optional, out*/ Digest* aBufDigest)
-{
- nsCOMPtr<nsIFile> metafile;
- nsresult rv = aMetaDir->Clone(getter_AddRefs(metafile));
- NS_ENSURE_SUCCESS(rv, rv);
-
- rv = metafile->Append(aFilename);
- NS_ENSURE_SUCCESS(rv, rv);
-
- bool exists;
- rv = metafile->Exists(&exists);
- if (NS_FAILED(rv) || !exists) {
- // we can call a missing .rsa file "unsigned" but FindSignatureFilename()
- // already found one: missing other metadata files means a broken signature.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- nsCOMPtr<nsIInputStream> stream;
- rv = NS_NewLocalFileInputStream(getter_AddRefs(stream), metafile);
- NS_ENSURE_SUCCESS(rv, rv);
-
- rv = ReadStream(stream, aBuf);
- stream->Close();
- NS_ENSURE_SUCCESS(rv, rv);
-
- if (aBufDigest) {
- rv = aBufDigest->DigestBuf(SEC_OID_SHA1, aBuf.data, aBuf.len - 1);
- NS_ENSURE_SUCCESS(rv, rv);
- }
-
- return NS_OK;
-}
-
-// Parses MANIFEST.MF and verifies the contents of the unpacked files
-// listed in the manifest.
-// The filenames of all entries will be returned in aMfItems. aBuf must
-// be a pre-allocated scratch buffer that is used for doing I/O.
-nsresult
-ParseMFUnpacked(const char* aFilebuf, nsIFile* aDir,
- /*out*/ nsTHashtable<nsStringHashKey>& aMfItems,
- ScopedAutoSECItem& aBuf)
-{
- nsresult rv;
-
- const char* nextLineStart = aFilebuf;
-
- rv = CheckManifestVersion(nextLineStart, NS_LITERAL_CSTRING(JAR_MF_HEADER));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Skip the rest of the header section, which ends with a blank line.
- {
- nsAutoCString line;
- do {
- rv = ReadLine(nextLineStart, line);
- if (NS_FAILED(rv)) {
- return rv;
- }
- } while (line.Length() > 0);
-
- // Manifest containing no file entries is OK, though useless.
- if (*nextLineStart == '\0') {
- return NS_OK;
- }
- }
-
- nsAutoString curItemName;
- ScopedAutoSECItem digest;
-
- for (;;) {
- nsAutoCString curLine;
- rv = ReadLine(nextLineStart, curLine);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- if (curLine.Length() == 0) {
- // end of section (blank line or end-of-file)
-
- if (curItemName.Length() == 0) {
- // '...Each section must start with an attribute with the name as
- // "Name",...', so every section must have a Name attribute.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (digest.len == 0) {
- // We require every entry to have a digest, since we require every
- // entry to be signed and we don't allow duplicate entries.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (aMfItems.Contains(curItemName)) {
- // Duplicate entry
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Verify that the file's content digest matches the digest from this
- // MF section.
- rv = VerifyFileContentDigest(aDir, curItemName, digest, aBuf);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- aMfItems.PutEntry(curItemName);
-
- if (*nextLineStart == '\0') {
- // end-of-file
- break;
- }
-
- // reset so we know we haven't encountered either of these for the next
- // item yet.
- curItemName.Truncate();
- digest.reset();
-
- continue; // skip the rest of the loop below
- }
-
- nsAutoCString attrName;
- nsAutoCString attrValue;
- rv = ParseAttribute(curLine, attrName, attrValue);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // Lines to look for:
-
- // (1) Digest:
- if (attrName.LowerCaseEqualsLiteral("sha1-digest")) {
- if (digest.len > 0) {
- // multiple SHA1 digests in section
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- rv = MapSECStatus(ATOB_ConvertAsciiToItem(&digest, attrValue.get()));
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- continue;
- }
-
- // (2) Name: associates this manifest section with a file in the jar.
- if (attrName.LowerCaseEqualsLiteral("name")) {
- if (MOZ_UNLIKELY(curItemName.Length() > 0)) {
- // multiple names in section
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (MOZ_UNLIKELY(attrValue.Length() == 0)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- curItemName = NS_ConvertUTF8toUTF16(attrValue);
-
- continue;
- }
-
- // (3) Magic: the only other must-understand attribute
- if (attrName.LowerCaseEqualsLiteral("magic")) {
- // We don't understand any magic, so we can't verify an entry that
- // requires magic. Since we require every entry to have a valid
- // signature, we have no choice but to reject the entry.
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // unrecognized attributes must be ignored
- }
-
- return NS_OK;
-}
-
-// recursively check a directory tree for files not in the list of
-// verified files we found in the manifest. For each file we find
-// Check it against the files found in the manifest. If the file wasn't
-// in the manifest then it's unsigned and we can stop looking. Otherwise
-// remove it from the collection so we can check leftovers later.
-//
-// @param aDir Directory to check
-// @param aPath Relative path to that directory (to check against aItems)
-// @param aItems All the files found
-// @param *Filename signature files that won't be in the manifest
-nsresult
-CheckDirForUnsignedFiles(nsIFile* aDir,
- const nsString& aPath,
- /* in/out */ nsTHashtable<nsStringHashKey>& aItems,
- const nsAString& sigFilename,
- const nsAString& sfFilename,
- const nsAString& mfFilename)
-{
- nsCOMPtr<nsISimpleEnumerator> entries;
- nsresult rv = aDir->GetDirectoryEntries(getter_AddRefs(entries));
- nsCOMPtr<nsIDirectoryEnumerator> files = do_QueryInterface(entries);
- if (NS_FAILED(rv) || !files) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- bool inMeta = StringBeginsWith(aPath, NS_LITERAL_STRING(JAR_META_DIR));
-
- while (NS_SUCCEEDED(rv)) {
- nsCOMPtr<nsIFile> file;
- rv = files->GetNextFile(getter_AddRefs(file));
- if (NS_FAILED(rv) || !file) {
- break;
- }
-
- nsAutoString leafname;
- rv = file->GetLeafName(leafname);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- nsAutoString curName(aPath + leafname);
-
- bool isDir;
- rv = file->IsDirectory(&isDir);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // if it's a directory we need to recurse
- if (isDir) {
- curName.Append(NS_LITERAL_STRING("/"));
- rv = CheckDirForUnsignedFiles(file, curName, aItems,
- sigFilename, sfFilename, mfFilename);
- } else {
- // The files that comprise the signature mechanism are not covered by the
- // signature.
- //
- // XXX: This is OK for a single signature, but doesn't work for
- // multiple signatures because the metadata for the other signatures
- // is not signed either.
- if (inMeta && ( leafname == sigFilename ||
- leafname == sfFilename ||
- leafname == mfFilename )) {
- continue;
- }
-
- // make sure the current file was found in the manifest
- nsStringHashKey* item = aItems.GetEntry(curName);
- if (!item) {
- return NS_ERROR_SIGNED_JAR_UNSIGNED_ENTRY;
- }
-
- // Remove the item so we can check for leftover items later
- aItems.RemoveEntry(item);
- }
- }
- files->Close();
- return rv;
-}
-
-/*
- * Verify the signature of a directory structure as if it were a
- * signed JAR file (used for unpacked JARs)
- */
-nsresult
-VerifySignedDirectory(AppTrustedRoot aTrustedRoot,
- nsIFile* aDirectory,
- /*out, optional */ nsIX509Cert** aSignerCert)
-{
- NS_ENSURE_ARG_POINTER(aDirectory);
-
- if (aSignerCert) {
- *aSignerCert = nullptr;
- }
-
- // Make sure there's a META-INF directory
-
- nsCOMPtr<nsIFile> metaDir;
- nsresult rv = aDirectory->Clone(getter_AddRefs(metaDir));
- if (NS_FAILED(rv)) {
- return rv;
- }
- rv = metaDir->Append(NS_LITERAL_STRING(JAR_META_DIR));
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- bool exists;
- rv = metaDir->Exists(&exists);
- if (NS_FAILED(rv) || !exists) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
- bool isDirectory;
- rv = metaDir->IsDirectory(&isDirectory);
- if (NS_FAILED(rv) || !isDirectory) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- // Find and load the Signature (RSA) file
-
- nsAutoString sigFilename;
- rv = FindSignatureFilename(metaDir, sigFilename);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- ScopedAutoSECItem sigBuffer;
- rv = LoadOneMetafile(metaDir, sigFilename, sigBuffer, nullptr);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_NOT_SIGNED;
- }
-
- // Load the signature (SF) file and verify the signature.
- // The .sf and .rsa files must have the same name apart from the extension.
-
- nsAutoString sfFilename(Substring(sigFilename, 0, sigFilename.Length() - 3)
- + NS_LITERAL_STRING("sf"));
-
- ScopedAutoSECItem sfBuffer;
- Digest sfCalculatedDigest;
- rv = LoadOneMetafile(metaDir, sfFilename, sfBuffer, &sfCalculatedDigest);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- sigBuffer.type = siBuffer;
- UniqueCERTCertList builtChain;
- rv = VerifySignature(aTrustedRoot, sigBuffer, sfCalculatedDigest.get(),
- builtChain);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Get the expected manifest hash from the signed .sf file
-
- ScopedAutoSECItem mfDigest;
- rv = ParseSF(BitwiseCast<char*, unsigned char*>(sfBuffer.data), mfDigest);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Load manifest (MF) file and verify signature
-
- nsAutoString mfFilename(NS_LITERAL_STRING("manifest.mf"));
- ScopedAutoSECItem manifestBuffer;
- Digest mfCalculatedDigest;
- rv = LoadOneMetafile(metaDir, mfFilename, manifestBuffer, &mfCalculatedDigest);
- if (NS_FAILED(rv)) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- if (SECITEM_CompareItem(&mfDigest, &mfCalculatedDigest.get()) != SECEqual) {
- return NS_ERROR_SIGNED_JAR_MANIFEST_INVALID;
- }
-
- // Parse manifest and verify signed hash of all listed files
-
- // Allocate the I/O buffer only once per JAR, instead of once per entry, in
- // order to minimize malloc/free calls and in order to avoid fragmenting
- // memory.
- ScopedAutoSECItem buf(128 * 1024);
-
- nsTHashtable<nsStringHashKey> items;
- rv = ParseMFUnpacked(BitwiseCast<char*, unsigned char*>(manifestBuffer.data),
- aDirectory, items, buf);
- if (NS_FAILED(rv)){
- return rv;
- }
-
- // We've checked that everything listed in the manifest exists and is signed
- // correctly. Now check on disk for extra (unsigned) files.
- // Deletes found entries from items as it goes.
- rv = CheckDirForUnsignedFiles(aDirectory, EmptyString(), items,
- sigFilename, sfFilename, mfFilename);
- if (NS_FAILED(rv)) {
- return rv;
- }
-
- // We verified that every entry that we require to be signed is signed. But,
- // were there any missing entries--that is, entries that are mentioned in the
- // manifest but missing from the directory tree? (There shouldn't be given
- // ParseMFUnpacked() checking them all, but it's a cheap sanity check.)
- if (items.Count() != 0) {
- return NS_ERROR_SIGNED_JAR_ENTRY_MISSING;
- }
-
- // Return the signer's certificate to the reader if they want it.
- // XXX: We should return an nsIX509CertList with the whole validated chain.
- if (aSignerCert) {
- CERTCertListNode* signerCertNode = CERT_LIST_HEAD(builtChain);
- if (!signerCertNode || CERT_LIST_END(signerCertNode, builtChain) ||
- !signerCertNode->cert) {
- return NS_ERROR_FAILURE;
- }
- nsCOMPtr<nsIX509Cert> signerCert =
- nsNSSCertificate::Create(signerCertNode->cert);
- NS_ENSURE_TRUE(signerCert, NS_ERROR_OUT_OF_MEMORY);
- signerCert.forget(aSignerCert);
- }
-
- return NS_OK;
-}
-
-class VerifySignedDirectoryTask final : public CryptoTask
-{
-public:
- VerifySignedDirectoryTask(AppTrustedRoot aTrustedRoot, nsIFile* aUnpackedJar,
- nsIVerifySignedDirectoryCallback* aCallback)
- : mTrustedRoot(aTrustedRoot)
- , mDirectory(aUnpackedJar)
- , mCallback(new nsMainThreadPtrHolder<nsIVerifySignedDirectoryCallback>(aCallback))
- {
- }
-
-private:
- virtual nsresult CalculateResult() override
- {
- return VerifySignedDirectory(mTrustedRoot,
- mDirectory,
- getter_AddRefs(mSignerCert));
- }
-
- // This class doesn't directly hold NSS resources so there's nothing that
- // needs to be released
- virtual void ReleaseNSSResources() override { }
-
- virtual void CallCallback(nsresult rv) override
- {
- (void) mCallback->VerifySignedDirectoryFinished(rv, mSignerCert);
- }
-
- const AppTrustedRoot mTrustedRoot;
- const nsCOMPtr<nsIFile> mDirectory;
- nsMainThreadPtrHandle<nsIVerifySignedDirectoryCallback> mCallback;
- nsCOMPtr<nsIX509Cert> mSignerCert; // out
-};
-
-NS_IMETHODIMP
-nsNSSCertificateDB::VerifySignedDirectoryAsync(
- AppTrustedRoot aTrustedRoot, nsIFile* aUnpackedJar,
- nsIVerifySignedDirectoryCallback* aCallback)
-{
- NS_ENSURE_ARG_POINTER(aUnpackedJar);
- NS_ENSURE_ARG_POINTER(aCallback);
- RefPtr<VerifySignedDirectoryTask> task(new VerifySignedDirectoryTask(aTrustedRoot,
- aUnpackedJar,
- aCallback));
- return task->Dispatch("UnpackedJar");
-}
diff --git a/security/apps/AppTrustDomain.cpp b/security/apps/AppTrustDomain.cpp
deleted file mode 100644
index 35be4ebd97..0000000000
--- a/security/apps/AppTrustDomain.cpp
+++ /dev/null
@@ -1,388 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "AppTrustDomain.h"
-#include "MainThreadUtils.h"
-#include "certdb.h"
-#include "mozilla/ArrayUtils.h"
-#include "mozilla/Casting.h"
-#include "mozilla/Preferences.h"
-#include "nsComponentManagerUtils.h"
-#include "nsIFile.h"
-#include "nsIFileStreams.h"
-#include "nsIX509CertDB.h"
-#include "nsNSSCertificate.h"
-#include "nsNetUtil.h"
-#include "pkix/pkixnss.h"
-#include "prerror.h"
-#include "secerr.h"
-
-// Generated in Makefile.in
-#include "marketplace-prod-public.inc"
-#include "marketplace-prod-reviewers.inc"
-#include "marketplace-dev-public.inc"
-#include "marketplace-dev-reviewers.inc"
-#include "marketplace-stage.inc"
-#include "xpcshell.inc"
-// Trusted Hosted Apps Certificates
-#include "manifest-signing-root.inc"
-#include "manifest-signing-test-root.inc"
-// Add-on signing Certificates
-#include "addons-public.inc"
-#include "addons-stage.inc"
-// Privileged Package Certificates
-#include "privileged-package-root.inc"
-
-using namespace mozilla::pkix;
-
-extern mozilla::LazyLogModule gPIPNSSLog;
-
-static const unsigned int DEFAULT_MIN_RSA_BITS = 2048;
-static char kDevImportedDER[] =
- "network.http.signed-packages.developer-root";
-
-namespace mozilla { namespace psm {
-
-StaticMutex AppTrustDomain::sMutex;
-UniquePtr<unsigned char[]> AppTrustDomain::sDevImportedDERData;
-unsigned int AppTrustDomain::sDevImportedDERLen = 0;
-
-AppTrustDomain::AppTrustDomain(UniqueCERTCertList& certChain, void* pinArg)
- : mCertChain(certChain)
- , mPinArg(pinArg)
- , mMinRSABits(DEFAULT_MIN_RSA_BITS)
-{
-}
-
-SECStatus
-AppTrustDomain::SetTrustedRoot(AppTrustedRoot trustedRoot)
-{
- SECItem trustedDER;
-
- // Load the trusted certificate into the in-memory NSS database so that
- // CERT_CreateSubjectCertList can find it.
-
- switch (trustedRoot)
- {
- case nsIX509CertDB::AppMarketplaceProdPublicRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceProdPublicRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceProdPublicRoot);
- break;
-
- case nsIX509CertDB::AppMarketplaceProdReviewersRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceProdReviewersRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceProdReviewersRoot);
- break;
-
- case nsIX509CertDB::AppMarketplaceDevPublicRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceDevPublicRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceDevPublicRoot);
- break;
-
- case nsIX509CertDB::AppMarketplaceDevReviewersRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceDevReviewersRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceDevReviewersRoot);
- break;
-
- case nsIX509CertDB::AppMarketplaceStageRoot:
- trustedDER.data = const_cast<uint8_t*>(marketplaceStageRoot);
- trustedDER.len = mozilla::ArrayLength(marketplaceStageRoot);
- // The staging root was generated with a 1024-bit key.
- mMinRSABits = 1024u;
- break;
-
- case nsIX509CertDB::AppXPCShellRoot:
- trustedDER.data = const_cast<uint8_t*>(xpcshellRoot);
- trustedDER.len = mozilla::ArrayLength(xpcshellRoot);
- break;
-
- case nsIX509CertDB::AddonsPublicRoot:
- trustedDER.data = const_cast<uint8_t*>(addonsPublicRoot);
- trustedDER.len = mozilla::ArrayLength(addonsPublicRoot);
- break;
-
- case nsIX509CertDB::AddonsStageRoot:
- trustedDER.data = const_cast<uint8_t*>(addonsStageRoot);
- trustedDER.len = mozilla::ArrayLength(addonsStageRoot);
- break;
-
- case nsIX509CertDB::PrivilegedPackageRoot:
- trustedDER.data = const_cast<uint8_t*>(privilegedPackageRoot);
- trustedDER.len = mozilla::ArrayLength(privilegedPackageRoot);
- break;
-
- case nsIX509CertDB::DeveloperImportedRoot: {
- StaticMutexAutoLock lock(sMutex);
- if (!sDevImportedDERData) {
- MOZ_ASSERT(!NS_IsMainThread());
- nsCOMPtr<nsIFile> file(do_CreateInstance("@mozilla.org/file/local;1"));
- if (!file) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
- nsresult rv = file->InitWithNativePath(
- Preferences::GetCString(kDevImportedDER));
- if (NS_FAILED(rv)) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
-
- nsCOMPtr<nsIInputStream> inputStream;
- NS_NewLocalFileInputStream(getter_AddRefs(inputStream), file, -1, -1,
- nsIFileInputStream::CLOSE_ON_EOF);
- if (!inputStream) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
-
- uint64_t length;
- rv = inputStream->Available(&length);
- if (NS_FAILED(rv)) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
-
- auto data = MakeUnique<char[]>(length);
- rv = inputStream->Read(data.get(), length, &sDevImportedDERLen);
- if (NS_FAILED(rv)) {
- PR_SetError(SEC_ERROR_IO, 0);
- return SECFailure;
- }
-
- MOZ_ASSERT(length == sDevImportedDERLen);
- sDevImportedDERData.reset(
- BitwiseCast<unsigned char*, char*>(data.release()));
- }
-
- trustedDER.data = sDevImportedDERData.get();
- trustedDER.len = sDevImportedDERLen;
- break;
- }
-
- default:
- PR_SetError(SEC_ERROR_INVALID_ARGS, 0);
- return SECFailure;
- }
-
- mTrustedRoot.reset(CERT_NewTempCertificate(CERT_GetDefaultCertDB(),
- &trustedDER, nullptr, false, true));
- if (!mTrustedRoot) {
- return SECFailure;
- }
-
- return SECSuccess;
-}
-
-Result
-AppTrustDomain::FindIssuer(Input encodedIssuerName, IssuerChecker& checker,
- Time)
-
-{
- MOZ_ASSERT(mTrustedRoot);
- if (!mTrustedRoot) {
- return Result::FATAL_ERROR_INVALID_STATE;
- }
-
- // TODO(bug 1035418): If/when mozilla::pkix relaxes the restriction that
- // FindIssuer must only pass certificates with a matching subject name to
- // checker.Check, we can stop using CERT_CreateSubjectCertList and instead
- // use logic like this:
- //
- // 1. First, try the trusted trust anchor.
- // 2. Secondly, iterate through the certificates that were stored in the CMS
- // message, passing each one to checker.Check.
- SECItem encodedIssuerNameSECItem =
- UnsafeMapInputToSECItem(encodedIssuerName);
- UniqueCERTCertList
- candidates(CERT_CreateSubjectCertList(nullptr, CERT_GetDefaultCertDB(),
- &encodedIssuerNameSECItem, 0,
- false));
- if (candidates) {
- for (CERTCertListNode* n = CERT_LIST_HEAD(candidates);
- !CERT_LIST_END(n, candidates); n = CERT_LIST_NEXT(n)) {
- Input certDER;
- Result rv = certDER.Init(n->cert->derCert.data, n->cert->derCert.len);
- if (rv != Success) {
- continue; // probably too big
- }
-
- bool keepGoing;
- rv = checker.Check(certDER, nullptr/*additionalNameConstraints*/,
- keepGoing);
- if (rv != Success) {
- return rv;
- }
- if (!keepGoing) {
- break;
- }
- }
- }
-
- return Success;
-}
-
-Result
-AppTrustDomain::GetCertTrust(EndEntityOrCA endEntityOrCA,
- const CertPolicyId& policy,
- Input candidateCertDER,
- /*out*/ TrustLevel& trustLevel)
-{
- MOZ_ASSERT(policy.IsAnyPolicy());
- MOZ_ASSERT(mTrustedRoot);
- if (!policy.IsAnyPolicy()) {
- return Result::FATAL_ERROR_INVALID_ARGS;
- }
- if (!mTrustedRoot) {
- return Result::FATAL_ERROR_INVALID_STATE;
- }
-
- // Handle active distrust of the certificate.
-
- // XXX: This would be cleaner and more efficient if we could get the trust
- // information without constructing a CERTCertificate here, but NSS doesn't
- // expose it in any other easy-to-use fashion.
- SECItem candidateCertDERSECItem =
- UnsafeMapInputToSECItem(candidateCertDER);
- UniqueCERTCertificate candidateCert(
- CERT_NewTempCertificate(CERT_GetDefaultCertDB(), &candidateCertDERSECItem,
- nullptr, false, true));
- if (!candidateCert) {
- return MapPRErrorCodeToResult(PR_GetError());
- }
-
- CERTCertTrust trust;
- if (CERT_GetCertTrust(candidateCert.get(), &trust) == SECSuccess) {
- uint32_t flags = SEC_GET_TRUST_FLAGS(&trust, trustObjectSigning);
-
- // For DISTRUST, we use the CERTDB_TRUSTED or CERTDB_TRUSTED_CA bit,
- // because we can have active distrust for either type of cert. Note that
- // CERTDB_TERMINAL_RECORD means "stop trying to inherit trust" so if the
- // relevant trust bit isn't set then that means the cert must be considered
- // distrusted.
- uint32_t relevantTrustBit = endEntityOrCA == EndEntityOrCA::MustBeCA
- ? CERTDB_TRUSTED_CA
- : CERTDB_TRUSTED;
- if (((flags & (relevantTrustBit | CERTDB_TERMINAL_RECORD)))
- == CERTDB_TERMINAL_RECORD) {
- trustLevel = TrustLevel::ActivelyDistrusted;
- return Success;
- }
- }
-
- // mTrustedRoot is the only trust anchor for this validation.
- if (CERT_CompareCerts(mTrustedRoot.get(), candidateCert.get())) {
- trustLevel = TrustLevel::TrustAnchor;
- return Success;
- }
-
- trustLevel = TrustLevel::InheritsTrust;
- return Success;
-}
-
-Result
-AppTrustDomain::DigestBuf(Input item,
- DigestAlgorithm digestAlg,
- /*out*/ uint8_t* digestBuf,
- size_t digestBufLen)
-{
- return DigestBufNSS(item, digestAlg, digestBuf, digestBufLen);
-}
-
-Result
-AppTrustDomain::CheckRevocation(EndEntityOrCA, const CertID&, Time, Duration,
- /*optional*/ const Input*,
- /*optional*/ const Input*,
- /*optional*/ const Input*)
-{
- // We don't currently do revocation checking. If we need to distrust an Apps
- // certificate, we will use the active distrust mechanism.
- return Success;
-}
-
-Result
-AppTrustDomain::IsChainValid(const DERArray& certChain, Time time,
- const CertPolicyId& requiredPolicy)
-{
- SECStatus srv = ConstructCERTCertListFromReversedDERArray(certChain,
- mCertChain);
- if (srv != SECSuccess) {
- return MapPRErrorCodeToResult(PR_GetError());
- }
- return Success;
-}
-
-Result
-AppTrustDomain::CheckSignatureDigestAlgorithm(DigestAlgorithm,
- EndEntityOrCA,
- Time)
-{
- // TODO: We should restrict signatures to SHA-256 or better.
- return Success;
-}
-
-Result
-AppTrustDomain::CheckRSAPublicKeyModulusSizeInBits(
- EndEntityOrCA /*endEntityOrCA*/, unsigned int modulusSizeInBits)
-{
- if (modulusSizeInBits < mMinRSABits) {
- return Result::ERROR_INADEQUATE_KEY_SIZE;
- }
- return Success;
-}
-
-Result
-AppTrustDomain::VerifyRSAPKCS1SignedDigest(const SignedDigest& signedDigest,
- Input subjectPublicKeyInfo)
-{
- // TODO: We should restrict signatures to SHA-256 or better.
- return VerifyRSAPKCS1SignedDigestNSS(signedDigest, subjectPublicKeyInfo,
- mPinArg);
-}
-
-Result
-AppTrustDomain::CheckECDSACurveIsAcceptable(EndEntityOrCA /*endEntityOrCA*/,
- NamedCurve curve)
-{
- switch (curve) {
- case NamedCurve::secp256r1: // fall through
- case NamedCurve::secp384r1: // fall through
- case NamedCurve::secp521r1:
- return Success;
- }
-
- return Result::ERROR_UNSUPPORTED_ELLIPTIC_CURVE;
-}
-
-Result
-AppTrustDomain::VerifyECDSASignedDigest(const SignedDigest& signedDigest,
- Input subjectPublicKeyInfo)
-{
- return VerifyECDSASignedDigestNSS(signedDigest, subjectPublicKeyInfo,
- mPinArg);
-}
-
-Result
-AppTrustDomain::CheckValidityIsAcceptable(Time /*notBefore*/, Time /*notAfter*/,
- EndEntityOrCA /*endEntityOrCA*/,
- KeyPurposeId /*keyPurpose*/)
-{
- return Success;
-}
-
-Result
-AppTrustDomain::NetscapeStepUpMatchesServerAuth(Time /*notBefore*/,
- /*out*/ bool& matches)
-{
- matches = false;
- return Success;
-}
-
-void
-AppTrustDomain::NoteAuxiliaryExtension(AuxiliaryExtension /*extension*/,
- Input /*extensionData*/)
-{
-}
-
-} } // namespace mozilla::psm
diff --git a/security/apps/AppTrustDomain.h b/security/apps/AppTrustDomain.h
deleted file mode 100644
index e4a8ec5e50..0000000000
--- a/security/apps/AppTrustDomain.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef AppTrustDomain_h
-#define AppTrustDomain_h
-
-#include "pkix/pkixtypes.h"
-#include "mozilla/StaticMutex.h"
-#include "mozilla/UniquePtr.h"
-#include "nsDebug.h"
-#include "nsIX509CertDB.h"
-#include "ScopedNSSTypes.h"
-
-namespace mozilla { namespace psm {
-
-class AppTrustDomain final : public mozilla::pkix::TrustDomain
-{
-public:
- typedef mozilla::pkix::Result Result;
-
- AppTrustDomain(UniqueCERTCertList& certChain, void* pinArg);
-
- SECStatus SetTrustedRoot(AppTrustedRoot trustedRoot);
-
- virtual Result GetCertTrust(mozilla::pkix::EndEntityOrCA endEntityOrCA,
- const mozilla::pkix::CertPolicyId& policy,
- mozilla::pkix::Input candidateCertDER,
- /*out*/ mozilla::pkix::TrustLevel& trustLevel)
- override;
- virtual Result FindIssuer(mozilla::pkix::Input encodedIssuerName,
- IssuerChecker& checker,
- mozilla::pkix::Time time) override;
- virtual Result CheckRevocation(mozilla::pkix::EndEntityOrCA endEntityOrCA,
- const mozilla::pkix::CertID& certID,
- mozilla::pkix::Time time,
- mozilla::pkix::Duration validityDuration,
- /*optional*/ const mozilla::pkix::Input* stapledOCSPresponse,
- /*optional*/ const mozilla::pkix::Input* aiaExtension,
- /*optional*/ const mozilla::pkix::Input* sctExtension) override;
- virtual Result IsChainValid(const mozilla::pkix::DERArray& certChain,
- mozilla::pkix::Time time,
- const mozilla::pkix::CertPolicyId& requiredPolicy) override;
- virtual Result CheckSignatureDigestAlgorithm(
- mozilla::pkix::DigestAlgorithm digestAlg,
- mozilla::pkix::EndEntityOrCA endEntityOrCA,
- mozilla::pkix::Time notBefore) override;
- virtual Result CheckRSAPublicKeyModulusSizeInBits(
- mozilla::pkix::EndEntityOrCA endEntityOrCA,
- unsigned int modulusSizeInBits) override;
- virtual Result VerifyRSAPKCS1SignedDigest(
- const mozilla::pkix::SignedDigest& signedDigest,
- mozilla::pkix::Input subjectPublicKeyInfo) override;
- virtual Result CheckECDSACurveIsAcceptable(
- mozilla::pkix::EndEntityOrCA endEntityOrCA,
- mozilla::pkix::NamedCurve curve) override;
- virtual Result VerifyECDSASignedDigest(
- const mozilla::pkix::SignedDigest& signedDigest,
- mozilla::pkix::Input subjectPublicKeyInfo) override;
- virtual Result CheckValidityIsAcceptable(
- mozilla::pkix::Time notBefore, mozilla::pkix::Time notAfter,
- mozilla::pkix::EndEntityOrCA endEntityOrCA,
- mozilla::pkix::KeyPurposeId keyPurpose) override;
- virtual Result NetscapeStepUpMatchesServerAuth(
- mozilla::pkix::Time notBefore,
- /*out*/ bool& matches) override;
- virtual void NoteAuxiliaryExtension(
- mozilla::pkix::AuxiliaryExtension extension,
- mozilla::pkix::Input extensionData) override;
- virtual Result DigestBuf(mozilla::pkix::Input item,
- mozilla::pkix::DigestAlgorithm digestAlg,
- /*out*/ uint8_t* digestBuf,
- size_t digestBufLen) override;
-
-private:
- /*out*/ UniqueCERTCertList& mCertChain;
- void* mPinArg; // non-owning!
- UniqueCERTCertificate mTrustedRoot;
- unsigned int mMinRSABits;
-
- static StaticMutex sMutex;
- static UniquePtr<unsigned char[]> sDevImportedDERData;
- static unsigned int sDevImportedDERLen;
-};
-
-} } // namespace mozilla::psm
-
-#endif // AppTrustDomain_h
diff --git a/security/apps/addons-public.crt b/security/apps/addons-public.crt
deleted file mode 100644
index 6ab711b996..0000000000
--- a/security/apps/addons-public.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/addons-stage.crt b/security/apps/addons-stage.crt
deleted file mode 100644
index 73e48cadfe..0000000000
--- a/security/apps/addons-stage.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/gen_cert_header.py b/security/apps/gen_cert_header.py
deleted file mode 100644
index 0ffe25cf4e..0000000000
--- a/security/apps/gen_cert_header.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-import binascii
-
-def _file_byte_generator(filename):
- with open(filename, "rb") as f:
- contents = f.read()
-
- # Treat empty files the same as a file containing a lone 0;
- # a single-element array will fail cert verifcation just as an
- # empty array would.
- if not contents:
- return ['\0']
-
- return contents
-
-def _create_header(array_name, cert_bytes):
- hexified = ["0x" + binascii.hexlify(byte) for byte in cert_bytes]
- substs = { 'array_name': array_name, 'bytes': ', '.join(hexified) }
- return "const uint8_t %(array_name)s[] = {\n%(bytes)s\n};\n" % substs
-
-# Create functions named the same as the data arrays that we're going to
-# write to the headers, so we don't have to duplicate the names like so:
-#
-# def arrayName(header, cert_filename):
-# header.write(_create_header("arrayName", cert_filename))
-array_names = [
- 'marketplaceProdPublicRoot',
- 'marketplaceProdReviewersRoot',
- 'marketplaceDevPublicRoot',
- 'marketplaceDevReviewersRoot',
- 'marketplaceStageRoot',
- 'trustedAppPublicRoot',
- 'trustedAppTestRoot',
- 'xpcshellRoot',
- 'addonsPublicRoot',
- 'addonsStageRoot',
- 'privilegedPackageRoot',
-]
-
-for n in array_names:
- # Make sure the lambda captures the right string.
- globals()[n] = lambda header, cert_filename, name=n: header.write(_create_header(name, _file_byte_generator(cert_filename)))
diff --git a/security/apps/marketplace-dev-public.crt b/security/apps/marketplace-dev-public.crt
deleted file mode 100644
index 490b8682b7..0000000000
--- a/security/apps/marketplace-dev-public.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/marketplace-dev-reviewers.crt b/security/apps/marketplace-dev-reviewers.crt
deleted file mode 100644
index 5b8bde9337..0000000000
--- a/security/apps/marketplace-dev-reviewers.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/marketplace-prod-public.crt b/security/apps/marketplace-prod-public.crt
deleted file mode 100644
index 85c2fed92a..0000000000
--- a/security/apps/marketplace-prod-public.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/marketplace-prod-reviewers.crt b/security/apps/marketplace-prod-reviewers.crt
deleted file mode 100644
index 53be8c81ed..0000000000
--- a/security/apps/marketplace-prod-reviewers.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/marketplace-stage.crt b/security/apps/marketplace-stage.crt
deleted file mode 100644
index 84504f3574..0000000000
--- a/security/apps/marketplace-stage.crt
+++ /dev/null
Binary files differ
diff --git a/security/apps/moz.build b/security/apps/moz.build
deleted file mode 100644
index 365379881b..0000000000
--- a/security/apps/moz.build
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-UNIFIED_SOURCES += [
- 'AppSignatureVerification.cpp',
- 'AppTrustDomain.cpp',
-]
-
-FINAL_LIBRARY = 'xul'
-
-LOCAL_INCLUDES += [
- '/security/certverifier',
- '/security/manager/ssl',
- '/security/pkix/include',
-]
-
-DEFINES['NSS_ENABLE_ECC'] = 'True'
-for var in ('DLL_PREFIX', 'DLL_SUFFIX'):
- DEFINES[var] = '"%s"' % CONFIG[var]
-
-test_ssl_path = '/security/manager/ssl/tests/unit'
-
-headers_arrays_certs = [
- ('marketplace-prod-public.inc', 'marketplaceProdPublicRoot', 'marketplace-prod-public.crt'),
- ('marketplace-prod-reviewers.inc', 'marketplaceProdReviewersRoot', 'marketplace-prod-reviewers.crt'),
- ('marketplace-dev-public.inc', 'marketplaceDevPublicRoot', 'marketplace-dev-public.crt'),
- ('marketplace-dev-reviewers.inc', 'marketplaceDevReviewersRoot', 'marketplace-dev-reviewers.crt'),
- ('marketplace-stage.inc', 'marketplaceStageRoot', 'marketplace-stage.crt'),
- ('manifest-signing-root.inc', 'trustedAppPublicRoot', 'trusted-app-public.der'),
- ('manifest-signing-test-root.inc', 'trustedAppTestRoot', test_ssl_path + '/test_signed_manifest/trusted_ca1.der'),
- ('xpcshell.inc', 'xpcshellRoot', test_ssl_path + '/test_signed_apps/trusted_ca1.der'),
- ('addons-public.inc', 'addonsPublicRoot', 'addons-public.crt'),
- ('addons-stage.inc', 'addonsStageRoot', 'addons-stage.crt'),
- ('privileged-package-root.inc', 'privilegedPackageRoot', 'privileged-package-root.der'),
-]
-
-for header, array_name, cert in headers_arrays_certs:
- GENERATED_FILES += [header]
- h = GENERATED_FILES[header]
- h.script = 'gen_cert_header.py:' + array_name
- h.inputs = [cert]
diff --git a/security/apps/privileged-package-root.der b/security/apps/privileged-package-root.der
deleted file mode 100644
index 9f77af5823..0000000000
--- a/security/apps/privileged-package-root.der
+++ /dev/null
Binary files differ
diff --git a/security/apps/trusted-app-public.der b/security/apps/trusted-app-public.der
deleted file mode 100644
index e69de29bb2..0000000000
--- a/security/apps/trusted-app-public.der
+++ /dev/null
diff --git a/security/manager/ssl/nsIX509CertDB.idl b/security/manager/ssl/nsIX509CertDB.idl
index 44d8e0588c..1dbef22fbd 100644
--- a/security/manager/ssl/nsIX509CertDB.idl
+++ b/security/manager/ssl/nsIX509CertDB.idl
@@ -265,74 +265,11 @@ interface nsIX509CertDB : nsISupports {
*/
nsIX509Cert constructX509(in string certDER, in unsigned long length);
- /**
- * Verifies the signature on the given JAR file to verify that it has a
- * valid signature. To be considered valid, there must be exactly one
- * signature on the JAR file and that signature must have signed every
- * entry. Further, the signature must come from a certificate that
- * is trusted for code signing.
- *
- * On success, NS_OK, a nsIZipReader, and the trusted certificate that
- * signed the JAR are returned.
- *
- * On failure, an error code is returned.
- *
- * This method returns a nsIZipReader, instead of taking an nsIZipReader
- * as input, to encourage users of the API to verify the signature as the
- * first step in opening the JAR.
- */
- const AppTrustedRoot AppMarketplaceProdPublicRoot = 1;
- const AppTrustedRoot AppMarketplaceProdReviewersRoot = 2;
- const AppTrustedRoot AppMarketplaceDevPublicRoot = 3;
- const AppTrustedRoot AppMarketplaceDevReviewersRoot = 4;
- const AppTrustedRoot AppMarketplaceStageRoot = 5;
- const AppTrustedRoot AppXPCShellRoot = 6;
- const AppTrustedRoot AddonsPublicRoot = 7;
- const AppTrustedRoot AddonsStageRoot = 8;
- const AppTrustedRoot PrivilegedPackageRoot = 9;
- /*
- * If DeveloperImportedRoot is set as trusted root, a CA from local file
- * system will be imported. Only used when preference
- * "network.http.packaged-apps-developer-mode" is set.
- * The path of the CA is specified by preference
- * "network.http.packaged-apps-developer-trusted-root".
- */
- const AppTrustedRoot DeveloperImportedRoot = 10;
- void openSignedAppFileAsync(in AppTrustedRoot trustedRoot,
- in nsIFile aJarFile,
- in nsIOpenSignedAppFileCallback callback);
-
- /**
- * Verifies the signature on a directory representing an unpacked signed
- * JAR file. To be considered valid, there must be exactly one signature
- * on the directory structure and that signature must have signed every
- * entry. Further, the signature must come from a certificate that
- * is trusted for code signing.
- *
- * On success NS_OK and the trusted certificate that signed the
- * unpacked JAR are returned.
- *
- * On failure, an error code is returned.
- */
- void verifySignedDirectoryAsync(in AppTrustedRoot trustedRoot,
- in nsIFile aUnpackedDir,
- in nsIVerifySignedDirectoryCallback callback);
-
- /**
- * Given streams containing a signature and a manifest file, verifies
- * that the signature is valid for the manifest. The signature must
- * come from a certificate that is trusted for code signing and that
- * was issued by the given trusted root.
- *
- * On success, NS_OK and the trusted certificate that signed the
- * Manifest are returned.
- *
- * On failure, an error code is returned.
- */
- void verifySignedManifestAsync(in AppTrustedRoot trustedRoot,
- in nsIInputStream aManifestStream,
- in nsIInputStream aSignatureStream,
- in nsIVerifySignedManifestCallback callback);
+ // Flags to indicate the type of cert root for signed extensions
+ // This can probably be removed eventually.
+ const AppTrustedRoot AddonsPublicRoot = 1;
+ const AppTrustedRoot AddonsStageRoot = 2;
+ const AppTrustedRoot PrivilegedPackageRoot = 3;
/*
* Add a cert to a cert DB from a binary string.
diff --git a/settings.gradle b/settings.gradle
deleted file mode 100644
index 9d75e3242b..0000000000
--- a/settings.gradle
+++ /dev/null
@@ -1 +0,0 @@
-throw new GradleException("Building with Gradle is not supported.")
diff --git a/toolkit/components/moz.build b/toolkit/components/moz.build
index cd80ca1197..759b07e18e 100644
--- a/toolkit/components/moz.build
+++ b/toolkit/components/moz.build
@@ -33,7 +33,6 @@ DIRS += [
'lz4',
'mediasniffer',
'microformats',
- 'mozprotocol',
'osfile',
'parentalcontrols',
'passwordmgr',
diff --git a/toolkit/components/mozprotocol/moz.build b/toolkit/components/mozprotocol/moz.build
deleted file mode 100644
index b96a64ec2a..0000000000
--- a/toolkit/components/mozprotocol/moz.build
+++ /dev/null
@@ -1,9 +0,0 @@
-# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-EXTRA_COMPONENTS += [
- 'mozProtocolHandler.js',
- 'mozProtocolHandler.manifest',
-]
diff --git a/toolkit/components/mozprotocol/mozProtocolHandler.js b/toolkit/components/mozprotocol/mozProtocolHandler.js
deleted file mode 100644
index 97bfb737e4..0000000000
--- a/toolkit/components/mozprotocol/mozProtocolHandler.js
+++ /dev/null
@@ -1,48 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-"use strict";
-
-const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
-
-Cu.import("resource://gre/modules/XPCOMUtils.jsm");
-Cu.import("resource://gre/modules/Services.jsm");
-Cu.import("resource://gre/modules/NetUtil.jsm");
-
-function mozProtocolHandler() {
- XPCOMUtils.defineLazyPreferenceGetter(this, "urlToLoad", "toolkit.mozprotocol.url",
- "http://thereisonlyxul.org/");
-}
-
-mozProtocolHandler.prototype = {
- scheme: "moz",
- defaultPort: -1,
- protocolFlags: Ci.nsIProtocolHandler.URI_DANGEROUS_TO_LOAD,
-
- newURI(spec, charset, base) {
- let uri = Cc["@mozilla.org/network/simple-uri;1"].createInstance(Ci.nsIURI);
- if (base) {
- uri.spec = base.resolve(spec);
- } else {
- uri.spec = spec;
- }
- return uri;
- },
-
- newChannel2(uri, loadInfo) {
- let realURL = NetUtil.newURI(this.urlToLoad);
- let channel = Services.io.newChannelFromURIWithLoadInfo(realURL, loadInfo)
- channel.loadFlags |= Ci.nsIChannel.LOAD_REPLACE;
- return channel;
- },
-
- newChannel(uri) {
- return this.newChannel(uri, null);
- },
-
- classID: Components.ID("{47a45e5f-691e-4799-8686-14f8d3fc0f8c}"),
-
- QueryInterface: XPCOMUtils.generateQI([Ci.nsIProtocolHandler]),
-};
-
-this.NSGetFactory = XPCOMUtils.generateNSGetFactory([mozProtocolHandler]);
diff --git a/toolkit/components/mozprotocol/mozProtocolHandler.manifest b/toolkit/components/mozprotocol/mozProtocolHandler.manifest
deleted file mode 100644
index bbfdf780af..0000000000
--- a/toolkit/components/mozprotocol/mozProtocolHandler.manifest
+++ /dev/null
@@ -1,2 +0,0 @@
-component {47a45e5f-691e-4799-8686-14f8d3fc0f8c} mozProtocolHandler.js
-contract @mozilla.org/network/protocol;1?name=moz {47a45e5f-691e-4799-8686-14f8d3fc0f8c}
diff --git a/toolkit/mozapps/installer/packager.mk b/toolkit/mozapps/installer/packager.mk
index 6cae156580..99bb0de911 100644
--- a/toolkit/mozapps/installer/packager.mk
+++ b/toolkit/mozapps/installer/packager.mk
@@ -62,19 +62,6 @@ ifdef MOZ_PACKAGE_JSSHELL
$(RM) $(PKG_JSSHELL)
$(MAKE_JSSHELL)
endif # MOZ_PACKAGE_JSSHELL
-ifdef MOZ_ARTIFACT_BUILD_SYMBOLS
- @echo 'Packaging existing crashreporter symbols from artifact build...'
- $(NSINSTALL) -D $(DIST)/$(PKG_PATH)
- cd $(DIST)/crashreporter-symbols && \
- zip -r5D '../$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip' . -i '*.sym' -i '*.txt'
-endif # MOZ_ARTIFACT_BUILD_SYMBOLS
-ifdef MOZ_CODE_COVERAGE
- # Package code coverage gcno tree
- @echo 'Packaging code coverage data...'
- $(RM) $(CODE_COVERAGE_ARCHIVE_BASENAME).zip
- $(PYTHON) -mmozbuild.codecoverage.packager \
- --output-file='$(DIST)/$(PKG_PATH)$(CODE_COVERAGE_ARCHIVE_BASENAME).zip'
-endif
ifeq (Darwin, $(OS_ARCH))
ifdef MOZ_ASAN
@echo "Rewriting ASan runtime dylib paths for all binaries in $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) ..."
diff --git a/toolkit/mozapps/installer/upload-files.mk b/toolkit/mozapps/installer/upload-files.mk
index 1bbccecb2f..3f0d1f7061 100644
--- a/toolkit/mozapps/installer/upload-files.mk
+++ b/toolkit/mozapps/installer/upload-files.mk
@@ -446,11 +446,6 @@ UPLOAD_FILES= \
$(call QUOTED_WILDCARD,$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip) \
$(if $(UPLOAD_EXTRA_FILES), $(foreach f, $(UPLOAD_EXTRA_FILES), $(wildcard $(DIST)/$(f))))
-ifdef MOZ_CODE_COVERAGE
- UPLOAD_FILES += \
- $(call QUOTED_WILDCARD,$(DIST)/$(PKG_PATH)$(CODE_COVERAGE_ARCHIVE_BASENAME).zip)
-endif
-
ifdef UNIFY_DIST
UNIFY_ARCH := $(notdir $(patsubst %/,%,$(dir $(UNIFY_DIST))))
UPLOAD_FILES += \
diff --git a/toolkit/toolkit.mozbuild b/toolkit/toolkit.mozbuild
index b8f30d64a9..bb5cac7cba 100644
--- a/toolkit/toolkit.mozbuild
+++ b/toolkit/toolkit.mozbuild
@@ -13,8 +13,6 @@ if CONFIG['MOZ_MAILNEWS']:
DIRS += [
# Depends on NSS and NSPR
'/security/certverifier',
- # Depends on certverifier
- '/security/apps',
]
# MAR support at all times.