Radix cross Linux

The main Radix cross Linux repository contains the build scripts of packages, which have the most complete and common functionality for desktop machines

452 Commits   2 Branches   1 Tag
Index: create.patch.sh
===================================================================
--- create.patch.sh	(nonexistent)
+++ create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-emitter.patch
+
+mv firefox-$VERSION-emitter.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: file.list
===================================================================
--- file.list	(nonexistent)
+++ file.list	(revision 228)
@@ -0,0 +1,2 @@
+firefox-102.15.0/python/mozbuild/mozbuild/frontend/emitter.py
+firefox-102.15.0/python/mozbuild/mozbuild/test/frontend/test_emitter.py
Index: firefox-102.15.0-new/python/mozbuild/mozbuild/frontend/emitter.py
===================================================================
--- firefox-102.15.0-new/python/mozbuild/mozbuild/frontend/emitter.py	(nonexistent)
+++ firefox-102.15.0-new/python/mozbuild/mozbuild/frontend/emitter.py	(revision 228)
@@ -0,0 +1,1889 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import os
+import six
+import sys
+import time
+import traceback
+
+from collections import defaultdict, OrderedDict
+from mach.mixin.logging import LoggingMixin
+from mozbuild.util import memoize, OrderedDefaultDict
+
+import mozpack.path as mozpath
+import mozinfo
+import pytoml
+
+from .data import (
+    BaseRustProgram,
+    ChromeManifestEntry,
+    ComputedFlags,
+    ConfigFileSubstitution,
+    Defines,
+    DirectoryTraversal,
+    Exports,
+    FinalTargetFiles,
+    FinalTargetPreprocessedFiles,
+    GeneratedFile,
+    ExternalStaticLibrary,
+    ExternalSharedLibrary,
+    HostDefines,
+    HostLibrary,
+    HostProgram,
+    HostRustProgram,
+    HostSharedLibrary,
+    HostSimpleProgram,
+    HostSources,
+    InstallationTarget,
+    IPDLCollection,
+    JARManifest,
+    Library,
+    Linkable,
+    LocalInclude,
+    LocalizedFiles,
+    LocalizedPreprocessedFiles,
+    ObjdirFiles,
+    ObjdirPreprocessedFiles,
+    PerSourceFlag,
+    WebIDLCollection,
+    Program,
+    RustLibrary,
+    HostRustLibrary,
+    RustProgram,
+    RustTests,
+    SandboxedWasmLibrary,
+    SharedLibrary,
+    SimpleProgram,
+    Sources,
+    StaticLibrary,
+    TestHarnessFiles,
+    TestManifest,
+    UnifiedSources,
+    VariablePassthru,
+    WasmDefines,
+    WasmSources,
+    XPCOMComponentManifests,
+    XPIDLModule,
+)
+from mozpack.chrome.manifest import Manifest
+
+from .reader import SandboxValidationError
+
+from ..testing import TEST_MANIFESTS, REFTEST_FLAVORS, SupportFilesConverter
+
+from .context import Context, SourcePath, ObjDirPath, Path, SubContext
+
+from mozbuild.base import ExecutionSummary
+
+
+class TreeMetadataEmitter(LoggingMixin):
+    """Converts the executed mozbuild files into data structures.
+
+    This is a bridge between reader.py and data.py. It takes what was read by
+    reader.BuildReader and converts it into the classes defined in the data
+    module.
+    """
+
+    def __init__(self, config):
+        self.populate_logger()
+
+        self.config = config
+
+        mozinfo.find_and_update_from_json(config.topobjdir)
+
+        self.info = dict(mozinfo.info)
+
+        self._libs = OrderedDefaultDict(list)
+        self._binaries = OrderedDict()
+        self._compile_dirs = set()
+        self._host_compile_dirs = set()
+        self._wasm_compile_dirs = set()
+        self._asm_compile_dirs = set()
+        self._compile_flags = dict()
+        self._compile_as_flags = dict()
+        self._linkage = []
+        self._static_linking_shared = set()
+        self._crate_verified_local = set()
+        self._crate_directories = dict()
+        self._idls = defaultdict(set)
+
+        # Keep track of external paths (third party build systems), starting
+        # from what we run a subconfigure in. We'll eliminate some directories
+        # as we traverse them with moz.build (e.g. js/src).
+        subconfigures = os.path.join(self.config.topobjdir, "subconfigures")
+        paths = []
+        if os.path.exists(subconfigures):
+            paths = open(subconfigures).read().splitlines()
+        self._external_paths = set(mozpath.normsep(d) for d in paths)
+
+        self._emitter_time = 0.0
+        self._object_count = 0
+        self._test_files_converter = SupportFilesConverter()
+
+    def summary(self):
+        return ExecutionSummary(
+            "Processed into {object_count:d} build config descriptors in "
+            "{execution_time:.2f}s",
+            execution_time=self._emitter_time,
+            object_count=self._object_count,
+        )
+
+    def emit(self, output, emitfn=None):
+        """Convert the BuildReader output into data structures.
+
+        The return value from BuildReader.read_topsrcdir() (a generator) is
+        typically fed into this function.
+        """
+        contexts = {}
+        emitfn = emitfn or self.emit_from_context
+
+        def emit_objs(objs):
+            for o in objs:
+                self._object_count += 1
+                yield o
+
+        for out in output:
+            # Nothing in sub-contexts is currently of interest to us. Filter
+            # them all out.
+            if isinstance(out, SubContext):
+                continue
+
+            if isinstance(out, Context):
+                # Keep all contexts around, we will need them later.
+                contexts[os.path.normcase(out.objdir)] = out
+
+                start = time.time()
+                # We need to expand the generator for the timings to work.
+                objs = list(emitfn(out))
+                self._emitter_time += time.time() - start
+
+                for o in emit_objs(objs):
+                    yield o
+
+            else:
+                raise Exception("Unhandled output type: %s" % type(out))
+
+        # Don't emit Linkable objects when COMPILE_ENVIRONMENT is not set
+        if self.config.substs.get("COMPILE_ENVIRONMENT"):
+            start = time.time()
+            objs = list(self._emit_libs_derived(contexts))
+            self._emitter_time += time.time() - start
+
+            for o in emit_objs(objs):
+                yield o
+
+    def _emit_libs_derived(self, contexts):
+
+        # First aggregate idl sources.
+        webidl_attrs = [
+            ("GENERATED_EVENTS_WEBIDL_FILES", lambda c: c.generated_events_sources),
+            ("GENERATED_WEBIDL_FILES", lambda c: c.generated_sources),
+            ("PREPROCESSED_TEST_WEBIDL_FILES", lambda c: c.preprocessed_test_sources),
+            ("PREPROCESSED_WEBIDL_FILES", lambda c: c.preprocessed_sources),
+            ("TEST_WEBIDL_FILES", lambda c: c.test_sources),
+            ("WEBIDL_FILES", lambda c: c.sources),
+            ("WEBIDL_EXAMPLE_INTERFACES", lambda c: c.example_interfaces),
+        ]
+        ipdl_attrs = [
+            ("IPDL_SOURCES", lambda c: c.sources),
+            ("PREPROCESSED_IPDL_SOURCES", lambda c: c.preprocessed_sources),
+        ]
+        xpcom_attrs = [("XPCOM_MANIFESTS", lambda c: c.manifests)]
+
+        idl_sources = {}
+        for root, cls, attrs in (
+            (self.config.substs.get("WEBIDL_ROOT"), WebIDLCollection, webidl_attrs),
+            (self.config.substs.get("IPDL_ROOT"), IPDLCollection, ipdl_attrs),
+            (
+                self.config.substs.get("XPCOM_ROOT"),
+                XPCOMComponentManifests,
+                xpcom_attrs,
+            ),
+        ):
+            if root:
+                collection = cls(contexts[os.path.normcase(root)])
+                for var, src_getter in attrs:
+                    src_getter(collection).update(self._idls[var])
+
+                idl_sources[root] = collection.all_source_files()
+                if isinstance(collection, WebIDLCollection):
+                    # Test webidl sources are added here as a somewhat special
+                    # case.
+                    idl_sources[mozpath.join(root, "test")] = [
+                        s for s in collection.all_test_cpp_basenames()
+                    ]
+
+                yield collection
+
+        # Next do FINAL_LIBRARY linkage.
+        for lib in (l for libs in self._libs.values() for l in libs):
+            if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
+                continue
+            if lib.link_into not in self._libs:
+                raise SandboxValidationError(
+                    'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME'
+                    % lib.link_into,
+                    contexts[os.path.normcase(lib.objdir)],
+                )
+            candidates = self._libs[lib.link_into]
+
+            # When there are multiple candidates, but all are in the same
+            # directory and have a different type, we want all of them to
+            # have the library linked. The typical usecase is when building
+            # both a static and a shared library in a directory, and having
+            # that as a FINAL_LIBRARY.
+            if (
+                len(set(type(l) for l in candidates)) == len(candidates)
+                and len(set(l.objdir for l in candidates)) == 1
+            ):
+                for c in candidates:
+                    c.link_library(lib)
+            else:
+                raise SandboxValidationError(
+                    'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in '
+                    "multiple places:\n    %s"
+                    % (lib.link_into, "\n    ".join(l.objdir for l in candidates)),
+                    contexts[os.path.normcase(lib.objdir)],
+                )
+
+        # ...and USE_LIBS linkage.
+        for context, obj, variable in self._linkage:
+            self._link_libraries(context, obj, variable, idl_sources)
+
+        def recurse_refs(lib):
+            for o in lib.refs:
+                yield o
+                if isinstance(o, StaticLibrary):
+                    for q in recurse_refs(o):
+                        yield q
+
+        # Check that all static libraries refering shared libraries in
+        # USE_LIBS are linked into a shared library or program.
+        for lib in self._static_linking_shared:
+            if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)):
+                shared_libs = sorted(
+                    l.basename
+                    for l in lib.linked_libraries
+                    if isinstance(l, SharedLibrary)
+                )
+                raise SandboxValidationError(
+                    'The static "%s" library is not used in a shared library '
+                    "or a program, but USE_LIBS contains the following shared "
+                    "library names:\n    %s\n\nMaybe you can remove the "
+                    'static "%s" library?'
+                    % (lib.basename, "\n    ".join(shared_libs), lib.basename),
+                    contexts[os.path.normcase(lib.objdir)],
+                )
+
+        @memoize
+        def rust_libraries(obj):
+            libs = []
+            for o in obj.linked_libraries:
+                if isinstance(o, (HostRustLibrary, RustLibrary)):
+                    libs.append(o)
+                elif isinstance(o, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+                    libs.extend(rust_libraries(o))
+            return libs
+
+        def check_rust_libraries(obj):
+            rust_libs = set(rust_libraries(obj))
+            if len(rust_libs) <= 1:
+                return
+            if isinstance(obj, (Library, HostLibrary)):
+                what = '"%s" library' % obj.basename
+            else:
+                what = '"%s" program' % obj.name
+            raise SandboxValidationError(
+                "Cannot link the following Rust libraries into the %s:\n"
+                "%s\nOnly one is allowed."
+                % (
+                    what,
+                    "\n".join(
+                        "  - %s" % r.basename
+                        for r in sorted(rust_libs, key=lambda r: r.basename)
+                    ),
+                ),
+                contexts[os.path.normcase(obj.objdir)],
+            )
+
+        # Propagate LIBRARY_DEFINES to all child libraries recursively.
+        def propagate_defines(outerlib, defines):
+            outerlib.lib_defines.update(defines)
+            for lib in outerlib.linked_libraries:
+                # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS
+                # paths.
+                if (
+                    isinstance(lib, StaticLibrary)
+                    and lib.link_into == outerlib.basename
+                ):
+                    propagate_defines(lib, defines)
+
+        for lib in (l for libs in self._libs.values() for l in libs):
+            if isinstance(lib, Library):
+                propagate_defines(lib, lib.lib_defines)
+            check_rust_libraries(lib)
+            yield lib
+
+        for lib in (l for libs in self._libs.values() for l in libs):
+            lib_defines = list(lib.lib_defines.get_defines())
+            if lib_defines:
+                objdir_flags = self._compile_flags[lib.objdir]
+                objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines)
+
+                objdir_flags = self._compile_as_flags.get(lib.objdir)
+                if objdir_flags:
+                    objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines)
+
+        for flags_obj in self._compile_flags.values():
+            yield flags_obj
+
+        for flags_obj in self._compile_as_flags.values():
+            yield flags_obj
+
+        for obj in self._binaries.values():
+            if isinstance(obj, Linkable):
+                check_rust_libraries(obj)
+            yield obj
+
+    LIBRARY_NAME_VAR = {
+        "host": "HOST_LIBRARY_NAME",
+        "target": "LIBRARY_NAME",
+        "wasm": "SANDBOXED_WASM_LIBRARY_NAME",
+    }
+
+    ARCH_VAR = {"host": "HOST_OS_ARCH", "target": "OS_TARGET"}
+
+    STDCXXCOMPAT_NAME = {"host": "host_stdc++compat", "target": "stdc++compat"}
+
+    def _link_libraries(self, context, obj, variable, extra_sources):
+        """Add linkage declarations to a given object."""
+        assert isinstance(obj, Linkable)
+
+        if context.objdir in extra_sources:
+            # All "extra sources" are .cpp for the moment, and happen to come
+            # first in order.
+            obj.sources[".cpp"] = extra_sources[context.objdir] + obj.sources[".cpp"]
+
+        for path in context.get(variable, []):
+            self._link_library(context, obj, variable, path)
+
+        # Link system libraries from OS_LIBS/HOST_OS_LIBS.
+        for lib in context.get(variable.replace("USE", "OS"), []):
+            obj.link_system_library(lib)
+
+        # We have to wait for all the self._link_library calls above to have
+        # happened for obj.cxx_link to be final.
+        # FIXME: Theoretically, HostSharedLibrary shouldn't be here (bug
+        # 1474022).
+        if (
+            not isinstance(
+                obj, (StaticLibrary, HostLibrary, HostSharedLibrary, BaseRustProgram)
+            )
+            and obj.cxx_link
+        ):
+            if (
+                context.config.substs.get("MOZ_STDCXX_COMPAT")
+                and context.config.substs.get(self.ARCH_VAR.get(obj.KIND)) == "Linux"
+            ):
+                self._link_library(
+                    context, obj, variable, self.STDCXXCOMPAT_NAME[obj.KIND]
+                )
+            if obj.KIND == "target":
+                for lib in context.config.substs.get("STLPORT_LIBS", []):
+                    obj.link_system_library(lib)
+
+    def _link_library(self, context, obj, variable, path):
+        force_static = path.startswith("static:") and obj.KIND == "target"
+        if force_static:
+            path = path[7:]
+        name = mozpath.basename(path)
+        dir = mozpath.dirname(path)
+        candidates = [l for l in self._libs[name] if l.KIND == obj.KIND]
+        if dir:
+            if dir.startswith("/"):
+                dir = mozpath.normpath(mozpath.join(obj.topobjdir, dir[1:]))
+            else:
+                dir = mozpath.normpath(mozpath.join(obj.objdir, dir))
+            dir = mozpath.relpath(dir, obj.topobjdir)
+            candidates = [l for l in candidates if l.relobjdir == dir]
+            if not candidates:
+                # If the given directory is under one of the external
+                # (third party) paths, use a fake library reference to
+                # there.
+                for d in self._external_paths:
+                    if dir.startswith("%s/" % d):
+                        candidates = [
+                            self._get_external_library(dir, name, force_static)
+                        ]
+                        break
+
+            if not candidates:
+                raise SandboxValidationError(
+                    '%s contains "%s", but there is no "%s" %s in %s.'
+                    % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir),
+                    context,
+                )
+
+        if len(candidates) > 1:
+            # If there's more than one remaining candidate, it could be
+            # that there are instances for the same library, in static and
+            # shared form.
+            libs = {}
+            for l in candidates:
+                key = mozpath.join(l.relobjdir, l.basename)
+                if force_static:
+                    if isinstance(l, StaticLibrary):
+                        libs[key] = l
+                else:
+                    if key in libs and isinstance(l, SharedLibrary):
+                        libs[key] = l
+                    if key not in libs:
+                        libs[key] = l
+            candidates = list(libs.values())
+            if force_static and not candidates:
+                if dir:
+                    raise SandboxValidationError(
+                        '%s contains "static:%s", but there is no static '
+                        '"%s" %s in %s.'
+                        % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir),
+                        context,
+                    )
+                raise SandboxValidationError(
+                    '%s contains "static:%s", but there is no static "%s" '
+                    "%s in the tree"
+                    % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]),
+                    context,
+                )
+
+        if not candidates:
+            raise SandboxValidationError(
+                '%s contains "%s", which does not match any %s in the tree.'
+                % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]),
+                context,
+            )
+
+        elif len(candidates) > 1:
+            paths = (mozpath.join(l.relsrcdir, "moz.build") for l in candidates)
+            raise SandboxValidationError(
+                '%s contains "%s", which matches a %s defined in multiple '
+                "places:\n    %s"
+                % (
+                    variable,
+                    path,
+                    self.LIBRARY_NAME_VAR[obj.KIND],
+                    "\n    ".join(paths),
+                ),
+                context,
+            )
+
+        elif force_static and not isinstance(candidates[0], StaticLibrary):
+            raise SandboxValidationError(
+                '%s contains "static:%s", but there is only a shared "%s" '
+                "in %s. You may want to add FORCE_STATIC_LIB=True in "
+                '%s/moz.build, or remove "static:".'
+                % (
+                    variable,
+                    path,
+                    name,
+                    candidates[0].relobjdir,
+                    candidates[0].relobjdir,
+                ),
+                context,
+            )
+
+        elif isinstance(obj, StaticLibrary) and isinstance(
+            candidates[0], SharedLibrary
+        ):
+            self._static_linking_shared.add(obj)
+        obj.link_library(candidates[0])
+
+    @memoize
+    def _get_external_library(self, dir, name, force_static):
+        # Create ExternalStaticLibrary or ExternalSharedLibrary object with a
+        # context more or less truthful about where the external library is.
+        context = Context(config=self.config)
+        context.add_source(mozpath.join(self.config.topsrcdir, dir, "dummy"))
+        if force_static:
+            return ExternalStaticLibrary(context, name)
+        else:
+            return ExternalSharedLibrary(context, name)
+
+    def _parse_cargo_file(self, context):
+        """Parse the Cargo.toml file in context and return a Python object
+        representation of it.  Raise a SandboxValidationError if the Cargo.toml
+        file does not exist.  Return a tuple of (config, cargo_file)."""
+        cargo_file = mozpath.join(context.srcdir, "Cargo.toml")
+        if not os.path.exists(cargo_file):
+            raise SandboxValidationError(
+                "No Cargo.toml file found in %s" % cargo_file, context
+            )
+        with open(cargo_file, "r") as f:
+            return pytoml.load(f), cargo_file
+
+    def _verify_deps(
+        self, context, crate_dir, crate_name, dependencies, description="Dependency"
+    ):
+        """Verify that a crate's dependencies all specify local paths."""
+        for dep_crate_name, values in six.iteritems(dependencies):
+            # A simple version number.
+            if isinstance(values, (six.binary_type, six.text_type)):
+                raise SandboxValidationError(
+                    "%s %s of crate %s does not list a path"
+                    % (description, dep_crate_name, crate_name),
+                    context,
+                )
+
+            dep_path = values.get("path", None)
+            if not dep_path:
+                raise SandboxValidationError(
+                    "%s %s of crate %s does not list a path"
+                    % (description, dep_crate_name, crate_name),
+                    context,
+                )
+
+            # Try to catch the case where somebody listed a
+            # local path for development.
+            if os.path.isabs(dep_path):
+                raise SandboxValidationError(
+                    "%s %s of crate %s has a non-relative path"
+                    % (description, dep_crate_name, crate_name),
+                    context,
+                )
+
+            if not os.path.exists(
+                mozpath.join(context.config.topsrcdir, crate_dir, dep_path)
+            ):
+                raise SandboxValidationError(
+                    "%s %s of crate %s refers to a non-existent path"
+                    % (description, dep_crate_name, crate_name),
+                    context,
+                )
+
+    def _rust_library(
+        self, context, libname, static_args, is_gkrust=False, cls=RustLibrary
+    ):
+        # We need to note any Rust library for linking purposes.
+        config, cargo_file = self._parse_cargo_file(context)
+        crate_name = config["package"]["name"]
+
+        if crate_name != libname:
+            raise SandboxValidationError(
+                "library %s does not match Cargo.toml-defined package %s"
+                % (libname, crate_name),
+                context,
+            )
+
+        # Check that the [lib.crate-type] field is correct
+        lib_section = config.get("lib", None)
+        if not lib_section:
+            raise SandboxValidationError(
+                "Cargo.toml for %s has no [lib] section" % libname, context
+            )
+
+        crate_type = lib_section.get("crate-type", None)
+        if not crate_type:
+            raise SandboxValidationError(
+                "Can't determine a crate-type for %s from Cargo.toml" % libname, context
+            )
+
+        crate_type = crate_type[0]
+        if crate_type != "staticlib":
+            raise SandboxValidationError(
+                "crate-type %s is not permitted for %s" % (crate_type, libname), context
+            )
+
+        dependencies = set(six.iterkeys(config.get("dependencies", {})))
+
+        features = context.get(cls.FEATURES_VAR, [])
+        unique_features = set(features)
+        if len(features) != len(unique_features):
+            raise SandboxValidationError(
+                "features for %s should not contain duplicates: %s"
+                % (libname, features),
+                context,
+            )
+
+        return cls(
+            context,
+            libname,
+            cargo_file,
+            crate_type,
+            dependencies,
+            features,
+            is_gkrust,
+            **static_args,
+        )
+
+    def _handle_linkables(self, context, passthru, generated_files):
+        linkables = []
+        host_linkables = []
+        wasm_linkables = []
+
+        def add_program(prog, var):
+            if var.startswith("HOST_"):
+                host_linkables.append(prog)
+            else:
+                linkables.append(prog)
+
+        def check_unique_binary(program, kind):
+            if program in self._binaries:
+                raise SandboxValidationError(
+                    'Cannot use "%s" as %s name, '
+                    "because it is already used in %s"
+                    % (program, kind, self._binaries[program].relsrcdir),
+                    context,
+                )
+
+        for kind, cls in [("PROGRAM", Program), ("HOST_PROGRAM", HostProgram)]:
+            program = context.get(kind)
+            if program:
+                check_unique_binary(program, kind)
+                self._binaries[program] = cls(context, program)
+                self._linkage.append(
+                    (
+                        context,
+                        self._binaries[program],
+                        kind.replace("PROGRAM", "USE_LIBS"),
+                    )
+                )
+                add_program(self._binaries[program], kind)
+
+        all_rust_programs = []
+        for kind, cls in [
+            ("RUST_PROGRAMS", RustProgram),
+            ("HOST_RUST_PROGRAMS", HostRustProgram),
+        ]:
+            programs = context[kind]
+            if not programs:
+                continue
+
+            all_rust_programs.append((programs, kind, cls))
+
+        # Verify Rust program definitions.
+        if all_rust_programs:
+            config, cargo_file = self._parse_cargo_file(context)
+            bin_section = config.get("bin", None)
+            if not bin_section:
+                raise SandboxValidationError(
+                    "Cargo.toml in %s has no [bin] section" % context.srcdir, context
+                )
+
+            defined_binaries = {b["name"] for b in bin_section}
+
+            for programs, kind, cls in all_rust_programs:
+                for program in programs:
+                    if program not in defined_binaries:
+                        raise SandboxValidationError(
+                            "Cannot find Cargo.toml definition for %s" % program,
+                            context,
+                        )
+
+                    check_unique_binary(program, kind)
+                    self._binaries[program] = cls(context, program, cargo_file)
+                    add_program(self._binaries[program], kind)
+
+        for kind, cls in [
+            ("SIMPLE_PROGRAMS", SimpleProgram),
+            ("CPP_UNIT_TESTS", SimpleProgram),
+            ("HOST_SIMPLE_PROGRAMS", HostSimpleProgram),
+        ]:
+            for program in context[kind]:
+                if program in self._binaries:
+                    raise SandboxValidationError(
+                        'Cannot use "%s" in %s, '
+                        "because it is already used in %s"
+                        % (program, kind, self._binaries[program].relsrcdir),
+                        context,
+                    )
+                self._binaries[program] = cls(
+                    context, program, is_unit_test=kind == "CPP_UNIT_TESTS"
+                )
+                self._linkage.append(
+                    (
+                        context,
+                        self._binaries[program],
+                        "HOST_USE_LIBS"
+                        if kind == "HOST_SIMPLE_PROGRAMS"
+                        else "USE_LIBS",
+                    )
+                )
+                add_program(self._binaries[program], kind)
+
+        host_libname = context.get("HOST_LIBRARY_NAME")
+        libname = context.get("LIBRARY_NAME")
+
+        if host_libname:
+            if host_libname == libname:
+                raise SandboxValidationError(
+                    "LIBRARY_NAME and HOST_LIBRARY_NAME must have a different value",
+                    context,
+                )
+
+            is_rust_library = context.get("IS_RUST_LIBRARY")
+            if is_rust_library:
+                lib = self._rust_library(context, host_libname, {}, cls=HostRustLibrary)
+            elif context.get("FORCE_SHARED_LIB"):
+                lib = HostSharedLibrary(context, host_libname)
+            else:
+                lib = HostLibrary(context, host_libname)
+            self._libs[host_libname].append(lib)
+            self._linkage.append((context, lib, "HOST_USE_LIBS"))
+            host_linkables.append(lib)
+
+        final_lib = context.get("FINAL_LIBRARY")
+        if not libname and final_lib:
+            # If no LIBRARY_NAME is given, create one.
+            libname = context.relsrcdir.replace("/", "_")
+
+        static_lib = context.get("FORCE_STATIC_LIB")
+        shared_lib = context.get("FORCE_SHARED_LIB")
+
+        static_name = context.get("STATIC_LIBRARY_NAME")
+        shared_name = context.get("SHARED_LIBRARY_NAME")
+
+        is_framework = context.get("IS_FRAMEWORK")
+
+        soname = context.get("SONAME")
+
+        lib_defines = context.get("LIBRARY_DEFINES")
+
+        wasm_lib = context.get("SANDBOXED_WASM_LIBRARY_NAME")
+
+        shared_args = {}
+        static_args = {}
+
+        if final_lib:
+            if static_lib:
+                raise SandboxValidationError(
+                    "FINAL_LIBRARY implies FORCE_STATIC_LIB. "
+                    "Please remove the latter.",
+                    context,
+                )
+            if shared_lib:
+                raise SandboxValidationError(
+                    "FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. "
+                    "Please remove one.",
+                    context,
+                )
+            if is_framework:
+                raise SandboxValidationError(
+                    "FINAL_LIBRARY conflicts with IS_FRAMEWORK. " "Please remove one.",
+                    context,
+                )
+            static_args["link_into"] = final_lib
+            static_lib = True
+
+        if libname:
+            if is_framework:
+                if soname:
+                    raise SandboxValidationError(
+                        "IS_FRAMEWORK conflicts with SONAME. " "Please remove one.",
+                        context,
+                    )
+                shared_lib = True
+                shared_args["variant"] = SharedLibrary.FRAMEWORK
+
+            if not static_lib and not shared_lib:
+                static_lib = True
+
+            if static_name:
+                if not static_lib:
+                    raise SandboxValidationError(
+                        "STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB", context
+                    )
+                static_args["real_name"] = static_name
+
+            if shared_name:
+                if not shared_lib:
+                    raise SandboxValidationError(
+                        "SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB", context
+                    )
+                shared_args["real_name"] = shared_name
+
+            if soname:
+                if not shared_lib:
+                    raise SandboxValidationError(
+                        "SONAME requires FORCE_SHARED_LIB", context
+                    )
+                shared_args["soname"] = soname
+
+            if context.get("NO_EXPAND_LIBS"):
+                if not static_lib:
+                    raise SandboxValidationError(
+                        "NO_EXPAND_LIBS can only be set for static libraries.", context
+                    )
+                static_args["no_expand_lib"] = True
+
+            if shared_lib and static_lib:
+                if not static_name and not shared_name:
+                    raise SandboxValidationError(
+                        "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+                        "but neither STATIC_LIBRARY_NAME or "
+                        "SHARED_LIBRARY_NAME is set. At least one is required.",
+                        context,
+                    )
+                if static_name and not shared_name and static_name == libname:
+                    raise SandboxValidationError(
+                        "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+                        "but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, "
+                        "and SHARED_LIBRARY_NAME is unset. Please either "
+                        "change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set "
+                        "SHARED_LIBRARY_NAME.",
+                        context,
+                    )
+                if shared_name and not static_name and shared_name == libname:
+                    raise SandboxValidationError(
+                        "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+                        "but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, "
+                        "and STATIC_LIBRARY_NAME is unset. Please either "
+                        "change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set "
+                        "STATIC_LIBRARY_NAME.",
+                        context,
+                    )
+                if shared_name and static_name and shared_name == static_name:
+                    raise SandboxValidationError(
+                        "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+                        "but SHARED_LIBRARY_NAME is the same as "
+                        "STATIC_LIBRARY_NAME. Please change one of them.",
+                        context,
+                    )
+
+            symbols_file = context.get("SYMBOLS_FILE")
+            if symbols_file:
+                if not shared_lib:
+                    raise SandboxValidationError(
+                        "SYMBOLS_FILE can only be used with a SHARED_LIBRARY.", context
+                    )
+                if context.get("DEFFILE"):
+                    raise SandboxValidationError(
+                        "SYMBOLS_FILE cannot be used along DEFFILE.", context
+                    )
+                if isinstance(symbols_file, SourcePath):
+                    if not os.path.exists(symbols_file.full_path):
+                        raise SandboxValidationError(
+                            "Path specified in SYMBOLS_FILE does not exist: %s "
+                            "(resolved to %s)" % (symbols_file, symbols_file.full_path),
+                            context,
+                        )
+                    shared_args["symbols_file"] = True
+                else:
+                    if symbols_file.target_basename not in generated_files:
+                        raise SandboxValidationError(
+                            (
+                                "Objdir file specified in SYMBOLS_FILE not in "
+                                + "GENERATED_FILES: %s"
+                            )
+                            % (symbols_file,),
+                            context,
+                        )
+                    shared_args["symbols_file"] = symbols_file.target_basename
+
+            if shared_lib:
+                lib = SharedLibrary(context, libname, **shared_args)
+                self._libs[libname].append(lib)
+                self._linkage.append((context, lib, "USE_LIBS"))
+                linkables.append(lib)
+                if not lib.installed:
+                    generated_files.add(lib.lib_name)
+                if symbols_file and isinstance(symbols_file, SourcePath):
+                    script = mozpath.join(
+                        mozpath.dirname(mozpath.dirname(__file__)),
+                        "action",
+                        "generate_symbols_file.py",
+                    )
+                    defines = ()
+                    if lib.defines:
+                        defines = lib.defines.get_defines()
+                    yield GeneratedFile(
+                        context,
+                        script,
+                        "generate_symbols_file",
+                        lib.symbols_file,
+                        [symbols_file],
+                        defines,
+                        required_during_compile=[lib.symbols_file],
+                    )
+            if static_lib:
+                is_rust_library = context.get("IS_RUST_LIBRARY")
+                if is_rust_library:
+                    lib = self._rust_library(
+                        context,
+                        libname,
+                        static_args,
+                        is_gkrust=bool(context.get("IS_GKRUST")),
+                    )
+                else:
+                    lib = StaticLibrary(context, libname, **static_args)
+                self._libs[libname].append(lib)
+                self._linkage.append((context, lib, "USE_LIBS"))
+                linkables.append(lib)
+
+            if lib_defines:
+                if not libname:
+                    raise SandboxValidationError(
+                        "LIBRARY_DEFINES needs a " "LIBRARY_NAME to take effect",
+                        context,
+                    )
+                lib.lib_defines.update(lib_defines)
+
+        if wasm_lib:
+            if wasm_lib == libname:
+                raise SandboxValidationError(
+                    "SANDBOXED_WASM_LIBRARY_NAME and LIBRARY_NAME must have a "
+                    "different value.",
+                    context,
+                )
+            if wasm_lib == host_libname:
+                raise SandboxValidationError(
+                    "SANDBOXED_WASM_LIBRARY_NAME and HOST_LIBRARY_NAME must "
+                    "have a different value.",
+                    context,
+                )
+            if wasm_lib == shared_name:
+                raise SandboxValidationError(
+                    "SANDBOXED_WASM_LIBRARY_NAME and SHARED_NAME must have a "
+                    "different value.",
+                    context,
+                )
+            if wasm_lib == static_name:
+                raise SandboxValidationError(
+                    "SANDBOXED_WASM_LIBRARY_NAME and STATIC_NAME must have a "
+                    "different value.",
+                    context,
+                )
+            lib = SandboxedWasmLibrary(context, wasm_lib)
+            self._libs[libname].append(lib)
+            wasm_linkables.append(lib)
+            self._wasm_compile_dirs.add(context.objdir)
+
+        seen = {}
+        for symbol in ("SOURCES", "UNIFIED_SOURCES"):
+            for src in context.get(symbol, []):
+                basename = os.path.splitext(os.path.basename(src))[0]
+                if basename in seen:
+                    other_src, where = seen[basename]
+                    extra = ""
+                    if "UNIFIED_SOURCES" in (symbol, where):
+                        extra = " in non-unified builds"
+                    raise SandboxValidationError(
+                        f"{src} from {symbol} would have the same object name "
+                        f"as {other_src} from {where}{extra}.",
+                        context,
+                    )
+                seen[basename] = (src, symbol)
+
+        # Only emit sources if we have linkables defined in the same context.
+        # Note the linkables are not emitted in this function, but much later,
+        # after aggregation (because of e.g. USE_LIBS processing).
+        if not (linkables or host_linkables or wasm_linkables):
+            return
+
+        self._compile_dirs.add(context.objdir)
+
+        if host_linkables and not all(
+            isinstance(l, HostRustLibrary) for l in host_linkables
+        ):
+            self._host_compile_dirs.add(context.objdir)
+            # TODO: objdirs with only host things in them shouldn't need target
+            # flags, but there's at least one Makefile.in (in
+            # build/unix/elfhack) that relies on the value of LDFLAGS being
+            # passed to one-off rules.
+            self._compile_dirs.add(context.objdir)
+
+        sources = defaultdict(list)
+        gen_sources = defaultdict(list)
+        all_flags = {}
+        for symbol in ("SOURCES", "HOST_SOURCES", "UNIFIED_SOURCES", "WASM_SOURCES"):
+            srcs = sources[symbol]
+            gen_srcs = gen_sources[symbol]
+            context_srcs = context.get(symbol, [])
+            seen_sources = set()
+            for f in context_srcs:
+                if f in seen_sources:
+                    raise SandboxValidationError(
+                        "Source file should only "
+                        "be added to %s once: %s" % (symbol, f),
+                        context,
+                    )
+                seen_sources.add(f)
+                full_path = f.full_path
+                if isinstance(f, SourcePath):
+                    srcs.append(full_path)
+                else:
+                    assert isinstance(f, Path)
+                    gen_srcs.append(full_path)
+                if symbol == "SOURCES":
+                    context_flags = context_srcs[f]
+                    if context_flags:
+                        all_flags[full_path] = context_flags
+
+                if isinstance(f, SourcePath) and not os.path.exists(full_path):
+                    raise SandboxValidationError(
+                        "File listed in %s does not "
+                        "exist: '%s'" % (symbol, full_path),
+                        context,
+                    )
+
+        # Process the .cpp files generated by IPDL as generated sources within
+        # the context which declared the IPDL_SOURCES attribute.
+        ipdl_root = self.config.substs.get("IPDL_ROOT")
+        for symbol in ("IPDL_SOURCES", "PREPROCESSED_IPDL_SOURCES"):
+            context_srcs = context.get(symbol, [])
+            for f in context_srcs:
+                root, ext = mozpath.splitext(mozpath.basename(f))
+
+                suffix_map = {
+                    ".ipdlh": [".cpp"],
+                    ".ipdl": [".cpp", "Child.cpp", "Parent.cpp"],
+                }
+                if ext not in suffix_map:
+                    raise SandboxValidationError(
+                        "Unexpected extension for IPDL source %s" % ext
+                    )
+
+                gen_sources["UNIFIED_SOURCES"].extend(
+                    mozpath.join(ipdl_root, root + suffix) for suffix in suffix_map[ext]
+                )
+
+        no_pgo = context.get("NO_PGO")
+        no_pgo_sources = [f for f, flags in six.iteritems(all_flags) if flags.no_pgo]
+        if no_pgo:
+            if no_pgo_sources:
+                raise SandboxValidationError(
+                    "NO_PGO and SOURCES[...].no_pgo " "cannot be set at the same time",
+                    context,
+                )
+            passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo
+        if no_pgo_sources:
+            passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo_sources
+
+        # A map from "canonical suffixes" for a particular source file
+        # language to the range of suffixes associated with that language.
+        #
+        # We deliberately don't list the canonical suffix in the suffix list
+        # in the definition; we'll add it in programmatically after defining
+        # things.
+        suffix_map = {
+            ".s": set([".asm"]),
+            ".c": set(),
+            ".m": set(),
+            ".mm": set(),
+            ".cpp": set([".cc", ".cxx"]),
+            ".S": set(),
+        }
+
+        # The inverse of the above, mapping suffixes to their canonical suffix.
+        canonicalized_suffix_map = {}
+        for suffix, alternatives in six.iteritems(suffix_map):
+            alternatives.add(suffix)
+            for a in alternatives:
+                canonicalized_suffix_map[a] = suffix
+
+        # A map from moz.build variables to the canonical suffixes of file
+        # kinds that can be listed therein.
+        all_suffixes = list(suffix_map.keys())
+        varmap = dict(
+            SOURCES=(Sources, all_suffixes),
+            HOST_SOURCES=(HostSources, [".c", ".mm", ".cpp"]),
+            UNIFIED_SOURCES=(UnifiedSources, [".c", ".mm", ".m", ".cpp"]),
+        )
+        # Only include a WasmSources context if there are any WASM_SOURCES.
+        # (This is going to matter later because we inject an extra .c file to
+        # compile with the wasm compiler if, and only if, there are any WASM
+        # sources.)
+        if sources["WASM_SOURCES"] or gen_sources["WASM_SOURCES"]:
+            varmap["WASM_SOURCES"] = (WasmSources, [".c", ".cpp"])
+        # Track whether there are any C++ source files.
+        # Technically this won't do the right thing for SIMPLE_PROGRAMS in
+        # a directory with mixed C and C++ source, but it's not that important.
+        cxx_sources = defaultdict(bool)
+
+        # Source files to track for linkables associated with this context.
+        ctxt_sources = defaultdict(lambda: defaultdict(list))
+
+        for variable, (klass, suffixes) in varmap.items():
+            # Group static and generated files by their canonical suffixes, and
+            # ensure we haven't been given filetypes that we don't recognize.
+            by_canonical_suffix = defaultdict(lambda: {"static": [], "generated": []})
+            for srcs, key in (
+                (sources[variable], "static"),
+                (gen_sources[variable], "generated"),
+            ):
+                for f in srcs:
+                    canonical_suffix = canonicalized_suffix_map.get(
+                        mozpath.splitext(f)[1]
+                    )
+                    if canonical_suffix not in suffixes:
+                        raise SandboxValidationError(
+                            "%s has an unknown file type." % f, context
+                        )
+                    by_canonical_suffix[canonical_suffix][key].append(f)
+
+            # Yield an object for each canonical suffix, grouping generated and
+            # static sources together to allow them to be unified together.
+            for canonical_suffix in sorted(by_canonical_suffix.keys()):
+                if canonical_suffix in (".cpp", ".mm"):
+                    cxx_sources[variable] = True
+                elif canonical_suffix in (".s", ".S"):
+                    self._asm_compile_dirs.add(context.objdir)
+                src_group = by_canonical_suffix[canonical_suffix]
+                obj = klass(
+                    context,
+                    src_group["static"],
+                    src_group["generated"],
+                    canonical_suffix,
+                )
+                srcs = list(obj.files)
+                if isinstance(obj, UnifiedSources) and obj.have_unified_mapping:
+                    srcs = sorted(dict(obj.unified_source_mapping).keys())
+                ctxt_sources[variable][canonical_suffix] += srcs
+                yield obj
+
+        if ctxt_sources:
+            for linkable in linkables:
+                for target_var in ("SOURCES", "UNIFIED_SOURCES"):
+                    for suffix, srcs in ctxt_sources[target_var].items():
+                        linkable.sources[suffix] += srcs
+            for host_linkable in host_linkables:
+                for suffix, srcs in ctxt_sources["HOST_SOURCES"].items():
+                    host_linkable.sources[suffix] += srcs
+            for wasm_linkable in wasm_linkables:
+                for suffix, srcs in ctxt_sources["WASM_SOURCES"].items():
+                    wasm_linkable.sources[suffix] += srcs
+
+        for f, flags in sorted(six.iteritems(all_flags)):
+            if flags.flags:
+                ext = mozpath.splitext(f)[1]
+                yield PerSourceFlag(context, f, flags.flags)
+
+        # If there are any C++ sources, set all the linkables defined here
+        # to require the C++ linker.
+        for vars, linkable_items in (
+            (("SOURCES", "UNIFIED_SOURCES"), linkables),
+            (("HOST_SOURCES",), host_linkables),
+        ):
+            for var in vars:
+                if cxx_sources[var]:
+                    for l in linkable_items:
+                        l.cxx_link = True
+                    break
+
+    def emit_from_context(self, context):
+        """Convert a Context to tree metadata objects.
+
+        This is a generator of mozbuild.frontend.data.ContextDerived instances.
+        """
+
+        # We only want to emit an InstallationTarget if one of the consulted
+        # variables is defined. Later on, we look up FINAL_TARGET, which has
+        # the side-effect of populating it. So, we need to do this lookup
+        # early.
+        if any(k in context for k in ("FINAL_TARGET", "XPI_NAME", "DIST_SUBDIR")):
+            yield InstallationTarget(context)
+
+        # We always emit a directory traversal descriptor. This is needed by
+        # the recursive make backend.
+        for o in self._emit_directory_traversal_from_context(context):
+            yield o
+
+        for obj in self._process_xpidl(context):
+            yield obj
+
+        computed_flags = ComputedFlags(context, context["COMPILE_FLAGS"])
+        computed_link_flags = ComputedFlags(context, context["LINK_FLAGS"])
+        computed_host_flags = ComputedFlags(context, context["HOST_COMPILE_FLAGS"])
+        computed_as_flags = ComputedFlags(context, context["ASM_FLAGS"])
+        computed_wasm_flags = ComputedFlags(context, context["WASM_FLAGS"])
+
+        # Proxy some variables as-is until we have richer classes to represent
+        # them. We should aim to keep this set small because it violates the
+        # desired abstraction of the build definition away from makefiles.
+        passthru = VariablePassthru(context)
+        varlist = [
+            "EXTRA_DSO_LDOPTS",
+            "RCFILE",
+            "RCINCLUDE",
+            "WIN32_EXE_LDFLAGS",
+            "USE_EXTENSION_MANIFEST",
+        ]
+        for v in varlist:
+            if v in context and context[v]:
+                passthru.variables[v] = context[v]
+
+        if (
+            context.config.substs.get("OS_TARGET") == "WINNT"
+            and context["DELAYLOAD_DLLS"]
+        ):
+            if context.config.substs.get("CC_TYPE") != "clang":
+                context["LDFLAGS"].extend(
+                    [("-DELAYLOAD:%s" % dll) for dll in context["DELAYLOAD_DLLS"]]
+                )
+            else:
+                context["LDFLAGS"].extend(
+                    [
+                        ("-Wl,-Xlink=-DELAYLOAD:%s" % dll)
+                        for dll in context["DELAYLOAD_DLLS"]
+                    ]
+                )
+            context["OS_LIBS"].append("delayimp")
+
+        for v in ["CMFLAGS", "CMMFLAGS"]:
+            if v in context and context[v]:
+                passthru.variables["MOZBUILD_" + v] = context[v]
+
+        for v in ["CXXFLAGS", "CFLAGS"]:
+            if v in context and context[v]:
+                computed_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+        for v in ["WASM_CFLAGS", "WASM_CXXFLAGS"]:
+            if v in context and context[v]:
+                computed_wasm_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+        for v in ["HOST_CXXFLAGS", "HOST_CFLAGS"]:
+            if v in context and context[v]:
+                computed_host_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+        if "LDFLAGS" in context and context["LDFLAGS"]:
+            computed_link_flags.resolve_flags("MOZBUILD", context["LDFLAGS"])
+
+        deffile = context.get("DEFFILE")
+        if deffile and context.config.substs.get("OS_TARGET") == "WINNT":
+            if isinstance(deffile, SourcePath):
+                if not os.path.exists(deffile.full_path):
+                    raise SandboxValidationError(
+                        "Path specified in DEFFILE does not exist: %s "
+                        "(resolved to %s)" % (deffile, deffile.full_path),
+                        context,
+                    )
+                path = mozpath.relpath(deffile.full_path, context.objdir)
+            else:
+                path = deffile.target_basename
+
+            if context.config.substs.get("GNU_CC"):
+                computed_link_flags.resolve_flags("DEFFILE", [path])
+            else:
+                computed_link_flags.resolve_flags("DEFFILE", ["-DEF:" + path])
+
+        dist_install = context["DIST_INSTALL"]
+        if dist_install is True:
+            passthru.variables["DIST_INSTALL"] = True
+        elif dist_install is False:
+            passthru.variables["NO_DIST_INSTALL"] = True
+
+        # Ideally, this should be done in templates, but this is difficult at
+        # the moment because USE_STATIC_LIBS can be set after a template
+        # returns. Eventually, with context-based templates, it will be
+        # possible.
+        if context.config.substs.get(
+            "OS_ARCH"
+        ) == "WINNT" and not context.config.substs.get("GNU_CC"):
+            use_static_lib = context.get(
+                "USE_STATIC_LIBS"
+            ) and not context.config.substs.get("MOZ_ASAN")
+            rtl_flag = "-MT" if use_static_lib else "-MD"
+            if context.config.substs.get("MOZ_DEBUG") and not context.config.substs.get(
+                "MOZ_NO_DEBUG_RTL"
+            ):
+                rtl_flag += "d"
+            computed_flags.resolve_flags("RTL", [rtl_flag])
+            if not context.config.substs.get("CROSS_COMPILE"):
+                computed_host_flags.resolve_flags("RTL", [rtl_flag])
+
+        generated_files = set()
+        localized_generated_files = set()
+        for obj in self._process_generated_files(context):
+            for f in obj.outputs:
+                generated_files.add(f)
+                if obj.localized:
+                    localized_generated_files.add(f)
+            yield obj
+
+        for path in context["CONFIGURE_SUBST_FILES"]:
+            sub = self._create_substitution(ConfigFileSubstitution, context, path)
+            generated_files.add(str(sub.relpath))
+            yield sub
+
+        for defines_var, cls, backend_flags in (
+            ("DEFINES", Defines, (computed_flags, computed_as_flags)),
+            ("HOST_DEFINES", HostDefines, (computed_host_flags,)),
+            ("WASM_DEFINES", WasmDefines, (computed_wasm_flags,)),
+        ):
+            defines = context.get(defines_var)
+            if defines:
+                defines_obj = cls(context, defines)
+                if isinstance(defines_obj, Defines):
+                    # DEFINES have consumers outside the compile command line,
+                    # HOST_DEFINES do not.
+                    yield defines_obj
+            else:
+                # If we don't have explicitly set defines we need to make sure
+                # initialized values if present end up in computed flags.
+                defines_obj = cls(context, context[defines_var])
+
+            defines_from_obj = list(defines_obj.get_defines())
+            if defines_from_obj:
+                for flags in backend_flags:
+                    flags.resolve_flags(defines_var, defines_from_obj)
+
+        idl_vars = (
+            "GENERATED_EVENTS_WEBIDL_FILES",
+            "GENERATED_WEBIDL_FILES",
+            "PREPROCESSED_TEST_WEBIDL_FILES",
+            "PREPROCESSED_WEBIDL_FILES",
+            "TEST_WEBIDL_FILES",
+            "WEBIDL_FILES",
+            "IPDL_SOURCES",
+            "PREPROCESSED_IPDL_SOURCES",
+            "XPCOM_MANIFESTS",
+        )
+        for context_var in idl_vars:
+            for name in context.get(context_var, []):
+                self._idls[context_var].add(mozpath.join(context.srcdir, name))
+        # WEBIDL_EXAMPLE_INTERFACES do not correspond to files.
+        for name in context.get("WEBIDL_EXAMPLE_INTERFACES", []):
+            self._idls["WEBIDL_EXAMPLE_INTERFACES"].add(name)
+
+        local_includes = []
+        for local_include in context.get("LOCAL_INCLUDES", []):
+            full_path = local_include.full_path
+            if not isinstance(local_include, ObjDirPath):
+                if not os.path.exists(full_path):
+                    raise SandboxValidationError(
+                        "Path specified in LOCAL_INCLUDES does not exist: %s (resolved to %s)"
+                        % (local_include, full_path),
+                        context,
+                    )
+                if not os.path.isdir(full_path):
+                    raise SandboxValidationError(
+                        "Path specified in LOCAL_INCLUDES "
+                        "is a filename, but a directory is required: %s "
+                        "(resolved to %s)" % (local_include, full_path),
+                        context,
+                    )
+            include_obj = LocalInclude(context, local_include)
+            local_includes.append(include_obj.path.full_path)
+            yield include_obj
+
+        computed_flags.resolve_flags(
+            "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+        )
+        computed_as_flags.resolve_flags(
+            "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+        )
+        computed_host_flags.resolve_flags(
+            "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+        )
+        computed_wasm_flags.resolve_flags(
+            "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+        )
+
+        for obj in self._handle_linkables(context, passthru, generated_files):
+            yield obj
+
+        generated_files.update(
+            [
+                "%s%s" % (k, self.config.substs.get("BIN_SUFFIX", ""))
+                for k in self._binaries.keys()
+            ]
+        )
+
+        components = []
+        for var, cls in (
+            ("EXPORTS", Exports),
+            ("FINAL_TARGET_FILES", FinalTargetFiles),
+            ("FINAL_TARGET_PP_FILES", FinalTargetPreprocessedFiles),
+            ("LOCALIZED_FILES", LocalizedFiles),
+            ("LOCALIZED_PP_FILES", LocalizedPreprocessedFiles),
+            ("OBJDIR_FILES", ObjdirFiles),
+            ("OBJDIR_PP_FILES", ObjdirPreprocessedFiles),
+            ("TEST_HARNESS_FILES", TestHarnessFiles),
+        ):
+            all_files = context.get(var)
+            if not all_files:
+                continue
+            if dist_install is False and var != "TEST_HARNESS_FILES":
+                raise SandboxValidationError(
+                    "%s cannot be used with DIST_INSTALL = False" % var, context
+                )
+            has_prefs = False
+            has_resources = False
+            for base, files in all_files.walk():
+                if var == "TEST_HARNESS_FILES" and not base:
+                    raise SandboxValidationError(
+                        "Cannot install files to the root of TEST_HARNESS_FILES",
+                        context,
+                    )
+                if base == "components":
+                    components.extend(files)
+                if base == "defaults/pref":
+                    has_prefs = True
+                if mozpath.split(base)[0] == "res":
+                    has_resources = True
+                for f in files:
+                    if (
+                        var
+                        in (
+                            "FINAL_TARGET_PP_FILES",
+                            "OBJDIR_PP_FILES",
+                            "LOCALIZED_PP_FILES",
+                        )
+                        and not isinstance(f, SourcePath)
+                    ):
+                        raise SandboxValidationError(
+                            ("Only source directory paths allowed in " + "%s: %s")
+                            % (var, f),
+                            context,
+                        )
+                    if var.startswith("LOCALIZED_"):
+                        if isinstance(f, SourcePath):
+                            if f.startswith("en-US/"):
+                                pass
+                            elif "locales/en-US/" in f:
+                                pass
+                            else:
+                                raise SandboxValidationError(
+                                    "%s paths must start with `en-US/` or "
+                                    "contain `locales/en-US/`: %s" % (var, f),
+                                    context,
+                                )
+
+                    if not isinstance(f, ObjDirPath):
+                        path = f.full_path
+                        if "*" not in path and not os.path.exists(path):
+                            raise SandboxValidationError(
+                                "File listed in %s does not exist: %s" % (var, path),
+                                context,
+                            )
+                    else:
+                        # TODO: Bug 1254682 - The '/' check is to allow
+                        # installing files generated from other directories,
+                        # which is done occasionally for tests. However, it
+                        # means we don't fail early if the file isn't actually
+                        # created by the other moz.build file.
+                        if f.target_basename not in generated_files and "/" not in f:
+                            raise SandboxValidationError(
+                                (
+                                    "Objdir file listed in %s not in "
+                                    + "GENERATED_FILES: %s"
+                                )
+                                % (var, f),
+                                context,
+                            )
+
+                        if var.startswith("LOCALIZED_"):
+                            # Further require that LOCALIZED_FILES are from
+                            # LOCALIZED_GENERATED_FILES.
+                            if f.target_basename not in localized_generated_files:
+                                raise SandboxValidationError(
+                                    (
+                                        "Objdir file listed in %s not in "
+                                        + "LOCALIZED_GENERATED_FILES: %s"
+                                    )
+                                    % (var, f),
+                                    context,
+                                )
+                        else:
+                            # Additionally, don't allow LOCALIZED_GENERATED_FILES to be used
+                            # in anything *but* LOCALIZED_FILES.
+                            if f.target_basename in localized_generated_files:
+                                raise SandboxValidationError(
+                                    (
+                                        "Outputs of LOCALIZED_GENERATED_FILES cannot "
+                                        "be used in %s: %s"
+                                    )
+                                    % (var, f),
+                                    context,
+                                )
+
+            # Addons (when XPI_NAME is defined) and Applications (when
+            # DIST_SUBDIR is defined) use a different preferences directory
+            # (default/preferences) from the one the GRE uses (defaults/pref).
+            # Hence, we move the files from the latter to the former in that
+            # case.
+            if has_prefs and (context.get("XPI_NAME") or context.get("DIST_SUBDIR")):
+                all_files.defaults.preferences += all_files.defaults.pref
+                del all_files.defaults._children["pref"]
+
+            if has_resources and (
+                context.get("DIST_SUBDIR") or context.get("XPI_NAME")
+            ):
+                raise SandboxValidationError(
+                    "RESOURCES_FILES cannot be used with DIST_SUBDIR or " "XPI_NAME.",
+                    context,
+                )
+
+            yield cls(context, all_files)
+
+        for c in components:
+            if c.endswith(".manifest"):
+                yield ChromeManifestEntry(
+                    context,
+                    "chrome.manifest",
+                    Manifest("components", mozpath.basename(c)),
+                )
+
+        rust_tests = context.get("RUST_TESTS", [])
+        if rust_tests:
+            # TODO: more sophisticated checking of the declared name vs.
+            # contents of the Cargo.toml file.
+            features = context.get("RUST_TEST_FEATURES", [])
+
+            yield RustTests(context, rust_tests, features)
+
+        for obj in self._process_test_manifests(context):
+            yield obj
+
+        for obj in self._process_jar_manifests(context):
+            yield obj
+
+        computed_as_flags.resolve_flags("MOZBUILD", context.get("ASFLAGS"))
+
+        if context.get("USE_NASM") is True:
+            nasm = context.config.substs.get("NASM")
+            if not nasm:
+                raise SandboxValidationError("nasm is not available", context)
+            passthru.variables["AS"] = nasm
+            passthru.variables["AS_DASH_C_FLAG"] = ""
+            passthru.variables["ASOUTOPTION"] = "-o "
+            computed_as_flags.resolve_flags(
+                "OS", context.config.substs.get("NASM_ASFLAGS", [])
+            )
+
+        if context.get("USE_INTEGRATED_CLANGCL_AS") is True:
+            if context.config.substs.get("CC_TYPE") != "clang-cl":
+                raise SandboxValidationError("clang-cl is not available", context)
+            passthru.variables["AS"] = context.config.substs.get("CC")
+            passthru.variables["AS_DASH_C_FLAG"] = "-c"
+            passthru.variables["ASOUTOPTION"] = "-o "
+
+        if passthru.variables:
+            yield passthru
+
+        if context.objdir in self._compile_dirs:
+            self._compile_flags[context.objdir] = computed_flags
+            yield computed_link_flags
+
+        if context.objdir in self._asm_compile_dirs:
+            self._compile_as_flags[context.objdir] = computed_as_flags
+
+        if context.objdir in self._host_compile_dirs:
+            yield computed_host_flags
+
+        if context.objdir in self._wasm_compile_dirs:
+            yield computed_wasm_flags
+
+    def _create_substitution(self, cls, context, path):
+        sub = cls(context)
+        sub.input_path = "%s.in" % path.full_path
+        sub.output_path = path.translated
+        sub.relpath = path
+
+        return sub
+
+    def _process_xpidl(self, context):
+        # XPIDL source files get processed and turned into .h and .xpt files.
+        # If there are multiple XPIDL files in a directory, they get linked
+        # together into a final .xpt, which has the name defined by
+        # XPIDL_MODULE.
+        xpidl_module = context["XPIDL_MODULE"]
+
+        if not xpidl_module:
+            if context["XPIDL_SOURCES"]:
+                raise SandboxValidationError(
+                    "XPIDL_MODULE must be defined if " "XPIDL_SOURCES is defined.",
+                    context,
+                )
+            return
+
+        if not context["XPIDL_SOURCES"]:
+            raise SandboxValidationError(
+                "XPIDL_MODULE cannot be defined " "unless there are XPIDL_SOURCES",
+                context,
+            )
+
+        if context["DIST_INSTALL"] is False:
+            self.log(
+                logging.WARN,
+                "mozbuild_warning",
+                dict(path=context.main_path),
+                "{path}: DIST_INSTALL = False has no effect on XPIDL_SOURCES.",
+            )
+
+        for idl in context["XPIDL_SOURCES"]:
+            if not os.path.exists(idl.full_path):
+                raise SandboxValidationError(
+                    "File %s from XPIDL_SOURCES " "does not exist" % idl.full_path,
+                    context,
+                )
+
+        yield XPIDLModule(context, xpidl_module, context["XPIDL_SOURCES"])
+
+    def _process_generated_files(self, context):
+        for path in context["CONFIGURE_DEFINE_FILES"]:
+            script = mozpath.join(
+                mozpath.dirname(mozpath.dirname(__file__)),
+                "action",
+                "process_define_files.py",
+            )
+            yield GeneratedFile(
+                context,
+                script,
+                "process_define_file",
+                six.text_type(path),
+                [Path(context, path + ".in")],
+            )
+
+        generated_files = context.get("GENERATED_FILES") or []
+        localized_generated_files = context.get("LOCALIZED_GENERATED_FILES") or []
+        if not (generated_files or localized_generated_files):
+            return
+
+        for (localized, gen) in (
+            (False, generated_files),
+            (True, localized_generated_files),
+        ):
+            for f in gen:
+                flags = gen[f]
+                outputs = f
+                inputs = []
+                if flags.script:
+                    method = "main"
+                    script = SourcePath(context, flags.script).full_path
+
+                    # Deal with cases like "C:\\path\\to\\script.py:function".
+                    if ".py:" in script:
+                        script, method = script.rsplit(".py:", 1)
+                        script += ".py"
+
+                    if not os.path.exists(script):
+                        raise SandboxValidationError(
+                            "Script for generating %s does not exist: %s" % (f, script),
+                            context,
+                        )
+                    if os.path.splitext(script)[1] != ".py":
+                        raise SandboxValidationError(
+                            "Script for generating %s does not end in .py: %s"
+                            % (f, script),
+                            context,
+                        )
+                else:
+                    script = None
+                    method = None
+
+                for i in flags.inputs:
+                    p = Path(context, i)
+                    if isinstance(p, SourcePath) and not os.path.exists(p.full_path):
+                        raise SandboxValidationError(
+                            "Input for generating %s does not exist: %s"
+                            % (f, p.full_path),
+                            context,
+                        )
+                    inputs.append(p)
+
+                yield GeneratedFile(
+                    context,
+                    script,
+                    method,
+                    outputs,
+                    inputs,
+                    flags.flags,
+                    localized=localized,
+                    force=flags.force,
+                )
+
+    def _process_test_manifests(self, context):
+        for prefix, info in TEST_MANIFESTS.items():
+            for path, manifest in context.get("%s_MANIFESTS" % prefix, []):
+                for obj in self._process_test_manifest(context, info, path, manifest):
+                    yield obj
+
+        for flavor in REFTEST_FLAVORS:
+            for path, manifest in context.get("%s_MANIFESTS" % flavor.upper(), []):
+                for obj in self._process_reftest_manifest(
+                    context, flavor, path, manifest
+                ):
+                    yield obj
+
+    def _process_test_manifest(self, context, info, manifest_path, mpmanifest):
+        flavor, install_root, install_subdir, package_tests = info
+
+        path = manifest_path.full_path
+        manifest_dir = mozpath.dirname(path)
+        manifest_reldir = mozpath.dirname(
+            mozpath.relpath(path, context.config.topsrcdir)
+        )
+        manifest_sources = [
+            mozpath.relpath(pth, context.config.topsrcdir)
+            for pth in mpmanifest.source_files
+        ]
+        install_prefix = mozpath.join(install_root, install_subdir)
+
+        try:
+            if not mpmanifest.tests:
+                raise SandboxValidationError("Empty test manifest: %s" % path, context)
+
+            defaults = mpmanifest.manifest_defaults[os.path.normpath(path)]
+            obj = TestManifest(
+                context,
+                path,
+                mpmanifest,
+                flavor=flavor,
+                install_prefix=install_prefix,
+                relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
+                sources=manifest_sources,
+                dupe_manifest="dupe-manifest" in defaults,
+            )
+
+            filtered = mpmanifest.tests
+
+            missing = [t["name"] for t in filtered if not os.path.exists(t["path"])]
+            if missing:
+                raise SandboxValidationError(
+                    "Test manifest (%s) lists "
+                    "test that does not exist: %s" % (path, ", ".join(missing)),
+                    context,
+                )
+
+            out_dir = mozpath.join(install_prefix, manifest_reldir)
+
+            def process_support_files(test):
+                install_info = self._test_files_converter.convert_support_files(
+                    test, install_root, manifest_dir, out_dir
+                )
+
+                obj.pattern_installs.extend(install_info.pattern_installs)
+                for source, dest in install_info.installs:
+                    obj.installs[source] = (dest, False)
+                obj.external_installs |= install_info.external_installs
+                for install_path in install_info.deferred_installs:
+                    if all(
+                        [
+                            "*" not in install_path,
+                            not os.path.isfile(
+                                mozpath.join(context.config.topsrcdir, install_path[2:])
+                            ),
+                            install_path not in install_info.external_installs,
+                        ]
+                    ):
+                        raise SandboxValidationError(
+                            "Error processing test "
+                            "manifest %s: entry in support-files not present "
+                            "in the srcdir: %s" % (path, install_path),
+                            context,
+                        )
+
+                obj.deferred_installs |= install_info.deferred_installs
+
+            for test in filtered:
+                obj.tests.append(test)
+
+                # Some test files are compiled and should not be copied into the
+                # test package. They function as identifiers rather than files.
+                if package_tests:
+                    manifest_relpath = mozpath.relpath(
+                        test["path"], mozpath.dirname(test["manifest"])
+                    )
+                    obj.installs[mozpath.normpath(test["path"])] = (
+                        (mozpath.join(out_dir, manifest_relpath)),
+                        True,
+                    )
+
+                process_support_files(test)
+
+            for path, m_defaults in mpmanifest.manifest_defaults.items():
+                process_support_files(m_defaults)
+
+            # We also copy manifests into the output directory,
+            # including manifests from [include:foo] directives.
+            for mpath in mpmanifest.manifests():
+                mpath = mozpath.normpath(mpath)
+                out_path = mozpath.join(out_dir, mozpath.basename(mpath))
+                obj.installs[mpath] = (out_path, False)
+
+            # Some manifests reference files that are auto generated as
+            # part of the build or shouldn't be installed for some
+            # reason. Here, we prune those files from the install set.
+            # FUTURE we should be able to detect autogenerated files from
+            # other build metadata. Once we do that, we can get rid of this.
+            for f in defaults.get("generated-files", "").split():
+                # We re-raise otherwise the stack trace isn't informative.
+                try:
+                    del obj.installs[mozpath.join(manifest_dir, f)]
+                except KeyError:
+                    raise SandboxValidationError(
+                        "Error processing test "
+                        "manifest %s: entry in generated-files not present "
+                        "elsewhere in manifest: %s" % (path, f),
+                        context,
+                    )
+
+            yield obj
+        except (AssertionError, Exception):
+            raise SandboxValidationError(
+                "Error processing test "
+                "manifest file %s: %s"
+                % (path, "\n".join(traceback.format_exception(*sys.exc_info()))),
+                context,
+            )
+
+    def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
+        manifest_full_path = manifest_path.full_path
+        manifest_reldir = mozpath.dirname(
+            mozpath.relpath(manifest_full_path, context.config.topsrcdir)
+        )
+
+        # reftest manifests don't come from manifest parser. But they are
+        # similar enough that we can use the same emitted objects. Note
+        # that we don't perform any installs for reftests.
+        obj = TestManifest(
+            context,
+            manifest_full_path,
+            manifest,
+            flavor=flavor,
+            install_prefix="%s/" % flavor,
+            relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path)),
+        )
+        obj.tests = list(sorted(manifest.tests, key=lambda t: t["path"]))
+
+        yield obj
+
+    def _process_jar_manifests(self, context):
+        jar_manifests = context.get("JAR_MANIFESTS", [])
+        if len(jar_manifests) > 1:
+            raise SandboxValidationError(
+                "While JAR_MANIFESTS is a list, "
+                "it is currently limited to one value.",
+                context,
+            )
+
+        for path in jar_manifests:
+            yield JARManifest(context, path)
+
+        # Temporary test to look for jar.mn files that creep in without using
+        # the new declaration. Before, we didn't require jar.mn files to
+        # declared anywhere (they were discovered). This will detect people
+        # relying on the old behavior.
+        if os.path.exists(os.path.join(context.srcdir, "jar.mn")):
+            if "jar.mn" not in jar_manifests:
+                raise SandboxValidationError(
+                    "A jar.mn exists but it "
+                    "is not referenced in the moz.build file. "
+                    "Please define JAR_MANIFESTS.",
+                    context,
+                )
+
+    def _emit_directory_traversal_from_context(self, context):
+        o = DirectoryTraversal(context)
+        o.dirs = context.get("DIRS", [])
+
+        # Some paths have a subconfigure, yet also have a moz.build. Those
+        # shouldn't end up in self._external_paths.
+        if o.objdir:
+            self._external_paths -= {o.relobjdir}
+
+        yield o
Index: firefox-102.15.0-new/python/mozbuild/mozbuild/test/frontend/test_emitter.py
===================================================================
--- firefox-102.15.0-new/python/mozbuild/mozbuild/test/frontend/test_emitter.py	(nonexistent)
+++ firefox-102.15.0-new/python/mozbuild/mozbuild/test/frontend/test_emitter.py	(revision 228)
@@ -0,0 +1,1850 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import six
+import unittest
+
+from mozunit import main
+
+from mozbuild.frontend.context import ObjDirPath, Path
+from mozbuild.frontend.data import (
+    ComputedFlags,
+    ConfigFileSubstitution,
+    Defines,
+    DirectoryTraversal,
+    Exports,
+    FinalTargetPreprocessedFiles,
+    GeneratedFile,
+    HostProgram,
+    HostRustLibrary,
+    HostRustProgram,
+    HostSources,
+    IPDLCollection,
+    JARManifest,
+    LocalInclude,
+    LocalizedFiles,
+    LocalizedPreprocessedFiles,
+    Program,
+    RustLibrary,
+    RustProgram,
+    SharedLibrary,
+    SimpleProgram,
+    Sources,
+    StaticLibrary,
+    TestHarnessFiles,
+    TestManifest,
+    UnifiedSources,
+    VariablePassthru,
+    WasmSources,
+)
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import (
+    BuildReader,
+    BuildReaderError,
+    SandboxValidationError,
+)
+
+from mozbuild.test.common import MockConfig
+
+import mozpack.path as mozpath
+
+
+data_path = mozpath.abspath(mozpath.dirname(__file__))
+data_path = mozpath.join(data_path, "data")
+
+
+class TestEmitterBasic(unittest.TestCase):
+    def setUp(self):
+        self._old_env = dict(os.environ)
+        os.environ.pop("MOZ_OBJDIR", None)
+
+    def tearDown(self):
+        os.environ.clear()
+        os.environ.update(self._old_env)
+
+    def reader(self, name, enable_tests=False, extra_substs=None):
+        substs = dict(
+            ENABLE_TESTS="1" if enable_tests else "",
+            BIN_SUFFIX=".prog",
+            HOST_BIN_SUFFIX=".hostprog",
+            OS_TARGET="WINNT",
+            COMPILE_ENVIRONMENT="1",
+            STL_FLAGS=["-I/path/to/topobjdir/dist/stl_wrappers"],
+            VISIBILITY_FLAGS=["-include", "$(topsrcdir)/config/gcc_hidden.h"],
+            OBJ_SUFFIX="obj",
+            WASM_OBJ_SUFFIX="wasm",
+            WASM_CFLAGS=["-foo"],
+        )
+        if extra_substs:
+            substs.update(extra_substs)
+        config = MockConfig(mozpath.join(data_path, name), extra_substs=substs)
+
+        return BuildReader(config)
+
+    def read_topsrcdir(self, reader, filter_common=True):
+        emitter = TreeMetadataEmitter(reader.config)
+        objs = list(emitter.emit(reader.read_topsrcdir()))
+        self.assertGreater(len(objs), 0)
+
+        filtered = []
+        for obj in objs:
+            if filter_common and isinstance(obj, DirectoryTraversal):
+                continue
+
+            filtered.append(obj)
+
+        return filtered
+
+    def test_dirs_traversal_simple(self):
+        reader = self.reader("traversal-simple")
+        objs = self.read_topsrcdir(reader, filter_common=False)
+        self.assertEqual(len(objs), 4)
+
+        for o in objs:
+            self.assertIsInstance(o, DirectoryTraversal)
+            self.assertTrue(os.path.isabs(o.context_main_path))
+            self.assertEqual(len(o.context_all_paths), 1)
+
+        reldirs = [o.relsrcdir for o in objs]
+        self.assertEqual(reldirs, ["", "foo", "foo/biz", "bar"])
+
+        dirs = [[d.full_path for d in o.dirs] for o in objs]
+        self.assertEqual(
+            dirs,
+            [
+                [
+                    mozpath.join(reader.config.topsrcdir, "foo"),
+                    mozpath.join(reader.config.topsrcdir, "bar"),
+                ],
+                [mozpath.join(reader.config.topsrcdir, "foo", "biz")],
+                [],
+                [],
+            ],
+        )
+
+    def test_traversal_all_vars(self):
+        reader = self.reader("traversal-all-vars")
+        objs = self.read_topsrcdir(reader, filter_common=False)
+        self.assertEqual(len(objs), 2)
+
+        for o in objs:
+            self.assertIsInstance(o, DirectoryTraversal)
+
+        reldirs = set([o.relsrcdir for o in objs])
+        self.assertEqual(reldirs, set(["", "regular"]))
+
+        for o in objs:
+            reldir = o.relsrcdir
+
+            if reldir == "":
+                self.assertEqual(
+                    [d.full_path for d in o.dirs],
+                    [mozpath.join(reader.config.topsrcdir, "regular")],
+                )
+
+    def test_traversal_all_vars_enable_tests(self):
+        reader = self.reader("traversal-all-vars", enable_tests=True)
+        objs = self.read_topsrcdir(reader, filter_common=False)
+        self.assertEqual(len(objs), 3)
+
+        for o in objs:
+            self.assertIsInstance(o, DirectoryTraversal)
+
+        reldirs = set([o.relsrcdir for o in objs])
+        self.assertEqual(reldirs, set(["", "regular", "test"]))
+
+        for o in objs:
+            reldir = o.relsrcdir
+
+            if reldir == "":
+                self.assertEqual(
+                    [d.full_path for d in o.dirs],
+                    [
+                        mozpath.join(reader.config.topsrcdir, "regular"),
+                        mozpath.join(reader.config.topsrcdir, "test"),
+                    ],
+                )
+
+    def test_config_file_substitution(self):
+        reader = self.reader("config-file-substitution")
+        objs = self.read_topsrcdir(reader)
+        self.assertEqual(len(objs), 2)
+
+        self.assertIsInstance(objs[0], ConfigFileSubstitution)
+        self.assertIsInstance(objs[1], ConfigFileSubstitution)
+
+        topobjdir = mozpath.abspath(reader.config.topobjdir)
+        self.assertEqual(objs[0].relpath, "foo")
+        self.assertEqual(
+            mozpath.normpath(objs[0].output_path),
+            mozpath.normpath(mozpath.join(topobjdir, "foo")),
+        )
+        self.assertEqual(
+            mozpath.normpath(objs[1].output_path),
+            mozpath.normpath(mozpath.join(topobjdir, "bar")),
+        )
+
+    def test_variable_passthru(self):
+        reader = self.reader("variable-passthru")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], VariablePassthru)
+
+        wanted = {
+            "NO_DIST_INSTALL": True,
+            "RCFILE": "foo.rc",
+            "RCINCLUDE": "bar.rc",
+            "WIN32_EXE_LDFLAGS": ["-subsystem:console"],
+        }
+
+        variables = objs[0].variables
+        maxDiff = self.maxDiff
+        self.maxDiff = None
+        self.assertEqual(wanted, variables)
+        self.maxDiff = maxDiff
+
+    def test_compile_flags(self):
+        reader = self.reader(
+            "compile-flags", extra_substs={"WARNINGS_AS_ERRORS": "-Werror"}
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["STL"], reader.config.substs["STL_FLAGS"])
+        self.assertEqual(
+            flags.flags["VISIBILITY"], reader.config.substs["VISIBILITY_FLAGS"]
+        )
+        self.assertEqual(flags.flags["WARNINGS_AS_ERRORS"], ["-Werror"])
+        self.assertEqual(flags.flags["MOZBUILD_CFLAGS"], ["-Wall", "-funroll-loops"])
+        self.assertEqual(flags.flags["MOZBUILD_CXXFLAGS"], ["-funroll-loops", "-Wall"])
+
+    def test_asflags(self):
+        reader = self.reader("asflags", extra_substs={"ASFLAGS": ["-safeseh"]})
+        as_sources, sources, ldflags, lib, flags, asflags = self.read_topsrcdir(reader)
+        self.assertIsInstance(asflags, ComputedFlags)
+        self.assertEqual(asflags.flags["OS"], reader.config.substs["ASFLAGS"])
+        self.assertEqual(asflags.flags["MOZBUILD"], ["-no-integrated-as"])
+
+    def test_debug_flags(self):
+        reader = self.reader(
+            "compile-flags",
+            extra_substs={"MOZ_DEBUG_FLAGS": "-g", "MOZ_DEBUG_SYMBOLS": "1"},
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["DEBUG"], ["-g"])
+
+    def test_disable_debug_flags(self):
+        reader = self.reader(
+            "compile-flags",
+            extra_substs={"MOZ_DEBUG_FLAGS": "-g", "MOZ_DEBUG_SYMBOLS": ""},
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["DEBUG"], [])
+
+    def test_link_flags(self):
+        reader = self.reader(
+            "link-flags",
+            extra_substs={
+                "OS_LDFLAGS": ["-Wl,rpath-link=/usr/lib"],
+                "MOZ_OPTIMIZE": "",
+                "MOZ_OPTIMIZE_LDFLAGS": ["-Wl,-dead_strip"],
+                "MOZ_DEBUG_LDFLAGS": ["-framework ExceptionHandling"],
+            },
+        )
+        sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertEqual(ldflags.flags["OS"], reader.config.substs["OS_LDFLAGS"])
+        self.assertEqual(
+            ldflags.flags["MOZBUILD"], ["-Wl,-U_foo", "-framework Foo", "-x"]
+        )
+        self.assertEqual(ldflags.flags["OPTIMIZE"], [])
+
+    def test_debug_ldflags(self):
+        reader = self.reader(
+            "link-flags",
+            extra_substs={
+                "MOZ_DEBUG_SYMBOLS": "1",
+                "MOZ_DEBUG_LDFLAGS": ["-framework ExceptionHandling"],
+            },
+        )
+        sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertEqual(ldflags.flags["OS"], reader.config.substs["MOZ_DEBUG_LDFLAGS"])
+
+    def test_windows_opt_link_flags(self):
+        reader = self.reader(
+            "link-flags",
+            extra_substs={
+                "OS_ARCH": "WINNT",
+                "GNU_CC": "",
+                "MOZ_OPTIMIZE": "1",
+                "MOZ_DEBUG_LDFLAGS": ["-DEBUG"],
+                "MOZ_DEBUG_SYMBOLS": "1",
+                "MOZ_OPTIMIZE_FLAGS": [],
+                "MOZ_OPTIMIZE_LDFLAGS": [],
+            },
+        )
+        sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIn("-DEBUG", ldflags.flags["OS"])
+        self.assertIn("-OPT:REF,ICF", ldflags.flags["OS"])
+
+    def test_windows_dmd_link_flags(self):
+        reader = self.reader(
+            "link-flags",
+            extra_substs={
+                "OS_ARCH": "WINNT",
+                "GNU_CC": "",
+                "MOZ_DMD": "1",
+                "MOZ_DEBUG_LDFLAGS": ["-DEBUG"],
+                "MOZ_DEBUG_SYMBOLS": "1",
+                "MOZ_OPTIMIZE": "1",
+                "MOZ_OPTIMIZE_FLAGS": [],
+            },
+        )
+        sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertEqual(ldflags.flags["OS"], ["-DEBUG", "-OPT:REF,ICF"])
+
+    def test_host_compile_flags(self):
+        reader = self.reader(
+            "host-compile-flags",
+            extra_substs={
+                "HOST_CXXFLAGS": ["-Wall", "-Werror"],
+                "HOST_CFLAGS": ["-Werror", "-Wall"],
+            },
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(
+            flags.flags["HOST_CXXFLAGS"], reader.config.substs["HOST_CXXFLAGS"]
+        )
+        self.assertEqual(
+            flags.flags["HOST_CFLAGS"], reader.config.substs["HOST_CFLAGS"]
+        )
+        self.assertEqual(
+            set(flags.flags["HOST_DEFINES"]),
+            set(["-DFOO", '-DBAZ="abcd"', "-UQUX", "-DBAR=7", "-DVALUE=xyz"]),
+        )
+        self.assertEqual(
+            flags.flags["MOZBUILD_HOST_CFLAGS"], ["-funroll-loops", "-host-arg"]
+        )
+        self.assertEqual(flags.flags["MOZBUILD_HOST_CXXFLAGS"], [])
+
+    def test_host_no_optimize_flags(self):
+        reader = self.reader(
+            "host-compile-flags",
+            extra_substs={"MOZ_OPTIMIZE": "", "MOZ_OPTIMIZE_FLAGS": ["-O2"]},
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["HOST_OPTIMIZE"], [])
+
+    def test_host_optimize_flags(self):
+        reader = self.reader(
+            "host-compile-flags",
+            extra_substs={"MOZ_OPTIMIZE": "1", "MOZ_OPTIMIZE_FLAGS": ["-O2"]},
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["HOST_OPTIMIZE"], ["-O2"])
+
+    def test_cross_optimize_flags(self):
+        reader = self.reader(
+            "host-compile-flags",
+            extra_substs={
+                "MOZ_OPTIMIZE": "1",
+                "MOZ_OPTIMIZE_FLAGS": ["-O2"],
+                "HOST_OPTIMIZE_FLAGS": ["-O3"],
+                "CROSS_COMPILE": "1",
+            },
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["HOST_OPTIMIZE"], ["-O3"])
+
+    def test_host_rtl_flag(self):
+        reader = self.reader(
+            "host-compile-flags", extra_substs={"OS_ARCH": "WINNT", "MOZ_DEBUG": "1"}
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["RTL"], ["-MDd"])
+
+    def test_compile_flags_validation(self):
+        reader = self.reader("compile-flags-field-validation")
+
+        with six.assertRaisesRegex(self, BuildReaderError, "Invalid value."):
+            self.read_topsrcdir(reader)
+
+        reader = self.reader("compile-flags-type-validation")
+        with six.assertRaisesRegex(
+            self, BuildReaderError, "A list of strings must be provided"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_compile_flags_templates(self):
+        reader = self.reader(
+            "compile-flags-templates",
+            extra_substs={
+                "NSPR_CFLAGS": ["-I/nspr/path"],
+                "NSS_CFLAGS": ["-I/nss/path"],
+                "MOZ_JPEG_CFLAGS": ["-I/jpeg/path"],
+                "MOZ_PNG_CFLAGS": ["-I/png/path"],
+                "MOZ_ZLIB_CFLAGS": ["-I/zlib/path"],
+                "MOZ_PIXMAN_CFLAGS": ["-I/pixman/path"],
+            },
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["STL"], [])
+        self.assertEqual(flags.flags["VISIBILITY"], [])
+        self.assertEqual(
+            flags.flags["OS_INCLUDES"],
+            [
+                "-I/nspr/path",
+                "-I/nss/path",
+                "-I/jpeg/path",
+                "-I/png/path",
+                "-I/zlib/path",
+                "-I/pixman/path",
+            ],
+        )
+
+    def test_disable_stl_wrapping(self):
+        reader = self.reader("disable-stl-wrapping")
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["STL"], [])
+
+    def test_visibility_flags(self):
+        reader = self.reader("visibility-flags")
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["VISIBILITY"], [])
+
+    def test_defines_in_flags(self):
+        reader = self.reader("compile-defines")
+        defines, sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(
+            flags.flags["LIBRARY_DEFINES"], ["-DMOZ_LIBRARY_DEFINE=MOZ_TEST"]
+        )
+        self.assertEqual(flags.flags["DEFINES"], ["-DMOZ_TEST_DEFINE"])
+
+    def test_resolved_flags_error(self):
+        reader = self.reader("resolved-flags-error")
+        with six.assertRaisesRegex(
+            self,
+            BuildReaderError,
+            "`DEFINES` may not be set in COMPILE_FLAGS from moz.build",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_includes_in_flags(self):
+        reader = self.reader("compile-includes")
+        defines, sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(
+            flags.flags["BASE_INCLUDES"],
+            ["-I%s" % reader.config.topsrcdir, "-I%s" % reader.config.topobjdir],
+        )
+        self.assertEqual(
+            flags.flags["EXTRA_INCLUDES"],
+            ["-I%s/dist/include" % reader.config.topobjdir],
+        )
+        self.assertEqual(
+            flags.flags["LOCAL_INCLUDES"], ["-I%s/subdir" % reader.config.topsrcdir]
+        )
+
+    def test_allow_compiler_warnings(self):
+        reader = self.reader(
+            "allow-compiler-warnings", extra_substs={"WARNINGS_AS_ERRORS": "-Werror"}
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertEqual(flags.flags["WARNINGS_AS_ERRORS"], [])
+
+    def test_disable_compiler_warnings(self):
+        reader = self.reader(
+            "disable-compiler-warnings", extra_substs={"WARNINGS_CFLAGS": "-Wall"}
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertEqual(flags.flags["WARNINGS_CFLAGS"], [])
+
+    def test_use_nasm(self):
+        # When nasm is not available, this should raise.
+        reader = self.reader("use-nasm")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "nasm is not available"
+        ):
+            self.read_topsrcdir(reader)
+
+        # When nasm is available, this should work.
+        reader = self.reader(
+            "use-nasm", extra_substs=dict(NASM="nasm", NASM_ASFLAGS="-foo")
+        )
+
+        sources, passthru, ldflags, lib, flags, asflags = self.read_topsrcdir(reader)
+
+        self.assertIsInstance(passthru, VariablePassthru)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertIsInstance(asflags, ComputedFlags)
+
+        self.assertEqual(asflags.flags["OS"], reader.config.substs["NASM_ASFLAGS"])
+
+        maxDiff = self.maxDiff
+        self.maxDiff = None
+        self.assertEqual(
+            passthru.variables,
+            {"AS": "nasm", "AS_DASH_C_FLAG": "", "ASOUTOPTION": "-o "},
+        )
+        self.maxDiff = maxDiff
+
+    def test_generated_files(self):
+        reader = self.reader("generated-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+            self.assertFalse(o.localized)
+            self.assertFalse(o.force)
+
+        expected = ["bar.c", "foo.c", ("xpidllex.py", "xpidlyacc.py")]
+        for o, f in zip(objs, expected):
+            expected_filename = f if isinstance(f, tuple) else (f,)
+            self.assertEqual(o.outputs, expected_filename)
+            self.assertEqual(o.script, None)
+            self.assertEqual(o.method, None)
+            self.assertEqual(o.inputs, [])
+
+    def test_generated_files_force(self):
+        reader = self.reader("generated-files-force")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+            self.assertEqual(o.force, "bar.c" in o.outputs)
+
+    def test_localized_generated_files(self):
+        reader = self.reader("localized-generated-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+            self.assertTrue(o.localized)
+
+        expected = ["abc.ini", ("bar", "baz")]
+        for o, f in zip(objs, expected):
+            expected_filename = f if isinstance(f, tuple) else (f,)
+            self.assertEqual(o.outputs, expected_filename)
+            self.assertEqual(o.script, None)
+            self.assertEqual(o.method, None)
+            self.assertEqual(o.inputs, [])
+
+    def test_localized_generated_files_force(self):
+        reader = self.reader("localized-generated-files-force")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+            self.assertTrue(o.localized)
+            self.assertEqual(o.force, "abc.ini" in o.outputs)
+
+    def test_localized_files_from_generated(self):
+        """Test that using LOCALIZED_GENERATED_FILES and then putting the output in
+        LOCALIZED_FILES as an objdir path works.
+        """
+        reader = self.reader("localized-files-from-generated")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        self.assertIsInstance(objs[0], GeneratedFile)
+        self.assertIsInstance(objs[1], LocalizedFiles)
+
+    def test_localized_files_not_localized_generated(self):
+        """Test that using GENERATED_FILES and then putting the output in
+        LOCALIZED_FILES as an objdir path produces an error.
+        """
+        reader = self.reader("localized-files-not-localized-generated")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Objdir file listed in LOCALIZED_FILES not in LOCALIZED_GENERATED_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_localized_generated_files_final_target_files(self):
+        """Test that using LOCALIZED_GENERATED_FILES and then putting the output in
+        FINAL_TARGET_FILES as an objdir path produces an error.
+        """
+        reader = self.reader("localized-generated-files-final-target-files")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Outputs of LOCALIZED_GENERATED_FILES cannot be used in FINAL_TARGET_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_generated_files_method_names(self):
+        reader = self.reader("generated-files-method-names")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+
+        expected = ["bar.c", "foo.c"]
+        expected_method_names = ["make_bar", "main"]
+        for o, expected_filename, expected_method in zip(
+            objs, expected, expected_method_names
+        ):
+            self.assertEqual(o.outputs, (expected_filename,))
+            self.assertEqual(o.method, expected_method)
+            self.assertEqual(o.inputs, [])
+
+    def test_generated_files_absolute_script(self):
+        reader = self.reader("generated-files-absolute-script")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+
+        o = objs[0]
+        self.assertIsInstance(o, GeneratedFile)
+        self.assertEqual(o.outputs, ("bar.c",))
+        self.assertRegex(o.script, "script.py$")
+        self.assertEqual(o.method, "make_bar")
+        self.assertEqual(o.inputs, [])
+
+    def test_generated_files_no_script(self):
+        reader = self.reader("generated-files-no-script")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Script for generating bar.c does not exist"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_generated_files_no_inputs(self):
+        reader = self.reader("generated-files-no-inputs")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Input for generating foo.c does not exist"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_generated_files_no_python_script(self):
+        reader = self.reader("generated-files-no-python-script")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Script for generating bar.c does not end in .py",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_exports(self):
+        reader = self.reader("exports")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], Exports)
+
+        expected = [
+            ("", ["foo.h", "bar.h", "baz.h"]),
+            ("mozilla", ["mozilla1.h", "mozilla2.h"]),
+            ("mozilla/dom", ["dom1.h", "dom2.h", "dom3.h"]),
+            ("mozilla/gfx", ["gfx.h"]),
+            ("nspr/private", ["pprio.h", "pprthred.h"]),
+            ("vpx", ["mem.h", "mem2.h"]),
+        ]
+        for (expect_path, expect_headers), (actual_path, actual_headers) in zip(
+            expected, [(path, list(seq)) for path, seq in objs[0].files.walk()]
+        ):
+            self.assertEqual(expect_path, actual_path)
+            self.assertEqual(expect_headers, actual_headers)
+
+    def test_exports_missing(self):
+        """
+        Missing files in EXPORTS is an error.
+        """
+        reader = self.reader("exports-missing")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "File listed in EXPORTS does not exist:"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_exports_missing_generated(self):
+        """
+        An objdir file in EXPORTS that is not in GENERATED_FILES is an error.
+        """
+        reader = self.reader("exports-missing-generated")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Objdir file listed in EXPORTS not in GENERATED_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_exports_generated(self):
+        reader = self.reader("exports-generated")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        self.assertIsInstance(objs[0], GeneratedFile)
+        self.assertIsInstance(objs[1], Exports)
+        exports = [(path, list(seq)) for path, seq in objs[1].files.walk()]
+        self.assertEqual(
+            exports, [("", ["foo.h"]), ("mozilla", ["mozilla1.h", "!mozilla2.h"])]
+        )
+        path, files = exports[1]
+        self.assertIsInstance(files[1], ObjDirPath)
+
+    def test_test_harness_files(self):
+        reader = self.reader("test-harness-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], TestHarnessFiles)
+
+        expected = {
+            "mochitest": ["runtests.py", "utils.py"],
+            "testing/mochitest": ["mochitest.py", "mochitest.ini"],
+        }
+
+        for path, strings in objs[0].files.walk():
+            self.assertTrue(path in expected)
+            basenames = sorted(mozpath.basename(s) for s in strings)
+            self.assertEqual(sorted(expected[path]), basenames)
+
+    def test_test_harness_files_root(self):
+        reader = self.reader("test-harness-files-root")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Cannot install files to the root of TEST_HARNESS_FILES",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_program(self):
+        reader = self.reader("program")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 6)
+        self.assertIsInstance(objs[0], Sources)
+        self.assertIsInstance(objs[1], ComputedFlags)
+        self.assertIsInstance(objs[2], ComputedFlags)
+        self.assertIsInstance(objs[3], Program)
+        self.assertIsInstance(objs[4], SimpleProgram)
+        self.assertIsInstance(objs[5], SimpleProgram)
+
+        self.assertEqual(objs[3].program, "test_program.prog")
+        self.assertEqual(objs[4].program, "test_program1.prog")
+        self.assertEqual(objs[5].program, "test_program2.prog")
+
+        self.assertEqual(objs[3].name, "test_program.prog")
+        self.assertEqual(objs[4].name, "test_program1.prog")
+        self.assertEqual(objs[5].name, "test_program2.prog")
+
+        self.assertEqual(
+            objs[4].objs,
+            [
+                mozpath.join(
+                    reader.config.topobjdir,
+                    "test_program1.%s" % reader.config.substs["OBJ_SUFFIX"],
+                )
+            ],
+        )
+        self.assertEqual(
+            objs[5].objs,
+            [
+                mozpath.join(
+                    reader.config.topobjdir,
+                    "test_program2.%s" % reader.config.substs["OBJ_SUFFIX"],
+                )
+            ],
+        )
+
+    def test_program_paths(self):
+        """Various moz.build settings that change the destination of PROGRAM should be
+        accurately reflected in Program.output_path."""
+        reader = self.reader("program-paths")
+        objs = self.read_topsrcdir(reader)
+        prog_paths = [o.output_path for o in objs if isinstance(o, Program)]
+        self.assertEqual(
+            prog_paths,
+            [
+                "!/dist/bin/dist-bin.prog",
+                "!/dist/bin/foo/dist-subdir.prog",
+                "!/final/target/final-target.prog",
+                "!not-installed.prog",
+            ],
+        )
+
+    def test_host_program_paths(self):
+        """The destination of a HOST_PROGRAM (almost always dist/host/bin)
+        should be accurately reflected in Program.output_path."""
+        reader = self.reader("host-program-paths")
+        objs = self.read_topsrcdir(reader)
+        prog_paths = [o.output_path for o in objs if isinstance(o, HostProgram)]
+        self.assertEqual(
+            prog_paths,
+            [
+                "!/dist/host/bin/final-target.hostprog",
+                "!/dist/host/bin/dist-host-bin.hostprog",
+                "!not-installed.hostprog",
+            ],
+        )
+
+    def test_test_manifest_missing_manifest(self):
+        """A missing manifest file should result in an error."""
+        reader = self.reader("test-manifest-missing-manifest")
+
+        with six.assertRaisesRegex(self, BuildReaderError, "Missing files"):
+            self.read_topsrcdir(reader)
+
+    def test_empty_test_manifest_rejected(self):
+        """A test manifest without any entries is rejected."""
+        reader = self.reader("test-manifest-empty")
+
+        with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_just_support_files(self):
+        """A test manifest with no tests but support-files is not supported."""
+        reader = self.reader("test-manifest-just-support")
+
+        with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_dupe_support_files(self):
+        """A test manifest with dupe support-files in a single test is not
+        supported.
+        """
+        reader = self.reader("test-manifest-dupes")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "bar.js appears multiple times "
+            "in a test manifest under a support-files field, please omit the duplicate entry.",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_absolute_support_files(self):
+        """Support files starting with '/' are placed relative to the install root"""
+        reader = self.reader("test-manifest-absolute-support")
+
+        objs = self.read_topsrcdir(reader)
+        self.assertEqual(len(objs), 1)
+        o = objs[0]
+        self.assertEqual(len(o.installs), 3)
+        expected = [
+            mozpath.normpath(mozpath.join(o.install_prefix, "../.well-known/foo.txt")),
+            mozpath.join(o.install_prefix, "absolute-support.ini"),
+            mozpath.join(o.install_prefix, "test_file.js"),
+        ]
+        paths = sorted([v[0] for v in o.installs.values()])
+        self.assertEqual(paths, expected)
+
+    @unittest.skip("Bug 1304316 - Items in the second set but not the first")
+    def test_test_manifest_shared_support_files(self):
+        """Support files starting with '!' are given separate treatment, so their
+        installation can be resolved when running tests.
+        """
+        reader = self.reader("test-manifest-shared-support")
+        supported, child = self.read_topsrcdir(reader)
+
+        expected_deferred_installs = {
+            "!/child/test_sub.js",
+            "!/child/another-file.sjs",
+            "!/child/data/**",
+        }
+
+        self.assertEqual(len(supported.installs), 3)
+        self.assertEqual(set(supported.deferred_installs), expected_deferred_installs)
+        self.assertEqual(len(child.installs), 3)
+        self.assertEqual(len(child.pattern_installs), 1)
+
+    def test_test_manifest_deffered_install_missing(self):
+        """A non-existent shared support file reference produces an error."""
+        reader = self.reader("test-manifest-shared-missing")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "entry in support-files not present in the srcdir",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_install_includes(self):
+        """Ensure that any [include:foo.ini] are copied to the objdir."""
+        reader = self.reader("test-manifest-install-includes")
+
+        objs = self.read_topsrcdir(reader)
+        self.assertEqual(len(objs), 1)
+        o = objs[0]
+        self.assertEqual(len(o.installs), 3)
+        self.assertEqual(o.manifest_relpath, "mochitest.ini")
+        self.assertEqual(o.manifest_obj_relpath, "mochitest.ini")
+        expected = [
+            mozpath.normpath(mozpath.join(o.install_prefix, "common.ini")),
+            mozpath.normpath(mozpath.join(o.install_prefix, "mochitest.ini")),
+            mozpath.normpath(mozpath.join(o.install_prefix, "test_foo.html")),
+        ]
+        paths = sorted([v[0] for v in o.installs.values()])
+        self.assertEqual(paths, expected)
+
+    def test_test_manifest_includes(self):
+        """Ensure that manifest objects from the emitter list a correct manifest."""
+        reader = self.reader("test-manifest-emitted-includes")
+        [obj] = self.read_topsrcdir(reader)
+
+        # Expected manifest leafs for our tests.
+        expected_manifests = {
+            "reftest1.html": "reftest.list",
+            "reftest1-ref.html": "reftest.list",
+            "reftest2.html": "included-reftest.list",
+            "reftest2-ref.html": "included-reftest.list",
+        }
+
+        for t in obj.tests:
+            self.assertTrue(t["manifest"].endswith(expected_manifests[t["name"]]))
+
+    def test_test_manifest_keys_extracted(self):
+        """Ensure all metadata from test manifests is extracted."""
+        reader = self.reader("test-manifest-keys-extracted")
+
+        objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)]
+
+        self.assertEqual(len(objs), 8)
+
+        metadata = {
+            "a11y.ini": {
+                "flavor": "a11y",
+                "installs": {"a11y.ini": False, "test_a11y.js": True},
+                "pattern-installs": 1,
+            },
+            "browser.ini": {
+                "flavor": "browser-chrome",
+                "installs": {
+                    "browser.ini": False,
+                    "test_browser.js": True,
+                    "support1": False,
+                    "support2": False,
+                },
+            },
+            "mochitest.ini": {
+                "flavor": "mochitest",
+                "installs": {"mochitest.ini": False, "test_mochitest.js": True},
+                "external": {"external1", "external2"},
+            },
+            "chrome.ini": {
+                "flavor": "chrome",
+                "installs": {"chrome.ini": False, "test_chrome.js": True},
+            },
+            "xpcshell.ini": {
+                "flavor": "xpcshell",
+                "dupe": True,
+                "installs": {
+                    "xpcshell.ini": False,
+                    "test_xpcshell.js": True,
+                    "head1": False,
+                    "head2": False,
+                },
+            },
+            "reftest.list": {"flavor": "reftest", "installs": {}},
+            "crashtest.list": {"flavor": "crashtest", "installs": {}},
+            "python.ini": {"flavor": "python", "installs": {"python.ini": False}},
+        }
+
+        for o in objs:
+            m = metadata[mozpath.basename(o.manifest_relpath)]
+
+            self.assertTrue(o.path.startswith(o.directory))
+            self.assertEqual(o.flavor, m["flavor"])
+            self.assertEqual(o.dupe_manifest, m.get("dupe", False))
+
+            external_normalized = set(mozpath.basename(p) for p in o.external_installs)
+            self.assertEqual(external_normalized, m.get("external", set()))
+
+            self.assertEqual(len(o.installs), len(m["installs"]))
+            for path in o.installs.keys():
+                self.assertTrue(path.startswith(o.directory))
+                relpath = path[len(o.directory) + 1 :]
+
+                self.assertIn(relpath, m["installs"])
+                self.assertEqual(o.installs[path][1], m["installs"][relpath])
+
+            if "pattern-installs" in m:
+                self.assertEqual(len(o.pattern_installs), m["pattern-installs"])
+
+    def test_test_manifest_unmatched_generated(self):
+        reader = self.reader("test-manifest-unmatched-generated")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "entry in generated-files not present elsewhere",
+        ):
+            self.read_topsrcdir(reader),
+
+    def test_test_manifest_parent_support_files_dir(self):
+        """support-files referencing a file in a parent directory works."""
+        reader = self.reader("test-manifest-parent-support-files-dir")
+
+        objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)]
+
+        self.assertEqual(len(objs), 1)
+
+        o = objs[0]
+
+        expected = mozpath.join(o.srcdir, "support-file.txt")
+        self.assertIn(expected, o.installs)
+        self.assertEqual(
+            o.installs[expected],
+            ("testing/mochitest/tests/child/support-file.txt", False),
+        )
+
+    def test_test_manifest_missing_test_error(self):
+        """Missing test files should result in error."""
+        reader = self.reader("test-manifest-missing-test-file")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "lists test that does not exist: test_missing.html",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_missing_test_error_unfiltered(self):
+        """Missing test files should result in error, even when the test list is not filtered."""
+        reader = self.reader("test-manifest-missing-test-file-unfiltered")
+
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "lists test that does not exist: missing.js"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_ipdl_sources(self):
+        reader = self.reader(
+            "ipdl_sources",
+            extra_substs={"IPDL_ROOT": mozpath.abspath("/path/to/topobjdir")},
+        )
+        objs = self.read_topsrcdir(reader)
+        ipdl_collection = objs[0]
+        self.assertIsInstance(ipdl_collection, IPDLCollection)
+
+        ipdls = set(
+            mozpath.relpath(p, ipdl_collection.topsrcdir)
+            for p in ipdl_collection.all_regular_sources()
+        )
+        expected = set(
+            ["bar/bar.ipdl", "bar/bar2.ipdlh", "foo/foo.ipdl", "foo/foo2.ipdlh"]
+        )
+
+        self.assertEqual(ipdls, expected)
+
+        pp_ipdls = set(
+            mozpath.relpath(p, ipdl_collection.topsrcdir)
+            for p in ipdl_collection.all_preprocessed_sources()
+        )
+        expected = set(["bar/bar1.ipdl", "foo/foo1.ipdl"])
+        self.assertEqual(pp_ipdls, expected)
+
+    def test_local_includes(self):
+        """Test that LOCAL_INCLUDES is emitted correctly."""
+        reader = self.reader("local_includes")
+        objs = self.read_topsrcdir(reader)
+
+        local_includes = [o.path for o in objs if isinstance(o, LocalInclude)]
+        expected = ["/bar/baz", "foo"]
+
+        self.assertEqual(local_includes, expected)
+
+        local_includes = [o.path.full_path for o in objs if isinstance(o, LocalInclude)]
+        expected = [
+            mozpath.join(reader.config.topsrcdir, "bar/baz"),
+            mozpath.join(reader.config.topsrcdir, "foo"),
+        ]
+
+        self.assertEqual(local_includes, expected)
+
+    def test_local_includes_file(self):
+        """Test that a filename can't be used in LOCAL_INCLUDES."""
+        reader = self.reader("local_includes-filename")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Path specified in LOCAL_INCLUDES is a filename",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_generated_includes(self):
+        """Test that GENERATED_INCLUDES is emitted correctly."""
+        reader = self.reader("generated_includes")
+        objs = self.read_topsrcdir(reader)
+
+        generated_includes = [o.path for o in objs if isinstance(o, LocalInclude)]
+        expected = ["!/bar/baz", "!foo"]
+
+        self.assertEqual(generated_includes, expected)
+
+        generated_includes = [
+            o.path.full_path for o in objs if isinstance(o, LocalInclude)
+        ]
+        expected = [
+            mozpath.join(reader.config.topobjdir, "bar/baz"),
+            mozpath.join(reader.config.topobjdir, "foo"),
+        ]
+
+        self.assertEqual(generated_includes, expected)
+
+    def test_defines(self):
+        reader = self.reader("defines")
+        objs = self.read_topsrcdir(reader)
+
+        defines = {}
+        for o in objs:
+            if isinstance(o, Defines):
+                defines = o.defines
+
+        expected = {
+            "BAR": 7,
+            "BAZ": '"abcd"',
+            "FOO": True,
+            "VALUE": "xyz",
+            "QUX": False,
+        }
+
+        self.assertEqual(defines, expected)
+
+    def test_jar_manifests(self):
+        reader = self.reader("jar-manifests")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        for obj in objs:
+            self.assertIsInstance(obj, JARManifest)
+            self.assertIsInstance(obj.path, Path)
+
+    def test_jar_manifests_multiple_files(self):
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "limited to one value"
+        ):
+            reader = self.reader("jar-manifests-multiple-files")
+            self.read_topsrcdir(reader)
+
+    def test_xpidl_module_no_sources(self):
+        """XPIDL_MODULE without XPIDL_SOURCES should be rejected."""
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "XPIDL_MODULE " "cannot be defined"
+        ):
+            reader = self.reader("xpidl-module-no-sources")
+            self.read_topsrcdir(reader)
+
+    def test_xpidl_module_missing_sources(self):
+        """Missing XPIDL_SOURCES should be rejected."""
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "File .* " "from XPIDL_SOURCES does not exist"
+        ):
+            reader = self.reader("missing-xpidl")
+            self.read_topsrcdir(reader)
+
+    def test_missing_local_includes(self):
+        """LOCAL_INCLUDES containing non-existent directories should be rejected."""
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Path specified in " "LOCAL_INCLUDES does not exist",
+        ):
+            reader = self.reader("missing-local-includes")
+            self.read_topsrcdir(reader)
+
+    def test_library_defines(self):
+        """Test that LIBRARY_DEFINES is propagated properly."""
+        reader = self.reader("library-defines")
+        objs = self.read_topsrcdir(reader)
+
+        libraries = [o for o in objs if isinstance(o, StaticLibrary)]
+        library_flags = [
+            o
+            for o in objs
+            if isinstance(o, ComputedFlags) and "LIBRARY_DEFINES" in o.flags
+        ]
+        expected = {
+            "liba": "-DIN_LIBA",
+            "libb": "-DIN_LIBB -DIN_LIBA",
+            "libc": "-DIN_LIBA -DIN_LIBB",
+            "libd": "",
+        }
+        defines = {}
+        for lib in libraries:
+            defines[lib.basename] = " ".join(lib.lib_defines.get_defines())
+        self.assertEqual(expected, defines)
+        defines_in_flags = {}
+        for flags in library_flags:
+            defines_in_flags[flags.relobjdir] = " ".join(
+                flags.flags["LIBRARY_DEFINES"] or []
+            )
+        self.assertEqual(expected, defines_in_flags)
+
+    def test_sources(self):
+        """Test that SOURCES works properly."""
+        reader = self.reader("sources")
+        objs = self.read_topsrcdir(reader)
+
+        as_flags = objs.pop()
+        self.assertIsInstance(as_flags, ComputedFlags)
+        computed_flags = objs.pop()
+        self.assertIsInstance(computed_flags, ComputedFlags)
+        # The third to last object is a Linkable.
+        linkable = objs.pop()
+        self.assertTrue(linkable.cxx_link)
+        ld_flags = objs.pop()
+        self.assertIsInstance(ld_flags, ComputedFlags)
+        self.assertEqual(len(objs), 6)
+        for o in objs:
+            self.assertIsInstance(o, Sources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 6)
+
+        expected = {
+            ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+            ".c": ["d.c"],
+            ".m": ["e.m"],
+            ".mm": ["f.mm"],
+            ".S": ["g.S"],
+            ".s": ["h.s", "i.asm"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+
+            for f in files:
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "%s.%s"
+                        % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_sources_just_c(self):
+        """Test that a linkable with no C++ sources doesn't have cxx_link set."""
+        reader = self.reader("sources-just-c")
+        objs = self.read_topsrcdir(reader)
+
+        as_flags = objs.pop()
+        self.assertIsInstance(as_flags, ComputedFlags)
+        flags = objs.pop()
+        self.assertIsInstance(flags, ComputedFlags)
+        # The third to last object is a Linkable.
+        linkable = objs.pop()
+        self.assertFalse(linkable.cxx_link)
+
+    def test_linkables_cxx_link(self):
+        """Test that linkables transitively set cxx_link properly."""
+        reader = self.reader("test-linkables-cxx-link")
+        got_results = 0
+        for obj in self.read_topsrcdir(reader):
+            if isinstance(obj, SharedLibrary):
+                if obj.basename == "cxx_shared":
+                    self.assertEqual(
+                        obj.name,
+                        "%scxx_shared%s"
+                        % (reader.config.dll_prefix, reader.config.dll_suffix),
+                    )
+                    self.assertTrue(obj.cxx_link)
+                    got_results += 1
+                elif obj.basename == "just_c_shared":
+                    self.assertEqual(
+                        obj.name,
+                        "%sjust_c_shared%s"
+                        % (reader.config.dll_prefix, reader.config.dll_suffix),
+                    )
+                    self.assertFalse(obj.cxx_link)
+                    got_results += 1
+        self.assertEqual(got_results, 2)
+
+    def test_generated_sources(self):
+        """Test that GENERATED_SOURCES works properly."""
+        reader = self.reader("generated-sources")
+        objs = self.read_topsrcdir(reader)
+
+        as_flags = objs.pop()
+        self.assertIsInstance(as_flags, ComputedFlags)
+        flags = objs.pop()
+        self.assertIsInstance(flags, ComputedFlags)
+        # The third to last object is a Linkable.
+        linkable = objs.pop()
+        self.assertTrue(linkable.cxx_link)
+        flags = objs.pop()
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(len(objs), 6)
+
+        generated_sources = [
+            o for o in objs if isinstance(o, Sources) and o.generated_files
+        ]
+        self.assertEqual(len(generated_sources), 6)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in generated_sources}
+        self.assertEqual(len(suffix_map), 6)
+
+        expected = {
+            ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+            ".c": ["d.c"],
+            ".m": ["e.m"],
+            ".mm": ["f.mm"],
+            ".S": ["g.S"],
+            ".s": ["h.s", "i.asm"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.generated_files,
+                [mozpath.join(reader.config.topobjdir, f) for f in files],
+            )
+
+            for f in files:
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "%s.%s"
+                        % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_host_sources(self):
+        """Test that HOST_SOURCES works properly."""
+        reader = self.reader("host-sources")
+        objs = self.read_topsrcdir(reader)
+
+        # This objdir will generate target flags.
+        flags = objs.pop()
+        self.assertIsInstance(flags, ComputedFlags)
+        # The second to last object is a Linkable
+        linkable = objs.pop()
+        self.assertTrue(linkable.cxx_link)
+        # This objdir will also generate host flags.
+        host_flags = objs.pop()
+        self.assertIsInstance(host_flags, ComputedFlags)
+        # ...and ldflags.
+        ldflags = objs.pop()
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, HostSources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 3)
+
+        expected = {
+            ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+            ".c": ["d.c"],
+            ".mm": ["e.mm", "f.mm"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+
+            for f in files:
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "host_%s.%s"
+                        % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_wasm_sources(self):
+        """Test that WASM_SOURCES works properly."""
+        reader = self.reader(
+            "wasm-sources", extra_substs={"WASM_CC": "clang", "WASM_CXX": "clang++"}
+        )
+        objs = list(self.read_topsrcdir(reader))
+
+        # The second to last object is a linkable.
+        linkable = objs[-2]
+        # Other than that, we only care about the WasmSources objects.
+        objs = objs[:2]
+        for o in objs:
+            self.assertIsInstance(o, WasmSources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 2)
+
+        expected = {".cpp": ["a.cpp", "b.cc", "c.cxx"], ".c": ["d.c"]}
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+            for f in files:
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "%s.%s"
+                        % (
+                            mozpath.splitext(f)[0],
+                            reader.config.substs["WASM_OBJ_SUFFIX"],
+                        ),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_unified_sources(self):
+        """Test that UNIFIED_SOURCES works properly."""
+        reader = self.reader("unified-sources")
+        objs = self.read_topsrcdir(reader)
+
+        # The last object is a ComputedFlags, the second to last a Linkable,
+        # followed by ldflags, ignore them.
+        linkable = objs[-2]
+        objs = objs[:-3]
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, UnifiedSources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 3)
+
+        expected = {
+            ".cpp": ["bar.cxx", "foo.cpp", "quux.cc"],
+            ".mm": ["objc1.mm", "objc2.mm"],
+            ".c": ["c1.c", "c2.c"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+            self.assertTrue(sources.have_unified_mapping)
+
+            for f in dict(sources.unified_source_mapping).keys():
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "%s.%s"
+                        % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_unified_sources_non_unified(self):
+        """Test that UNIFIED_SOURCES with FILES_PER_UNIFIED_FILE=1 works properly."""
+        reader = self.reader("unified-sources-non-unified")
+        objs = self.read_topsrcdir(reader)
+
+        # The last object is a Linkable, the second to last ComputedFlags,
+        # followed by ldflags, ignore them.
+        objs = objs[:-3]
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, UnifiedSources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 3)
+
+        expected = {
+            ".cpp": ["bar.cxx", "foo.cpp", "quux.cc"],
+            ".mm": ["objc1.mm", "objc2.mm"],
+            ".c": ["c1.c", "c2.c"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+            self.assertFalse(sources.have_unified_mapping)
+
+    def test_object_conflicts(self):
+        """Test that object name conflicts are detected."""
+        reader = self.reader("object-conflicts/1")
+        with self.assertRaisesRegex(
+            SandboxValidationError,
+            "Test.cpp from SOURCES would have the same object name as"
+            " Test.c from SOURCES\.",
+        ):
+            self.read_topsrcdir(reader)
+
+        reader = self.reader("object-conflicts/2")
+        with self.assertRaisesRegex(
+            SandboxValidationError,
+            "Test.cpp from SOURCES would have the same object name as"
+            " subdir/Test.cpp from SOURCES\.",
+        ):
+            self.read_topsrcdir(reader)
+
+        reader = self.reader("object-conflicts/3")
+        with self.assertRaisesRegex(
+            SandboxValidationError,
+            "Test.cpp from UNIFIED_SOURCES would have the same object name as"
+            " Test.c from SOURCES in non-unified builds\.",
+        ):
+            self.read_topsrcdir(reader)
+
+        reader = self.reader("object-conflicts/4")
+        with self.assertRaisesRegex(
+            SandboxValidationError,
+            "Test.cpp from UNIFIED_SOURCES would have the same object name as"
+            " Test.c from UNIFIED_SOURCES in non-unified builds\.",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_final_target_pp_files(self):
+        """Test that FINAL_TARGET_PP_FILES works properly."""
+        reader = self.reader("dist-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], FinalTargetPreprocessedFiles)
+
+        # Ideally we'd test hierarchies, but that would just be testing
+        # the HierarchicalStringList class, which we test separately.
+        for path, files in objs[0].files.walk():
+            self.assertEqual(path, "")
+            self.assertEqual(len(files), 2)
+
+            expected = {"install.rdf", "main.js"}
+            for f in files:
+                self.assertTrue(six.text_type(f) in expected)
+
+    def test_missing_final_target_pp_files(self):
+        """Test that FINAL_TARGET_PP_FILES with missing files throws errors."""
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "File listed in " "FINAL_TARGET_PP_FILES does not exist",
+        ):
+            reader = self.reader("dist-files-missing")
+            self.read_topsrcdir(reader)
+
+    def test_final_target_pp_files_non_srcdir(self):
+        """Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors."""
+        reader = self.reader("final-target-pp-files-non-srcdir")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Only source directory paths allowed in FINAL_TARGET_PP_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_localized_files(self):
+        """Test that LOCALIZED_FILES works properly."""
+        reader = self.reader("localized-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], LocalizedFiles)
+
+        for path, files in objs[0].files.walk():
+            self.assertEqual(path, "foo")
+            self.assertEqual(len(files), 3)
+
+            expected = {"en-US/bar.ini", "en-US/code/*.js", "en-US/foo.js"}
+            for f in files:
+                self.assertTrue(six.text_type(f) in expected)
+
+    def test_localized_files_no_en_us(self):
+        """Test that LOCALIZED_FILES errors if a path does not start with
+        `en-US/` or contain `locales/en-US/`."""
+        reader = self.reader("localized-files-no-en-us")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "LOCALIZED_FILES paths must start with `en-US/` or contain `locales/en-US/`: "
+            "foo.js",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_localized_pp_files(self):
+        """Test that LOCALIZED_PP_FILES works properly."""
+        reader = self.reader("localized-pp-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], LocalizedPreprocessedFiles)
+
+        for path, files in objs[0].files.walk():
+            self.assertEqual(path, "foo")
+            self.assertEqual(len(files), 2)
+
+            expected = {"en-US/bar.ini", "en-US/foo.js"}
+            for f in files:
+                self.assertTrue(six.text_type(f) in expected)
+
+    def test_rust_library_no_cargo_toml(self):
+        """Test that defining a RustLibrary without a Cargo.toml fails."""
+        reader = self.reader("rust-library-no-cargo-toml")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "No Cargo.toml file found"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_name_mismatch(self):
+        """Test that defining a RustLibrary that doesn't match Cargo.toml fails."""
+        reader = self.reader("rust-library-name-mismatch")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "library.*does not match Cargo.toml-defined package",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_no_lib_section(self):
+        """Test that a RustLibrary Cargo.toml with no [lib] section fails."""
+        reader = self.reader("rust-library-no-lib-section")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Cargo.toml for.* has no \\[lib\\] section"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_invalid_crate_type(self):
+        """Test that a RustLibrary Cargo.toml has a permitted crate-type."""
+        reader = self.reader("rust-library-invalid-crate-type")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "crate-type.* is not permitted"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_dash_folding(self):
+        """Test that on-disk names of RustLibrary objects convert dashes to underscores."""
+        reader = self.reader(
+            "rust-library-dash-folding",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, lib, cflags = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(lib, RustLibrary)
+        self.assertRegex(lib.lib_name, "random_crate")
+        self.assertRegex(lib.import_name, "random_crate")
+        self.assertRegex(lib.basename, "random-crate")
+
+    def test_multiple_rust_libraries(self):
+        """Test that linking multiple Rust libraries throws an error"""
+        reader = self.reader(
+            "multiple-rust-libraries",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+        )
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Cannot link the following Rust libraries"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_features(self):
+        """Test that RustLibrary features are correctly emitted."""
+        reader = self.reader(
+            "rust-library-features",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, lib, cflags = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(lib, RustLibrary)
+        self.assertEqual(lib.features, ["musthave", "cantlivewithout"])
+
+    def test_rust_library_duplicate_features(self):
+        """Test that duplicate RustLibrary features are rejected."""
+        reader = self.reader("rust-library-duplicate-features")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "features for .* should not contain duplicates",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_program_no_cargo_toml(self):
+        """Test that specifying RUST_PROGRAMS without a Cargo.toml fails."""
+        reader = self.reader("rust-program-no-cargo-toml")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "No Cargo.toml file found"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_host_rust_program_no_cargo_toml(self):
+        """Test that specifying HOST_RUST_PROGRAMS without a Cargo.toml fails."""
+        reader = self.reader("host-rust-program-no-cargo-toml")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "No Cargo.toml file found"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_program_nonexistent_name(self):
+        """Test that specifying RUST_PROGRAMS that don't exist in Cargo.toml
+        correctly throws an error."""
+        reader = self.reader("rust-program-nonexistent-name")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Cannot find Cargo.toml definition for"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_host_rust_program_nonexistent_name(self):
+        """Test that specifying HOST_RUST_PROGRAMS that don't exist in
+        Cargo.toml correctly throws an error."""
+        reader = self.reader("host-rust-program-nonexistent-name")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Cannot find Cargo.toml definition for"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_programs(self):
+        """Test RUST_PROGRAMS emission."""
+        reader = self.reader(
+            "rust-programs",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc", BIN_SUFFIX=".exe"),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, cflags, prog = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(prog, RustProgram)
+        self.assertEqual(prog.name, "some")
+
+    def test_host_rust_programs(self):
+        """Test HOST_RUST_PROGRAMS emission."""
+        reader = self.reader(
+            "host-rust-programs",
+            extra_substs=dict(
+                RUST_HOST_TARGET="i686-pc-windows-msvc", HOST_BIN_SUFFIX=".exe"
+            ),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 4)
+        print(objs)
+        ldflags, cflags, hostflags, prog = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(hostflags, ComputedFlags)
+        self.assertIsInstance(prog, HostRustProgram)
+        self.assertEqual(prog.name, "some")
+
+    def test_host_rust_libraries(self):
+        """Test HOST_RUST_LIBRARIES emission."""
+        reader = self.reader(
+            "host-rust-libraries",
+            extra_substs=dict(
+                RUST_HOST_TARGET="i686-pc-windows-msvc", HOST_BIN_SUFFIX=".exe"
+            ),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, lib, cflags = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(lib, HostRustLibrary)
+        self.assertRegex(lib.lib_name, "host_lib")
+        self.assertRegex(lib.import_name, "host_lib")
+
+    def test_crate_dependency_path_resolution(self):
+        """Test recursive dependencies resolve with the correct paths."""
+        reader = self.reader(
+            "crate-dependency-path-resolution",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, lib, cflags = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(lib, RustLibrary)
+
+    def test_install_shared_lib(self):
+        """Test that we can install a shared library with TEST_HARNESS_FILES"""
+        reader = self.reader("test-install-shared-lib")
+        objs = self.read_topsrcdir(reader)
+        self.assertIsInstance(objs[0], TestHarnessFiles)
+        self.assertIsInstance(objs[1], VariablePassthru)
+        self.assertIsInstance(objs[2], ComputedFlags)
+        self.assertIsInstance(objs[3], SharedLibrary)
+        self.assertIsInstance(objs[4], ComputedFlags)
+        for path, files in objs[0].files.walk():
+            for f in files:
+                self.assertEqual(str(f), "!libfoo.so")
+                self.assertEqual(path, "foo/bar")
+
+    def test_symbols_file(self):
+        """Test that SYMBOLS_FILE works"""
+        reader = self.reader("test-symbols-file")
+        genfile, ldflags, shlib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(genfile, GeneratedFile)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(shlib, SharedLibrary)
+        # This looks weird but MockConfig sets DLL_{PREFIX,SUFFIX} and
+        # the reader method in this class sets OS_TARGET=WINNT.
+        self.assertEqual(shlib.symbols_file, "libfoo.so.def")
+
+    def test_symbols_file_objdir(self):
+        """Test that a SYMBOLS_FILE in the objdir works"""
+        reader = self.reader("test-symbols-file-objdir")
+        genfile, ldflags, shlib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(genfile, GeneratedFile)
+        self.assertEqual(
+            genfile.script, mozpath.join(reader.config.topsrcdir, "foo.py")
+        )
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(shlib, SharedLibrary)
+        self.assertEqual(shlib.symbols_file, "foo.symbols")
+
+    def test_symbols_file_objdir_missing_generated(self):
+        """Test that a SYMBOLS_FILE in the objdir that's missing
+        from GENERATED_FILES is an error.
+        """
+        reader = self.reader("test-symbols-file-objdir-missing-generated")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Objdir file specified in SYMBOLS_FILE not in GENERATED_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_wasm_compile_flags(self):
+        reader = self.reader(
+            "wasm-compile-flags",
+            extra_substs={"WASM_CC": "clang", "WASM_CXX": "clang++"},
+        )
+        flags = list(self.read_topsrcdir(reader))[2]
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(
+            flags.flags["WASM_CFLAGS"], reader.config.substs["WASM_CFLAGS"]
+        )
+        self.assertEqual(
+            flags.flags["MOZBUILD_WASM_CFLAGS"], ["-funroll-loops", "-wasm-arg"]
+        )
+        self.assertEqual(
+            set(flags.flags["WASM_DEFINES"]),
+            set(["-DFOO", '-DBAZ="abcd"', "-UQUX", "-DBAR=7", "-DVALUE=xyz"]),
+        )
+
+
+if __name__ == "__main__":
+    main()