mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-10-30 21:55:31 +00:00
Merge mozilla-central into mozilla-inbound
This commit is contained in:
commit
92dae8e55e
1
aclocal.m4
vendored
1
aclocal.m4
vendored
@ -26,6 +26,7 @@ builtin(include, build/autoconf/arch.m4)dnl
|
||||
builtin(include, build/autoconf/android.m4)dnl
|
||||
builtin(include, build/autoconf/zlib.m4)dnl
|
||||
builtin(include, build/autoconf/linux.m4)dnl
|
||||
builtin(include, build/autoconf/python-virtualenv.m4)dnl
|
||||
|
||||
MOZ_PROG_CHECKMSYS()
|
||||
|
||||
|
@ -11,6 +11,8 @@ from optparse import OptionParser
|
||||
import sys, re, os, posixpath, ntpath
|
||||
import errno
|
||||
from StringIO import StringIO
|
||||
from os.path import relpath
|
||||
|
||||
# Standalone js doesn't have virtualenv.
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'config'))
|
||||
from Preprocessor import Preprocessor
|
||||
@ -21,29 +23,6 @@ def log(string):
|
||||
if verbose:
|
||||
print >>sys.stderr, string
|
||||
|
||||
# We need relpath, but it is introduced in python 2.6
|
||||
# http://docs.python.org/library/os.path.html
|
||||
def my_relpath(path, start):
|
||||
"""
|
||||
Return a relative version of a path
|
||||
from /usr/lib/python2.6/posixpath.py
|
||||
"""
|
||||
|
||||
if not path:
|
||||
raise ValueError("no path specified")
|
||||
|
||||
start_list = os.path.abspath(start).split(os.path.sep)
|
||||
path_list = os.path.abspath(path).split(os.path.sep)
|
||||
|
||||
# Work out how much of the filepath is shared by start and path.
|
||||
i = len(os.path.commonprefix([start_list, path_list]))
|
||||
|
||||
rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
|
||||
if not rel_list:
|
||||
return os.curdir
|
||||
return os.path.join(*rel_list)
|
||||
|
||||
relpath = getattr(os.path, "relpath", my_relpath)
|
||||
|
||||
def ensureParentDir(file):
|
||||
'''Ensures the directory parent to the given file exists'''
|
||||
|
77
build/autoconf/python-virtualenv.m4
Normal file
77
build/autoconf/python-virtualenv.m4
Normal file
@ -0,0 +1,77 @@
|
||||
dnl This Source Code Form is subject to the terms of the Mozilla Public
|
||||
dnl License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
dnl file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
AC_DEFUN([MOZ_PYTHON],
|
||||
[
|
||||
|
||||
dnl We honor the Python path defined in an environment variable. This is used
|
||||
dnl to pass the virtualenv's Python from the main configure to SpiderMonkey's
|
||||
dnl configure, for example.
|
||||
if test -z "$PYTHON"; then
|
||||
MOZ_PATH_PROGS(PYTHON, $PYTHON python2.7 python)
|
||||
if test -z "$PYTHON"; then
|
||||
AC_MSG_ERROR([python was not found in \$PATH])
|
||||
fi
|
||||
else
|
||||
AC_MSG_RESULT([Using Python from environment variable \$PYTHON])
|
||||
fi
|
||||
|
||||
_virtualenv_topsrcdir=
|
||||
_virtualenv_populate_path=
|
||||
|
||||
dnl If this is a mozilla-central, we'll find the virtualenv in the top
|
||||
dnl source directory. If this is a SpiderMonkey build, we assume we're at
|
||||
dnl js/src and try to find the virtualenv from the mozilla-central root.
|
||||
for base in $MOZILLA_CENTRAL_PATH $_topsrcdir $_topsrcdir/../..; do
|
||||
possible=$base/build/virtualenv/populate_virtualenv.py
|
||||
|
||||
if test -e $possible; then
|
||||
_virtualenv_topsrcdir=$base
|
||||
_virtualenv_populate_path=$possible
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if test -z $_virtualenv_populate_path; then
|
||||
AC_MSG_ERROR([Unable to find Virtualenv population script. In order
|
||||
to build, you will need mozilla-central's virtualenv.
|
||||
|
||||
If you are building from a mozilla-central checkout, you should never see this
|
||||
message. If you are building from a source archive, the source archive was
|
||||
likely not created properly (it is missing the virtualenv files).
|
||||
|
||||
If you have a copy of mozilla-central available, define the
|
||||
MOZILLA_CENTRAL_PATH environment variable to the top source directory of
|
||||
mozilla-central and relaunch configure.])
|
||||
|
||||
fi
|
||||
|
||||
if test -z $DONT_POPULATE_VIRTUALENV; then
|
||||
AC_MSG_RESULT([Creating Python environment])
|
||||
dnl This verifies our Python version is sane and ensures the Python
|
||||
dnl virtualenv is present and up to date. It sanitizes the environment
|
||||
dnl for us, so we don't need to clean anything out.
|
||||
$PYTHON $_virtualenv_populate_path \
|
||||
$_virtualenv_topsrcdir $MOZ_BUILD_ROOT/_virtualenv || exit 1
|
||||
|
||||
case "$host_os" in
|
||||
mingw*)
|
||||
PYTHON=`cd $MOZ_BUILD_ROOT && pwd -W`/_virtualenv/Scripts/python.exe
|
||||
;;
|
||||
*)
|
||||
PYTHON=$MOZ_BUILD_ROOT/_virtualenv/bin/python
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
AC_SUBST(PYTHON)
|
||||
|
||||
AC_MSG_CHECKING([Python environment is Mozilla virtualenv])
|
||||
$PYTHON -c "import mozbuild.base"
|
||||
if test "$?" != 0; then
|
||||
AC_MSG_ERROR([Python environment does not appear to be sane.])
|
||||
fi
|
||||
AC_MSG_RESULT([yes])
|
||||
])
|
||||
|
@ -254,6 +254,14 @@
|
||||
fun:_ZN2js15ArgumentsObject14createExpectedEP9JSContextPNS_10StackFrameE
|
||||
...
|
||||
}
|
||||
{
|
||||
Bug 812423
|
||||
Memcheck:Leak
|
||||
fun:malloc
|
||||
fun:_ZN2js15ArgumentsObject6createI13CopyFrameArgsEEPS0_P9JSContextN2JS6HandleIP8JSScriptEENS7_IP10JSFunctionEEjRT_
|
||||
fun:_ZN2js15ArgumentsObject14createExpectedEP9JSContextNS_16AbstractFramePtrE
|
||||
...
|
||||
}
|
||||
{
|
||||
Bug 823782
|
||||
Memcheck:Leak
|
||||
|
27
configure.in
27
configure.in
@ -147,29 +147,7 @@ else
|
||||
touch $_objdir/CLOBBER
|
||||
fi
|
||||
|
||||
MOZ_PATH_PROGS(PYTHON, $PYTHON python2.7 python)
|
||||
if test -z "$PYTHON"; then
|
||||
AC_MSG_ERROR([python was not found in \$PATH])
|
||||
fi
|
||||
|
||||
AC_MSG_RESULT([Creating Python environment])
|
||||
dnl This verifies our Python version is sane and ensures the Python
|
||||
dnl virtualenv is present and up to date. It sanitizes the environment
|
||||
dnl for us, so we don't need to clean anything out.
|
||||
$PYTHON $_topsrcdir/build/virtualenv/populate_virtualenv.py \
|
||||
$_topsrcdir $MOZ_BUILD_ROOT/_virtualenv || exit 1
|
||||
|
||||
dnl Create a virtualenv where we can install local Python packages
|
||||
case "$host_os" in
|
||||
mingw*)
|
||||
PYTHON=`cd $MOZ_BUILD_ROOT && pwd -W`/_virtualenv/Scripts/python.exe
|
||||
;;
|
||||
*)
|
||||
PYTHON=$MOZ_BUILD_ROOT/_virtualenv/bin/python
|
||||
;;
|
||||
esac
|
||||
|
||||
AC_SUBST(PYTHON)
|
||||
MOZ_PYTHON
|
||||
|
||||
MOZ_DEFAULT_COMPILER
|
||||
|
||||
@ -9347,6 +9325,9 @@ export MOZ_NATIVE_ZLIB
|
||||
export MOZ_ZLIB_CFLAGS
|
||||
export MOZ_ZLIB_LIBS
|
||||
export MOZ_APP_NAME
|
||||
export DONT_POPULATE_VIRTUALENV=1
|
||||
export PYTHON
|
||||
export MOZILLA_CENTRAL_PATH=$_topsrcdir
|
||||
export STLPORT_CPPFLAGS
|
||||
export STLPORT_LDFLAGS
|
||||
export STLPORT_LIBS
|
||||
|
1
js/src/aclocal.m4
vendored
1
js/src/aclocal.m4
vendored
@ -25,5 +25,6 @@ builtin(include, build/autoconf/arch.m4)dnl
|
||||
builtin(include, build/autoconf/android.m4)dnl
|
||||
builtin(include, build/autoconf/zlib.m4)dnl
|
||||
builtin(include, build/autoconf/linux.m4)dnl
|
||||
builtin(include, build/autoconf/python-virtualenv.m4)dnl
|
||||
|
||||
MOZ_PROG_CHECKMSYS()
|
||||
|
@ -11,6 +11,8 @@ from optparse import OptionParser
|
||||
import sys, re, os, posixpath, ntpath
|
||||
import errno
|
||||
from StringIO import StringIO
|
||||
from os.path import relpath
|
||||
|
||||
# Standalone js doesn't have virtualenv.
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'config'))
|
||||
from Preprocessor import Preprocessor
|
||||
@ -21,29 +23,6 @@ def log(string):
|
||||
if verbose:
|
||||
print >>sys.stderr, string
|
||||
|
||||
# We need relpath, but it is introduced in python 2.6
|
||||
# http://docs.python.org/library/os.path.html
|
||||
def my_relpath(path, start):
|
||||
"""
|
||||
Return a relative version of a path
|
||||
from /usr/lib/python2.6/posixpath.py
|
||||
"""
|
||||
|
||||
if not path:
|
||||
raise ValueError("no path specified")
|
||||
|
||||
start_list = os.path.abspath(start).split(os.path.sep)
|
||||
path_list = os.path.abspath(path).split(os.path.sep)
|
||||
|
||||
# Work out how much of the filepath is shared by start and path.
|
||||
i = len(os.path.commonprefix([start_list, path_list]))
|
||||
|
||||
rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
|
||||
if not rel_list:
|
||||
return os.curdir
|
||||
return os.path.join(*rel_list)
|
||||
|
||||
relpath = getattr(os.path, "relpath", my_relpath)
|
||||
|
||||
def ensureParentDir(file):
|
||||
'''Ensures the directory parent to the given file exists'''
|
||||
|
77
js/src/build/autoconf/python-virtualenv.m4
Normal file
77
js/src/build/autoconf/python-virtualenv.m4
Normal file
@ -0,0 +1,77 @@
|
||||
dnl This Source Code Form is subject to the terms of the Mozilla Public
|
||||
dnl License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
dnl file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
AC_DEFUN([MOZ_PYTHON],
|
||||
[
|
||||
|
||||
dnl We honor the Python path defined in an environment variable. This is used
|
||||
dnl to pass the virtualenv's Python from the main configure to SpiderMonkey's
|
||||
dnl configure, for example.
|
||||
if test -z "$PYTHON"; then
|
||||
MOZ_PATH_PROGS(PYTHON, $PYTHON python2.7 python)
|
||||
if test -z "$PYTHON"; then
|
||||
AC_MSG_ERROR([python was not found in \$PATH])
|
||||
fi
|
||||
else
|
||||
AC_MSG_RESULT([Using Python from environment variable \$PYTHON])
|
||||
fi
|
||||
|
||||
_virtualenv_topsrcdir=
|
||||
_virtualenv_populate_path=
|
||||
|
||||
dnl If this is a mozilla-central, we'll find the virtualenv in the top
|
||||
dnl source directory. If this is a SpiderMonkey build, we assume we're at
|
||||
dnl js/src and try to find the virtualenv from the mozilla-central root.
|
||||
for base in $MOZILLA_CENTRAL_PATH $_topsrcdir $_topsrcdir/../..; do
|
||||
possible=$base/build/virtualenv/populate_virtualenv.py
|
||||
|
||||
if test -e $possible; then
|
||||
_virtualenv_topsrcdir=$base
|
||||
_virtualenv_populate_path=$possible
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if test -z $_virtualenv_populate_path; then
|
||||
AC_MSG_ERROR([Unable to find Virtualenv population script. In order
|
||||
to build, you will need mozilla-central's virtualenv.
|
||||
|
||||
If you are building from a mozilla-central checkout, you should never see this
|
||||
message. If you are building from a source archive, the source archive was
|
||||
likely not created properly (it is missing the virtualenv files).
|
||||
|
||||
If you have a copy of mozilla-central available, define the
|
||||
MOZILLA_CENTRAL_PATH environment variable to the top source directory of
|
||||
mozilla-central and relaunch configure.])
|
||||
|
||||
fi
|
||||
|
||||
if test -z $DONT_POPULATE_VIRTUALENV; then
|
||||
AC_MSG_RESULT([Creating Python environment])
|
||||
dnl This verifies our Python version is sane and ensures the Python
|
||||
dnl virtualenv is present and up to date. It sanitizes the environment
|
||||
dnl for us, so we don't need to clean anything out.
|
||||
$PYTHON $_virtualenv_populate_path \
|
||||
$_virtualenv_topsrcdir $MOZ_BUILD_ROOT/_virtualenv || exit 1
|
||||
|
||||
case "$host_os" in
|
||||
mingw*)
|
||||
PYTHON=`cd $MOZ_BUILD_ROOT && pwd -W`/_virtualenv/Scripts/python.exe
|
||||
;;
|
||||
*)
|
||||
PYTHON=$MOZ_BUILD_ROOT/_virtualenv/bin/python
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
AC_SUBST(PYTHON)
|
||||
|
||||
AC_MSG_CHECKING([Python environment is Mozilla virtualenv])
|
||||
$PYTHON -c "import mozbuild.base"
|
||||
if test "$?" != 0; then
|
||||
AC_MSG_ERROR([Python environment does not appear to be sane.])
|
||||
fi
|
||||
AC_MSG_RESULT([yes])
|
||||
])
|
||||
|
@ -55,7 +55,6 @@ NSPR_VERSION=4
|
||||
dnl Set the minimum version of toolkit libs used by mozilla
|
||||
dnl ========================================================
|
||||
PERL_VERSION=5.006
|
||||
PYTHON_VERSION=2.7
|
||||
WINDRES_VERSION=2.14.90
|
||||
W32API_VERSION=3.14
|
||||
|
||||
@ -665,10 +664,7 @@ else
|
||||
AC_MSG_RESULT([yes])
|
||||
fi
|
||||
|
||||
MOZ_PATH_PROGS(PYTHON, $PYTHON python2.7 python)
|
||||
if test -z "$PYTHON"; then
|
||||
AC_MSG_ERROR([python was not found in \$PATH])
|
||||
fi
|
||||
MOZ_PYTHON
|
||||
|
||||
if test -z "$COMPILE_ENVIRONMENT"; then
|
||||
NSINSTALL_BIN='$(PYTHON) $(topsrcdir)/config/nsinstall.py'
|
||||
@ -1463,17 +1459,6 @@ case "$host" in
|
||||
;;
|
||||
esac
|
||||
|
||||
dnl We require version 2.7 of Python to build.
|
||||
AC_MSG_CHECKING([for Python version >= $PYTHON_VERSION but not 3.x])
|
||||
changequote(,)
|
||||
$PYTHON -c "import sys; sys.exit(sys.version[:3] < sys.argv[1] or sys.version[:2] != '2.')" $PYTHON_VERSION
|
||||
_python_res=$?
|
||||
changequote([,])
|
||||
if test "$_python_res" != 0; then
|
||||
AC_MSG_ERROR([Python $PYTHON_VERSION or higher (but not Python 3.x) is required.])
|
||||
fi
|
||||
AC_MSG_RESULT([yes])
|
||||
|
||||
dnl Check for using a custom <stdint.h> implementation
|
||||
dnl ========================================================
|
||||
AC_MSG_CHECKING(for custom <stdint.h> implementation)
|
||||
@ -4268,7 +4253,6 @@ dnl win32 options
|
||||
AC_SUBST(MOZ_MAPINFO)
|
||||
AC_SUBST(MOZ_BROWSE_INFO)
|
||||
AC_SUBST(MOZ_TOOLS_DIR)
|
||||
AC_SUBST(PYTHON)
|
||||
|
||||
dnl Echo the CFLAGS to remove extra whitespace.
|
||||
CFLAGS=`echo \
|
||||
|
1
mach
1
mach
@ -42,6 +42,7 @@ MACH_MODULES = [
|
||||
'python/mozboot/mozboot/mach_commands.py',
|
||||
'python/mozbuild/mozbuild/config.py',
|
||||
'python/mozbuild/mozbuild/mach_commands.py',
|
||||
'python/mozbuild/mozbuild/frontend/mach_commands.py',
|
||||
'testing/mochitest/mach_commands.py',
|
||||
'testing/xpcshell/mach_commands.py',
|
||||
]
|
||||
|
@ -12,6 +12,7 @@ include $(DEPTH)/config/autoconf.mk
|
||||
test_dirs := \
|
||||
mozbuild/mozbuild/test \
|
||||
mozbuild/mozbuild/test/compilation \
|
||||
mozbuild/mozbuild/test/frontend \
|
||||
$(NULL)
|
||||
|
||||
PYTHON_UNIT_TESTS := $(foreach dir,$(test_dirs),$(wildcard $(srcdir)/$(dir)/*.py))
|
||||
|
@ -10,4 +10,30 @@ Modules Overview
|
||||
|
||||
* mozbuild.compilation -- Functionality related to compiling. This
|
||||
includes managing compiler warnings.
|
||||
* mozbuild.frontend -- Functionality for reading build frontend files
|
||||
(what defines the build system) and converting them to data structures
|
||||
which are fed into build backends to produce backend configurations.
|
||||
|
||||
Overview
|
||||
========
|
||||
|
||||
The build system consists of frontend files that define what to do. They
|
||||
say things like "compile X" "copy Y."
|
||||
|
||||
The mozbuild.frontend package contains code for reading these frontend
|
||||
files and converting them to static data structures. The set of produced
|
||||
static data structures for the tree constitute the current build
|
||||
configuration.
|
||||
|
||||
There exist entities called build backends. From a high level, build
|
||||
backends consume the build configuration and do something with it. They
|
||||
typically produce tool-specific files such as make files which can be used
|
||||
to build the tree.
|
||||
|
||||
Builders are entities that build the tree. They typically have high
|
||||
cohesion with a specific build backend.
|
||||
|
||||
Piecing it all together, we have frontend files that are parsed into data
|
||||
structures. These data structures are fed into a build backend. The output
|
||||
from build backends is used by builders to build the tree.
|
||||
|
||||
|
3
python/mozbuild/TODO
Normal file
3
python/mozbuild/TODO
Normal file
@ -0,0 +1,3 @@
|
||||
dom/imptests Makefile.in's are autogenerated. See
|
||||
dom/imptests/writeMakefile.py and bug 782651. We will need to update
|
||||
writeMakefile.py to produce mozbuild files.
|
137
python/mozbuild/mozbuild/frontend/README.rst
Normal file
137
python/mozbuild/mozbuild/frontend/README.rst
Normal file
@ -0,0 +1,137 @@
|
||||
=================
|
||||
mozbuild.frontend
|
||||
=================
|
||||
|
||||
The mozbuild.frontend package is of sufficient importance and complexity
|
||||
to warrant its own README file. If you are looking for documentation on
|
||||
how the build system gets started, you've come to the right place.
|
||||
|
||||
Overview
|
||||
========
|
||||
|
||||
Tree metadata (including the build system) is defined by a collection of
|
||||
files in the source tree called *mozbuild* files. These typically are files
|
||||
named *moz.build*. But, the actual name can vary.
|
||||
|
||||
Each *mozbuild* file defines basic metadata about the part of the tree
|
||||
(typically directory scope) it resides in. This includes build system
|
||||
configuration, such as the list of C++ files to compile or headers to install
|
||||
or libraries to link together.
|
||||
|
||||
*mozbuild* files are actually Python scripts. However, their execution
|
||||
is governed by special rules. This will be explained later.
|
||||
|
||||
Once a *mozbuild* file has executed, it is converted into a set of static
|
||||
data structures.
|
||||
|
||||
The set of all data structures from all relevant *mozbuild* files
|
||||
constitute all of the metadata from the tree.
|
||||
|
||||
How *mozbuild* Files Work
|
||||
=========================
|
||||
|
||||
As stated above, *mozbuild* files are actually Python scripts. However,
|
||||
their behavior is very different from what you would expect if you executed
|
||||
the file using the standard Python interpreter from the command line.
|
||||
|
||||
There are two properties that make execution of *mozbuild* files special:
|
||||
|
||||
1. They are evaluated in a sandbox which exposes a limited subset of Python
|
||||
2. There is a special set of global variables which hold the output from
|
||||
execution.
|
||||
|
||||
The limited subset of Python is actually an extremely limited subset.
|
||||
Only a few built-ins are exposed. These include *True*, *False*, and
|
||||
*None*. Global functions like *import*, *print*, and *open* aren't defined.
|
||||
Without these, *mozbuild* files can do very little. This is by design.
|
||||
|
||||
The side-effects of the execution of a *mozbuild* file are used to define
|
||||
the build configuration. Specifically, variables set during the execution
|
||||
of a *mozbuild* file are examined and their values are used to populate
|
||||
data structures.
|
||||
|
||||
The enforced convention is that all UPPERCASE names inside a sandbox are
|
||||
reserved and it is the value of these variables post-execution that is
|
||||
examined. Furthermore, the set of allowed UPPERCASE variable names and
|
||||
their types is statically defined. If you attempt to reference or assign
|
||||
to an UPPERCASE variable name that isn't known to the build system or
|
||||
attempt to assign a value of the wrong type (e.g. a string when it wants a
|
||||
list), an error will be raised during execution of the *mozbuild* file.
|
||||
This strictness is to ensure that assignment to all UPPERCASE variables
|
||||
actually does something. If things weren't this way, *mozbuild* files
|
||||
might think they were doing something but in reality wouldn't be. We don't
|
||||
want to create false promises, so we validate behavior strictly.
|
||||
|
||||
If a variable is not UPPERCASE, you can do anything you want with it,
|
||||
provided it isn't a function or other built-in. In other words, normal
|
||||
Python rules apply.
|
||||
|
||||
All of the logic for loading and evaluating *mozbuild* files is in the
|
||||
*reader* module. Of specific interest is the *MozbuildSandbox* class. The
|
||||
*BuildReader* class is also important, as it is in charge of
|
||||
instantiating *MozbuildSandbox* instances and traversing a tree of linked
|
||||
*mozbuild* files. Unless you are a core component of the build system,
|
||||
*BuildReader* is probably the only class you care about in this module.
|
||||
|
||||
The set of variables and functions *exported* to the sandbox is defined by
|
||||
the *sandbox_symbols* module. These data structures are actually used to
|
||||
populate MozbuildSandbox instances. And, there are tests to ensure that the
|
||||
sandbox doesn't add new symbols without those symbols being added to the
|
||||
module. And, since the module contains documentation, this ensures the
|
||||
documentation is up to date (at least in terms of symbol membership).
|
||||
|
||||
How Sandboxes are Converted into Data Structures
|
||||
================================================
|
||||
|
||||
The output of a *mozbuild* file execution is essentially a dict of all
|
||||
the special UPPERCASE variables populated during its execution. While these
|
||||
dicts are data structures, they aren't the final data structures that
|
||||
represent the build configuration.
|
||||
|
||||
We feed the *mozbuild* execution output (actually *reader.MozbuildSandbox*
|
||||
instances) into a *TreeMetadataEmitter* class instance. This class is
|
||||
defined in the *emitter* module. *TreeMetadataEmitter* converts the
|
||||
*MozbuildSandbox* instances into instances of the *TreeMetadata*-derived
|
||||
classes from the *data* module.
|
||||
|
||||
All the classes in the *data* module define a domain-specific
|
||||
component of the tree metadata, including build configuration. File compilation
|
||||
and IDL generation are separate classes, for example. The only thing these
|
||||
classes have in common is that they inherit from *TreeMetadata*, which is
|
||||
merely an abstract base class.
|
||||
|
||||
The set of all emitted *TreeMetadata* instances (converted from executed
|
||||
*mozbuild* files) constitutes the aggregate tree metadata. This is the
|
||||
the authoritative definition of the build system, etc and is what's used by
|
||||
all downstream consumers, such as build backends. There is no monolithic
|
||||
class or data structure. Instead, the tree metadata is modeled as a collection
|
||||
of *TreeMetadata* instances.
|
||||
|
||||
There is no defined mapping between the number of
|
||||
*MozbuildSandbox*/*moz.build* instances and *TreeMetadata* instances.
|
||||
Some *mozbuild* files will emit only 1 *TreeMetadata* instance. Some
|
||||
will emit 7. Some may even emit 0!
|
||||
|
||||
The purpose of this *emitter* layer between the raw *mozbuild* execution
|
||||
result and *TreeMetadata* is to facilitate additional normalization and
|
||||
verification of the output. There are multiple downstream consumers of
|
||||
this data and there is common functionality shared between them. An
|
||||
abstraction layer that provides high-level filtering is a useful feature.
|
||||
Thus *TreeMetadataEmitter* exists.
|
||||
|
||||
Other Notes
|
||||
===========
|
||||
|
||||
*reader.BuildReader* and *emitter.TreeMetadataEmitter* have a nice
|
||||
stream-based API courtesy of generators. When you hook them up properly,
|
||||
*TreeMetadata* instances can be consumed before all *mozbuild* files have
|
||||
been read. This means that errors down the pipe can trigger before all
|
||||
upstream tasks (such as executing and converting) are complete. This should
|
||||
reduce the turnaround time in the event of errors. This likely translates to
|
||||
a more rapid pace for implementing backends, which require lots of iterative
|
||||
runs through the entire system.
|
||||
|
||||
Lots of code in this sub-module is applicable to other systems, not just
|
||||
Mozilla's. However, some of the code is tightly coupled. If there is a will
|
||||
to extract the generic bits for re-use in other projects, that can and should
|
||||
be done.
|
0
python/mozbuild/mozbuild/frontend/__init__.py
Normal file
0
python/mozbuild/mozbuild/frontend/__init__.py
Normal file
82
python/mozbuild/mozbuild/frontend/data.py
Normal file
82
python/mozbuild/mozbuild/frontend/data.py
Normal file
@ -0,0 +1,82 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
r"""Data structures representing Mozilla's source tree.
|
||||
|
||||
The frontend files are parsed into static data structures. These data
|
||||
structures are defined in this module.
|
||||
|
||||
All data structures of interest are children of the TreeMetadata class.
|
||||
|
||||
Logic for populating these data structures is not defined in this class.
|
||||
Instead, what we have here are dumb container classes. The emitter module
|
||||
contains the code for converting executed mozbuild files into these data
|
||||
structures.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class TreeMetadata(object):
|
||||
"""Base class for all data being captured."""
|
||||
|
||||
|
||||
class SandboxDerived(TreeMetadata):
|
||||
"""Build object derived from a single MozbuildSandbox instance.
|
||||
|
||||
It holds fields common to all sandboxes. This class is likely never
|
||||
instantiated directly but is instead derived from.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'objdir',
|
||||
'relativedir',
|
||||
'srcdir',
|
||||
'topobjdir',
|
||||
'topsrcdir',
|
||||
)
|
||||
|
||||
def __init__(self, sandbox):
|
||||
self.topsrcdir = sandbox['TOPSRCDIR']
|
||||
self.topobjdir = sandbox['TOPOBJDIR']
|
||||
|
||||
self.relativedir = sandbox['RELATIVEDIR']
|
||||
self.srcdir = sandbox['SRCDIR']
|
||||
self.objdir = sandbox['OBJDIR']
|
||||
|
||||
|
||||
class DirectoryTraversal(SandboxDerived):
|
||||
"""Describes how directory traversal for building should work.
|
||||
|
||||
This build object is likely only of interest to the recursive make backend.
|
||||
Other build backends should (ideally) not attempt to mimic the behavior of
|
||||
the recursive make backend. The only reason this exists is to support the
|
||||
existing recursive make backend while the transition to mozbuild frontend
|
||||
files is complete and we move to a more optimal build backend.
|
||||
|
||||
Fields in this class correspond to similarly named variables in the
|
||||
frontend files.
|
||||
"""
|
||||
__slots__ = (
|
||||
'dirs',
|
||||
'parallel_dirs',
|
||||
'tool_dirs',
|
||||
'test_dirs',
|
||||
'test_tool_dirs',
|
||||
'tier_dirs',
|
||||
'tier_static_dirs',
|
||||
)
|
||||
|
||||
def __init__(self, sandbox):
|
||||
SandboxDerived.__init__(self, sandbox)
|
||||
|
||||
self.dirs = []
|
||||
self.parallel_dirs = []
|
||||
self.tool_dirs = []
|
||||
self.test_dirs = []
|
||||
self.test_tool_dirs = []
|
||||
self.tier_dirs = OrderedDict()
|
||||
self.tier_static_dirs = OrderedDict()
|
59
python/mozbuild/mozbuild/frontend/emitter.py
Normal file
59
python/mozbuild/mozbuild/frontend/emitter.py
Normal file
@ -0,0 +1,59 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from .data import DirectoryTraversal
|
||||
from .reader import MozbuildSandbox
|
||||
|
||||
|
||||
class TreeMetadataEmitter(object):
|
||||
"""Converts the executed mozbuild files into data structures.
|
||||
|
||||
This is a bridge between reader.py and data.py. It takes what was read by
|
||||
reader.BuildReader and converts it into the classes defined in the data
|
||||
module.
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def emit(self, output):
|
||||
"""Convert the BuildReader output into data structures.
|
||||
|
||||
The return value from BuildReader.read_topsrcdir() (a generator) is
|
||||
typically fed into this function.
|
||||
"""
|
||||
for out in output:
|
||||
if isinstance(out, MozbuildSandbox):
|
||||
for o in self.emit_from_sandbox(out):
|
||||
yield o
|
||||
else:
|
||||
raise Exception('Unhandled output type: %s' % out)
|
||||
|
||||
def emit_from_sandbox(self, sandbox):
|
||||
"""Convert a MozbuildSandbox to tree metadata objects.
|
||||
|
||||
This is a generator of mozbuild.frontend.data.SandboxDerived instances.
|
||||
"""
|
||||
|
||||
# We always emit a directory traversal descriptor. This is needed by
|
||||
# the recursive make backend.
|
||||
for o in self._emit_directory_traversal_from_sandbox(sandbox): yield o
|
||||
|
||||
def _emit_directory_traversal_from_sandbox(self, sandbox):
|
||||
o = DirectoryTraversal(sandbox)
|
||||
o.dirs = sandbox.get('DIRS', [])
|
||||
o.parallel_dirs = sandbox.get('PARALLEL_DIRS', [])
|
||||
o.tool_dirs = sandbox.get('TOOL_DIRS', [])
|
||||
o.test_dirs = sandbox.get('TEST_DIRS', [])
|
||||
o.test_tool_dirs = sandbox.get('TEST_TOOL_DIRS', [])
|
||||
|
||||
if 'TIERS' in sandbox:
|
||||
for tier in sandbox['TIERS']:
|
||||
o.tier_dirs[tier] = sandbox['TIERS'][tier]['regular']
|
||||
o.tier_static_dirs[tier] = sandbox['TIERS'][tier]['static']
|
||||
|
||||
yield o
|
||||
|
171
python/mozbuild/mozbuild/frontend/mach_commands.py
Normal file
171
python/mozbuild/mozbuild/frontend/mach_commands.py
Normal file
@ -0,0 +1,171 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import textwrap
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
CommandProvider,
|
||||
Command
|
||||
)
|
||||
|
||||
from mozbuild.frontend.sandbox_symbols import (
|
||||
FUNCTIONS,
|
||||
SPECIAL_VARIABLES,
|
||||
VARIABLES,
|
||||
doc_to_paragraphs,
|
||||
)
|
||||
|
||||
|
||||
def get_doc(doc):
|
||||
"""Split documentation into summary line and everything else."""
|
||||
paragraphs = doc_to_paragraphs(doc)
|
||||
|
||||
summary = paragraphs[0]
|
||||
extra = paragraphs[1:]
|
||||
|
||||
return summary, extra
|
||||
|
||||
def print_extra(extra):
|
||||
"""Prints the 'everything else' part of documentation intelligently."""
|
||||
for para in extra:
|
||||
for line in textwrap.wrap(para):
|
||||
print(line)
|
||||
|
||||
print('')
|
||||
|
||||
if not len(extra):
|
||||
print('')
|
||||
|
||||
|
||||
@CommandProvider
|
||||
class MozbuildFileCommands(object):
|
||||
@Command('mozbuild-reference',
|
||||
help='View reference documentation on mozbuild files.')
|
||||
@CommandArgument('symbol', default=None, nargs='*',
|
||||
help='Symbol to view help on. If not specified, all will be shown.')
|
||||
@CommandArgument('--name-only', '-n', default=False, action='store_true',
|
||||
help='Print symbol names only.')
|
||||
def reference(self, symbol, name_only=False):
|
||||
if name_only:
|
||||
for s in sorted(VARIABLES.keys()):
|
||||
print(s)
|
||||
|
||||
for s in sorted(FUNCTIONS.keys()):
|
||||
print(s)
|
||||
|
||||
for s in sorted(SPECIAL_VARIABLES.keys()):
|
||||
print(s)
|
||||
|
||||
return 0
|
||||
|
||||
if len(symbol):
|
||||
for s in symbol:
|
||||
if s in VARIABLES:
|
||||
self.variable_reference(s)
|
||||
continue
|
||||
elif s in FUNCTIONS:
|
||||
self.function_reference(s)
|
||||
continue
|
||||
elif s in SPECIAL_VARIABLES:
|
||||
self.special_reference(s)
|
||||
continue
|
||||
|
||||
print('Could not find symbol: %s' % s)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
print('=========')
|
||||
print('VARIABLES')
|
||||
print('=========')
|
||||
print('')
|
||||
print('This section lists all the variables that may be set ')
|
||||
print('in moz.build files.')
|
||||
print('')
|
||||
|
||||
for v in sorted(VARIABLES.keys()):
|
||||
self.variable_reference(v)
|
||||
|
||||
print('=========')
|
||||
print('FUNCTIONS')
|
||||
print('=========')
|
||||
print('')
|
||||
print('This section lists all the functions that may be called ')
|
||||
print('in moz.build files.')
|
||||
print('')
|
||||
|
||||
for f in sorted(FUNCTIONS.keys()):
|
||||
self.function_reference(f)
|
||||
|
||||
print('=================')
|
||||
print('SPECIAL VARIABLES')
|
||||
print('=================')
|
||||
print('')
|
||||
|
||||
for v in sorted(SPECIAL_VARIABLES.keys()):
|
||||
self.special_reference(v)
|
||||
|
||||
return 0
|
||||
|
||||
def variable_reference(self, v):
|
||||
typ, default, doc = VARIABLES[v]
|
||||
|
||||
print(v)
|
||||
print('=' * len(v))
|
||||
print('')
|
||||
|
||||
summary, extra = get_doc(doc)
|
||||
|
||||
print(summary)
|
||||
print('')
|
||||
print('Type: %s' % typ.__name__)
|
||||
print('Default Value: %s' % default)
|
||||
print('')
|
||||
print_extra(extra)
|
||||
|
||||
def function_reference(self, f):
|
||||
attr, args, doc = FUNCTIONS[f]
|
||||
|
||||
print(f)
|
||||
print('=' * len(f))
|
||||
print('')
|
||||
|
||||
summary, extra = get_doc(doc)
|
||||
|
||||
print(summary)
|
||||
print('')
|
||||
|
||||
arg_types = []
|
||||
|
||||
for t in args:
|
||||
if isinstance(t, list):
|
||||
inner_types = [t2.__name__ for t2 in t]
|
||||
arg_types.append(' | ' .join(inner_types))
|
||||
continue
|
||||
|
||||
arg_types.append(t.__name__)
|
||||
|
||||
arg_s = '(%s)' % ', '.join(arg_types)
|
||||
|
||||
print('Arguments: %s' % arg_s)
|
||||
print('')
|
||||
print_extra(extra)
|
||||
|
||||
def special_reference(self, v):
|
||||
typ, doc = SPECIAL_VARIABLES[v]
|
||||
|
||||
print(v)
|
||||
print('=' * len(v))
|
||||
print('')
|
||||
|
||||
summary, extra = get_doc(doc)
|
||||
|
||||
print(summary)
|
||||
print('')
|
||||
print('Type: %s' % typ.__name__)
|
||||
print('')
|
||||
print_extra(extra)
|
575
python/mozbuild/mozbuild/frontend/reader.py
Normal file
575
python/mozbuild/mozbuild/frontend/reader.py
Normal file
@ -0,0 +1,575 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
# This file contains code for reading metadata from the build system into
|
||||
# data structures.
|
||||
|
||||
r"""Read build frontend files into data structures.
|
||||
|
||||
In terms of code architecture, the main interface is BuildReader. BuildReader
|
||||
starts with a root mozbuild file. It creates a new execution environment for
|
||||
this file, which is represented by the Sandbox class. The Sandbox class is what
|
||||
defines what is allowed to execute in an individual mozbuild file. The Sandbox
|
||||
consists of a local and global namespace, which are modeled by the
|
||||
LocalNamespace and GlobalNamespace classes, respectively. The global namespace
|
||||
contains all of the takeaway information from the execution. The local
|
||||
namespace is for throwaway local variables and its contents are discarded after
|
||||
execution.
|
||||
|
||||
The BuildReader contains basic logic for traversing a tree of mozbuild files.
|
||||
It does this by examining specific variables populated during execution.
|
||||
"""
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
|
||||
from io import StringIO
|
||||
|
||||
from mozbuild.util import (
|
||||
ReadOnlyDefaultDict,
|
||||
ReadOnlyDict,
|
||||
)
|
||||
|
||||
from .sandbox import (
|
||||
SandboxExecutionError,
|
||||
SandboxLoadError,
|
||||
Sandbox,
|
||||
)
|
||||
|
||||
from .sandbox_symbols import (
|
||||
FUNCTIONS,
|
||||
VARIABLES,
|
||||
)
|
||||
|
||||
|
||||
if sys.version_info.major == 2:
|
||||
text_type = unicode
|
||||
type_type = types.TypeType
|
||||
else:
|
||||
text_type = str
|
||||
type_type = type
|
||||
|
||||
def log(logger, level, action, params, formatter):
|
||||
logger.log(level, formatter, extra={'action': action, 'params': params})
|
||||
|
||||
|
||||
class MozbuildSandbox(Sandbox):
|
||||
"""Implementation of a Sandbox tailored for mozbuild files.
|
||||
|
||||
We expose a few useful functions and expose the set of variables defining
|
||||
Mozilla's build system.
|
||||
"""
|
||||
def __init__(self, config, path):
|
||||
"""Create an empty mozbuild Sandbox.
|
||||
|
||||
config is a ConfigStatus instance (the output of configure). path is
|
||||
the path of the main mozbuild file that is being executed. It is used
|
||||
to compute encountered relative paths.
|
||||
"""
|
||||
Sandbox.__init__(self, allowed_variables=VARIABLES)
|
||||
|
||||
self.config = config
|
||||
|
||||
topobjdir = os.path.abspath(config.topobjdir)
|
||||
|
||||
# This may not always hold true. If we ever have autogenerated mozbuild
|
||||
# files in topobjdir, we'll need to change this.
|
||||
assert os.path.normpath(path).startswith(os.path.normpath(config.topsrcdir))
|
||||
assert not os.path.normpath(path).startswith(os.path.normpath(topobjdir))
|
||||
|
||||
relpath = os.path.relpath(path, config.topsrcdir).replace(os.sep, '/')
|
||||
reldir = os.path.dirname(relpath)
|
||||
|
||||
with self._globals.allow_all_writes() as d:
|
||||
d['TOPSRCDIR'] = config.topsrcdir
|
||||
d['TOPOBJDIR'] = topobjdir
|
||||
d['RELATIVEDIR'] = reldir
|
||||
d['SRCDIR'] = os.path.join(config.topsrcdir, reldir).replace(os.sep, '/').rstrip('/')
|
||||
d['OBJDIR'] = os.path.join(topobjdir, reldir).replace(os.sep, '/').rstrip('/')
|
||||
|
||||
d['CONFIG'] = ReadOnlyDefaultDict(config.substs,
|
||||
global_default=None)
|
||||
|
||||
# Register functions.
|
||||
for name, func in FUNCTIONS.items():
|
||||
d[name] = getattr(self, func[0])
|
||||
|
||||
self._normalized_topsrcdir = os.path.normpath(config.topsrcdir)
|
||||
|
||||
def exec_file(self, path, filesystem_absolute=False):
|
||||
"""Override exec_file to normalize paths and restrict file loading.
|
||||
|
||||
If the path is absolute, behavior is governed by filesystem_absolute.
|
||||
If filesystem_absolute is True, the path is interpreted as absolute on
|
||||
the actual filesystem. If it is false, the path is treated as absolute
|
||||
within the current topsrcdir.
|
||||
|
||||
If the path is not absolute, it will be treated as relative to the
|
||||
currently executing file. If there is no currently executing file, it
|
||||
will be treated as relative to topsrcdir.
|
||||
|
||||
Paths will be rejected if they do not fall under topsrcdir.
|
||||
"""
|
||||
if os.path.isabs(path):
|
||||
if not filesystem_absolute:
|
||||
path = os.path.normpath(os.path.join(self.config.topsrcdir,
|
||||
path[1:]))
|
||||
|
||||
else:
|
||||
if len(self._execution_stack):
|
||||
path = os.path.normpath(os.path.join(
|
||||
os.path.dirname(self._execution_stack[-1]),
|
||||
path))
|
||||
else:
|
||||
path = os.path.normpath(os.path.join(
|
||||
self.config.topsrcdir, path))
|
||||
|
||||
# realpath() is needed for true security. But, this isn't for security
|
||||
# protection, so it is omitted.
|
||||
normalized_path = os.path.normpath(path)
|
||||
if not normalized_path.startswith(self._normalized_topsrcdir):
|
||||
raise SandboxLoadError(list(self._execution_stack),
|
||||
sys.exc_info()[2], illegal_path=path)
|
||||
|
||||
Sandbox.exec_file(self, path)
|
||||
|
||||
def _add_tier_directory(self, tier, reldir, static=False):
|
||||
"""Register a tier directory with the build."""
|
||||
if isinstance(reldir, text_type):
|
||||
reldir = [reldir]
|
||||
|
||||
if not tier in self['TIERS']:
|
||||
self['TIERS'][tier] = {
|
||||
'regular': [],
|
||||
'static': [],
|
||||
}
|
||||
|
||||
key = 'static' if static else 'regular'
|
||||
|
||||
for path in reldir:
|
||||
if path in self['TIERS'][tier][key]:
|
||||
raise Exception('Directory has already been registered with '
|
||||
'tier: %s' % path)
|
||||
|
||||
self['TIERS'][tier][key].append(path)
|
||||
|
||||
def _include(self, path):
|
||||
"""Include and exec another file within the context of this one."""
|
||||
|
||||
# exec_file() handles normalization and verification of the path.
|
||||
self.exec_file(path)
|
||||
|
||||
|
||||
class SandboxValidationError(Exception):
|
||||
"""Represents an error encountered when validating sandbox results."""
|
||||
pass
|
||||
|
||||
|
||||
class BuildReaderError(Exception):
|
||||
"""Represents errors encountered during BuildReader execution.
|
||||
|
||||
The main purpose of this class is to facilitate user-actionable error
|
||||
messages. Execution errors should say:
|
||||
|
||||
- Why they failed
|
||||
- Where they failed
|
||||
- What can be done to prevent the error
|
||||
|
||||
A lot of the code in this class should arguably be inside sandbox.py.
|
||||
However, extraction is somewhat difficult given the additions
|
||||
MozbuildSandbox has over Sandbox (e.g. the concept of included files -
|
||||
which affect error messages, of course).
|
||||
"""
|
||||
def __init__(self, file_stack, trace, sandbox_exec_error=None,
|
||||
sandbox_load_error=None, validation_error=None, other_error=None):
|
||||
|
||||
self.file_stack = file_stack
|
||||
self.trace = trace
|
||||
self.sandbox_exec = sandbox_exec_error
|
||||
self.sandbox_load = sandbox_load_error
|
||||
self.validation_error = validation_error
|
||||
self.other = other_error
|
||||
|
||||
@property
|
||||
def main_file(self):
|
||||
return self.file_stack[-1]
|
||||
|
||||
@property
|
||||
def actual_file(self):
|
||||
# We report the file that called out to the file that couldn't load.
|
||||
if self.sandbox_load is not None:
|
||||
if len(self.sandbox_load.file_stack) > 1:
|
||||
return self.sandbox_load.file_stack[-2]
|
||||
|
||||
if len(self.file_stack) > 1:
|
||||
return self.file_stack[-2]
|
||||
|
||||
if self.sandbox_error is not None and \
|
||||
len(self.sandbox_error.file_stack):
|
||||
return self.sandbox_error.file_stack[-1]
|
||||
|
||||
return self.file_stack[-1]
|
||||
|
||||
@property
|
||||
def sandbox_error(self):
|
||||
return self.sandbox_exec or self.sandbox_load
|
||||
|
||||
def __str__(self):
|
||||
s = StringIO()
|
||||
|
||||
delim = '=' * 30
|
||||
s.write('%s\nERROR PROCESSING MOZBUILD FILE\n%s\n\n' % (delim, delim))
|
||||
|
||||
s.write('The error occurred while processing the following file:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % self.actual_file)
|
||||
s.write('\n')
|
||||
|
||||
if self.actual_file != self.main_file and not self.sandbox_load:
|
||||
s.write('This file was included as part of processing:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % self.main_file)
|
||||
s.write('\n')
|
||||
|
||||
if self.sandbox_error is not None:
|
||||
self._print_sandbox_error(s)
|
||||
elif self.validation_error is not None:
|
||||
s.write('The error occurred when validating the result of ')
|
||||
s.write('the execution. The reported error is:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % self.validation_error.message)
|
||||
s.write('\n')
|
||||
else:
|
||||
s.write('The error appears to be part of the %s ' % __name__)
|
||||
s.write('Python module itself! It is possible you have stumbled ')
|
||||
s.write('across a legitimate bug.\n')
|
||||
s.write('\n')
|
||||
|
||||
for l in traceback.format_exception(type(self.other), self.other,
|
||||
self.trace):
|
||||
s.write(unicode(l))
|
||||
|
||||
return s.getvalue()
|
||||
|
||||
def _print_sandbox_error(self, s):
|
||||
# Try to find the frame of the executed code.
|
||||
script_frame = None
|
||||
for frame in traceback.extract_tb(self.sandbox_error.trace):
|
||||
if frame[0] == self.actual_file:
|
||||
script_frame = frame
|
||||
|
||||
# Reset if we enter a new execution context. This prevents errors
|
||||
# in this module from being attributes to a script.
|
||||
elif frame[0] == __file__ and frame[2] == 'exec_source':
|
||||
script_frame = None
|
||||
|
||||
if script_frame is not None:
|
||||
s.write('The error was triggered on line %d ' % script_frame[1])
|
||||
s.write('of this file:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % script_frame[3])
|
||||
s.write('\n')
|
||||
|
||||
if self.sandbox_load is not None:
|
||||
self._print_sandbox_load_error(s)
|
||||
return
|
||||
|
||||
self._print_sandbox_exec_error(s)
|
||||
|
||||
def _print_sandbox_load_error(self, s):
|
||||
assert self.sandbox_load is not None
|
||||
|
||||
if self.sandbox_load.illegal_path is not None:
|
||||
s.write('The underlying problem is an illegal file access. ')
|
||||
s.write('This is likely due to trying to access a file ')
|
||||
s.write('outside of the top source directory.\n')
|
||||
s.write('\n')
|
||||
s.write('The path whose access was denied is:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % self.sandbox_load.illegal_path)
|
||||
s.write('\n')
|
||||
s.write('Modify the script to not access this file and ')
|
||||
s.write('try again.\n')
|
||||
return
|
||||
|
||||
if self.sandbox_load.read_error is not None:
|
||||
if not os.path.exists(self.sandbox_load.read_error):
|
||||
s.write('The underlying problem is we referenced a path ')
|
||||
s.write('that does not exist. That path is:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % self.sandbox_load.read_error)
|
||||
s.write('\n')
|
||||
s.write('Either create the file if it needs to exist or ')
|
||||
s.write('do not reference it.\n')
|
||||
else:
|
||||
s.write('The underlying problem is a referenced path could ')
|
||||
s.write('not be read. The trouble path is:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % self.sandbox_load.read_error)
|
||||
s.write('\n')
|
||||
s.write('It is possible the path is not correct. Is it ')
|
||||
s.write('pointing to a directory? It could also be a file ')
|
||||
s.write('permissions issue. Ensure that the file is ')
|
||||
s.write('readable.\n')
|
||||
|
||||
return
|
||||
|
||||
# This module is buggy if you see this.
|
||||
raise AssertionError('SandboxLoadError with unhandled properties!')
|
||||
|
||||
def _print_sandbox_exec_error(self, s):
|
||||
assert self.sandbox_exec is not None
|
||||
|
||||
inner = self.sandbox_exec.exc_value
|
||||
|
||||
if isinstance(inner, SyntaxError):
|
||||
s.write('The underlying problem is a Python syntax error ')
|
||||
s.write('on line %d:\n' % inner.lineno)
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % inner.text)
|
||||
s.write((' ' * (inner.offset + 4)) + '^\n')
|
||||
s.write('\n')
|
||||
s.write('Fix the syntax error and try again.\n')
|
||||
return
|
||||
|
||||
if isinstance(inner, KeyError):
|
||||
self._print_keyerror(inner, s)
|
||||
elif isinstance(inner, ValueError):
|
||||
self._print_valueerror(inner, s)
|
||||
else:
|
||||
self._print_exception(inner, s)
|
||||
|
||||
def _print_keyerror(self, inner, s):
|
||||
if inner.args[0] not in ('global_ns', 'local_ns'):
|
||||
self._print_exception(unner, s)
|
||||
return
|
||||
|
||||
if inner.args[0] == 'global_ns':
|
||||
verb = None
|
||||
if inner.args[1] == 'get_unknown':
|
||||
verb = 'read'
|
||||
elif inner.args[1] == 'set_unknown':
|
||||
verb = 'write'
|
||||
else:
|
||||
raise AssertionError('Unhandled global_ns: %s' % inner.args[1])
|
||||
|
||||
s.write('The underlying problem is an attempt to %s ' % verb)
|
||||
s.write('a reserved UPPERCASE variable that does not exist.\n')
|
||||
s.write('\n')
|
||||
s.write('The variable %s causing the error is:\n' % verb)
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % inner.args[2])
|
||||
s.write('\n')
|
||||
s.write('Please change the file to not use this variable.\n')
|
||||
s.write('\n')
|
||||
s.write('For reference, the set of valid variables is:\n')
|
||||
s.write('\n')
|
||||
s.write(', '.join(sorted(VARIABLES.keys())) + '\n')
|
||||
return
|
||||
|
||||
s.write('The underlying problem is a reference to an undefined ')
|
||||
s.write('local variable:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % inner.args[2])
|
||||
s.write('\n')
|
||||
s.write('Please change the file to not reference undefined ')
|
||||
s.write('variables and try again.\n')
|
||||
|
||||
def _print_valueerror(self, inner, s):
|
||||
if inner.args[0] not in ('global_ns', 'local_ns'):
|
||||
self._print_exception(inner, s)
|
||||
return
|
||||
|
||||
assert inner.args[1] == 'set_type'
|
||||
|
||||
s.write('The underlying problem is an attempt to write an illegal ')
|
||||
s.write('value to a special variable.\n')
|
||||
s.write('\n')
|
||||
s.write('The variable whose value was rejected is:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s' % inner.args[2])
|
||||
s.write('\n')
|
||||
s.write('The value being written to it was of the following type:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % type(inner.args[3]).__name__)
|
||||
s.write('\n')
|
||||
s.write('This variable expects the following type(s):\n')
|
||||
s.write('\n')
|
||||
if type(inner.args[4]) == type_type:
|
||||
s.write(' %s\n' % inner.args[4].__name__)
|
||||
else:
|
||||
for t in inner.args[4]:
|
||||
s.write( ' %s\n' % t.__name__)
|
||||
s.write('\n')
|
||||
s.write('Change the file to write a value of the appropriate type ')
|
||||
s.write('and try again.\n')
|
||||
|
||||
def _print_exception(self, e, s):
|
||||
s.write('An error was encountered as part of executing the file ')
|
||||
s.write('itself. The error appears to be the fault of the script.\n')
|
||||
s.write('\n')
|
||||
s.write('The error as reported by Python is:\n')
|
||||
s.write('\n')
|
||||
s.write(' %s\n' % traceback.format_exception_only(type(e), e))
|
||||
|
||||
|
||||
class BuildReader(object):
|
||||
"""Read a tree of mozbuild files into data structures.
|
||||
|
||||
This is where the build system starts. You give it a tree configuration
|
||||
(the output of configuration) and it executes the moz.build files and
|
||||
collects the data they define.
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.topsrcdir = config.topsrcdir
|
||||
|
||||
self._log = logging.getLogger(__name__)
|
||||
self._read_files = set()
|
||||
self._normalized_topsrcdir = os.path.normpath(config.topsrcdir)
|
||||
self._execution_stack = []
|
||||
|
||||
def read_topsrcdir(self):
|
||||
"""Read the tree of mozconfig files into a data structure.
|
||||
|
||||
This starts with the tree's top-most mozbuild file and descends into
|
||||
all linked mozbuild files until all relevant files have been evaluated.
|
||||
|
||||
This is a generator of Sandbox instances. As each mozbuild file is
|
||||
read, a new Sandbox is created. Each created Sandbox is returned.
|
||||
"""
|
||||
path = os.path.join(self.topsrcdir, 'moz.build')
|
||||
return self.read_mozbuild(path, read_tiers=True,
|
||||
filesystem_absolute=True)
|
||||
|
||||
def read_mozbuild(self, path, read_tiers=False, filesystem_absolute=False,
|
||||
descend=True):
|
||||
"""Read and process a mozbuild file, descending into children.
|
||||
|
||||
This starts with a single mozbuild file, executes it, and descends into
|
||||
other referenced files per our traversal logic.
|
||||
|
||||
The traversal logic is to iterate over the *DIRS variables, treating
|
||||
each element as a relative directory path. For each encountered
|
||||
directory, we will open the moz.build file located in that
|
||||
directory in a new Sandbox and process it.
|
||||
|
||||
If read_tiers is True (it should only be True for the top-level
|
||||
mozbuild file in a project), the TIERS variable will be used for
|
||||
traversal as well.
|
||||
|
||||
If descend is True (the default), we will descend into child
|
||||
directories and files per variable values.
|
||||
|
||||
Traversal is performed depth first (for no particular reason).
|
||||
"""
|
||||
self._execution_stack.append(path)
|
||||
try:
|
||||
for s in self._read_mozbuild(path, read_tiers=read_tiers,
|
||||
filesystem_absolute=filesystem_absolute, descend=descend):
|
||||
yield s
|
||||
|
||||
except BuildReaderError as bre:
|
||||
raise bre
|
||||
|
||||
except SandboxExecutionError as se:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], sandbox_exec_error=se)
|
||||
|
||||
except SandboxLoadError as sle:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], sandbox_load_error=sle)
|
||||
|
||||
except SandboxValidationError as ve:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], validation_error=ve)
|
||||
|
||||
except Exception as e:
|
||||
raise BuildReaderError(list(self._execution_stack),
|
||||
sys.exc_info()[2], other_error=e)
|
||||
|
||||
def _read_mozbuild(self, path, read_tiers, filesystem_absolute, descend):
|
||||
path = os.path.normpath(path)
|
||||
log(self._log, logging.DEBUG, 'read_mozbuild', {'path': path},
|
||||
'Reading file: {path}')
|
||||
|
||||
if path in self._read_files:
|
||||
log(self._log, logging.WARNING, 'read_already', {'path': path},
|
||||
'File already read. Skipping: {path}')
|
||||
return
|
||||
|
||||
self._read_files.add(path)
|
||||
|
||||
sandbox = MozbuildSandbox(self.config, path)
|
||||
sandbox.exec_file(path, filesystem_absolute=filesystem_absolute)
|
||||
yield sandbox
|
||||
|
||||
# Traverse into referenced files.
|
||||
|
||||
# We first collect directories populated in variables.
|
||||
dir_vars = ['DIRS', 'PARALLEL_DIRS', 'TOOL_DIRS']
|
||||
|
||||
if self.config.substs.get('ENABLE_TESTS', False) == '1':
|
||||
dir_vars.extend(['TEST_DIRS', 'TEST_TOOL_DIRS'])
|
||||
|
||||
# It's very tempting to use a set here. Unfortunately, the recursive
|
||||
# make backend needs order preserved. Once we autogenerate all backend
|
||||
# files, we should be able to convert this to a set.
|
||||
dirs = []
|
||||
for var in dir_vars:
|
||||
if not var in sandbox:
|
||||
continue
|
||||
|
||||
for d in sandbox[var]:
|
||||
if d in dirs:
|
||||
raise SandboxValidationError(
|
||||
'Directory (%s) registered multiple times in %s' % (
|
||||
d, var))
|
||||
|
||||
dirs.append(d)
|
||||
|
||||
# We also have tiers whose members are directories.
|
||||
if 'TIERS' in sandbox:
|
||||
if not read_tiers:
|
||||
raise SandboxValidationError(
|
||||
'TIERS defined but it should not be')
|
||||
|
||||
for tier, values in sandbox['TIERS'].items():
|
||||
for var in ('regular', 'static'):
|
||||
for d in values[var]:
|
||||
if d in dirs:
|
||||
raise SandboxValidationError(
|
||||
'Tier directory (%s) registered multiple '
|
||||
'times in %s' % (d, tier))
|
||||
dirs.append(d)
|
||||
|
||||
curdir = os.path.dirname(path)
|
||||
for relpath in dirs:
|
||||
child_path = os.path.join(curdir, relpath, 'moz.build')
|
||||
|
||||
# Ensure we don't break out of the topsrcdir. We don't do realpath
|
||||
# because it isn't necessary. If there are symlinks in the srcdir,
|
||||
# that's not our problem. We're not a hosted application: we don't
|
||||
# need to worry about security too much.
|
||||
child_path = os.path.normpath(child_path)
|
||||
if not child_path.startswith(self._normalized_topsrcdir):
|
||||
raise SandboxValidationError(
|
||||
'Attempting to process file outside of topsrcdir: %s' %
|
||||
child_path)
|
||||
|
||||
if not descend:
|
||||
continue
|
||||
|
||||
for res in self.read_mozbuild(child_path, read_tiers=False,
|
||||
filesystem_absolute=True):
|
||||
yield res
|
||||
|
||||
self._execution_stack.pop()
|
||||
|
348
python/mozbuild/mozbuild/frontend/sandbox.py
Normal file
348
python/mozbuild/mozbuild/frontend/sandbox.py
Normal file
@ -0,0 +1,348 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
r"""Python sandbox implementation for build files.
|
||||
|
||||
This module contains classes for Python sandboxes that execute in a
|
||||
highly-controlled environment.
|
||||
|
||||
The main class is `Sandbox`. This provides an execution environment for Python
|
||||
code.
|
||||
|
||||
The behavior inside sandboxes is mostly regulated by the `GlobalNamespace` and
|
||||
`LocalNamespace` classes. These represent the global and local namespaces in
|
||||
the sandbox, respectively.
|
||||
|
||||
Code in this module takes a different approach to exception handling compared
|
||||
to what you'd see elsewhere in Python. Arguments to built-in exceptions like
|
||||
KeyError are machine parseable. This machine-friendly data is used to present
|
||||
user-friendly error messages in the case of errors.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
import os
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from mozbuild.util import (
|
||||
ReadOnlyDefaultDict,
|
||||
ReadOnlyDict,
|
||||
)
|
||||
|
||||
|
||||
class GlobalNamespace(dict):
|
||||
"""Represents the globals namespace in a sandbox.
|
||||
|
||||
This is a highly specialized dictionary employing light magic.
|
||||
|
||||
At the crux we have the concept of a restricted keys set. Only very
|
||||
specific keys may be retrieved or mutated. The rules are as follows:
|
||||
|
||||
- The '__builtins__' key is hardcoded and is read-only.
|
||||
- The set of variables that can be assigned or accessed during
|
||||
execution is passed into the constructor.
|
||||
|
||||
When variables are assigned to, we verify assignment is allowed. Assignment
|
||||
is allowed if the variable is known (set defined at constructor time) and
|
||||
if the value being assigned is the expected type (also defined at
|
||||
constructor time).
|
||||
|
||||
When variables are read, we first try to read the existing value. If a
|
||||
value is not found and it is defined in the allowed variables set, we
|
||||
return the default value for it. We don't assign default values until
|
||||
they are accessed because this makes debugging the end-result much
|
||||
simpler. Instead of a data structure with lots of empty/default values,
|
||||
you have a data structure with only the values that were read or touched.
|
||||
|
||||
Instantiators of this class are given a backdoor to perform setting of
|
||||
arbitrary values. e.g.
|
||||
|
||||
ns = GlobalNamespace()
|
||||
with ns.allow_all_writes():
|
||||
ns['foo'] = True
|
||||
|
||||
ns['bar'] = True # KeyError raised.
|
||||
"""
|
||||
|
||||
# The default set of builtins.
|
||||
BUILTINS = ReadOnlyDict({
|
||||
# Only real Python built-ins should go here.
|
||||
'None': None,
|
||||
'False': False,
|
||||
'True': True,
|
||||
})
|
||||
|
||||
def __init__(self, allowed_variables=None, builtins=None):
|
||||
"""Create a new global namespace having specific variables.
|
||||
|
||||
allowed_variables is a dict of the variables that can be queried and
|
||||
mutated. Keys in this dict are the strings representing keys in this
|
||||
namespace which are valid. Values are tuples of type, default value,
|
||||
and a docstring describing the purpose of the variable.
|
||||
|
||||
builtins is the value to use for the special __builtins__ key. If not
|
||||
defined, the BUILTINS constant attached to this class is used. The
|
||||
__builtins__ object is read-only.
|
||||
"""
|
||||
builtins = builtins or self.BUILTINS
|
||||
|
||||
assert isinstance(builtins, ReadOnlyDict)
|
||||
|
||||
dict.__init__(self, {'__builtins__': builtins})
|
||||
|
||||
self._allowed_variables = allowed_variables or {}
|
||||
|
||||
# We need to record this because it gets swallowed as part of
|
||||
# evaluation.
|
||||
self.last_name_error = None
|
||||
|
||||
self._allow_all_writes = False
|
||||
|
||||
def __getitem__(self, name):
|
||||
try:
|
||||
return dict.__getitem__(self, name)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# The variable isn't present yet. Fall back to VARIABLES.
|
||||
default = self._allowed_variables.get(name, None)
|
||||
if default is None:
|
||||
self.last_name_error = KeyError('global_ns', 'get_unknown', name)
|
||||
raise self.last_name_error
|
||||
|
||||
dict.__setitem__(self, name, copy.deepcopy(default[1]))
|
||||
return dict.__getitem__(self, name)
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
if self._allow_all_writes:
|
||||
dict.__setitem__(self, name, value)
|
||||
return
|
||||
|
||||
# We don't need to check for name.isupper() here because LocalNamespace
|
||||
# only sends variables our way if isupper() is True.
|
||||
default = self._allowed_variables.get(name, None)
|
||||
|
||||
if default is None:
|
||||
self.last_name_error = KeyError('global_ns', 'set_unknown', name,
|
||||
value)
|
||||
raise self.last_name_error
|
||||
|
||||
if not isinstance(value, default[0]):
|
||||
self.last_name_error = ValueError('global_ns', 'set_type', name,
|
||||
value, default[0])
|
||||
raise self.last_name_error
|
||||
|
||||
dict.__setitem__(self, name, value)
|
||||
|
||||
@contextmanager
|
||||
def allow_all_writes(self):
|
||||
"""Allow any variable to be written to this instance.
|
||||
|
||||
This is used as a context manager. When activated, all writes
|
||||
(__setitem__ calls) are allowed. When the context manager is exited,
|
||||
the instance goes back to its default behavior of only allowing
|
||||
whitelisted mutations.
|
||||
"""
|
||||
self._allow_all_writes = True
|
||||
yield self
|
||||
self._allow_all_writes = False
|
||||
|
||||
|
||||
class LocalNamespace(dict):
|
||||
"""Represents the locals namespace in a Sandbox.
|
||||
|
||||
This behaves like a dict except with some additional behavior tailored
|
||||
to our sandbox execution model.
|
||||
|
||||
Under normal rules of exec(), doing things like += could have interesting
|
||||
consequences. Keep in mind that a += is really a read, followed by the
|
||||
creation of a new variable, followed by a write. If the read came from the
|
||||
global namespace, then the write would go to the local namespace, resulting
|
||||
in fragmentation. This is not desired.
|
||||
|
||||
LocalNamespace proxies reads and writes for global-looking variables
|
||||
(read: UPPERCASE) to the global namespace. This means that attempting to
|
||||
read or write an unknown variable results in exceptions raised from the
|
||||
GlobalNamespace.
|
||||
"""
|
||||
def __init__(self, global_ns):
|
||||
"""Create a local namespace associated with a GlobalNamespace."""
|
||||
dict.__init__({})
|
||||
|
||||
self._globals = global_ns
|
||||
self.last_name_error = None
|
||||
|
||||
def __getitem__(self, name):
|
||||
if name.isupper():
|
||||
return self._globals[name]
|
||||
|
||||
return dict.__getitem__(self, name)
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
if name.isupper():
|
||||
self._globals[name] = value
|
||||
return
|
||||
|
||||
dict.__setitem__(self, name, value)
|
||||
|
||||
|
||||
class SandboxError(Exception):
|
||||
def __init__(self, file_stack):
|
||||
self.file_stack = file_stack
|
||||
|
||||
|
||||
class SandboxExecutionError(SandboxError):
|
||||
"""Represents errors encountered during execution of a Sandbox.
|
||||
|
||||
This is a simple container exception. It's purpose is to capture state
|
||||
so something else can report on it.
|
||||
"""
|
||||
def __init__(self, file_stack, exc_type, exc_value, trace):
|
||||
SandboxError.__init__(self, file_stack)
|
||||
|
||||
self.exc_type = exc_type
|
||||
self.exc_value = exc_value
|
||||
self.trace = trace
|
||||
|
||||
|
||||
class SandboxLoadError(SandboxError):
|
||||
"""Represents errors encountered when loading a file for execution.
|
||||
|
||||
This exception represents errors in a Sandbox that occurred as part of
|
||||
loading a file. The error could have occurred in the course of executing
|
||||
a file. If so, the file_stack will be non-empty and the file that caused
|
||||
the load will be on top of the stack.
|
||||
"""
|
||||
def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
|
||||
SandboxError.__init__(self, file_stack)
|
||||
|
||||
self.trace = trace
|
||||
self.illegal_path = illegal_path
|
||||
self.read_error = read_error
|
||||
|
||||
|
||||
class Sandbox(object):
|
||||
"""Represents a sandbox for executing Python code.
|
||||
|
||||
This class both provides a sandbox for execution of a single mozbuild
|
||||
frontend file as well as an interface to the results of that execution.
|
||||
|
||||
Sandbox is effectively a glorified wrapper around compile() + exec(). You
|
||||
point it at some Python code and it executes it. The main difference from
|
||||
executing Python code like normal is that the executed code is very limited
|
||||
in what it can do: the sandbox only exposes a very limited set of Python
|
||||
functionality. Only specific types and functions are available. This
|
||||
prevents executed code from doing things like import modules, open files,
|
||||
etc.
|
||||
|
||||
Sandboxes are bound to a mozconfig instance. These objects are produced by
|
||||
the output of configure.
|
||||
|
||||
Sandbox instances can be accessed like dictionaries to facilitate result
|
||||
retrieval. e.g. foo = sandbox['FOO']. Direct assignment is not allowed.
|
||||
|
||||
Each sandbox has associated with it a GlobalNamespace and LocalNamespace.
|
||||
Only data stored in the GlobalNamespace is retrievable via the dict
|
||||
interface. This is because the local namespace should be irrelevant: it
|
||||
should only contain throwaway variables.
|
||||
"""
|
||||
def __init__(self, allowed_variables=None, builtins=None):
|
||||
"""Initialize a Sandbox ready for execution.
|
||||
|
||||
The arguments are proxied to GlobalNamespace.__init__.
|
||||
"""
|
||||
self._globals = GlobalNamespace(allowed_variables=allowed_variables,
|
||||
builtins=builtins)
|
||||
self._locals = LocalNamespace(self._globals)
|
||||
self._execution_stack = []
|
||||
|
||||
def exec_file(self, path):
|
||||
"""Execute code at a path in the sandbox.
|
||||
|
||||
The path must be absolute.
|
||||
"""
|
||||
assert os.path.isabs(path)
|
||||
|
||||
source = None
|
||||
|
||||
try:
|
||||
with open(path, 'rt') as fd:
|
||||
source = fd.read()
|
||||
except Exception as e:
|
||||
raise SandboxLoadError(list(self._execution_stack),
|
||||
sys.exc_info()[2], read_error=path)
|
||||
|
||||
self.exec_source(source, path)
|
||||
|
||||
def exec_source(self, source, path):
|
||||
"""Execute Python code within a string.
|
||||
|
||||
The passed string should contain Python code to be executed. The string
|
||||
will be compiled and executed.
|
||||
|
||||
You should almost always go through exec_file() because exec_source()
|
||||
does not perform extra path normalization. This can cause relative
|
||||
paths to behave weirdly.
|
||||
"""
|
||||
self._execution_stack.append(path)
|
||||
|
||||
# We don't have to worry about bytecode generation here because we are
|
||||
# too low-level for that. However, we could add bytecode generation via
|
||||
# the marshall module if parsing performance were ever an issue.
|
||||
|
||||
try:
|
||||
# compile() inherits the __future__ from the module by default. We
|
||||
# do want Unicode literals.
|
||||
code = compile(source, path, 'exec')
|
||||
exec(code, self._globals, self._locals)
|
||||
except SandboxError as e:
|
||||
raise e
|
||||
except NameError as e:
|
||||
# A NameError is raised when a local or global could not be found.
|
||||
# The original KeyError has been dropped by the interpreter.
|
||||
# However, we should have it cached in our namespace instances!
|
||||
|
||||
# Unless a script is doing something wonky like catching NameError
|
||||
# itself (that would be silly), if there is an exception on the
|
||||
# global namespace, that's our error.
|
||||
actual = e
|
||||
|
||||
if self._globals.last_name_error is not None:
|
||||
actual = self._globals.last_name_error
|
||||
elif self._locals.last_name_error is not None:
|
||||
actual = self._locals.last_name_error
|
||||
|
||||
raise SandboxExecutionError(list(self._execution_stack),
|
||||
type(actual), actual, sys.exc_info()[2])
|
||||
|
||||
except Exception as e:
|
||||
# Need to copy the stack otherwise we get a reference and that is
|
||||
# mutated during the finally.
|
||||
exc = sys.exc_info()
|
||||
raise SandboxExecutionError(list(self._execution_stack), exc[0],
|
||||
exc[1], exc[2])
|
||||
finally:
|
||||
self._execution_stack.pop()
|
||||
|
||||
# Dict interface proxies reads to global namespace.
|
||||
def __len__(self):
|
||||
return len(self._globals)
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self._globals[name]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._globals)
|
||||
|
||||
def iterkeys(self):
|
||||
return self.__iter__()
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._globals
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self._globals.get(key, default)
|
249
python/mozbuild/mozbuild/frontend/sandbox_symbols.py
Normal file
249
python/mozbuild/mozbuild/frontend/sandbox_symbols.py
Normal file
@ -0,0 +1,249 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
######################################################################
|
||||
# DO NOT UPDATE THIS FILE WITHOUT SIGN-OFF FROM A BUILD MODULE PEER. #
|
||||
######################################################################
|
||||
|
||||
r"""Defines the global config variables.
|
||||
|
||||
This module contains data structures defining the global symbols that have
|
||||
special meaning in the frontend files for the build system.
|
||||
|
||||
If you are looking for the absolute authority on what the global namespace in
|
||||
the Sandbox consists of, you've come to the right place.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
def doc_to_paragraphs(doc):
|
||||
"""Take a documentation string and converts it to paragraphs.
|
||||
|
||||
This normalizes the inline strings in VARIABLES and elsewhere in this file.
|
||||
|
||||
It returns a list of paragraphs. It is up to the caller to insert newlines
|
||||
or to wrap long lines (e.g. by using textwrap.wrap()).
|
||||
"""
|
||||
lines = [line.strip() for line in doc.split('\n')]
|
||||
|
||||
paragraphs = []
|
||||
current = []
|
||||
for line in lines:
|
||||
if not len(line):
|
||||
if len(current):
|
||||
paragraphs.append(' '.join(current))
|
||||
current = []
|
||||
|
||||
continue
|
||||
|
||||
current.append(line)
|
||||
|
||||
if len(current):
|
||||
paragraphs.append(' '.join(current))
|
||||
|
||||
return paragraphs
|
||||
|
||||
|
||||
# This defines the set of mutable global variables.
|
||||
#
|
||||
# Each variable is a tuple of:
|
||||
#
|
||||
# (type, default_value, docs)
|
||||
#
|
||||
VARIABLES = {
|
||||
# Variables controlling reading of other frontend files.
|
||||
'DIRS': (list, [],
|
||||
"""Child directories to descend into looking for build frontend files.
|
||||
|
||||
This works similarly to the DIRS variable in make files. Each str value
|
||||
in the list is the name of a child directory. When this file is done
|
||||
parsing, the build reader will descend into each listed directory and
|
||||
read the frontend file there. If there is no frontend file, an error
|
||||
is raised.
|
||||
|
||||
Values are relative paths. They can be multiple directory levels
|
||||
above or below. Use ".." for parent directories and "/" for path
|
||||
delimiters.
|
||||
"""),
|
||||
|
||||
'PARALLEL_DIRS': (list, [],
|
||||
"""A parallel version of DIRS.
|
||||
|
||||
Ideally this variable does not exist. It is provided so a transition
|
||||
from recursive makefiles can be made. Once the build system has been
|
||||
converted to not use Makefile's for the build frontend, this will
|
||||
likely go away.
|
||||
"""),
|
||||
|
||||
'TOOL_DIRS': (list, [],
|
||||
"""Like DIRS but for tools.
|
||||
|
||||
Tools are for pieces of the build system that aren't required to
|
||||
produce a working binary (in theory). They provide things like test
|
||||
code and utilities.
|
||||
"""),
|
||||
|
||||
'TEST_DIRS': (list, [],
|
||||
"""Like DIRS but only for directories that contain test-only code.
|
||||
|
||||
If tests are not enabled, this variable will be ignored.
|
||||
|
||||
This variable may go away once the transition away from Makefiles is
|
||||
complete.
|
||||
"""),
|
||||
|
||||
'TEST_TOOL_DIRS': (list, [],
|
||||
"""TOOL_DIRS that is only executed if tests are enabled.
|
||||
"""),
|
||||
|
||||
|
||||
'TIERS': (OrderedDict, OrderedDict(),
|
||||
"""Defines directories constituting the tier traversal mechanism.
|
||||
|
||||
The recursive make backend iteration is organized into tiers. There are
|
||||
major tiers (keys in this dict) that correspond roughly to applications
|
||||
or libraries being built. e.g. base, nspr, js, platform, app. Within
|
||||
each tier are phases like export, libs, and tools. The recursive make
|
||||
backend iterates over each phase in the first tier then proceeds to the
|
||||
next tier until all tiers are exhausted.
|
||||
|
||||
Tiers are a way of working around deficiencies in recursive make. These
|
||||
will probably disappear once we no longer rely on recursive make for
|
||||
the build backend. They will likely be replaced by DIRS.
|
||||
|
||||
This variable is typically not populated directly. Instead, it is
|
||||
populated by calling add_tier_dir().
|
||||
"""),
|
||||
}
|
||||
|
||||
# The set of functions exposed to the sandbox.
|
||||
#
|
||||
# Each entry is a tuple of:
|
||||
#
|
||||
# (method attribute, (argument types), docs)
|
||||
#
|
||||
# The first element is an attribute on Sandbox that should be a function type.
|
||||
#
|
||||
FUNCTIONS = {
|
||||
'include': ('_include', (str,),
|
||||
"""Include another mozbuild file in the context of this one.
|
||||
|
||||
This is similar to a #include in C languages. The filename passed to
|
||||
the function will be read and its contents will be evaluated within the
|
||||
context of the calling file.
|
||||
|
||||
If a relative path is given, it is evaluated as relative to the file
|
||||
currently being processed. If there is a chain of multiple include(),
|
||||
the relative path computation is from the most recent/active file.
|
||||
|
||||
If an absolute path is given, it is evaluated from TOPSRCDIR. In other
|
||||
words, include('/foo') references the path TOPSRCDIR + '/foo'.
|
||||
|
||||
Example usage
|
||||
-------------
|
||||
|
||||
# Include "sibling.build" from the current directory.
|
||||
include('sibling.build')
|
||||
|
||||
# Include "foo.build" from a path within the top source directory.
|
||||
include('/elsewhere/foo.build')
|
||||
"""),
|
||||
|
||||
'add_tier_dir': ('_add_tier_directory', (str, [str, list], bool),
|
||||
"""Register a directory for tier traversal.
|
||||
|
||||
This is the preferred way to populate the TIERS variable.
|
||||
|
||||
Tiers are how the build system is organized. The build process is
|
||||
divided into major phases called tiers. The most important tiers are
|
||||
"platform" and "apps." The platform tier builds the Gecko platform
|
||||
(typically outputting libxul). The apps tier builds the configured
|
||||
application (browser, mobile/android, b2g, etc).
|
||||
|
||||
This function is typically only called by the main moz.build file or a
|
||||
file directly included by the main moz.build file. An error will be
|
||||
raised if it is called when it shouldn't be.
|
||||
|
||||
An error will also occur if you attempt to add the same directory to
|
||||
the same tier multiple times.
|
||||
|
||||
Example usage
|
||||
-------------
|
||||
|
||||
# Register a single directory with the 'platform' tier.
|
||||
add_tier_dir('platform', 'xul')
|
||||
|
||||
# Register multiple directories with the 'app' tier.
|
||||
add_tier_dir('app', ['components', 'base'])
|
||||
|
||||
# Register a directory as having static content (no dependencies).
|
||||
add_tier_dir('base', 'foo', static=True)
|
||||
"""),
|
||||
|
||||
}
|
||||
|
||||
# Special variables. These complement VARIABLES.
|
||||
SPECIAL_VARIABLES = {
|
||||
'TOPSRCDIR': (str,
|
||||
"""Constant defining the top source directory.
|
||||
|
||||
The top source directory is the parent directory containing the source
|
||||
code and all build files. It is typically the root directory of a
|
||||
cloned repository.
|
||||
"""),
|
||||
|
||||
'TOPOBJDIR': (str,
|
||||
"""Constant defining the top object directory.
|
||||
|
||||
The top object directory is the parent directory which will contain
|
||||
the output of the build. This is commonly referred to as "the object
|
||||
directory."
|
||||
"""),
|
||||
|
||||
'RELATIVEDIR': (str,
|
||||
"""Constant defining the relative path of this file.
|
||||
|
||||
The relative path is from TOPSRCDIR. This is defined as relative to the
|
||||
main file being executed, regardless of whether additional files have
|
||||
been included using include().
|
||||
"""),
|
||||
|
||||
'SRCDIR': (str,
|
||||
"""Constant defining the source directory of this file.
|
||||
|
||||
This is the path inside TOPSRCDIR where this file is located. It is the
|
||||
same as TOPSRCDIR + RELATIVEDIR.
|
||||
"""),
|
||||
|
||||
'OBJDIR': (str,
|
||||
"""The path to the object directory for this file.
|
||||
|
||||
Is is the same as TOPOBJDIR + RELATIVEDIR.
|
||||
"""),
|
||||
|
||||
'CONFIG': (dict,
|
||||
"""Dictionary containing the current configuration variables.
|
||||
|
||||
All the variables defined by the configuration system are available
|
||||
through this object. e.g. ENABLE_TESTS, CFLAGS, etc.
|
||||
|
||||
Values in this container are read-only. Attempts at changing values
|
||||
will result in a run-time error.
|
||||
|
||||
Access to an unknown variable will return None.
|
||||
"""),
|
||||
|
||||
'__builtins__': (dict,
|
||||
"""Exposes Python built-in types.
|
||||
|
||||
The set of exposed Python built-ins is currently:
|
||||
|
||||
True
|
||||
False
|
||||
None
|
||||
"""),
|
||||
}
|
31
python/mozbuild/mozbuild/test/common.py
Normal file
31
python/mozbuild/mozbuild/test/common.py
Normal file
@ -0,0 +1,31 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
from mach.logging import LoggingManager
|
||||
|
||||
|
||||
# By including this module, tests get structured logging.
|
||||
log_manager = LoggingManager()
|
||||
log_manager.add_terminal_logging()
|
||||
|
||||
# mozconfig is not a reusable type (it's actually a module) so, we
|
||||
# have to mock it.
|
||||
class MockConfig(object):
|
||||
def __init__(self, topsrcdir='/path/to/topsrcdir'):
|
||||
self.topsrcdir = topsrcdir
|
||||
self.topobjdir = '/path/to/topobjdir'
|
||||
|
||||
self.substs = {
|
||||
'MOZ_FOO': 'foo',
|
||||
'MOZ_BAR': 'bar',
|
||||
'MOZ_TRUE': '1',
|
||||
'MOZ_FALSE': '',
|
||||
}
|
||||
|
||||
def child_path(self, p):
|
||||
return os.path.join(self.topsrcdir, p)
|
0
python/mozbuild/mozbuild/test/frontend/__init__.py
Normal file
0
python/mozbuild/mozbuild/test/frontend/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS += ['bar']
|
@ -0,0 +1,6 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo']
|
||||
|
||||
include('included.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('included-2.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
ILLEGAL = True
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('included-1.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('missing.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('../moz.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('../parent.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('grandchild/grandchild.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('../../parent.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('/sibling.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
ILLEGAL = True
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
ILLEGAL = True
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('child.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('missing.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('../include-basic/moz.build')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
l = FOO
|
@ -0,0 +1,6 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo']
|
||||
|
||||
DIRS += ['foo']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
foo = True + None
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
foo =
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = 'dir'
|
@ -0,0 +1,6 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['dir1', 'dir2']
|
||||
|
||||
FOO = 'bar'
|
@ -0,0 +1,8 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS += ['regular']
|
||||
PARALLEL_DIRS = ['parallel']
|
||||
TEST_DIRS = ['test']
|
||||
TEST_TOOL_DIRS = ['test_tool']
|
||||
TOOL_DIRS = ['tool']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['../../foo']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['../bar']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['../foo']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['../bar']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo', 'bar']
|
@ -0,0 +1 @@
|
||||
DIRS = ['biz']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo', 'bar']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
add_tier_dir('illegal', 'IRRELEVANT')
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['foo']
|
@ -0,0 +1,4 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS = ['biz']
|
@ -0,0 +1,8 @@
|
||||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
add_tier_dir('t1', 'foo')
|
||||
add_tier_dir('t1', 'foo_static', static=True)
|
||||
|
||||
add_tier_dir('t2', 'bar')
|
||||
add_tier_dir('t3', 'baz', static=True)
|
85
python/mozbuild/mozbuild/test/frontend/test_emitter.py
Normal file
85
python/mozbuild/mozbuild/test/frontend/test_emitter.py
Normal file
@ -0,0 +1,85 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from mozbuild.frontend.data import DirectoryTraversal
|
||||
from mozbuild.frontend.emitter import TreeMetadataEmitter
|
||||
from mozbuild.frontend.reader import BuildReader
|
||||
|
||||
from mozbuild.test.common import MockConfig
|
||||
|
||||
|
||||
data_path = os.path.abspath(os.path.dirname(__file__))
|
||||
data_path = os.path.join(data_path, 'data')
|
||||
|
||||
|
||||
class TestEmitterBasic(unittest.TestCase):
|
||||
def reader(self, name):
|
||||
config = MockConfig(os.path.join(data_path, name))
|
||||
config.substs['ENABLE_TESTS'] = '1'
|
||||
|
||||
return BuildReader(config)
|
||||
|
||||
def test_dirs_traversal_simple(self):
|
||||
reader = self.reader('traversal-simple')
|
||||
emitter = TreeMetadataEmitter(reader.config)
|
||||
|
||||
objs = list(emitter.emit(reader.read_topsrcdir()))
|
||||
|
||||
self.assertEqual(len(objs), 4)
|
||||
|
||||
for o in objs:
|
||||
self.assertIsInstance(o, DirectoryTraversal)
|
||||
self.assertEqual(o.parallel_dirs, [])
|
||||
self.assertEqual(o.tool_dirs, [])
|
||||
self.assertEqual(o.test_dirs, [])
|
||||
self.assertEqual(o.test_tool_dirs, [])
|
||||
self.assertEqual(len(o.tier_dirs), 0)
|
||||
self.assertEqual(len(o.tier_static_dirs), 0)
|
||||
|
||||
reldirs = [o.relativedir for o in objs]
|
||||
self.assertEqual(reldirs, ['', 'foo', 'foo/biz', 'bar'])
|
||||
|
||||
dirs = [o.dirs for o in objs]
|
||||
self.assertEqual(dirs, [['foo', 'bar'], ['biz'], [], []])
|
||||
|
||||
def test_traversal_all_vars(self):
|
||||
reader = self.reader('traversal-all-vars')
|
||||
emitter = TreeMetadataEmitter(reader.config)
|
||||
|
||||
objs = list(emitter.emit(reader.read_topsrcdir()))
|
||||
self.assertEqual(len(objs), 6)
|
||||
|
||||
for o in objs:
|
||||
self.assertIsInstance(o, DirectoryTraversal)
|
||||
|
||||
reldirs = set([o.relativedir for o in objs])
|
||||
self.assertEqual(reldirs, set(['', 'parallel', 'regular', 'test',
|
||||
'test_tool', 'tool']))
|
||||
|
||||
for o in objs:
|
||||
reldir = o.relativedir
|
||||
|
||||
if reldir == '':
|
||||
self.assertEqual(o.dirs, ['regular'])
|
||||
self.assertEqual(o.parallel_dirs, ['parallel'])
|
||||
self.assertEqual(o.test_dirs, ['test'])
|
||||
self.assertEqual(o.test_tool_dirs, ['test_tool'])
|
||||
self.assertEqual(o.tool_dirs, ['tool'])
|
||||
|
||||
def test_tier_simple(self):
|
||||
reader = self.reader('traversal-tier-simple')
|
||||
emitter = TreeMetadataEmitter(reader.config)
|
||||
|
||||
objs = list(emitter.emit(reader.read_topsrcdir()))
|
||||
self.assertEqual(len(objs), 6)
|
||||
|
||||
reldirs = [o.relativedir for o in objs]
|
||||
self.assertEqual(reldirs, ['', 'foo', 'foo/biz', 'foo_static', 'bar',
|
||||
'baz'])
|
||||
|
132
python/mozbuild/mozbuild/test/frontend/test_namespaces.py
Normal file
132
python/mozbuild/mozbuild/test/frontend/test_namespaces.py
Normal file
@ -0,0 +1,132 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import unittest
|
||||
|
||||
from mozunit import main
|
||||
|
||||
from mozbuild.frontend.sandbox import (
|
||||
GlobalNamespace,
|
||||
LocalNamespace,
|
||||
)
|
||||
|
||||
from mozbuild.frontend.sandbox_symbols import VARIABLES
|
||||
|
||||
|
||||
class TestGlobalNamespace(unittest.TestCase):
|
||||
def test_builtins(self):
|
||||
ns = GlobalNamespace()
|
||||
|
||||
self.assertIn('__builtins__', ns)
|
||||
self.assertEqual(ns['__builtins__']['True'], True)
|
||||
|
||||
def test_key_rejection(self):
|
||||
# Lowercase keys should be rejected during normal operation.
|
||||
ns = GlobalNamespace(allowed_variables=VARIABLES)
|
||||
|
||||
with self.assertRaises(KeyError) as ke:
|
||||
ns['foo'] = True
|
||||
|
||||
e = ke.exception.args
|
||||
self.assertEqual(e[0], 'global_ns')
|
||||
self.assertEqual(e[1], 'set_unknown')
|
||||
self.assertEqual(e[2], 'foo')
|
||||
self.assertTrue(e[3])
|
||||
|
||||
# Unknown uppercase keys should be rejected.
|
||||
with self.assertRaises(KeyError) as ke:
|
||||
ns['FOO'] = True
|
||||
|
||||
e = ke.exception.args
|
||||
self.assertEqual(e[0], 'global_ns')
|
||||
self.assertEqual(e[1], 'set_unknown')
|
||||
self.assertEqual(e[2], 'FOO')
|
||||
self.assertTrue(e[3])
|
||||
|
||||
def test_allowed_set(self):
|
||||
self.assertIn('DIRS', VARIABLES)
|
||||
|
||||
ns = GlobalNamespace(allowed_variables=VARIABLES)
|
||||
|
||||
ns['DIRS'] = ['foo']
|
||||
self.assertEqual(ns['DIRS'], ['foo'])
|
||||
|
||||
def test_value_checking(self):
|
||||
ns = GlobalNamespace(allowed_variables=VARIABLES)
|
||||
|
||||
# Setting to a non-allowed type should not work.
|
||||
with self.assertRaises(ValueError) as ve:
|
||||
ns['DIRS'] = True
|
||||
|
||||
e = ve.exception.args
|
||||
self.assertEqual(e[0], 'global_ns')
|
||||
self.assertEqual(e[1], 'set_type')
|
||||
self.assertEqual(e[2], 'DIRS')
|
||||
self.assertTrue(e[3])
|
||||
self.assertEqual(e[4], list)
|
||||
|
||||
def test_allow_all_writes(self):
|
||||
ns = GlobalNamespace(allowed_variables=VARIABLES)
|
||||
|
||||
with ns.allow_all_writes() as d:
|
||||
d['foo'] = True
|
||||
self.assertTrue(d['foo'])
|
||||
|
||||
with self.assertRaises(KeyError) as ke:
|
||||
ns['foo'] = False
|
||||
|
||||
self.assertEqual(ke.exception.args[1], 'set_unknown')
|
||||
|
||||
self.assertTrue(d['foo'])
|
||||
|
||||
def test_key_checking(self):
|
||||
# Checking for existence of a key should not populate the key if it
|
||||
# doesn't exist.
|
||||
g = GlobalNamespace(allowed_variables=VARIABLES)
|
||||
|
||||
self.assertFalse('DIRS' in g)
|
||||
self.assertFalse('DIRS' in g)
|
||||
|
||||
|
||||
class TestLocalNamespace(unittest.TestCase):
|
||||
def test_locals(self):
|
||||
g = GlobalNamespace(allowed_variables=VARIABLES)
|
||||
l = LocalNamespace(g)
|
||||
|
||||
l['foo'] = ['foo']
|
||||
self.assertEqual(l['foo'], ['foo'])
|
||||
|
||||
l['foo'] += ['bar']
|
||||
self.assertEqual(l['foo'], ['foo', 'bar'])
|
||||
|
||||
def test_global_proxy_reads(self):
|
||||
g = GlobalNamespace(allowed_variables=VARIABLES)
|
||||
g['DIRS'] = ['foo']
|
||||
|
||||
l = LocalNamespace(g)
|
||||
|
||||
self.assertEqual(l['DIRS'], g['DIRS'])
|
||||
|
||||
# Reads to missing UPPERCASE vars should result in KeyError.
|
||||
with self.assertRaises(KeyError) as ke:
|
||||
v = l['FOO']
|
||||
|
||||
e = ke.exception
|
||||
self.assertEqual(e.args[0], 'global_ns')
|
||||
self.assertEqual(e.args[1], 'get_unknown')
|
||||
|
||||
def test_global_proxy_writes(self):
|
||||
g = GlobalNamespace(allowed_variables=VARIABLES)
|
||||
l = LocalNamespace(g)
|
||||
|
||||
l['DIRS'] = ['foo']
|
||||
|
||||
self.assertEqual(l['DIRS'], ['foo'])
|
||||
self.assertEqual(g['DIRS'], ['foo'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
232
python/mozbuild/mozbuild/test/frontend/test_reader.py
Normal file
232
python/mozbuild/mozbuild/test/frontend/test_reader.py
Normal file
@ -0,0 +1,232 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from mozunit import main
|
||||
|
||||
from mozbuild.frontend.reader import BuildReaderError
|
||||
from mozbuild.frontend.reader import BuildReader
|
||||
|
||||
from mozbuild.test.common import MockConfig
|
||||
|
||||
|
||||
if sys.version_info.major == 2:
|
||||
text_type = 'unicode'
|
||||
else:
|
||||
text_type = 'str'
|
||||
|
||||
data_path = os.path.abspath(os.path.dirname(__file__))
|
||||
data_path = os.path.join(data_path, 'data')
|
||||
|
||||
|
||||
class TestBuildReader(unittest.TestCase):
|
||||
def config(self, name):
|
||||
path = os.path.join(data_path, name)
|
||||
|
||||
return MockConfig(path)
|
||||
|
||||
def reader(self, name, enable_tests=False):
|
||||
config = self.config(name)
|
||||
|
||||
if enable_tests:
|
||||
config.substs['ENABLE_TESTS'] = '1'
|
||||
|
||||
return BuildReader(config)
|
||||
|
||||
def file_path(self, name, *args):
|
||||
return os.path.join(data_path, name, *args)
|
||||
|
||||
def test_dirs_traversal_simple(self):
|
||||
reader = self.reader('traversal-simple')
|
||||
|
||||
sandboxes = list(reader.read_topsrcdir())
|
||||
|
||||
self.assertEqual(len(sandboxes), 4)
|
||||
|
||||
def test_dirs_traversal_no_descend(self):
|
||||
reader = self.reader('traversal-simple')
|
||||
|
||||
path = os.path.join(reader.topsrcdir, 'moz.build')
|
||||
self.assertTrue(os.path.exists(path))
|
||||
|
||||
sandboxes = list(reader.read_mozbuild(path,
|
||||
filesystem_absolute=True, descend=False))
|
||||
|
||||
self.assertEqual(len(sandboxes), 1)
|
||||
|
||||
def test_dirs_traversal_all_variables(self):
|
||||
reader = self.reader('traversal-all-vars', enable_tests=True)
|
||||
|
||||
sandboxes = list(reader.read_topsrcdir())
|
||||
self.assertEqual(len(sandboxes), 6)
|
||||
|
||||
def test_tiers_traversal(self):
|
||||
reader = self.reader('traversal-tier-simple')
|
||||
|
||||
sandboxes = list(reader.read_topsrcdir())
|
||||
self.assertEqual(len(sandboxes), 6)
|
||||
|
||||
def test_tier_subdir(self):
|
||||
# add_tier_dir() should fail when not in the top directory.
|
||||
reader = self.reader('traversal-tier-fails-in-subdir')
|
||||
|
||||
with self.assertRaises(Exception):
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
def test_relative_dirs(self):
|
||||
# Ensure relative directories are traversed.
|
||||
reader = self.reader('traversal-relative-dirs')
|
||||
|
||||
sandboxes = list(reader.read_topsrcdir())
|
||||
self.assertEqual(len(sandboxes), 3)
|
||||
|
||||
def test_repeated_dirs_ignored(self):
|
||||
# Ensure repeated directories are ignored.
|
||||
reader = self.reader('traversal-repeated-dirs')
|
||||
|
||||
sandboxes = list(reader.read_topsrcdir())
|
||||
self.assertEqual(len(sandboxes), 3)
|
||||
|
||||
def test_outside_topsrcdir(self):
|
||||
# References to directories outside the topsrcdir should fail.
|
||||
reader = self.reader('traversal-outside-topsrcdir')
|
||||
|
||||
with self.assertRaises(Exception):
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
def test_error_basic(self):
|
||||
reader = self.reader('reader-error-basic')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertEqual(e.actual_file, self.file_path('reader-error-basic',
|
||||
'moz.build'))
|
||||
|
||||
self.assertIn('The error occurred while processing the', str(e))
|
||||
|
||||
def test_error_included_from(self):
|
||||
reader = self.reader('reader-error-included-from')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertEqual(e.actual_file,
|
||||
self.file_path('reader-error-included-from', 'child.build'))
|
||||
self.assertEqual(e.main_file,
|
||||
self.file_path('reader-error-included-from', 'moz.build'))
|
||||
|
||||
self.assertIn('This file was included as part of processing', str(e))
|
||||
|
||||
def test_error_syntax_error(self):
|
||||
reader = self.reader('reader-error-syntax')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('Python syntax error on line 4', str(e))
|
||||
self.assertIn(' foo =', str(e))
|
||||
self.assertIn(' ^', str(e))
|
||||
|
||||
def test_error_read_unknown_global(self):
|
||||
reader = self.reader('reader-error-read-unknown-global')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('The error was triggered on line 4', str(e))
|
||||
self.assertIn('The underlying problem is an attempt to read', str(e))
|
||||
self.assertIn(' FOO', str(e))
|
||||
|
||||
def test_error_write_unknown_global(self):
|
||||
reader = self.reader('reader-error-write-unknown-global')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('The error was triggered on line 6', str(e))
|
||||
self.assertIn('The underlying problem is an attempt to write', str(e))
|
||||
self.assertIn(' FOO', str(e))
|
||||
|
||||
def test_error_write_bad_value(self):
|
||||
reader = self.reader('reader-error-write-bad-value')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('The error was triggered on line 4', str(e))
|
||||
self.assertIn('is an attempt to write an illegal value to a special',
|
||||
str(e))
|
||||
|
||||
self.assertIn('variable whose value was rejected is:\n\n DIRS',
|
||||
str(e))
|
||||
|
||||
self.assertIn('written to it was of the following type:\n\n %s' % text_type,
|
||||
str(e))
|
||||
|
||||
self.assertIn('expects the following type(s):\n\n list', str(e))
|
||||
|
||||
def test_error_illegal_path(self):
|
||||
reader = self.reader('reader-error-outside-topsrcdir')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('The underlying problem is an illegal file access',
|
||||
str(e))
|
||||
|
||||
def test_error_missing_include_path(self):
|
||||
reader = self.reader('reader-error-missing-include')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('we referenced a path that does not exist', str(e))
|
||||
|
||||
def test_error_script_error(self):
|
||||
reader = self.reader('reader-error-script-error')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('The error appears to be the fault of the script',
|
||||
str(e))
|
||||
self.assertIn(' ["TypeError: unsupported operand', str(e))
|
||||
|
||||
def test_error_bad_dir(self):
|
||||
reader = self.reader('reader-error-bad-dir')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('we referenced a path that does not exist', str(e))
|
||||
|
||||
def test_error_repeated_dir(self):
|
||||
reader = self.reader('reader-error-repeated-dir')
|
||||
|
||||
with self.assertRaises(BuildReaderError) as bre:
|
||||
list(reader.read_topsrcdir())
|
||||
|
||||
e = bre.exception
|
||||
self.assertIn('Directory (foo) registered multiple times in DIRS',
|
||||
str(e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
272
python/mozbuild/mozbuild/test/frontend/test_sandbox.py
Normal file
272
python/mozbuild/mozbuild/test/frontend/test_sandbox.py
Normal file
@ -0,0 +1,272 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
from mozunit import main
|
||||
|
||||
from mozbuild.frontend.reader import MozbuildSandbox
|
||||
|
||||
from mozbuild.frontend.sandbox import (
|
||||
SandboxExecutionError,
|
||||
SandboxLoadError,
|
||||
)
|
||||
|
||||
from mozbuild.frontend.sandbox_symbols import (
|
||||
FUNCTIONS,
|
||||
SPECIAL_VARIABLES,
|
||||
VARIABLES,
|
||||
)
|
||||
|
||||
from mozbuild.test.common import MockConfig
|
||||
|
||||
|
||||
test_data_path = os.path.abspath(os.path.dirname(__file__))
|
||||
test_data_path = os.path.join(test_data_path, 'data')
|
||||
|
||||
|
||||
class TestSandbox(unittest.TestCase):
|
||||
def sandbox(self, relpath='moz.build', data_path=None):
|
||||
config = None
|
||||
|
||||
if data_path is not None:
|
||||
config = MockConfig(os.path.join(test_data_path, data_path))
|
||||
else:
|
||||
config = MockConfig()
|
||||
|
||||
return MozbuildSandbox(config, config.child_path(relpath))
|
||||
|
||||
def test_default_state(self):
|
||||
sandbox = self.sandbox()
|
||||
config = sandbox.config
|
||||
|
||||
self.assertEqual(sandbox['TOPSRCDIR'], config.topsrcdir)
|
||||
self.assertEqual(sandbox['TOPOBJDIR'],
|
||||
os.path.abspath(config.topobjdir))
|
||||
self.assertEqual(sandbox['RELATIVEDIR'], '')
|
||||
self.assertEqual(sandbox['SRCDIR'], config.topsrcdir)
|
||||
self.assertEqual(sandbox['OBJDIR'],
|
||||
os.path.abspath(config.topobjdir).replace(os.sep, '/'))
|
||||
|
||||
def test_symbol_presence(self):
|
||||
# Ensure no discrepancies between the master symbol table and what's in
|
||||
# the sandbox.
|
||||
sandbox = self.sandbox()
|
||||
|
||||
all_symbols = set()
|
||||
all_symbols |= set(FUNCTIONS.keys())
|
||||
all_symbols |= set(SPECIAL_VARIABLES.keys())
|
||||
|
||||
for symbol in sandbox:
|
||||
self.assertIn(symbol, all_symbols)
|
||||
all_symbols.remove(symbol)
|
||||
|
||||
self.assertEqual(len(all_symbols), 0)
|
||||
|
||||
def test_path_calculation(self):
|
||||
sandbox = self.sandbox('foo/bar/moz.build')
|
||||
config = sandbox.config
|
||||
|
||||
self.assertEqual(sandbox['RELATIVEDIR'], 'foo/bar')
|
||||
self.assertEqual(sandbox['SRCDIR'], '/'.join([config.topsrcdir,
|
||||
'foo/bar']))
|
||||
self.assertEqual(sandbox['OBJDIR'],
|
||||
os.path.abspath('/'.join([config.topobjdir, 'foo/bar'])).replace(os.sep, '/'))
|
||||
|
||||
def test_config_access(self):
|
||||
sandbox = self.sandbox()
|
||||
config = sandbox.config
|
||||
|
||||
self.assertIn('CONFIG', sandbox)
|
||||
self.assertEqual(sandbox['CONFIG']['MOZ_TRUE'], '1')
|
||||
self.assertEqual(sandbox['CONFIG']['MOZ_FOO'], config.substs['MOZ_FOO'])
|
||||
|
||||
# Access to an undefined substitution should return None.
|
||||
self.assertNotIn('MISSING', sandbox['CONFIG'])
|
||||
self.assertIsNone(sandbox['CONFIG']['MISSING'])
|
||||
|
||||
# Should shouldn't be allowed to assign to the config.
|
||||
with self.assertRaises(Exception):
|
||||
sandbox['CONFIG']['FOO'] = ''
|
||||
|
||||
def test_dict_interface(self):
|
||||
sandbox = self.sandbox()
|
||||
config = sandbox.config
|
||||
|
||||
self.assertFalse('foo' in sandbox)
|
||||
self.assertFalse('FOO' in sandbox)
|
||||
|
||||
self.assertTrue(sandbox.get('foo', True))
|
||||
self.assertEqual(sandbox.get('TOPSRCDIR'), config.topsrcdir)
|
||||
self.assertGreater(len(sandbox), 6)
|
||||
|
||||
for key in sandbox:
|
||||
continue
|
||||
|
||||
for key in sandbox.iterkeys():
|
||||
continue
|
||||
|
||||
def test_exec_source_success(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
sandbox.exec_source('foo = True', 'foo.py')
|
||||
|
||||
self.assertNotIn('foo', sandbox)
|
||||
|
||||
def test_exec_compile_error(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
with self.assertRaises(SandboxExecutionError) as se:
|
||||
sandbox.exec_source('2f23;k;asfj', 'foo.py')
|
||||
|
||||
self.assertEqual(se.exception.file_stack, ['foo.py'])
|
||||
self.assertIsInstance(se.exception.exc_value, SyntaxError)
|
||||
|
||||
def test_exec_import_denied(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
with self.assertRaises(SandboxExecutionError) as se:
|
||||
sandbox.exec_source('import sys', 'import.py')
|
||||
|
||||
self.assertIsInstance(se.exception, SandboxExecutionError)
|
||||
self.assertEqual(se.exception.exc_type, ImportError)
|
||||
|
||||
def test_exec_source_multiple(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
sandbox.exec_source('DIRS = ["foo"]', 'foo.py')
|
||||
sandbox.exec_source('DIRS = ["bar"]', 'foo.py')
|
||||
|
||||
self.assertEqual(sandbox['DIRS'], ['bar'])
|
||||
|
||||
def test_exec_source_illegal_key_set(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
with self.assertRaises(SandboxExecutionError) as se:
|
||||
sandbox.exec_source('ILLEGAL = True', 'foo.py')
|
||||
|
||||
e = se.exception
|
||||
self.assertIsInstance(e.exc_value, KeyError)
|
||||
|
||||
e = se.exception.exc_value
|
||||
self.assertEqual(e.args[0], 'global_ns')
|
||||
self.assertEqual(e.args[1], 'set_unknown')
|
||||
|
||||
def test_add_tier_dir_regular_str(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
sandbox.exec_source('add_tier_dir("t1", "foo")', 'foo.py')
|
||||
|
||||
self.assertEqual(sandbox['TIERS']['t1'],
|
||||
{'regular': ['foo'], 'static': []})
|
||||
|
||||
def test_add_tier_dir_regular_list(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
sandbox.exec_source('add_tier_dir("t1", ["foo", "bar"])', 'foo.py')
|
||||
|
||||
self.assertEqual(sandbox['TIERS']['t1'],
|
||||
{'regular': ['foo', 'bar'], 'static': []})
|
||||
|
||||
def test_add_tier_dir_static(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
sandbox.exec_source('add_tier_dir("t1", "foo", static=True)', 'foo.py')
|
||||
|
||||
self.assertEqual(sandbox['TIERS']['t1'],
|
||||
{'regular': [], 'static': ['foo']})
|
||||
|
||||
def test_tier_order(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
source = '''
|
||||
add_tier_dir('t1', 'foo')
|
||||
add_tier_dir('t1', 'bar')
|
||||
add_tier_dir('t2', 'baz', static=True)
|
||||
add_tier_dir('t3', 'biz')
|
||||
add_tier_dir('t1', 'bat', static=True)
|
||||
'''
|
||||
|
||||
sandbox.exec_source(source, 'foo.py')
|
||||
|
||||
self.assertEqual([k for k in sandbox['TIERS'].keys()], ['t1', 't2', 't3'])
|
||||
|
||||
def test_tier_multiple_registration(self):
|
||||
sandbox = self.sandbox()
|
||||
|
||||
sandbox.exec_source('add_tier_dir("t1", "foo")', 'foo.py')
|
||||
|
||||
with self.assertRaises(SandboxExecutionError):
|
||||
sandbox.exec_source('add_tier_dir("t1", "foo")', 'foo.py')
|
||||
|
||||
def test_include_basic(self):
|
||||
sandbox = self.sandbox(data_path='include-basic')
|
||||
|
||||
sandbox.exec_file('moz.build')
|
||||
|
||||
self.assertEqual(sandbox['DIRS'], ['foo', 'bar'])
|
||||
|
||||
def test_include_outside_topsrcdir(self):
|
||||
sandbox = self.sandbox(data_path='include-outside-topsrcdir')
|
||||
|
||||
with self.assertRaises(SandboxLoadError) as se:
|
||||
sandbox.exec_file('relative.build')
|
||||
|
||||
expected = os.path.join(test_data_path, 'moz.build')
|
||||
self.assertEqual(se.exception.illegal_path, expected)
|
||||
|
||||
def test_include_error_stack(self):
|
||||
# Ensure the path stack is reported properly in exceptions.
|
||||
sandbox = self.sandbox(data_path='include-file-stack')
|
||||
|
||||
with self.assertRaises(SandboxExecutionError) as se:
|
||||
sandbox.exec_file('moz.build')
|
||||
|
||||
e = se.exception
|
||||
self.assertIsInstance(e.exc_value, KeyError)
|
||||
|
||||
args = e.exc_value.args
|
||||
self.assertEqual(args[0], 'global_ns')
|
||||
self.assertEqual(args[1], 'set_unknown')
|
||||
self.assertEqual(args[2], 'ILLEGAL')
|
||||
|
||||
expected_stack = [os.path.join(sandbox.config.topsrcdir, p) for p in [
|
||||
'moz.build', 'included-1.build', 'included-2.build']]
|
||||
|
||||
self.assertEqual(e.file_stack, expected_stack)
|
||||
|
||||
def test_include_missing(self):
|
||||
sandbox = self.sandbox(data_path='include-missing')
|
||||
|
||||
with self.assertRaises(SandboxLoadError) as sle:
|
||||
sandbox.exec_file('moz.build')
|
||||
|
||||
self.assertIsNotNone(sle.exception.read_error)
|
||||
|
||||
def test_include_relative_from_child_dir(self):
|
||||
# A relative path from a subdirectory should be relative from that
|
||||
# child directory.
|
||||
sandbox = self.sandbox(data_path='include-relative-from-child')
|
||||
sandbox.exec_file('child/child.build')
|
||||
self.assertEqual(sandbox['DIRS'], ['foo'])
|
||||
|
||||
sandbox = self.sandbox(data_path='include-relative-from-child')
|
||||
sandbox.exec_file('child/child2.build')
|
||||
self.assertEqual(sandbox['DIRS'], ['foo'])
|
||||
|
||||
def test_include_topsrcdir_relative(self):
|
||||
# An absolute path for include() is relative to topsrcdir.
|
||||
|
||||
sandbox = self.sandbox(data_path='include-topsrcdir-relative')
|
||||
sandbox.exec_file('moz.build')
|
||||
|
||||
self.assertEqual(sandbox['DIRS'], ['foo'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,50 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import unittest
|
||||
|
||||
from mozunit import main
|
||||
|
||||
from mozbuild.frontend.sandbox_symbols import (
|
||||
FUNCTIONS,
|
||||
SPECIAL_VARIABLES,
|
||||
VARIABLES,
|
||||
)
|
||||
|
||||
|
||||
class TestSymbols(unittest.TestCase):
|
||||
def _verify_doc(self, doc):
|
||||
# Documentation should be of the format:
|
||||
# """SUMMARY LINE
|
||||
#
|
||||
# EXTRA PARAGRAPHS
|
||||
# """
|
||||
|
||||
self.assertNotIn('\r', doc)
|
||||
|
||||
lines = doc.split('\n')
|
||||
|
||||
# No trailing whitespace.
|
||||
for line in lines[0:-1]:
|
||||
self.assertEqual(line, line.rstrip())
|
||||
|
||||
self.assertGreater(len(lines), 0)
|
||||
self.assertGreater(len(lines[0].strip()), 0)
|
||||
|
||||
# Last line should be empty.
|
||||
self.assertEqual(lines[-1].strip(), '')
|
||||
|
||||
def test_documentation_formatting(self):
|
||||
for typ, default, doc in VARIABLES.values():
|
||||
self._verify_doc(doc)
|
||||
|
||||
for attr, args, doc in FUNCTIONS.values():
|
||||
self._verify_doc(doc)
|
||||
|
||||
for typ, doc in SPECIAL_VARIABLES.values():
|
||||
self._verify_doc(doc)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
104
python/mozbuild/mozbuild/test/test_containers.py
Normal file
104
python/mozbuild/mozbuild/test/test_containers.py
Normal file
@ -0,0 +1,104 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import unittest
|
||||
|
||||
from mozunit import main
|
||||
|
||||
from mozbuild.util import (
|
||||
DefaultOnReadDict,
|
||||
ReadOnlyDefaultDict,
|
||||
ReadOnlyDict,
|
||||
)
|
||||
|
||||
class TestReadOnlyDict(unittest.TestCase):
|
||||
def test_basic(self):
|
||||
original = {'foo': 1, 'bar': 2}
|
||||
|
||||
test = ReadOnlyDict(original)
|
||||
|
||||
self.assertEqual(original, test)
|
||||
self.assertEqual(test['foo'], 1)
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
value = test['missing']
|
||||
|
||||
with self.assertRaises(Exception):
|
||||
test['baz'] = True
|
||||
|
||||
class TestDefaultOnReadDict(unittest.TestCase):
|
||||
def test_no_defaults(self):
|
||||
original = {'foo': 1, 'bar': 2}
|
||||
|
||||
test = DefaultOnReadDict(original)
|
||||
self.assertEqual(original, test)
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
value = test['missing']
|
||||
|
||||
test['foo'] = 5
|
||||
self.assertEqual(test['foo'], 5)
|
||||
|
||||
def test_dict_defaults(self):
|
||||
original = {'foo': 1, 'bar': 2}
|
||||
|
||||
test = DefaultOnReadDict(original, defaults={'baz': 3})
|
||||
|
||||
self.assertEqual(original, test)
|
||||
self.assertEqual(test['baz'], 3)
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
value = test['missing']
|
||||
|
||||
test['baz'] = 4
|
||||
self.assertEqual(test['baz'], 4)
|
||||
|
||||
def test_global_default(self):
|
||||
original = {'foo': 1}
|
||||
|
||||
test = DefaultOnReadDict(original, defaults={'bar': 2},
|
||||
global_default=10)
|
||||
|
||||
self.assertEqual(original, test)
|
||||
self.assertEqual(test['foo'], 1)
|
||||
|
||||
self.assertEqual(test['bar'], 2)
|
||||
self.assertEqual(test['baz'], 10)
|
||||
|
||||
test['bar'] = 3
|
||||
test['baz'] = 12
|
||||
test['other'] = 11
|
||||
|
||||
self.assertEqual(test['bar'], 3)
|
||||
self.assertEqual(test['baz'], 12)
|
||||
self.assertEqual(test['other'], 11)
|
||||
|
||||
|
||||
class TestReadOnlyDefaultDict(unittest.TestCase):
|
||||
def test_simple(self):
|
||||
original = {'foo': 1, 'bar': 2}
|
||||
|
||||
test = ReadOnlyDefaultDict(original)
|
||||
|
||||
self.assertEqual(original, test)
|
||||
|
||||
self.assertEqual(test['foo'], 1)
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
value = test['missing']
|
||||
|
||||
def test_assignment(self):
|
||||
test = ReadOnlyDefaultDict({})
|
||||
|
||||
with self.assertRaises(Exception):
|
||||
test['foo'] = True
|
||||
|
||||
def test_defaults(self):
|
||||
test = ReadOnlyDefaultDict({}, defaults={'foo': 1})
|
||||
|
||||
self.assertEqual(test['foo'], 1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -7,6 +7,7 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
import hashlib
|
||||
|
||||
|
||||
@ -27,3 +28,59 @@ def hash_file(path):
|
||||
h.update(data)
|
||||
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
class ReadOnlyDict(dict):
|
||||
"""A read-only dictionary."""
|
||||
def __init__(self, d):
|
||||
dict.__init__(self, d)
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
raise Exception('Object does not support assignment.')
|
||||
|
||||
|
||||
class undefined_default(object):
|
||||
"""Represents an undefined argument value that isn't None."""
|
||||
|
||||
|
||||
undefined = undefined_default()
|
||||
|
||||
|
||||
class DefaultOnReadDict(dict):
|
||||
"""A dictionary that returns default values for missing keys on read."""
|
||||
|
||||
def __init__(self, d, defaults=None, global_default=undefined):
|
||||
"""Create an instance from an iterable with defaults.
|
||||
|
||||
The first argument is fed into the dict constructor.
|
||||
|
||||
defaults is a dict mapping keys to their default values.
|
||||
|
||||
global_default is the default value for *all* missing keys. If it isn't
|
||||
specified, no default value for keys not in defaults will be used and
|
||||
IndexError will be raised on access.
|
||||
"""
|
||||
dict.__init__(self, d)
|
||||
|
||||
self._defaults = defaults or {}
|
||||
self._global_default = global_default
|
||||
|
||||
def __getitem__(self, k):
|
||||
try:
|
||||
return dict.__getitem__(self, k)
|
||||
except:
|
||||
pass
|
||||
|
||||
if k in self._defaults:
|
||||
dict.__setitem__(self, k, copy.deepcopy(self._defaults[k]))
|
||||
elif self._global_default != undefined:
|
||||
dict.__setitem__(self, k, copy.deepcopy(self._global_default))
|
||||
|
||||
return dict.__getitem__(self, k)
|
||||
|
||||
|
||||
class ReadOnlyDefaultDict(DefaultOnReadDict, ReadOnlyDict):
|
||||
"""A read-only dictionary that supports default values on retrieval."""
|
||||
def __init__(self, d, defaults=None, global_default=undefined):
|
||||
DefaultOnReadDict.__init__(self, d, defaults, global_default)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user