forked from mirrors/gecko-dev
Backed out 6 changesets (bug 1632916, bug 1599658, bug 1633037, bug 1633039, bug 1633016, bug 1632920) for SA bustages CLOSED TREE
Backed out changeset 332ce0963b4e (bug 1633039)
Backed out changeset a9904cbc40d9 (bug 1633037)
Backed out changeset d06b0ec349f8 (bug 1599658)
Backed out changeset 8fd300cad80f (bug 1633016)
Backed out changeset f8820941c703 (bug 1632916)
Backed out changeset ac9c2c8746ed (bug 1632920)
This commit is contained in:
parent
d1320d8780
commit
f137fa0613
73 changed files with 334 additions and 253 deletions
14
Makefile.in
14
Makefile.in
|
|
@ -98,7 +98,7 @@ ifneq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
|
|||
@# same directory, because that would blow up
|
||||
$(if $(wildcard _build_manifests/install/$(subst /,_,$*)),$(if $(wildcard faster/install_$(subst /,_,$*)*),$(error FasterMake and RecursiveMake ends of the hybrid build system want to handle $*)))
|
||||
endif
|
||||
$(addprefix $(call py_action,process_install_manifest,--track install_$(subst /,_,$*).track $*) ,$(wildcard _build_manifests/install/$(subst /,_,$*)))
|
||||
$(addprefix $(call py3_action,process_install_manifest,--track install_$(subst /,_,$*).track $*) ,$(wildcard _build_manifests/install/$(subst /,_,$*)))
|
||||
|
||||
# Dummy wrapper rule to allow the faster backend to piggy back
|
||||
$(addprefix install-,$(subst /,_,$(filter dist/%,$(install_manifests)))): install-dist_%: install-dist/% ;
|
||||
|
|
@ -108,7 +108,7 @@ install-tests: install-test-files
|
|||
|
||||
.PHONY: install-test-files
|
||||
install-test-files:
|
||||
$(call py_action,process_install_manifest,--track install__test_files.track _tests _build_manifests/install/_test_files)
|
||||
$(call py3_action,process_install_manifest,--track install__test_files.track _tests _build_manifests/install/_test_files)
|
||||
|
||||
include $(topsrcdir)/build/moz-automation.mk
|
||||
|
||||
|
|
@ -140,7 +140,7 @@ endif
|
|||
|
||||
ifdef MOZ_ANDROID_FAT_AAR_ARCHITECTURES
|
||||
recurse_android-fat-aar-artifact:
|
||||
$(call py_action,fat_aar,\
|
||||
$(call py3_action,fat_aar,\
|
||||
$(addprefix --armeabi-v7a $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_ARMEABI_V7A)) \
|
||||
$(addprefix --arm64-v8a $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_ARM64_V8A)) \
|
||||
$(addprefix --x86 $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_X86)) \
|
||||
|
|
@ -196,14 +196,14 @@ endif
|
|||
.PHONY: symbolsfullarchive
|
||||
symbolsfullarchive: prepsymbolsarchive
|
||||
$(RM) '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
|
||||
$(call py_action,symbols_archive,'$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip' \
|
||||
$(call py3_action,symbols_archive,'$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip' \
|
||||
$(abspath $(DIST)/crashreporter-symbols) \
|
||||
--full-archive)
|
||||
|
||||
.PHONY: symbolsarchive
|
||||
symbolsarchive: prepsymbolsarchive
|
||||
$(RM) '$(DIST)/$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip'
|
||||
$(call py_action,symbols_archive,'$(DIST)/$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip' \
|
||||
$(call py3_action,symbols_archive,'$(DIST)/$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip' \
|
||||
$(abspath $(DIST)/crashreporter-symbols))
|
||||
|
||||
ifdef MOZ_CRASHREPORTER
|
||||
|
|
@ -220,7 +220,7 @@ endif
|
|||
|
||||
uploadsymbols:
|
||||
ifdef MOZ_CRASHREPORTER
|
||||
$(PYTHON3) -u $(topsrcdir)/toolkit/crashreporter/tools/upload_symbols.py '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
|
||||
$(PYTHON) -u $(topsrcdir)/toolkit/crashreporter/tools/upload_symbols.py '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
|
||||
endif
|
||||
|
||||
.PHONY: update-packaging
|
||||
|
|
@ -229,7 +229,7 @@ update-packaging:
|
|||
|
||||
.PHONY: package-generated-sources
|
||||
package-generated-sources:
|
||||
$(call py_action,package_generated_sources,'$(DIST)/$(PKG_PATH)$(GENERATED_SOURCE_FILE_PACKAGE)')
|
||||
$(call py3_action,package_generated_sources,'$(DIST)/$(PKG_PATH)$(GENERATED_SOURCE_FILE_PACKAGE)')
|
||||
|
||||
ifdef JS_STANDALONE
|
||||
# Delegate js-specific rules to js
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ endif
|
|||
# steps. See bug 1431342.
|
||||
libs:: $(srcdir)/profile/channel-prefs.js
|
||||
$(NSINSTALL) -D $(DIST)/bin/defaults/pref
|
||||
$(call py_action,preprocessor,-Fsubstitution $(PREF_PPFLAGS) $(ACDEFINES) $^ -o $(DIST)/bin/defaults/pref/channel-prefs.js)
|
||||
$(call py3_action,preprocessor,-Fsubstitution $(PREF_PPFLAGS) $(ACDEFINES) $^ -o $(DIST)/bin/defaults/pref/channel-prefs.js)
|
||||
|
||||
ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
|
||||
|
||||
|
|
@ -78,7 +78,7 @@ LPROJ := Contents/Resources/$(LPROJ_ROOT).lproj
|
|||
clean clobber repackage::
|
||||
$(RM) -r $(dist_dest)
|
||||
|
||||
MAC_BUNDLE_VERSION = $(shell $(PYTHON3) $(srcdir)/macversion.py --version=$(MOZ_APP_VERSION) --buildid=$(DEPTH)/buildid.h)
|
||||
MAC_BUNDLE_VERSION = $(shell $(PYTHON) $(srcdir)/macversion.py --version=$(MOZ_APP_VERSION) --buildid=$(DEPTH)/buildid.h)
|
||||
|
||||
.PHONY: repackage
|
||||
tools repackage:: $(DIST)/bin/$(MOZ_APP_NAME)
|
||||
|
|
|
|||
|
|
@ -3,24 +3,23 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import io
|
||||
from __future__ import absolute_import, print_function
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
import re
|
||||
|
||||
o = OptionParser()
|
||||
o.add_option('--buildid', dest='buildid')
|
||||
o.add_option('--version', dest='version')
|
||||
o.add_option("--buildid", dest="buildid")
|
||||
o.add_option("--version", dest="version")
|
||||
|
||||
(options, args) = o.parse_args()
|
||||
|
||||
if not options.buildid:
|
||||
print('--buildid is required', file=sys.stderr)
|
||||
print >>sys.stderr, "--buildid is required"
|
||||
sys.exit(1)
|
||||
|
||||
if not options.version:
|
||||
print('--version is required', file=sys.stderr)
|
||||
print >>sys.stderr, "--version is required"
|
||||
sys.exit(1)
|
||||
|
||||
# We want to build a version number that matches the format allowed for
|
||||
|
|
@ -29,8 +28,7 @@ if not options.version:
|
|||
# builds), but also so that newly-built older versions (e.g. beta build) aren't
|
||||
# considered "newer" than previously-built newer versions (e.g. a trunk nightly)
|
||||
|
||||
define, MOZ_BUILDID, buildid = io.open(
|
||||
options.buildid, 'r', encoding='utf-8').read().split()
|
||||
define, MOZ_BUILDID, buildid = open(options.buildid, 'r').read().split()
|
||||
|
||||
# extract only the major version (i.e. "14" from "14.0b1")
|
||||
majorVersion = re.match(r'^(\d+)[^\d].*', options.version).group(1)
|
||||
|
|
|
|||
|
|
@ -177,7 +177,7 @@ endif
|
|||
|
||||
package-compare::
|
||||
cd $(DIST); find $(PKGCOMP_FIND_OPTS) '$(FINDPATH)' -type f | sort > bin-list.txt
|
||||
$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) $(MOZ_PKG_MANIFEST)) | grep '^$(BINPATH)' | sed -e 's/^\///' | sort > $(DIST)/pack-list.txt
|
||||
$(call py3_action,preprocessor,$(DEFINES) $(ACDEFINES) $(MOZ_PKG_MANIFEST)) | grep '^$(BINPATH)' | sed -e 's/^\///' | sort > $(DIST)/pack-list.txt
|
||||
-diff -u $(DIST)/pack-list.txt $(DIST)/bin-list.txt
|
||||
rm -f $(DIST)/pack-list.txt $(DIST)/bin-list.txt
|
||||
|
||||
|
|
|
|||
|
|
@ -60,16 +60,16 @@ $(CONFIG_DIR)/setup.exe::
|
|||
$(MKDIR) $(CONFIG_DIR)
|
||||
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
||||
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
||||
$(call py_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||
$(call py3_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
--preprocess-locale $(topsrcdir) \
|
||||
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
--preprocess-single-file $(topsrcdir) \
|
||||
$(PPL_LOCALE_ARGS) $(CONFIG_DIR) \
|
||||
nsisstrings.properties nsisstrings.nlf
|
||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
--convert-utf8-utf16le \
|
||||
$(srcdir)/nsis/extensionsLocale.nsh $(CONFIG_DIR)/extensionsLocale.nsh
|
||||
|
||||
|
|
|
|||
|
|
@ -168,7 +168,7 @@ task machBuildFaster(type: MachExec) {
|
|||
|
||||
workingDir "${topsrcdir}"
|
||||
|
||||
commandLine mozconfig.substs.PYTHON3
|
||||
commandLine mozconfig.substs.PYTHON
|
||||
args "${topsrcdir}/mach"
|
||||
args 'build'
|
||||
args 'faster'
|
||||
|
|
|
|||
|
|
@ -146,8 +146,8 @@ if test "$GNU_CC" -a "$GCC_USE_GNU_LD" -a -z "$DEVELOPER_OPTIONS"; then
|
|||
if AC_TRY_COMMAND([${CC-cc} -o conftest.${ac_objext} $CFLAGS $MOZ_DEBUG_FLAGS -c conftest.${ac_ext} 1>&2]) &&
|
||||
AC_TRY_COMMAND([${CC-cc} -o conftest${ac_exeext} $LDFLAGS $MOZ_DEBUG_FLAGS -Wl,--gc-sections conftest.${ac_objext} $LIBS 1>&2]) &&
|
||||
test -s conftest${ac_exeext} -a -s conftest.${ac_objext}; then
|
||||
if test "`$PYTHON3 -m mozbuild.configure.check_debug_ranges conftest.${ac_objext} conftest.${ac_ext}`" = \
|
||||
"`$PYTHON3 -m mozbuild.configure.check_debug_ranges conftest${ac_exeext} conftest.${ac_ext}`"; then
|
||||
if test "`$PYTHON -m mozbuild.configure.check_debug_ranges conftest.${ac_objext} conftest.${ac_ext}`" = \
|
||||
"`$PYTHON -m mozbuild.configure.check_debug_ranges conftest${ac_exeext} conftest.${ac_ext}`"; then
|
||||
GC_SECTIONS_BREAKS_DEBUG_RANGES=no
|
||||
else
|
||||
GC_SECTIONS_BREAKS_DEBUG_RANGES=yes
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ AUTOMATION_EXTRA_CMDLINE-check = --keep-going
|
|||
# However, the target automation/buildsymbols will still be executed in this
|
||||
# case because it is a prerequisite of automation/upload.
|
||||
define automation_commands
|
||||
@+$(PYTHON3) $(topsrcdir)/config/run-and-prefix.py $1 $(MAKE) $1 $(AUTOMATION_EXTRA_CMDLINE-$1)
|
||||
@+$(PYTHON) $(topsrcdir)/config/run-and-prefix.py $1 $(MAKE) $1 $(AUTOMATION_EXTRA_CMDLINE-$1)
|
||||
$(call BUILDSTATUS,TIER_FINISH $1)
|
||||
endef
|
||||
|
||||
|
|
|
|||
|
|
@ -470,6 +470,113 @@ def mozconfig_options(mozconfig, automation, help):
|
|||
add(key, value)
|
||||
|
||||
|
||||
# Python 2
|
||||
# ========
|
||||
|
||||
option(env='PYTHON', nargs=1, help='Python 2.7 interpreter')
|
||||
|
||||
|
||||
@depends('PYTHON', check_build_environment, 'MOZILLABUILD')
|
||||
@imports(_from='__builtin__', _import='Exception')
|
||||
@imports(_from='mozbuild.configure.util', _import='LineIO')
|
||||
@imports(_from='mozbuild.virtualenv', _import='VirtualenvManager')
|
||||
@imports(_from='mozbuild.virtualenv', _import='verify_python_version')
|
||||
@imports(_from='mozbuild.pythonutil', _import='find_python2_executable')
|
||||
@imports(_from='mozbuild.pythonutil', _import='python_executable_version')
|
||||
@imports(_from='six', _import='ensure_text')
|
||||
def virtualenv_python2(env_python, build_env, mozillabuild):
|
||||
# Verify that the Python version we executed this code with is the minimum
|
||||
# required version to handle all project code.
|
||||
with LineIO(lambda l: log.error(l)) as out:
|
||||
verify_python_version(out)
|
||||
|
||||
python = env_python[0] if env_python else None
|
||||
|
||||
log.debug("python2: executable from configuration: %r" % python)
|
||||
|
||||
# If this is a mozilla-central build, we'll find the virtualenv in the top
|
||||
# source directory. If this is a SpiderMonkey build, we assume we're at
|
||||
# js/src and try to find the virtualenv from the mozilla-central root.
|
||||
# See mozilla-central changeset d2cce982a7c809815d86d5daecefe2e7a563ecca
|
||||
# Bug 784841
|
||||
topsrcdir, topobjdir = build_env.topsrcdir, build_env.topobjdir
|
||||
if topobjdir.endswith('/js/src'):
|
||||
topobjdir = topobjdir[:-7]
|
||||
|
||||
# If we know the Python executable the caller is asking for then verify its
|
||||
# version. If the caller did not ask for a specific executable then find
|
||||
# a reasonable default.
|
||||
if python:
|
||||
try:
|
||||
version = python_executable_version(python).version
|
||||
except Exception as e:
|
||||
raise FatalCheckError('could not determine version of PYTHON '
|
||||
'(%s): %s' % (python, e))
|
||||
elif mozillabuild:
|
||||
# MozillaBuild provides a Python 2.
|
||||
python = normsep('%s/python/python2.exe' % mozillabuild)
|
||||
|
||||
try:
|
||||
version = python_executable_version(python).version
|
||||
except Exception as e:
|
||||
raise FatalCheckError('could not determine version of '
|
||||
'MozillaBuild python: %s' % e)
|
||||
else:
|
||||
# Fall back to the search routine.
|
||||
python, version = find_python2_executable()
|
||||
|
||||
# The API returns a bytes whereas everything in configure is unicode.
|
||||
if python:
|
||||
python = ensure_text(python)
|
||||
|
||||
if not python:
|
||||
raise FatalCheckError('Python 2.7 is required to build. '
|
||||
'Ensure a `python2.7` executable is in your '
|
||||
'PATH or define PYTHON to point to a Python '
|
||||
'2.7 executable.')
|
||||
|
||||
if version < (2, 7, 0):
|
||||
raise FatalCheckError('Python 2.7 required to build; '
|
||||
'%s is Python %d.%d' % (python, version[0],
|
||||
version[1]))
|
||||
|
||||
log.debug("python2: found executable: %r" % python)
|
||||
|
||||
virtualenvs_root = os.path.join(topobjdir, '_virtualenvs')
|
||||
with LineIO(lambda l: log.info(l), 'replace') as out:
|
||||
manager = VirtualenvManager(
|
||||
topsrcdir, topobjdir,
|
||||
os.path.join(virtualenvs_root, 'init'), out,
|
||||
os.path.join(topsrcdir, 'build', 'virtualenv_packages.txt'))
|
||||
|
||||
log.debug("python: using venv: %r" % manager.virtualenv_root)
|
||||
|
||||
if not manager.up_to_date(python):
|
||||
log.info('Creating Python 2 environment')
|
||||
manager.build(python)
|
||||
else:
|
||||
log.debug("python2: venv is up to date")
|
||||
|
||||
python = normsep(manager.python_path)
|
||||
str_version = '.'.join(str(v) for v in version)
|
||||
|
||||
return namespace(
|
||||
path=python,
|
||||
version=version,
|
||||
str_version=str_version,
|
||||
)
|
||||
|
||||
|
||||
@depends(virtualenv_python2)
|
||||
@checking('for Python 2', callback=lambda x: '%s (%s)' % (x.path, x.str_version))
|
||||
def virtualenv_python2(venv):
|
||||
return venv
|
||||
|
||||
|
||||
set_config('PYTHON', virtualenv_python2.path)
|
||||
add_old_configure_assignment('PYTHON', virtualenv_python2.path)
|
||||
|
||||
|
||||
# Source checkout and version control integration.
|
||||
# ================================================
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ ifeq "$(CWD)" "/"
|
|||
CWD := /.
|
||||
endif
|
||||
|
||||
PYTHON3 ?= python3
|
||||
PYTHON ?= $(shell which python2.7 > /dev/null 2>&1 && echo python2.7 || echo python)
|
||||
|
||||
####################################
|
||||
# Load mozconfig Options
|
||||
|
|
@ -46,7 +46,7 @@ endif
|
|||
|
||||
# Automatically add -jN to make flags if not defined. N defaults to number of cores.
|
||||
ifeq (,$(findstring -j,$(MOZ_MAKE_FLAGS)))
|
||||
cores=$(shell $(PYTHON3) -c 'import multiprocessing; print(multiprocessing.cpu_count())')
|
||||
cores=$(shell $(PYTHON) -c 'import multiprocessing; print(multiprocessing.cpu_count())')
|
||||
MOZ_MAKE_FLAGS += -j$(cores)
|
||||
endif
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@
|
|||
# JSErrorFormatString.format member should be in ASCII encoding.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
|
|
|||
|
|
@ -38,7 +38,8 @@
|
|||
# mismatched alloc/free checking.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import re
|
||||
|
|
@ -111,7 +112,7 @@ def main():
|
|||
]
|
||||
|
||||
# This is like alloc_fns, but regexp chars are not escaped.
|
||||
alloc_fns_unescaped = [fn.replace('\\', '') for fn in alloc_fns]
|
||||
alloc_fns_unescaped = [fn.translate(None, r'\\') for fn in alloc_fns]
|
||||
|
||||
# This regexp matches the relevant lines in the output of |nm|, which look
|
||||
# like the following.
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ MKDIR ?= mkdir
|
|||
SLEEP ?= sleep
|
||||
TOUCH ?= touch
|
||||
|
||||
PYTHON_PATH = $(PYTHON3) $(topsrcdir)/config/pythonpath.py
|
||||
PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py
|
||||
|
||||
#
|
||||
# Build using PIC by default
|
||||
|
|
|
|||
|
|
@ -27,4 +27,8 @@ core_winabspath = $(error core_winabspath is unsupported)
|
|||
#
|
||||
# libs::
|
||||
# $(call py_action,purge_manifests,_build_manifests/purge/foo.manifest)
|
||||
py_action = $(PYTHON3) -m mozbuild.action.$(1) $(2)
|
||||
py_action = $(PYTHON) -m mozbuild.action.$(1) $(2)
|
||||
|
||||
# Same as above, but targeting Python 3. Wherever possible py3_action should be
|
||||
# used rather than py_action.
|
||||
py3_action = $(PYTHON3) -m mozbuild.action.$(1) $(2)
|
||||
|
|
|
|||
|
|
@ -303,7 +303,7 @@ ifndef MOZ_PROFILE_GENERATE
|
|||
ifeq ($(OS_ARCH), Linux)
|
||||
ifeq (,$(rustflags_sancov)$(MOZ_ASAN)$(MOZ_TSAN)$(MOZ_UBSAN))
|
||||
ifneq (,$(filter -Clto,$(cargo_rustc_flags)))
|
||||
$(call py_action,check_binary,--target --networking $@)
|
||||
$(call py3_action,check_binary,--target --networking $@)
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
|
|
|||
|
|
@ -2,9 +2,8 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
"""
|
||||
Run a python script, adding extra directories to the python path.
|
||||
"""
|
||||
|
|
@ -12,7 +11,7 @@ Run a python script, adding extra directories to the python path.
|
|||
|
||||
def main(args):
|
||||
def usage():
|
||||
print("pythonpath.py -I directory script.py [args...]", file=sys.stderr)
|
||||
print >>sys.stderr, "pythonpath.py -I directory script.py [args...]"
|
||||
sys.exit(150)
|
||||
|
||||
paths = []
|
||||
|
|
@ -48,14 +47,14 @@ def main(args):
|
|||
frozenglobals['__name__'] = '__main__'
|
||||
frozenglobals['__file__'] = script
|
||||
|
||||
exec(open(script, encoding='utf-8').read(), frozenglobals)
|
||||
execfile(script, frozenglobals)
|
||||
|
||||
|
||||
# Freeze scope here ... why this makes things work I have no idea ...
|
||||
frozenglobals = globals()
|
||||
|
||||
import os
|
||||
import sys
|
||||
import os
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import errno
|
||||
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ USE_AUTOTARGETS_MK = 1
|
|||
include $(MOZILLA_DIR)/config/makefiles/makeutils.mk
|
||||
|
||||
ifdef REBUILD_CHECK
|
||||
REPORT_BUILD = $(info $(shell $(PYTHON3) $(MOZILLA_DIR)/config/rebuild_check.py $@ $^))
|
||||
REPORT_BUILD = $(info $(shell $(PYTHON) $(MOZILLA_DIR)/config/rebuild_check.py $@ $^))
|
||||
REPORT_BUILD_VERBOSE = $(REPORT_BUILD)
|
||||
else
|
||||
REPORT_BUILD = $(info $(relativesrcdir)/$(notdir $@))
|
||||
|
|
@ -66,10 +66,10 @@ INSTALL_TARGETS += CPP_UNIT_TESTS
|
|||
endif
|
||||
|
||||
run-cppunittests::
|
||||
@$(PYTHON3) $(MOZILLA_DIR)/testing/runcppunittests.py --xre-path=$(DIST)/bin --symbols-path=$(DIST)/crashreporter-symbols $(CPP_UNIT_TESTS)
|
||||
@$(PYTHON) $(MOZILLA_DIR)/testing/runcppunittests.py --xre-path=$(DIST)/bin --symbols-path=$(DIST)/crashreporter-symbols $(CPP_UNIT_TESTS)
|
||||
|
||||
cppunittests-remote:
|
||||
$(PYTHON3) -u $(MOZILLA_DIR)/testing/remotecppunittests.py \
|
||||
$(PYTHON) -u $(MOZILLA_DIR)/testing/remotecppunittests.py \
|
||||
--xre-path=$(DEPTH)/dist/bin \
|
||||
--localLib=$(DEPTH)/dist/$(MOZ_APP_NAME) \
|
||||
--deviceIP=${TEST_DEVICE} \
|
||||
|
|
@ -474,7 +474,7 @@ ifdef MSMANIFEST_TOOL
|
|||
endif # MSVC with manifest tool
|
||||
else # !WINNT || GNU_CC
|
||||
$(call EXPAND_CC_OR_CXX,$@) -o $@ $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) $($(notdir $@)_OBJS) $(RESFILE) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
$(call py_action,check_binary,--target $@)
|
||||
$(call py3_action,check_binary,--target $@)
|
||||
endif # WINNT && !GNU_CC
|
||||
|
||||
ifdef ENABLE_STRIP
|
||||
|
|
@ -505,7 +505,7 @@ else
|
|||
endif # HOST_CPP_PROG_LINK
|
||||
endif
|
||||
ifndef CROSS_COMPILE
|
||||
$(call py_action,check_binary,--host $@)
|
||||
$(call py3_action,check_binary,--host $@)
|
||||
endif
|
||||
|
||||
#
|
||||
|
|
@ -530,7 +530,7 @@ ifdef MSMANIFEST_TOOL
|
|||
endif # MSVC with manifest tool
|
||||
else
|
||||
$(call EXPAND_CC_OR_CXX,$@) $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) -o $@ $($@_OBJS) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
$(call py_action,check_binary,--target $@)
|
||||
$(call py3_action,check_binary,--target $@)
|
||||
endif # WINNT && !GNU_CC
|
||||
|
||||
ifdef ENABLE_STRIP
|
||||
|
|
@ -552,7 +552,7 @@ else
|
|||
endif
|
||||
endif
|
||||
ifndef CROSS_COMPILE
|
||||
$(call py_action,check_binary,--host $@)
|
||||
$(call py3_action,check_binary,--host $@)
|
||||
endif
|
||||
|
||||
$(LIBRARY): $(OBJS) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
|
|
@ -610,7 +610,7 @@ ifndef INCREMENTAL_LINKER
|
|||
$(RM) $@
|
||||
endif
|
||||
$(MKSHLIB) $($@_OBJS) $(RESFILE) $(LDFLAGS) $(STATIC_LIBS) $(SHARED_LIBS) $(EXTRA_DSO_LDOPTS) $(MOZ_GLUE_LDFLAGS) $(OS_LIBS)
|
||||
$(call py_action,check_binary,--target $@)
|
||||
$(call py3_action,check_binary,--target $@)
|
||||
|
||||
ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
|
||||
ifdef MSMANIFEST_TOOL
|
||||
|
|
@ -702,7 +702,7 @@ define syms_template
|
|||
syms:: $(2)
|
||||
$(2): $(1)
|
||||
ifdef MOZ_CRASHREPORTER
|
||||
$$(call py_action,dumpsymbols,$$(abspath $$<) $$(abspath $$@) $$(DUMP_SYMBOLS_FLAGS))
|
||||
$$(call py3_action,dumpsymbols,$$(abspath $$<) $$(abspath $$@) $$(DUMP_SYMBOLS_FLAGS))
|
||||
ifeq ($(OS_ARCH),WINNT)
|
||||
ifdef WINCHECKSEC
|
||||
$$(PYTHON3) $$(topsrcdir)/build/win32/autowinchecksec.py $$<
|
||||
|
|
@ -941,7 +941,7 @@ endif
|
|||
endif
|
||||
|
||||
libs realchrome:: $(FINAL_TARGET)/chrome
|
||||
$(call py_action,jar_maker,\
|
||||
$(call py3_action,jar_maker,\
|
||||
$(QUIET) -d $(FINAL_TARGET) \
|
||||
$(MAKE_JARS_FLAGS) $(DEFINES) $(ACDEFINES) \
|
||||
$(JAR_MANIFEST))
|
||||
|
|
@ -984,7 +984,7 @@ ifndef MOZ_DEBUG
|
|||
endif
|
||||
endif
|
||||
@echo 'Packaging $(XPI_PKGNAME).xpi...'
|
||||
$(call py_action,zip,-C $(FINAL_TARGET) ../$(XPI_PKGNAME).xpi '*')
|
||||
$(call py3_action,zip,-C $(FINAL_TARGET) ../$(XPI_PKGNAME).xpi '*')
|
||||
endif
|
||||
|
||||
# See comment above about moving this out of the tools tier.
|
||||
|
|
@ -1164,7 +1164,7 @@ PP_TARGETS_ALL_RESULTS := $(sort $(foreach tier,$(PP_TARGETS_TIERS),$(PP_TARGETS
|
|||
$(PP_TARGETS_ALL_RESULTS):
|
||||
$(if $(filter-out $(notdir $@),$(notdir $(<:.in=))),$(error Looks like $@ has an unexpected dependency on $< which breaks PP_TARGETS))
|
||||
$(RM) '$@'
|
||||
$(call py_action,preprocessor,--depend $(MDDEPDIR)/$(@F).pp $(PP_TARGET_FLAGS) $(DEFINES) $(ACDEFINES) '$<' -o '$@')
|
||||
$(call py3_action,preprocessor,--depend $(MDDEPDIR)/$(@F).pp $(PP_TARGET_FLAGS) $(DEFINES) $(ACDEFINES) '$<' -o '$@')
|
||||
|
||||
$(filter %.css,$(PP_TARGETS_ALL_RESULTS)): PP_TARGET_FLAGS+=--marker %
|
||||
|
||||
|
|
|
|||
|
|
@ -6,22 +6,23 @@
|
|||
# This script runs a process and prefixes its output with.
|
||||
# Usage: run-and-prefix.py prefix command arg0 argv1...
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'wb', 0)
|
||||
sys.stderr = os.fdopen(sys.stderr.fileno(), 'wb', 0)
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
|
||||
|
||||
prefix = sys.argv[1].encode('utf-8')
|
||||
prefix = sys.argv[1]
|
||||
args = sys.argv[2:]
|
||||
|
||||
p = subprocess.Popen(args, bufsize=0,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=sys.stdin.fileno())
|
||||
stdin=sys.stdin.fileno(),
|
||||
universal_newlines=True)
|
||||
|
||||
while True:
|
||||
data = p.stdout.readline()
|
||||
|
|
@ -29,6 +30,6 @@ while True:
|
|||
if data == b'':
|
||||
break
|
||||
|
||||
sys.stdout.write(b'%s> %s' % (prefix, data))
|
||||
print('%s> %s' % (prefix, data), end=b'')
|
||||
|
||||
sys.exit(p.wait())
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ export:: webidl.stub
|
|||
-include codegen.pp
|
||||
|
||||
webidl.stub: $(codegen_dependencies)
|
||||
$(call py_action,webidl,$(srcdir))
|
||||
$(call py3_action,webidl,$(srcdir))
|
||||
@$(TOUCH) $@
|
||||
|
||||
.PHONY: compiletests
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ ipdl_py_deps := \
|
|||
# NB: the IPDL compiler manages .ipdl-->.h/.cpp dependencies itself,
|
||||
# which is why we don't have explicit .h/.cpp targets here
|
||||
ipdl.track: $(ALL_IPDLSRCS) $(srcdir)/sync-messages.ini $(srcdir)/message-metadata.ini $(ipdl_py_deps)
|
||||
$(PYTHON3) $(topsrcdir)/config/pythonpath.py \
|
||||
$(PYTHON) $(topsrcdir)/config/pythonpath.py \
|
||||
$(PLY_INCLUDE) \
|
||||
$(srcdir)/ipdl.py \
|
||||
--sync-msg-list=$(srcdir)/sync-messages.ini \
|
||||
|
|
|
|||
|
|
@ -3,11 +3,11 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
from __future__ import print_function
|
||||
|
||||
from io import StringIO
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
from configparser import RawConfigParser
|
||||
from cStringIO import StringIO
|
||||
from ConfigParser import RawConfigParser
|
||||
|
||||
import ipdl
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ __all__ = ['gencxx', 'genipdl', 'parse', 'typecheck', 'writeifmodified',
|
|||
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from cStringIO import StringIO
|
||||
|
||||
from ipdl.cgen import IPDLCodeGen
|
||||
from ipdl.lower import LowerToCxx, msgenums
|
||||
|
|
@ -75,7 +75,6 @@ def genmsgenum(ast):
|
|||
|
||||
|
||||
def writeifmodified(contents, file):
|
||||
contents = contents.encode('utf-8')
|
||||
dir = os.path.dirname(file)
|
||||
os.path.exists(dir) or os.makedirs(dir)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import copy
|
||||
import functools
|
||||
|
||||
|
||||
class Visitor:
|
||||
|
|
@ -443,7 +442,6 @@ class TypeFunction(Node):
|
|||
self.ret = ret
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class Typedef(Node):
|
||||
def __init__(self, fromtype, totypename, templateargs=[]):
|
||||
assert isinstance(totypename, str)
|
||||
|
|
@ -453,12 +451,12 @@ class Typedef(Node):
|
|||
self.totypename = totypename
|
||||
self.templateargs = templateargs
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.totypename < other.totypename
|
||||
def __cmp__(self, o):
|
||||
return cmp(self.totypename, o.totypename)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.__class__ == other.__class__
|
||||
and self.totypename == other.totypename)
|
||||
def __eq__(self, o):
|
||||
return (self.__class__ == o.__class__
|
||||
and 0 == cmp(self, o))
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.totypename)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
# API are often easier to read than users of the AST APIs in these cases.
|
||||
|
||||
import re
|
||||
import sys
|
||||
import math
|
||||
import textwrap
|
||||
|
||||
|
|
@ -139,7 +140,7 @@ def _line(raw, skip_indent, lineno, context):
|
|||
values = eval(expr, context, {})
|
||||
except Exception as e:
|
||||
msg = "%s in substitution on line %d" % (repr(e), lineno)
|
||||
raise ValueError(msg) from e
|
||||
raise ValueError(msg), None, sys.exc_traceback
|
||||
|
||||
# If we aren't dealing with lists, wrap the result into a
|
||||
# single-element list.
|
||||
|
|
|
|||
|
|
@ -1450,7 +1450,7 @@ with some new IPDL/C++ nodes that are tuned for C++ codegen."""
|
|||
|
||||
# Compute a permutation of the fields for in-memory storage such
|
||||
# that the memory layout of the structure will be well-packed.
|
||||
permutation = list(range(len(newfields)))
|
||||
permutation = range(len(newfields))
|
||||
|
||||
# Note that the results of `pod_size` ensure that non-POD fields
|
||||
# sort before POD ones.
|
||||
|
|
|
|||
|
|
@ -125,10 +125,10 @@ class Type:
|
|||
return self.__class__.__name__
|
||||
|
||||
def name(self):
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
def fullname(self):
|
||||
raise NotImplementedError()
|
||||
raise NotImplementedError
|
||||
|
||||
def accept(self, visitor, *args):
|
||||
visit = getattr(visitor, 'visit' + self.__class__.__name__, None)
|
||||
|
|
@ -229,18 +229,8 @@ class IPDLType(Type):
|
|||
|
||||
@classmethod
|
||||
def convertsTo(cls, lesser, greater):
|
||||
def _unwrap(nr):
|
||||
if isinstance(nr, dict):
|
||||
return _unwrap(nr['nested'])
|
||||
elif isinstance(nr, int):
|
||||
return nr
|
||||
else:
|
||||
raise ValueError('Got unexpected nestedRange value: %s' % nr)
|
||||
|
||||
lnr0, gnr0, lnr1, gnr1 = (
|
||||
_unwrap(lesser.nestedRange[0]), _unwrap(greater.nestedRange[0]),
|
||||
_unwrap(lesser.nestedRange[1]), _unwrap(greater.nestedRange[1]))
|
||||
if (lnr0 < gnr0 or lnr1 > gnr1):
|
||||
if (lesser.nestedRange[0] < greater.nestedRange[0] or
|
||||
lesser.nestedRange[1] > greater.nestedRange[1]):
|
||||
return False
|
||||
|
||||
# Protocols that use intr semantics are not allowed to use
|
||||
|
|
@ -603,7 +593,7 @@ def iteractortypes(t, visited=None):
|
|||
|
||||
def hasshmem(type):
|
||||
"""Return true iff |type| is shmem or has it buried within."""
|
||||
class found(BaseException):
|
||||
class found:
|
||||
pass
|
||||
|
||||
class findShmem(TypeVisitor):
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ include $(topsrcdir)/config/rules.mk
|
|||
IPDLUNITTEST_BIN = $(DEPTH)/dist/bin/ipdlunittest$(BIN_SUFFIX)
|
||||
|
||||
IPDLUnitTests.cpp : Makefile.in moz.build $(GENTESTER) $(TESTER_TEMPLATE) $(IPDLTESTHDRS)
|
||||
$(PYTHON3) $(GENTESTER) $(TESTER_TEMPLATE) -t $(IPDLTESTS) -e $(EXTRA_PROTOCOLS) > $@
|
||||
$(PYTHON) $(GENTESTER) $(TESTER_TEMPLATE) -t $(IPDLTESTS) -e $(EXTRA_PROTOCOLS) > $@
|
||||
|
||||
check-proc::
|
||||
@$(EXIT_ON_ERROR) \
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
from __future__ import print_function
|
||||
|
||||
import string
|
||||
import sys
|
||||
|
|
@ -124,7 +124,7 @@ def main(argv):
|
|||
}
|
||||
''' % (t, t, t, t) for t in unittests+extras])
|
||||
|
||||
templatefile = open(template, 'r', encoding='utf-8')
|
||||
templatefile = open(template, 'r')
|
||||
sys.stdout.write(
|
||||
string.Template(templatefile.read()).substitute(
|
||||
INCLUDES=includes,
|
||||
|
|
|
|||
|
|
@ -28,9 +28,9 @@ class IPDLCompile:
|
|||
self.specfilename
|
||||
])
|
||||
|
||||
proc = subprocess.Popen(
|
||||
args=self.argv, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, universal_newlines=True)
|
||||
proc = subprocess.Popen(args=self.argv,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
self.stdout, self.stderr = proc.communicate()
|
||||
|
||||
self.returncode = proc.returncode
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@ OKTESTS := $(wildcard $(srcdir)/ok/*.ipdl) $(wildcard $(srcdir)/ok/*.ipdlh)
|
|||
ERRORTESTS := $(wildcard $(srcdir)/error/*.ipdl) $(wildcard $(srcdir)/error/*.ipdlh)
|
||||
|
||||
check::
|
||||
@$(PYTHON3) $(srcdir)/runtests.py \
|
||||
@$(PYTHON) $(srcdir)/runtests.py \
|
||||
$(srcdir)/ok $(srcdir)/error \
|
||||
$(PYTHON3) $(topsrcdir)/config/pythonpath.py \
|
||||
$(PYTHON) $(topsrcdir)/config/pythonpath.py \
|
||||
$(PLY_INCLUDE) \
|
||||
$(topsrcdir)/ipc/ipdl/ipdl.py \
|
||||
--sync-msg-list=$(srcdir)/sync-messages.ini \
|
||||
|
|
|
|||
9
ipc/ipdl/test/ipdl/error/BadNestedManagee.ipdl
Normal file
9
ipc/ipdl/test/ipdl/error/BadNestedManagee.ipdl
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
//error: protocol `BadNestedManagee' requires more powerful send semantics than its manager `BadNestedManager' provides
|
||||
|
||||
include protocol BadNestedManager;
|
||||
|
||||
nested(upto inside_sync) async protocol BadNestedManagee {
|
||||
manager BadNestedManager;
|
||||
child:
|
||||
async __delete__();
|
||||
};
|
||||
9
ipc/ipdl/test/ipdl/error/BadNestedManager.ipdl
Normal file
9
ipc/ipdl/test/ipdl/error/BadNestedManager.ipdl
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
//error: protocol `BadNestedManagee' requires more powerful send semantics than its manager `BadNestedManager' provides
|
||||
|
||||
include protocol BadNestedManagee;
|
||||
|
||||
nested(upto not) async protocol BadNestedManager {
|
||||
manages BadNestedManagee;
|
||||
parent:
|
||||
async BadNestedManagee();
|
||||
};
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
//error: protocol `PBadNestedManagee' requires more powerful send semantics than its manager `PBadNestedManager' provides
|
||||
|
||||
include protocol PBadNestedManager;
|
||||
|
||||
nested(upto inside_sync) async protocol PBadNestedManagee {
|
||||
manager PBadNestedManager;
|
||||
child:
|
||||
async __delete__();
|
||||
};
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
//error: protocol `PBadNestedManagee' requires more powerful send semantics than its manager `PBadNestedManager' provides
|
||||
|
||||
include protocol PBadNestedManagee;
|
||||
|
||||
nested(upto not) async protocol PBadNestedManager {
|
||||
manages PBadNestedManagee;
|
||||
parent:
|
||||
async PBadNestedManagee();
|
||||
};
|
||||
|
|
@ -40,10 +40,10 @@ endif
|
|||
endif
|
||||
|
||||
check-js-msg::
|
||||
(cd $(topsrcdir) && $(PYTHON3) $(topsrcdir)/config/check_js_msg_encoding.py);
|
||||
(cd $(topsrcdir) && $(PYTHON) $(topsrcdir)/config/check_js_msg_encoding.py);
|
||||
|
||||
check-jit-test::
|
||||
$(JITTEST_SANITIZER_ENV) $(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON3) -u $(srcdir)/jit-test/jit_test.py \
|
||||
$(JITTEST_SANITIZER_ENV) $(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON) -u $(srcdir)/jit-test/jit_test.py \
|
||||
--no-slow --no-progress --format=automation --jitflags=all \
|
||||
$(JITTEST_VALGRIND_FLAG) \
|
||||
$(JITTEST_EXTRA_ARGS) \
|
||||
|
|
@ -52,7 +52,7 @@ check-jit-test::
|
|||
check:: check-js-msg
|
||||
|
||||
check-jstests:
|
||||
$(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON3) -u $(srcdir)/tests/jstests.py \
|
||||
$(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON) -u $(srcdir)/tests/jstests.py \
|
||||
--no-progress --format=automation --timeout 300 \
|
||||
$(JSTESTS_EXTRA_ARGS) \
|
||||
$(DIST)/bin/$(JS_SHELL_NAME)$(BIN_SUFFIX)
|
||||
|
|
|
|||
|
|
@ -9,12 +9,12 @@ include $(topsrcdir)/config/rules.mk
|
|||
# check_vanilla_allocations.py is tailored to Linux, so only run it there.
|
||||
# That should be enough to catch any problems.
|
||||
check-vanilla-allocations:
|
||||
$(PYTHON3) $(topsrcdir)/config/check_vanilla_allocations.py $(REAL_LIBRARY)
|
||||
$(PYTHON) $(topsrcdir)/config/check_vanilla_allocations.py $(REAL_LIBRARY)
|
||||
|
||||
# The "aggressive" variant will likely fail on some compiler/platform
|
||||
# combinations, but is worth running by hand every once in a while.
|
||||
check-vanilla-allocations-aggressive:
|
||||
$(PYTHON3) $(topsrcdir)/config/check_vanilla_allocations.py --aggressive $(REAL_LIBRARY)
|
||||
$(PYTHON) $(topsrcdir)/config/check_vanilla_allocations.py --aggressive $(REAL_LIBRARY)
|
||||
|
||||
ifeq ($(OS_ARCH),Linux)
|
||||
ifeq (,$(filter -flto,$(COMPUTED_CFLAGS) $(COMPUTED_CXXFLAGS) $(COMPUTED_LDFLAGS)))
|
||||
|
|
@ -58,7 +58,7 @@ install:: ../js-config.h
|
|||
#
|
||||
|
||||
install::
|
||||
$(call py_action,process_install_manifest,--track install_dist_include.track --no-symlinks $(DESTDIR)$(includedir)/$(JS_LIBRARY_NAME) $(DEPTH)/_build_manifests/install/dist_include)
|
||||
$(call py3_action,process_install_manifest,--track install_dist_include.track --no-symlinks $(DESTDIR)$(includedir)/$(JS_LIBRARY_NAME) $(DEPTH)/_build_manifests/install/dist_include)
|
||||
|
||||
#
|
||||
# END SpiderMonkey header installation
|
||||
|
|
|
|||
|
|
@ -580,7 +580,7 @@ if args.variant == 'msan':
|
|||
|
||||
# Generate stacks from minidumps.
|
||||
if use_minidump:
|
||||
venv_python = os.path.join(OBJDIR, "_virtualenvs", "init_py3", "bin", "python3")
|
||||
venv_python = os.path.join(OBJDIR, "_virtualenvs", "init", "bin", "python")
|
||||
run_command([
|
||||
venv_python,
|
||||
os.path.join(DIR.source, "testing/mozbase/mozcrash/mozcrash/mozcrash.py"),
|
||||
|
|
|
|||
|
|
@ -24,4 +24,4 @@ PKG_STAGE = $(DIST)/test-stage
|
|||
stage-package:
|
||||
$(NSINSTALL) -D $(PKG_STAGE)/jsreftest/tests/js/src/tests
|
||||
(cd $(srcdir) && tar $(TAR_CREATE_FLAGS) - $(TEST_FILES)) | (cd $(PKG_STAGE)/jsreftest/tests/js/src/tests && tar -xf -)
|
||||
$(PYTHON3) $(srcdir)/jstests.py --make-manifests $(PKG_STAGE)/jsreftest/tests/js/src/tests
|
||||
$(PYTHON) $(srcdir)/jstests.py --make-manifests $(PKG_STAGE)/jsreftest/tests/js/src/tests
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@
|
|||
|
||||
from __future__ import print_function
|
||||
|
||||
import io
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
|
|
@ -72,7 +71,7 @@ class XULInfo:
|
|||
# Read the values.
|
||||
val_re = re.compile(r'(TARGET_XPCOM_ABI|OS_TARGET|MOZ_DEBUG)\s*=\s*(.*)')
|
||||
kw = {'isdebug': False}
|
||||
for line in io.open(path, encoding='utf-8'):
|
||||
for line in open(path):
|
||||
m = val_re.match(line)
|
||||
if m:
|
||||
key, val = m.groups()
|
||||
|
|
@ -339,7 +338,7 @@ def _emit_manifest_at(location, relative, test_gen, depth):
|
|||
manifest = ["url-prefix {}jsreftest.html?test={}/".format(
|
||||
'../' * depth, relative)] + manifest
|
||||
|
||||
fp = io.open(filename, 'w', encoding='utf-8', newline='\n')
|
||||
fp = open(filename, 'w')
|
||||
try:
|
||||
fp.write('\n'.join(manifest) + '\n')
|
||||
finally:
|
||||
|
|
@ -379,7 +378,7 @@ def _parse_test_header(fullpath, testcase, xul_tester):
|
|||
This looks a bit weird. The reason is that it needs to be efficient, since
|
||||
it has to be done on every test
|
||||
"""
|
||||
fp = io.open(fullpath, 'r', encoding='utf-8')
|
||||
fp = open(fullpath, 'r')
|
||||
try:
|
||||
buf = fp.read(512)
|
||||
finally:
|
||||
|
|
@ -416,7 +415,7 @@ def _parse_external_manifest(filename, relpath):
|
|||
|
||||
entries = []
|
||||
|
||||
with io.open(filename, 'r', encoding='utf-8') as fp:
|
||||
with open(filename, 'r') as fp:
|
||||
manifest_re = re.compile(r'^\s*(?P<terms>.*)\s+(?P<type>include|script)\s+(?P<path>\S+)$')
|
||||
include_re = re.compile(r'^\s*include\s+(?P<path>\S+)$')
|
||||
for line in fp:
|
||||
|
|
|
|||
|
|
@ -4,10 +4,11 @@
|
|||
from __future__ import print_function, unicode_literals, division
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from threading import Thread
|
||||
from six.moves.queue import Queue, Empty
|
||||
from Queue import Queue, Empty
|
||||
|
||||
from .progressbar import ProgressBar
|
||||
from .results import NullTestOutput, TestOutput, escape_cmdline
|
||||
|
|
@ -23,7 +24,7 @@ class TaskFinishedMarker:
|
|||
|
||||
def _do_work(qTasks, qResults, qWatch, prefix, run_skipped, timeout, show_cmd):
|
||||
while True:
|
||||
test = qTasks.get()
|
||||
test = qTasks.get(block=True, timeout=sys.maxint)
|
||||
if test is EndMarker:
|
||||
qWatch.put(EndMarker)
|
||||
qResults.put(EndMarker)
|
||||
|
|
@ -38,9 +39,9 @@ def _do_work(qTasks, qResults, qWatch, prefix, run_skipped, timeout, show_cmd):
|
|||
if show_cmd:
|
||||
print(escape_cmdline(cmd))
|
||||
tStart = datetime.now()
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
universal_newlines=True)
|
||||
proc = subprocess.Popen(cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
# Push the task to the watchdog -- it will kill the task
|
||||
# if it goes over the timeout while we keep its stdout
|
||||
|
|
@ -74,7 +75,7 @@ def _do_watch(qWatch, timeout):
|
|||
# ignore this.
|
||||
if ex.winerror != 5:
|
||||
raise
|
||||
fin = qWatch.get()
|
||||
fin = qWatch.get(block=True, timeout=sys.maxint)
|
||||
assert fin is TaskFinishedMarker, "invalid finish marker"
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -32,13 +32,13 @@ check:: $(srcdir)/replay.log expected_output.log $(srcdir)/expected_output_minim
|
|||
# command in replay.log) because starting with libstdc++ 5, a static
|
||||
# initializer in the STL allocates memory, which we obviously don't have
|
||||
# in expected_output.log.
|
||||
MALLOC_LOG=1 $(LOGALLOC) ./$(PROGRAM) < $< | sed -n '/jemalloc_stats/,$$p' | $(PYTHON3) $(srcdir)/logalloc_munge.py | diff -w - expected_output.log
|
||||
MALLOC_LOG=1 $(LOGALLOC) ./$(PROGRAM) < $< | sed -n '/jemalloc_stats/,$$p' | $(PYTHON) $(srcdir)/logalloc_munge.py | diff -w - expected_output.log
|
||||
# Test with MALLOC_LOG as a file name
|
||||
$(RM) test_output.log
|
||||
MALLOC_LOG=test_output.log $(LOGALLOC) ./$(PROGRAM) < $<
|
||||
sed -n '/jemalloc_stats/,$$p' test_output.log | $(PYTHON3) $(srcdir)/logalloc_munge.py | diff -w - expected_output.log
|
||||
sed -n '/jemalloc_stats/,$$p' test_output.log | $(PYTHON) $(srcdir)/logalloc_munge.py | diff -w - expected_output.log
|
||||
|
||||
MALLOC_LOG=1 MALLOC_LOG_MINIMAL=1 $(LOGALLOC) ./$(PROGRAM) < $< | sed -n '/jemalloc_stats/,$$p' | $(PYTHON3) $(srcdir)/logalloc_munge.py | diff -w - $(srcdir)/expected_output_minimal.log
|
||||
MALLOC_LOG=1 MALLOC_LOG_MINIMAL=1 $(LOGALLOC) ./$(PROGRAM) < $< | sed -n '/jemalloc_stats/,$$p' | $(PYTHON) $(srcdir)/logalloc_munge.py | diff -w - $(srcdir)/expected_output_minimal.log
|
||||
|
||||
endif
|
||||
endif
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ include $(topsrcdir)/config/config.mk
|
|||
|
||||
ifeq (WINNT,$(OS_TARGET))
|
||||
mozglue.def: $(srcdir)/mozglue.def.in $(GLOBAL_DEPS)
|
||||
$(call py_action,preprocessor,$(if $(MOZ_REPLACE_MALLOC),-DMOZ_REPLACE_MALLOC) $(ACDEFINES) $< -o $@)
|
||||
$(call py3_action,preprocessor,$(if $(MOZ_REPLACE_MALLOC),-DMOZ_REPLACE_MALLOC) $(ACDEFINES) $< -o $@)
|
||||
|
||||
GARBAGE += mozglue.def
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
|
|||
rule = mk.create_rule()
|
||||
|
||||
glbl = {}
|
||||
exec(open(bindings_conf, encoding='utf-8').read(), glbl)
|
||||
execfile(bindings_conf, glbl)
|
||||
webidlconfig = glbl['DOMInterfaces']
|
||||
|
||||
# Write out dependencies for Python modules we import. If this list isn't
|
||||
|
|
@ -46,7 +46,7 @@ def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
|
|||
for path in idl_files:
|
||||
basename = os.path.basename(path)
|
||||
stem, _ = os.path.splitext(basename)
|
||||
idl_data = open(path, encoding='utf-8').read()
|
||||
idl_data = open(path).read()
|
||||
|
||||
idl = p.parse(idl_data, filename=path)
|
||||
idl.resolve(inc_paths, p, webidlconfig)
|
||||
|
|
@ -74,7 +74,7 @@ def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
|
|||
# time a build is run whether or not anything changed. To fix this we
|
||||
# unconditionally write out the file.
|
||||
xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
|
||||
with open(xpt_path, 'w', encoding='utf-8', newline='\n') as fh:
|
||||
with open(xpt_path, 'w') as fh:
|
||||
jsonxpt.write(jsonxpt.link(xpts), fh)
|
||||
|
||||
rule.add_targets([six.ensure_text(xpt_path)])
|
||||
|
|
|
|||
|
|
@ -171,6 +171,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
|
|||
|
||||
# Add a few necessary variables inherited from configure
|
||||
for var in (
|
||||
'PYTHON',
|
||||
'PYTHON3',
|
||||
'ACDEFINES',
|
||||
'MOZ_BUILD_APP',
|
||||
|
|
@ -216,7 +217,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
|
|||
[ref_file, l10n_file] + python_deps)
|
||||
rule.add_commands(
|
||||
[
|
||||
'$(PYTHON3) -m mozbuild.action.l10n_merge '
|
||||
'$(PYTHON) -m mozbuild.action.l10n_merge '
|
||||
'--output {} --ref-file {} --l10n-file {}'.format(
|
||||
merge, ref_file, l10n_file
|
||||
)
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ class MakeBackend(CommonBackend):
|
|||
ret.append((
|
||||
"""{stub}: {script}{inputs}{backend}{force}
|
||||
\t$(REPORT_BUILD)
|
||||
\t$(call py_action,file_generate,{locale}{script} """ # wrap for E501
|
||||
\t$(call py3_action,file_generate,{locale}{script} """ # wrap for E501
|
||||
"""{method} {output} {dep_file} {stub}{inputs}{flags})
|
||||
\t@$(TOUCH) $@
|
||||
""").format(
|
||||
|
|
|
|||
|
|
@ -1559,13 +1559,13 @@ class RecursiveMakeBackend(MakeBackend):
|
|||
mozpath.relpath(obj.path,
|
||||
obj.install_target))),
|
||||
]
|
||||
rule.add_commands(['$(call py_action,buildlist,%s)' %
|
||||
rule.add_commands(['$(call py3_action,buildlist,%s)' %
|
||||
' '.join(args)])
|
||||
args = [
|
||||
mozpath.join('$(DEPTH)', obj.path),
|
||||
make_quote(shell_quote(str(obj.entry))),
|
||||
]
|
||||
rule.add_commands(['$(call py_action,buildlist,%s)' % ' '.join(args)])
|
||||
rule.add_commands(['$(call py3_action,buildlist,%s)' % ' '.join(args)])
|
||||
fragment.dump(backend_file.fh, removal_guard=False)
|
||||
|
||||
self._no_skip['misc'].add(obj.relsrcdir)
|
||||
|
|
@ -1651,7 +1651,7 @@ class RecursiveMakeBackend(MakeBackend):
|
|||
rule.add_dependencies([source])
|
||||
rule.add_commands([
|
||||
'$(RM) $@',
|
||||
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||
'$(call py3_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||
'$< -o $@)'
|
||||
])
|
||||
|
||||
|
|
@ -1706,7 +1706,7 @@ class RecursiveMakeBackend(MakeBackend):
|
|||
# static to preprocessed don't end up writing to a symlink,
|
||||
# which would modify content in the source directory.
|
||||
'$(RM) $@',
|
||||
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||
'$(call py3_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||
'$< -o $@)'
|
||||
])
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
# to a given compilation unit. This is used as a helper to find a bug in some
|
||||
# versions of GNU ld.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
|
|
@ -46,8 +46,7 @@ def get_range_length(range, debug_ranges):
|
|||
|
||||
|
||||
def main(bin, compilation_unit):
|
||||
p = subprocess.Popen(['objdump', '-W', bin], stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, universal_newlines=True)
|
||||
p = subprocess.Popen(['objdump', '-W', bin], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(out, err) = p.communicate()
|
||||
sections = re.split('\n(Contents of the|The section) ', out)
|
||||
debug_info = [s for s in sections if s.startswith('.debug_info')]
|
||||
|
|
|
|||
|
|
@ -1094,7 +1094,7 @@ class BuildDriver(MozbuildObject):
|
|||
backend))
|
||||
for backend in all_backends])):
|
||||
print('Build configuration changed. Regenerating backend.')
|
||||
args = [config.substs['PYTHON3'],
|
||||
args = [config.substs['PYTHON'],
|
||||
mozpath.join(self.topobjdir, 'config.status')]
|
||||
self.run_process(args, cwd=self.topobjdir, pass_thru=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -416,7 +416,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp',
|
||||
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py' % env.topsrcdir,
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
||||
'$(call py3_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
'export:: $(MDDEPDIR)/foo.h.stub',
|
||||
|
|
@ -426,7 +426,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.h.pp',
|
||||
'$(MDDEPDIR)/foo.h.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,%s/generate-foo.py main foo.h $(MDDEPDIR)/foo.h.pp $(MDDEPDIR)/foo.h.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
||||
'$(call py3_action,file_generate,%s/generate-foo.py main foo.h $(MDDEPDIR)/foo.h.pp $(MDDEPDIR)/foo.h.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
]
|
||||
|
|
@ -449,7 +449,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp',
|
||||
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE' % env.topsrcdir,
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
||||
'$(call py3_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
'foo.c: $(MDDEPDIR)/foo.c.stub ;',
|
||||
|
|
@ -458,7 +458,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.c.pp',
|
||||
'$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
||||
'$(call py3_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
]
|
||||
|
|
@ -482,7 +482,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp',
|
||||
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
'LOCALIZED_FILES_0_FILES += foo.xyz',
|
||||
|
|
@ -510,7 +510,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp',
|
||||
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
'libs:: $(MDDEPDIR)/abc.xyz.stub',
|
||||
|
|
@ -520,7 +520,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/abc.xyz.pp',
|
||||
'$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE' % env.topsrcdir, # noqa
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
]
|
||||
|
|
@ -545,7 +545,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo$(AB_CD).xyz.pp',
|
||||
'$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
'bar$(AB_rCD).xyz: $(MDDEPDIR)/bar$(AB_rCD).xyz.stub ;',
|
||||
|
|
@ -554,7 +554,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar$(AB_rCD).xyz.pp',
|
||||
'$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
'zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;',
|
||||
|
|
@ -563,7 +563,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/zot$(AB_rCD).xyz.pp',
|
||||
'$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||
'$(REPORT_BUILD)',
|
||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||
'@$(TOUCH) $@',
|
||||
'',
|
||||
]
|
||||
|
|
|
|||
|
|
@ -282,12 +282,12 @@ class SimplePackager(object):
|
|||
self._file_queue.append(self.formatter.add, path, file)
|
||||
if mozpath.basename(path) == 'install.rdf':
|
||||
addon = True
|
||||
install_rdf = six.ensure_text(file.open('rt').read())
|
||||
install_rdf = file.open('rt').read()
|
||||
if self.UNPACK_ADDON_RE.search(install_rdf):
|
||||
addon = 'unpacked'
|
||||
self._add_addon(mozpath.dirname(path), addon)
|
||||
elif mozpath.basename(path) == 'manifest.json':
|
||||
manifest = six.ensure_text(file.open('rt').read())
|
||||
manifest = file.open('rt').read()
|
||||
try:
|
||||
parsed = json.loads(manifest)
|
||||
except ValueError:
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ class L10NRepackFormatterMixin(object):
|
|||
root, ext = mozpath.splitext(mozpath.basename(path))
|
||||
self._dictionaries[root] = path
|
||||
elif path.endswith('/built_in_addons.json'):
|
||||
data = json.loads(six.ensure_text(file.open().read()))
|
||||
data = json.load(file.open())
|
||||
data['dictionaries'] = self._dictionaries
|
||||
# The GeneratedFile content is only really generated after
|
||||
# all calls to formatter.add.
|
||||
|
|
|
|||
|
|
@ -397,7 +397,7 @@ class HgRepository(Repository):
|
|||
def get_files_in_working_directory(self):
|
||||
# Can return backslashes on Windows. Normalize to forward slashes.
|
||||
return list(p.replace('\\', '/') for p in
|
||||
self._run(b'files', b'-0').split('\0') if p)
|
||||
self._run(b'files', b'-0').split(b'\0') if p)
|
||||
|
||||
def working_directory_clean(self, untracked=False, ignored=False):
|
||||
args = [b'status', b'--modified', b'--added', b'--removed',
|
||||
|
|
@ -520,7 +520,7 @@ class GitRepository(Repository):
|
|||
self._run('reset', *paths)
|
||||
|
||||
def get_files_in_working_directory(self):
|
||||
return self._run('ls-files', '-z').split('\0')
|
||||
return self._run('ls-files', '-z').split(b'\0')
|
||||
|
||||
def working_directory_clean(self, untracked=False, ignored=False):
|
||||
args = ['status', '--porcelain']
|
||||
|
|
|
|||
|
|
@ -15,6 +15,6 @@ import subprocess
|
|||
def main(output, *inputs):
|
||||
env = dict(os.environ)
|
||||
env['PERL'] = str(buildconfig.substs['PERL'])
|
||||
output.write(subprocess.check_output([buildconfig.substs['PYTHON3'],
|
||||
output.write(subprocess.check_output([buildconfig.substs['PYTHON'],
|
||||
inputs[0], inputs[2]], env=env))
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ if CONFIG['MOZ_BUILD_APP'].startswith('comm/'):
|
|||
|
||||
# We run shlibsign as part of packaging, not build.
|
||||
gyp_vars['sign_libs'] = 0
|
||||
gyp_vars['python'] = CONFIG['PYTHON3']
|
||||
gyp_vars['python'] = CONFIG['PYTHON']
|
||||
# The NSS gyp files do not have a default for this.
|
||||
gyp_vars['nss_dist_dir'] = '$PRODUCT_DIR/dist'
|
||||
# NSS wants to put public headers in $nss_dist_dir/public/nss by default,
|
||||
|
|
|
|||
|
|
@ -26,12 +26,12 @@ def smooth_scroll(marionette_session, start_element, axis, direction,
|
|||
scroll_back = scroll_back or False
|
||||
current = 0
|
||||
if axis == "x":
|
||||
if direction == -1:
|
||||
if direction is -1:
|
||||
offset = [-increments, 0]
|
||||
else:
|
||||
offset = [increments, 0]
|
||||
else:
|
||||
if direction == -1:
|
||||
if direction is -1:
|
||||
offset = [0, -increments]
|
||||
else:
|
||||
offset = [0, increments]
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import socket
|
|||
import sys
|
||||
import time
|
||||
|
||||
import six
|
||||
from six import reraise
|
||||
|
||||
|
||||
class SocketTimeout(object):
|
||||
|
|
@ -129,10 +129,7 @@ class TcpTransport(object):
|
|||
|
||||
# protocol 3 and above
|
||||
if self.protocol >= 3:
|
||||
if six.PY3:
|
||||
typ = int(chr(packet[1]))
|
||||
else:
|
||||
typ = int(packet[1])
|
||||
typ = int(packet[1])
|
||||
if typ == Command.TYPE:
|
||||
msg = Command.from_msg(packet)
|
||||
elif typ == Response.TYPE:
|
||||
|
|
@ -148,7 +145,7 @@ class TcpTransport(object):
|
|||
the raw packet.
|
||||
"""
|
||||
now = time.time()
|
||||
data = b""
|
||||
data = ""
|
||||
bytes_to_recv = 10
|
||||
|
||||
while self.socket_timeout is None or (time.time() - now < self.socket_timeout):
|
||||
|
|
@ -161,7 +158,7 @@ class TcpTransport(object):
|
|||
if not chunk:
|
||||
raise socket.error("No data received over socket")
|
||||
|
||||
sep = data.find(b":")
|
||||
sep = data.find(":")
|
||||
if sep > -1:
|
||||
length = data[0:sep]
|
||||
remaining = data[sep + 1:]
|
||||
|
|
@ -210,7 +207,7 @@ class TcpTransport(object):
|
|||
except socket.timeout:
|
||||
exc_cls, exc, tb = sys.exc_info()
|
||||
msg = "Connection attempt failed because no data has been received over the socket: {}"
|
||||
six.reraise(exc_cls, exc_cls(msg.format(exc)), tb)
|
||||
reraise(exc_cls, exc_cls(msg.format(exc)), tb)
|
||||
|
||||
hello = json.loads(raw)
|
||||
application_type = hello.get("applicationType")
|
||||
|
|
@ -241,8 +238,7 @@ class TcpTransport(object):
|
|||
self.expected_response = obj
|
||||
else:
|
||||
data = json.dumps(obj)
|
||||
data = six.ensure_binary(data)
|
||||
payload = six.ensure_binary(str(len(data))) + b":" + data
|
||||
payload = "{0}:{1}".format(len(data), data)
|
||||
|
||||
totalsent = 0
|
||||
while totalsent < len(payload):
|
||||
|
|
|
|||
|
|
@ -27,11 +27,11 @@ CHECK_TEST_ERROR_RERUN = $(call check_test_error_internal,'To rerun your failure
|
|||
endif
|
||||
|
||||
# Usage: |make [EXTRA_TEST_ARGS=...] *test|.
|
||||
RUN_REFTEST = rm -f ./$@.log && $(PYTHON3) _tests/reftest/runreftest.py \
|
||||
RUN_REFTEST = rm -f ./$@.log && $(PYTHON) _tests/reftest/runreftest.py \
|
||||
--extra-profile-file=$(DIST)/plugins \
|
||||
$(SYMBOLS_PATH) $(EXTRA_TEST_ARGS) $(1) | tee ./$@.log
|
||||
|
||||
REMOTE_REFTEST = rm -f ./$@.log && $(PYTHON3) _tests/reftest/remotereftest.py \
|
||||
REMOTE_REFTEST = rm -f ./$@.log && $(PYTHON) _tests/reftest/remotereftest.py \
|
||||
--ignore-window-size \
|
||||
--app=$(TEST_PACKAGE_NAME) --deviceIP=${TEST_DEVICE} --xre-path=${MOZ_HOST_BIN} \
|
||||
--httpd-path=_tests/modules --suite reftest \
|
||||
|
|
@ -81,7 +81,7 @@ jstestbrowser:
|
|||
GARBAGE += $(addsuffix .log,$(MOCHITESTS) reftest crashtest jstestbrowser)
|
||||
|
||||
REMOTE_CPPUNITTESTS = \
|
||||
$(PYTHON3) -u $(topsrcdir)/testing/remotecppunittests.py \
|
||||
$(PYTHON) -u $(topsrcdir)/testing/remotecppunittests.py \
|
||||
--xre-path=$(DEPTH)/dist/bin \
|
||||
--localLib=$(DEPTH)/dist/fennec \
|
||||
--deviceIP=${TEST_DEVICE} \
|
||||
|
|
@ -138,7 +138,7 @@ PKG_ARG = --$(1) '$(PKG_BASENAME).$(1).tests.$(2)'
|
|||
test-packages-manifest:
|
||||
@rm -f $(MOZ_TEST_PACKAGES_FILE)
|
||||
$(NSINSTALL) -D $(dir $(MOZ_TEST_PACKAGES_FILE))
|
||||
$(PYTHON3) $(topsrcdir)/build/gen_test_packages_manifest.py \
|
||||
$(PYTHON) $(topsrcdir)/build/gen_test_packages_manifest.py \
|
||||
--jsshell $(JSSHELL_NAME) \
|
||||
--dest-file '$(MOZ_TEST_PACKAGES_FILE)' \
|
||||
$(call PKG_ARG,common,zip) \
|
||||
|
|
@ -158,7 +158,7 @@ download-wpt-manifest:
|
|||
|
||||
define package_archive
|
||||
package-tests-$(1): stage-all package-tests-prepare-dest download-wpt-manifest
|
||||
$$(call py_action,test_archive, \
|
||||
$$(call py3_action,test_archive, \
|
||||
$(1) \
|
||||
'$$(abspath $$(test_archive_dir))/$$(PKG_BASENAME).$(1).tests.$(2)')
|
||||
package-tests: package-tests-$(1)
|
||||
|
|
@ -268,7 +268,7 @@ stage-extensions: make-stage-dir
|
|||
|
||||
|
||||
check::
|
||||
$(eval cores=$(shell $(PYTHON3) -c 'import multiprocessing; print(multiprocessing.cpu_count())'))
|
||||
$(eval cores=$(shell $(PYTHON) -c 'import multiprocessing; print(multiprocessing.cpu_count())'))
|
||||
@echo "Starting 'mach python-test' with -j$(cores)"
|
||||
@$(topsrcdir)/mach --log-no-times python-test -j$(cores) --subsuite default
|
||||
@echo "Finished 'mach python-test' successfully"
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ def taskcluster_url(logger, commits):
|
|||
if not pushes:
|
||||
logger.debug("Error reading response; 'pushes' key not found")
|
||||
continue
|
||||
[cset] = next(iter(pushes.values()))['changesets']
|
||||
[cset] = pushes.values()[0]['changesets']
|
||||
|
||||
tc_index_url = tc_url.format(changeset=cset, name=index_name)
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -1,18 +1,9 @@
|
|||
import io
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
from copy import deepcopy
|
||||
from multiprocessing import Pool, cpu_count
|
||||
from six import (
|
||||
PY3,
|
||||
binary_type,
|
||||
ensure_text,
|
||||
iteritems,
|
||||
itervalues,
|
||||
string_types,
|
||||
text_type,
|
||||
)
|
||||
from six import PY3, iteritems, itervalues, string_types, binary_type, text_type
|
||||
|
||||
from . import vcs
|
||||
from .item import (ConformanceCheckerTest, ManifestItem, ManualTest, RefTest, SupportFile,
|
||||
|
|
@ -327,7 +318,7 @@ def _load(logger, # type: Logger
|
|||
else:
|
||||
logger.debug("Creating new manifest at %s" % manifest)
|
||||
try:
|
||||
with io.open(manifest, "r", encoding="utf-8") as f:
|
||||
with open(manifest, "rb") as f:
|
||||
rv = Manifest.from_json(tests_root,
|
||||
fast_json.load(f),
|
||||
types=types,
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import time
|
|||
import traceback
|
||||
import uuid
|
||||
|
||||
from six import iteritems, iterkeys
|
||||
from six import iteritems
|
||||
from six.moves.urllib.parse import urljoin
|
||||
|
||||
errors = None
|
||||
|
|
@ -602,7 +602,7 @@ class MarionetteProtocol(Protocol):
|
|||
|
||||
def on_environment_change(self, old_environment, new_environment):
|
||||
#Unset all the old prefs
|
||||
for name in iterkeys(old_environment.get("prefs", {})):
|
||||
for name in old_environment.get("prefs", {}).iterkeys():
|
||||
value = self.executor.original_pref_values[name]
|
||||
if value is None:
|
||||
self.prefs.clear(name)
|
||||
|
|
|
|||
|
|
@ -78,12 +78,7 @@ class FontInstaller(object):
|
|||
# Per https://github.com/web-platform-tests/results-collection/issues/218
|
||||
# installing Ahem on macOS is flaky, so check if it actually installed
|
||||
fonts = check_output(['/usr/sbin/system_profiler', '-xml', 'SPFontsDataType'])
|
||||
try:
|
||||
# if py3
|
||||
readPlistFromBytes = plistlib.readPlistFromBytes
|
||||
except AttributeError:
|
||||
readPlistFromBytes = plistlib.readPlistFromString
|
||||
fonts = readPlistFromBytes(fonts)
|
||||
fonts = plistlib.readPlistFromString(fonts)
|
||||
assert len(fonts) == 1
|
||||
for font in fonts[0]['_items']:
|
||||
if font['path'] == installed_font_path:
|
||||
|
|
|
|||
|
|
@ -401,8 +401,7 @@ class TestharnessTest(Test):
|
|||
testdriver = manifest_item.testdriver if hasattr(manifest_item, "testdriver") else False
|
||||
jsshell = manifest_item.jsshell if hasattr(manifest_item, "jsshell") else False
|
||||
script_metadata = manifest_item.script_metadata or []
|
||||
scripts = [v for (k, v) in script_metadata
|
||||
if k in (b"script", "script")]
|
||||
scripts = [v for (k, v) in script_metadata if k == b"script"]
|
||||
return cls(manifest_file.tests_root,
|
||||
manifest_item.url,
|
||||
inherit_metadata,
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ MOZDEPTH ?= $(DEPTH)
|
|||
|
||||
repackage-zip: UNPACKAGE='$(ZIP_IN)'
|
||||
repackage-zip:
|
||||
$(PYTHON3) $(MOZILLA_DIR)/toolkit/mozapps/installer/l10n-repack.py '$(STAGEDIST)' $(DIST)/xpi-stage/locale-$(AB_CD) \
|
||||
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/l10n-repack.py '$(STAGEDIST)' $(DIST)/xpi-stage/locale-$(AB_CD) \
|
||||
$(MOZ_PKG_EXTRAL10N) \
|
||||
$(if $(filter omni,$(MOZ_PACKAGER_FORMAT)),$(if $(NON_OMNIJAR_FILES),--non-resource $(NON_OMNIJAR_FILES)))
|
||||
|
||||
|
|
@ -218,8 +218,8 @@ package-langpack-%: XPI_NAME=locale-$*
|
|||
package-langpack-%: AB_CD=$*
|
||||
package-langpack-%:
|
||||
$(NSINSTALL) -D $(DIST)/$(PKG_LANGPACK_PATH)
|
||||
$(call py_action,langpack_manifest,--locales $(AB_CD) --min-app-ver $(MOZ_APP_VERSION) --max-app-ver $(MOZ_APP_MAXVERSION) --app-name '$(MOZ_APP_DISPLAYNAME)' --l10n-basedir '$(L10NBASEDIR)' --defines $(LANGPACK_DEFINES) --langpack-eid '$(MOZ_LANGPACK_EID)' --input $(DIST)/xpi-stage/locale-$(AB_CD))
|
||||
$(call py_action,zip,-C $(DIST)/xpi-stage/locale-$(AB_CD) -x **/*.manifest -x **/*.js -x **/*.ini $(LANGPACK_FILE) $(PKG_ZIP_DIRS) manifest.json)
|
||||
$(call py3_action,langpack_manifest,--locales $(AB_CD) --min-app-ver $(MOZ_APP_VERSION) --max-app-ver $(MOZ_APP_MAXVERSION) --app-name '$(MOZ_APP_DISPLAYNAME)' --l10n-basedir '$(L10NBASEDIR)' --defines $(LANGPACK_DEFINES) --langpack-eid '$(MOZ_LANGPACK_EID)' --input $(DIST)/xpi-stage/locale-$(AB_CD))
|
||||
$(call py3_action,zip,-C $(DIST)/xpi-stage/locale-$(AB_CD) -x **/*.manifest -x **/*.js -x **/*.ini $(LANGPACK_FILE) $(PKG_ZIP_DIRS) manifest.json)
|
||||
|
||||
# This variable is to allow the wget-en-US target to know which ftp server to download from
|
||||
ifndef EN_US_BINARY_URL
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ endif # RUN_FIND_DUPES
|
|||
ifndef MOZ_IS_COMM_TOPDIR
|
||||
ifdef RUN_MOZHARNESS_ZIP
|
||||
# Package mozharness
|
||||
$(call py_action,test_archive, \
|
||||
$(call py3_action,test_archive, \
|
||||
mozharness \
|
||||
$(ABS_DIST)/$(PKG_PATH)$(MOZHARNESS_PACKAGE))
|
||||
endif # RUN_MOZHARNESS_ZIP
|
||||
|
|
@ -211,14 +211,14 @@ endif
|
|||
# and places it in dist/bin/res - it should be used when packaging a build.
|
||||
multilocale.txt: LOCALES?=$(MOZ_CHROME_MULTILOCALE)
|
||||
multilocale.txt:
|
||||
$(call py_action,file_generate,$(MOZILLA_DIR)/toolkit/locales/gen_multilocale.py main '$(MULTILOCALE_DIR)/multilocale.txt' $(MDDEPDIR)/multilocale.txt.pp '$(MULTILOCALE_DIR)/multilocale.txt' $(ALL_LOCALES))
|
||||
$(call py3_action,file_generate,$(MOZILLA_DIR)/toolkit/locales/gen_multilocale.py main '$(MULTILOCALE_DIR)/multilocale.txt' $(MDDEPDIR)/multilocale.txt.pp '$(MULTILOCALE_DIR)/multilocale.txt' $(ALL_LOCALES))
|
||||
|
||||
# This version of the target uses AB_CD to build multilocale.txt and places it
|
||||
# in the $(XPI_NAME)/res dir - it should be used when repackaging a build.
|
||||
multilocale.txt-%: LOCALES?=$(AB_CD)
|
||||
multilocale.txt-%: MULTILOCALE_DIR=$(DIST)/xpi-stage/$(XPI_NAME)/res
|
||||
multilocale.txt-%:
|
||||
$(call py_action,file_generate,$(MOZILLA_DIR)/toolkit/locales/gen_multilocale.py main '$(MULTILOCALE_DIR)/multilocale.txt' $(MDDEPDIR)/multilocale.txt.pp '$(MULTILOCALE_DIR)/multilocale.txt' $(ALL_LOCALES))
|
||||
$(call py3_action,file_generate,$(MOZILLA_DIR)/toolkit/locales/gen_multilocale.py main '$(MULTILOCALE_DIR)/multilocale.txt' $(MDDEPDIR)/multilocale.txt.pp '$(MULTILOCALE_DIR)/multilocale.txt' $(ALL_LOCALES))
|
||||
|
||||
locale-manifest.in: LOCALES?=$(MOZ_CHROME_MULTILOCALE)
|
||||
locale-manifest.in: $(GLOBAL_DEPS) FORCE
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ ifdef FUZZING_INTERFACES
|
|||
JSSHELL_BINS += fuzz-tests$(BIN_SUFFIX)
|
||||
endif
|
||||
|
||||
MAKE_JSSHELL = $(call py_action,zip,-C $(DIST)/bin --strip $(abspath $(PKG_JSSHELL)) $(JSSHELL_BINS))
|
||||
MAKE_JSSHELL = $(call py3_action,zip,-C $(DIST)/bin --strip $(abspath $(PKG_JSSHELL)) $(JSSHELL_BINS))
|
||||
|
||||
ifneq (,$(PGO_JARLOG_PATH))
|
||||
# The backslash subst is to work around an issue with our version of mozmake,
|
||||
|
|
@ -134,14 +134,14 @@ endif
|
|||
|
||||
ifeq ($(MOZ_PKG_FORMAT),ZIP)
|
||||
PKG_SUFFIX = .zip
|
||||
INNER_MAKE_PACKAGE = $(call py_action,make_zip,'$(MOZ_PKG_DIR)' '$(PACKAGE)')
|
||||
INNER_UNMAKE_PACKAGE = $(call py_action,make_unzip,$(UNPACKAGE))
|
||||
INNER_MAKE_PACKAGE = $(call py3_action,make_zip,'$(MOZ_PKG_DIR)' '$(PACKAGE)')
|
||||
INNER_UNMAKE_PACKAGE = $(call py3_action,make_unzip,$(UNPACKAGE))
|
||||
endif
|
||||
|
||||
ifeq ($(MOZ_PKG_FORMAT),SFX7Z)
|
||||
PKG_SUFFIX = .exe
|
||||
INNER_MAKE_PACKAGE = $(call py_action,exe_7z_archive,'$(MOZ_PKG_DIR)' '$(MOZ_INSTALLER_PATH)/app.tag' '$(MOZ_SFX_PACKAGE)' '$(PACKAGE)')
|
||||
INNER_UNMAKE_PACKAGE = $(call py_action,exe_7z_extract,$(UNPACKAGE) $(MOZ_PKG_DIR))
|
||||
INNER_MAKE_PACKAGE = $(call py3_action,exe_7z_archive,'$(MOZ_PKG_DIR)' '$(MOZ_INSTALLER_PATH)/app.tag' '$(MOZ_SFX_PACKAGE)' '$(PACKAGE)')
|
||||
INNER_UNMAKE_PACKAGE = $(call py3_action,exe_7z_extract,$(UNPACKAGE) $(MOZ_PKG_DIR))
|
||||
endif
|
||||
|
||||
#Create an RPM file
|
||||
|
|
@ -162,7 +162,7 @@ ifeq ($(MOZ_PKG_FORMAT),RPM)
|
|||
|
||||
RPM_CMD = \
|
||||
echo Creating RPM && \
|
||||
$(PYTHON3) -m mozbuild.action.preprocessor \
|
||||
$(PYTHON) -m mozbuild.action.preprocessor \
|
||||
-DMOZ_APP_NAME=$(MOZ_APP_NAME) \
|
||||
-DMOZ_APP_DISPLAYNAME='$(MOZ_APP_DISPLAYNAME)' \
|
||||
-DMOZ_APP_REMOTINGNAME='$(MOZ_APP_REMOTINGNAME)' \
|
||||
|
|
@ -231,7 +231,7 @@ ifeq ($(MOZ_PKG_FORMAT),DMG)
|
|||
_ABS_MOZSRCDIR = $(shell cd $(MOZILLA_DIR) && pwd)
|
||||
PKG_DMG_SOURCE = $(MOZ_PKG_DIR)
|
||||
INNER_MAKE_PACKAGE = \
|
||||
$(call py_action,make_dmg, \
|
||||
$(call py3_action,make_dmg, \
|
||||
$(if $(MOZ_PKG_MAC_DSSTORE),--dsstore '$(MOZ_PKG_MAC_DSSTORE)') \
|
||||
$(if $(MOZ_PKG_MAC_BACKGROUND),--background '$(MOZ_PKG_MAC_BACKGROUND)') \
|
||||
$(if $(MOZ_PKG_MAC_ICON),--icon '$(MOZ_PKG_MAC_ICON)') \
|
||||
|
|
@ -239,7 +239,7 @@ ifeq ($(MOZ_PKG_FORMAT),DMG)
|
|||
'$(PKG_DMG_SOURCE)' '$(PACKAGE)' \
|
||||
)
|
||||
INNER_UNMAKE_PACKAGE = \
|
||||
$(call py_action,unpack_dmg, \
|
||||
$(call py3_action,unpack_dmg, \
|
||||
$(if $(MOZ_PKG_MAC_DSSTORE),--dsstore '$(MOZ_PKG_MAC_DSSTORE)') \
|
||||
$(if $(MOZ_PKG_MAC_BACKGROUND),--background '$(MOZ_PKG_MAC_BACKGROUND)') \
|
||||
$(if $(MOZ_PKG_MAC_ICON),--icon '$(MOZ_PKG_MAC_ICON)') \
|
||||
|
|
|
|||
|
|
@ -99,9 +99,9 @@ $(CONFIG_DIR)/helper.exe: $(HELPER_DEPS)
|
|||
$(MKDIR) $(CONFIG_DIR)
|
||||
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
||||
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
||||
$(call py_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||
$(call py3_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
--preprocess-locale $(topsrcdir) \
|
||||
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
||||
$(INSTALL) $(addprefix $(MOZILLA_DIR)/toolkit/mozapps/installer/windows/nsis/,$(TOOLKIT_NSIS_FILES)) $(CONFIG_DIR)
|
||||
|
|
@ -118,9 +118,9 @@ maintenanceservice_installer::
|
|||
$(MKDIR) $(CONFIG_DIR)
|
||||
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
||||
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
||||
$(call py_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||
$(call py3_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||
--preprocess-locale $(topsrcdir) \
|
||||
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
||||
$(INSTALL) $(addprefix $(MOZILLA_DIR)/toolkit/mozapps/installer/windows/nsis/,$(TOOLKIT_NSIS_FILES)) $(CONFIG_DIR)
|
||||
|
|
|
|||
|
|
@ -12,9 +12,7 @@
|
|||
# --convert-utf8-utf16le.
|
||||
|
||||
from codecs import BOM_UTF16_LE
|
||||
import io
|
||||
from os.path import join, isfile
|
||||
import six
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
|
|
@ -22,7 +20,7 @@ def open_utf16le_file(path):
|
|||
"""
|
||||
Returns an opened file object with a a UTF-16LE byte order mark.
|
||||
"""
|
||||
fp = io.open(path, "w+b")
|
||||
fp = open(path, "w+b")
|
||||
fp.write(BOM_UTF16_LE)
|
||||
return fp
|
||||
|
||||
|
|
@ -39,7 +37,7 @@ def get_locale_strings(path, prefix, middle, add_cr):
|
|||
linefeeds when there isn't one already
|
||||
"""
|
||||
output = ""
|
||||
fp = io.open(path, "r", encoding="utf-8")
|
||||
fp = open(path, "r")
|
||||
for line in fp:
|
||||
line = line.strip()
|
||||
if line == "" or line[0] == "#":
|
||||
|
|
@ -86,7 +84,7 @@ def preprocess_locale_files(config_dir, l10ndirs):
|
|||
"LangString ^",
|
||||
" 0 ",
|
||||
False)
|
||||
fp.write(locale_strings.encode("utf-16-le"))
|
||||
fp.write(unicode(locale_strings, "utf-8").encode("utf-16-le"))
|
||||
fp.close()
|
||||
|
||||
# Create the Modern User Interface language file
|
||||
|
|
@ -99,7 +97,7 @@ def preprocess_locale_files(config_dir, l10ndirs):
|
|||
""").encode("utf-16-le"))
|
||||
locale_strings = get_locale_strings(lookup("mui.properties", l10ndirs),
|
||||
"!define ", " ", True)
|
||||
fp.write(locale_strings.encode("utf-16-le"))
|
||||
fp.write(unicode(locale_strings, "utf-8").encode("utf-16-le"))
|
||||
fp.write(u"!insertmacro MOZ_MUI_LANGUAGEFILE_END\n".encode("utf-16-le"))
|
||||
fp.close()
|
||||
|
||||
|
|
@ -110,7 +108,7 @@ def preprocess_locale_files(config_dir, l10ndirs):
|
|||
"LangString ",
|
||||
" 0 ",
|
||||
True)
|
||||
fp.write(locale_strings.encode("utf-16-le"))
|
||||
fp.write(unicode(locale_strings, "utf-8").encode("utf-16-le"))
|
||||
fp.close()
|
||||
|
||||
def create_nlf_file(moz_dir, ab_cd, config_dir):
|
||||
|
|
@ -125,9 +123,9 @@ def create_nlf_file(moz_dir, ab_cd, config_dir):
|
|||
rtl = "-"
|
||||
|
||||
# Check whether the locale is right to left from locales.nsi.
|
||||
fp = io.open(join(moz_dir,
|
||||
"toolkit/mozapps/installer/windows/nsis/locales.nsi"),
|
||||
"r", encoding='utf-8')
|
||||
fp = open(join(moz_dir,
|
||||
"toolkit/mozapps/installer/windows/nsis/locales.nsi"),
|
||||
"r")
|
||||
for line in fp:
|
||||
line = line.strip()
|
||||
if line == "!define " + ab_cd + "_rtl":
|
||||
|
|
@ -177,7 +175,7 @@ def preprocess_locale_file(config_dir,
|
|||
"LangString ",
|
||||
" 0 ",
|
||||
True)
|
||||
fp.write(locale_strings.encode("utf-16-le"))
|
||||
fp.write(unicode(locale_strings, "utf-8").encode("utf-16-le"))
|
||||
fp.close()
|
||||
|
||||
|
||||
|
|
@ -189,9 +187,9 @@ def convert_utf8_utf16le(in_file_path, out_file_path):
|
|||
in_file_path - the path to the UTF-8 source file to convert
|
||||
out_file_path - the path to the UTF-16LE destination file to create
|
||||
"""
|
||||
in_fp = open(in_file_path, "r", encoding='utf-8')
|
||||
in_fp = open(in_file_path, "r")
|
||||
out_fp = open_utf16le_file(out_file_path)
|
||||
out_fp.write(in_fp.read().encode("utf-16-le"))
|
||||
out_fp.write(unicode(in_fp.read(), "utf-8").encode("utf-16-le"))
|
||||
in_fp.close()
|
||||
out_fp.close()
|
||||
|
||||
|
|
|
|||
|
|
@ -150,9 +150,12 @@ def paramlistAsNative(m, empty='void'):
|
|||
l.append('uint8_t _argc')
|
||||
|
||||
if not m.notxpcom and m.realtype.name != 'void':
|
||||
l.append(paramAsNative(xpidl.Param(
|
||||
paramtype='out', type=None, name='_retval', attlist=[],
|
||||
location=None, realtype=m.realtype)))
|
||||
l.append(paramAsNative(xpidl.Param(paramtype='out',
|
||||
type=None,
|
||||
name='_retval',
|
||||
attlist=[],
|
||||
location=None,
|
||||
realtype=m.realtype)))
|
||||
|
||||
# Set any optional out params to default to nullptr. Skip if we just added
|
||||
# extra non-optional args to l.
|
||||
|
|
@ -474,7 +477,7 @@ def write_interface(iface, fd):
|
|||
|
||||
names = uuid_decoder.match(iface.attributes.uuid).groupdict()
|
||||
m3str = names['m3'] + names['m4']
|
||||
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in range(0, 16, 2)])
|
||||
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in xrange(0, 16, 2)])
|
||||
|
||||
if iface.name[2] == 'I':
|
||||
implclass = iface.name[:2] + iface.name[3:]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
[DEFAULT]
|
||||
skip-if = python == 2
|
||||
skip-if = python == 3
|
||||
|
||||
[runtests.py]
|
||||
|
|
|
|||
|
|
@ -486,7 +486,7 @@ def write_interface(iface, fd):
|
|||
# Extract the UUID's information so that it can be written into the struct definition
|
||||
names = uuid_decoder.match(iface.attributes.uuid).groupdict()
|
||||
m3str = names['m3'] + names['m4']
|
||||
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in range(0, 16, 2)])
|
||||
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in xrange(0, 16, 2)])
|
||||
names['name'] = iface.name
|
||||
|
||||
if printdoccomments:
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ def attlistToIDL(attlist):
|
|||
return ''
|
||||
|
||||
attlist = list(attlist)
|
||||
attlist.sort(key=lambda a: a[0])
|
||||
attlist.sort(cmp=lambda a, b: cmp(a[0], b[0]))
|
||||
|
||||
return '[%s] ' % ','.join(["%s%s" % (name, value is not None and '(%s)' % value or '')
|
||||
for name, value, aloc in attlist])
|
||||
|
|
@ -205,7 +205,7 @@ class Location(object):
|
|||
|
||||
def pointerline(self):
|
||||
def i():
|
||||
for i in range(0, self._colno):
|
||||
for i in xrange(0, self._colno):
|
||||
yield " "
|
||||
yield "^"
|
||||
|
||||
|
|
@ -310,8 +310,7 @@ class Include(object):
|
|||
if not os.path.exists(file):
|
||||
continue
|
||||
|
||||
self.IDL = parent.parser.parse(open(file, encoding='utf-8').read(),
|
||||
filename=file)
|
||||
self.IDL = parent.parser.parse(open(file).read(), filename=file)
|
||||
self.IDL.resolve(parent.incdirs, parent.parser, parent.webidlconfig)
|
||||
for type in self.IDL.getNames():
|
||||
parent.setName(type)
|
||||
|
|
@ -441,10 +440,10 @@ class Forward(object):
|
|||
# Hack alert: if an identifier is already present, move the doccomments
|
||||
# forward.
|
||||
if parent.hasName(self.name):
|
||||
for i in range(0, len(parent.productions)):
|
||||
for i in xrange(0, len(parent.productions)):
|
||||
if parent.productions[i] is self:
|
||||
break
|
||||
for i in range(i + 1, len(parent.productions)):
|
||||
for i in xrange(i + 1, len(parent.productions)):
|
||||
if hasattr(parent.productions[i], 'doccomments'):
|
||||
parent.productions[i].doccomments[0:0] = self.doccomments
|
||||
break
|
||||
|
|
@ -1862,4 +1861,4 @@ if __name__ == '__main__':
|
|||
p = IDLParser()
|
||||
for f in sys.argv[1:]:
|
||||
print("Parsing %s" % f)
|
||||
p.parse(open(f, encoding='utf-8').read(), filename=f)
|
||||
p.parse(open(f).read(), filename=f)
|
||||
|
|
|
|||
|
|
@ -501,7 +501,7 @@ namespace detail {
|
|||
|
||||
# The strings array. We write out individual characters to avoid MSVC restrictions.
|
||||
fd.write("const char sStrings[] = {\n")
|
||||
for s, off in strings.items():
|
||||
for s, off in strings.iteritems():
|
||||
fd.write(" // %d = %s\n '%s','\\0',\n" % (off, s, "','".join(s)))
|
||||
fd.write("};\n\n")
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue