forked from mirrors/gecko-dev
Backed out 6 changesets (bug 1632916, bug 1599658, bug 1633037, bug 1633039, bug 1633016, bug 1632920) for SA bustages CLOSED TREE
Backed out changeset 332ce0963b4e (bug 1633039)
Backed out changeset a9904cbc40d9 (bug 1633037)
Backed out changeset d06b0ec349f8 (bug 1599658)
Backed out changeset 8fd300cad80f (bug 1633016)
Backed out changeset f8820941c703 (bug 1632916)
Backed out changeset ac9c2c8746ed (bug 1632920)
This commit is contained in:
parent
d1320d8780
commit
f137fa0613
73 changed files with 334 additions and 253 deletions
14
Makefile.in
14
Makefile.in
|
|
@ -98,7 +98,7 @@ ifneq (,$(filter FasterMake+RecursiveMake,$(BUILD_BACKENDS)))
|
||||||
@# same directory, because that would blow up
|
@# same directory, because that would blow up
|
||||||
$(if $(wildcard _build_manifests/install/$(subst /,_,$*)),$(if $(wildcard faster/install_$(subst /,_,$*)*),$(error FasterMake and RecursiveMake ends of the hybrid build system want to handle $*)))
|
$(if $(wildcard _build_manifests/install/$(subst /,_,$*)),$(if $(wildcard faster/install_$(subst /,_,$*)*),$(error FasterMake and RecursiveMake ends of the hybrid build system want to handle $*)))
|
||||||
endif
|
endif
|
||||||
$(addprefix $(call py_action,process_install_manifest,--track install_$(subst /,_,$*).track $*) ,$(wildcard _build_manifests/install/$(subst /,_,$*)))
|
$(addprefix $(call py3_action,process_install_manifest,--track install_$(subst /,_,$*).track $*) ,$(wildcard _build_manifests/install/$(subst /,_,$*)))
|
||||||
|
|
||||||
# Dummy wrapper rule to allow the faster backend to piggy back
|
# Dummy wrapper rule to allow the faster backend to piggy back
|
||||||
$(addprefix install-,$(subst /,_,$(filter dist/%,$(install_manifests)))): install-dist_%: install-dist/% ;
|
$(addprefix install-,$(subst /,_,$(filter dist/%,$(install_manifests)))): install-dist_%: install-dist/% ;
|
||||||
|
|
@ -108,7 +108,7 @@ install-tests: install-test-files
|
||||||
|
|
||||||
.PHONY: install-test-files
|
.PHONY: install-test-files
|
||||||
install-test-files:
|
install-test-files:
|
||||||
$(call py_action,process_install_manifest,--track install__test_files.track _tests _build_manifests/install/_test_files)
|
$(call py3_action,process_install_manifest,--track install__test_files.track _tests _build_manifests/install/_test_files)
|
||||||
|
|
||||||
include $(topsrcdir)/build/moz-automation.mk
|
include $(topsrcdir)/build/moz-automation.mk
|
||||||
|
|
||||||
|
|
@ -140,7 +140,7 @@ endif
|
||||||
|
|
||||||
ifdef MOZ_ANDROID_FAT_AAR_ARCHITECTURES
|
ifdef MOZ_ANDROID_FAT_AAR_ARCHITECTURES
|
||||||
recurse_android-fat-aar-artifact:
|
recurse_android-fat-aar-artifact:
|
||||||
$(call py_action,fat_aar,\
|
$(call py3_action,fat_aar,\
|
||||||
$(addprefix --armeabi-v7a $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_ARMEABI_V7A)) \
|
$(addprefix --armeabi-v7a $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_ARMEABI_V7A)) \
|
||||||
$(addprefix --arm64-v8a $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_ARM64_V8A)) \
|
$(addprefix --arm64-v8a $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_ARM64_V8A)) \
|
||||||
$(addprefix --x86 $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_X86)) \
|
$(addprefix --x86 $(MOZ_FETCHES_DIR)/,$(MOZ_ANDROID_FAT_AAR_X86)) \
|
||||||
|
|
@ -196,14 +196,14 @@ endif
|
||||||
.PHONY: symbolsfullarchive
|
.PHONY: symbolsfullarchive
|
||||||
symbolsfullarchive: prepsymbolsarchive
|
symbolsfullarchive: prepsymbolsarchive
|
||||||
$(RM) '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
|
$(RM) '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
|
||||||
$(call py_action,symbols_archive,'$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip' \
|
$(call py3_action,symbols_archive,'$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip' \
|
||||||
$(abspath $(DIST)/crashreporter-symbols) \
|
$(abspath $(DIST)/crashreporter-symbols) \
|
||||||
--full-archive)
|
--full-archive)
|
||||||
|
|
||||||
.PHONY: symbolsarchive
|
.PHONY: symbolsarchive
|
||||||
symbolsarchive: prepsymbolsarchive
|
symbolsarchive: prepsymbolsarchive
|
||||||
$(RM) '$(DIST)/$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip'
|
$(RM) '$(DIST)/$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip'
|
||||||
$(call py_action,symbols_archive,'$(DIST)/$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip' \
|
$(call py3_action,symbols_archive,'$(DIST)/$(PKG_PATH)$(SYMBOL_ARCHIVE_BASENAME).zip' \
|
||||||
$(abspath $(DIST)/crashreporter-symbols))
|
$(abspath $(DIST)/crashreporter-symbols))
|
||||||
|
|
||||||
ifdef MOZ_CRASHREPORTER
|
ifdef MOZ_CRASHREPORTER
|
||||||
|
|
@ -220,7 +220,7 @@ endif
|
||||||
|
|
||||||
uploadsymbols:
|
uploadsymbols:
|
||||||
ifdef MOZ_CRASHREPORTER
|
ifdef MOZ_CRASHREPORTER
|
||||||
$(PYTHON3) -u $(topsrcdir)/toolkit/crashreporter/tools/upload_symbols.py '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
|
$(PYTHON) -u $(topsrcdir)/toolkit/crashreporter/tools/upload_symbols.py '$(DIST)/$(PKG_PATH)$(SYMBOL_FULL_ARCHIVE_BASENAME).zip'
|
||||||
endif
|
endif
|
||||||
|
|
||||||
.PHONY: update-packaging
|
.PHONY: update-packaging
|
||||||
|
|
@ -229,7 +229,7 @@ update-packaging:
|
||||||
|
|
||||||
.PHONY: package-generated-sources
|
.PHONY: package-generated-sources
|
||||||
package-generated-sources:
|
package-generated-sources:
|
||||||
$(call py_action,package_generated_sources,'$(DIST)/$(PKG_PATH)$(GENERATED_SOURCE_FILE_PACKAGE)')
|
$(call py3_action,package_generated_sources,'$(DIST)/$(PKG_PATH)$(GENERATED_SOURCE_FILE_PACKAGE)')
|
||||||
|
|
||||||
ifdef JS_STANDALONE
|
ifdef JS_STANDALONE
|
||||||
# Delegate js-specific rules to js
|
# Delegate js-specific rules to js
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ endif
|
||||||
# steps. See bug 1431342.
|
# steps. See bug 1431342.
|
||||||
libs:: $(srcdir)/profile/channel-prefs.js
|
libs:: $(srcdir)/profile/channel-prefs.js
|
||||||
$(NSINSTALL) -D $(DIST)/bin/defaults/pref
|
$(NSINSTALL) -D $(DIST)/bin/defaults/pref
|
||||||
$(call py_action,preprocessor,-Fsubstitution $(PREF_PPFLAGS) $(ACDEFINES) $^ -o $(DIST)/bin/defaults/pref/channel-prefs.js)
|
$(call py3_action,preprocessor,-Fsubstitution $(PREF_PPFLAGS) $(ACDEFINES) $^ -o $(DIST)/bin/defaults/pref/channel-prefs.js)
|
||||||
|
|
||||||
ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
|
ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
|
||||||
|
|
||||||
|
|
@ -78,7 +78,7 @@ LPROJ := Contents/Resources/$(LPROJ_ROOT).lproj
|
||||||
clean clobber repackage::
|
clean clobber repackage::
|
||||||
$(RM) -r $(dist_dest)
|
$(RM) -r $(dist_dest)
|
||||||
|
|
||||||
MAC_BUNDLE_VERSION = $(shell $(PYTHON3) $(srcdir)/macversion.py --version=$(MOZ_APP_VERSION) --buildid=$(DEPTH)/buildid.h)
|
MAC_BUNDLE_VERSION = $(shell $(PYTHON) $(srcdir)/macversion.py --version=$(MOZ_APP_VERSION) --buildid=$(DEPTH)/buildid.h)
|
||||||
|
|
||||||
.PHONY: repackage
|
.PHONY: repackage
|
||||||
tools repackage:: $(DIST)/bin/$(MOZ_APP_NAME)
|
tools repackage:: $(DIST)/bin/$(MOZ_APP_NAME)
|
||||||
|
|
|
||||||
|
|
@ -3,24 +3,23 @@
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function
|
||||||
import io
|
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
|
|
||||||
o = OptionParser()
|
o = OptionParser()
|
||||||
o.add_option('--buildid', dest='buildid')
|
o.add_option("--buildid", dest="buildid")
|
||||||
o.add_option('--version', dest='version')
|
o.add_option("--version", dest="version")
|
||||||
|
|
||||||
(options, args) = o.parse_args()
|
(options, args) = o.parse_args()
|
||||||
|
|
||||||
if not options.buildid:
|
if not options.buildid:
|
||||||
print('--buildid is required', file=sys.stderr)
|
print >>sys.stderr, "--buildid is required"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not options.version:
|
if not options.version:
|
||||||
print('--version is required', file=sys.stderr)
|
print >>sys.stderr, "--version is required"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# We want to build a version number that matches the format allowed for
|
# We want to build a version number that matches the format allowed for
|
||||||
|
|
@ -29,8 +28,7 @@ if not options.version:
|
||||||
# builds), but also so that newly-built older versions (e.g. beta build) aren't
|
# builds), but also so that newly-built older versions (e.g. beta build) aren't
|
||||||
# considered "newer" than previously-built newer versions (e.g. a trunk nightly)
|
# considered "newer" than previously-built newer versions (e.g. a trunk nightly)
|
||||||
|
|
||||||
define, MOZ_BUILDID, buildid = io.open(
|
define, MOZ_BUILDID, buildid = open(options.buildid, 'r').read().split()
|
||||||
options.buildid, 'r', encoding='utf-8').read().split()
|
|
||||||
|
|
||||||
# extract only the major version (i.e. "14" from "14.0b1")
|
# extract only the major version (i.e. "14" from "14.0b1")
|
||||||
majorVersion = re.match(r'^(\d+)[^\d].*', options.version).group(1)
|
majorVersion = re.match(r'^(\d+)[^\d].*', options.version).group(1)
|
||||||
|
|
|
||||||
|
|
@ -177,7 +177,7 @@ endif
|
||||||
|
|
||||||
package-compare::
|
package-compare::
|
||||||
cd $(DIST); find $(PKGCOMP_FIND_OPTS) '$(FINDPATH)' -type f | sort > bin-list.txt
|
cd $(DIST); find $(PKGCOMP_FIND_OPTS) '$(FINDPATH)' -type f | sort > bin-list.txt
|
||||||
$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) $(MOZ_PKG_MANIFEST)) | grep '^$(BINPATH)' | sed -e 's/^\///' | sort > $(DIST)/pack-list.txt
|
$(call py3_action,preprocessor,$(DEFINES) $(ACDEFINES) $(MOZ_PKG_MANIFEST)) | grep '^$(BINPATH)' | sed -e 's/^\///' | sort > $(DIST)/pack-list.txt
|
||||||
-diff -u $(DIST)/pack-list.txt $(DIST)/bin-list.txt
|
-diff -u $(DIST)/pack-list.txt $(DIST)/bin-list.txt
|
||||||
rm -f $(DIST)/pack-list.txt $(DIST)/bin-list.txt
|
rm -f $(DIST)/pack-list.txt $(DIST)/bin-list.txt
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -60,16 +60,16 @@ $(CONFIG_DIR)/setup.exe::
|
||||||
$(MKDIR) $(CONFIG_DIR)
|
$(MKDIR) $(CONFIG_DIR)
|
||||||
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
||||||
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
||||||
$(call py_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
$(call py3_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||||
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
||||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||||
--preprocess-locale $(topsrcdir) \
|
--preprocess-locale $(topsrcdir) \
|
||||||
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
||||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||||
--preprocess-single-file $(topsrcdir) \
|
--preprocess-single-file $(topsrcdir) \
|
||||||
$(PPL_LOCALE_ARGS) $(CONFIG_DIR) \
|
$(PPL_LOCALE_ARGS) $(CONFIG_DIR) \
|
||||||
nsisstrings.properties nsisstrings.nlf
|
nsisstrings.properties nsisstrings.nlf
|
||||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||||
--convert-utf8-utf16le \
|
--convert-utf8-utf16le \
|
||||||
$(srcdir)/nsis/extensionsLocale.nsh $(CONFIG_DIR)/extensionsLocale.nsh
|
$(srcdir)/nsis/extensionsLocale.nsh $(CONFIG_DIR)/extensionsLocale.nsh
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -168,7 +168,7 @@ task machBuildFaster(type: MachExec) {
|
||||||
|
|
||||||
workingDir "${topsrcdir}"
|
workingDir "${topsrcdir}"
|
||||||
|
|
||||||
commandLine mozconfig.substs.PYTHON3
|
commandLine mozconfig.substs.PYTHON
|
||||||
args "${topsrcdir}/mach"
|
args "${topsrcdir}/mach"
|
||||||
args 'build'
|
args 'build'
|
||||||
args 'faster'
|
args 'faster'
|
||||||
|
|
|
||||||
|
|
@ -146,8 +146,8 @@ if test "$GNU_CC" -a "$GCC_USE_GNU_LD" -a -z "$DEVELOPER_OPTIONS"; then
|
||||||
if AC_TRY_COMMAND([${CC-cc} -o conftest.${ac_objext} $CFLAGS $MOZ_DEBUG_FLAGS -c conftest.${ac_ext} 1>&2]) &&
|
if AC_TRY_COMMAND([${CC-cc} -o conftest.${ac_objext} $CFLAGS $MOZ_DEBUG_FLAGS -c conftest.${ac_ext} 1>&2]) &&
|
||||||
AC_TRY_COMMAND([${CC-cc} -o conftest${ac_exeext} $LDFLAGS $MOZ_DEBUG_FLAGS -Wl,--gc-sections conftest.${ac_objext} $LIBS 1>&2]) &&
|
AC_TRY_COMMAND([${CC-cc} -o conftest${ac_exeext} $LDFLAGS $MOZ_DEBUG_FLAGS -Wl,--gc-sections conftest.${ac_objext} $LIBS 1>&2]) &&
|
||||||
test -s conftest${ac_exeext} -a -s conftest.${ac_objext}; then
|
test -s conftest${ac_exeext} -a -s conftest.${ac_objext}; then
|
||||||
if test "`$PYTHON3 -m mozbuild.configure.check_debug_ranges conftest.${ac_objext} conftest.${ac_ext}`" = \
|
if test "`$PYTHON -m mozbuild.configure.check_debug_ranges conftest.${ac_objext} conftest.${ac_ext}`" = \
|
||||||
"`$PYTHON3 -m mozbuild.configure.check_debug_ranges conftest${ac_exeext} conftest.${ac_ext}`"; then
|
"`$PYTHON -m mozbuild.configure.check_debug_ranges conftest${ac_exeext} conftest.${ac_ext}`"; then
|
||||||
GC_SECTIONS_BREAKS_DEBUG_RANGES=no
|
GC_SECTIONS_BREAKS_DEBUG_RANGES=no
|
||||||
else
|
else
|
||||||
GC_SECTIONS_BREAKS_DEBUG_RANGES=yes
|
GC_SECTIONS_BREAKS_DEBUG_RANGES=yes
|
||||||
|
|
|
||||||
|
|
@ -92,7 +92,7 @@ AUTOMATION_EXTRA_CMDLINE-check = --keep-going
|
||||||
# However, the target automation/buildsymbols will still be executed in this
|
# However, the target automation/buildsymbols will still be executed in this
|
||||||
# case because it is a prerequisite of automation/upload.
|
# case because it is a prerequisite of automation/upload.
|
||||||
define automation_commands
|
define automation_commands
|
||||||
@+$(PYTHON3) $(topsrcdir)/config/run-and-prefix.py $1 $(MAKE) $1 $(AUTOMATION_EXTRA_CMDLINE-$1)
|
@+$(PYTHON) $(topsrcdir)/config/run-and-prefix.py $1 $(MAKE) $1 $(AUTOMATION_EXTRA_CMDLINE-$1)
|
||||||
$(call BUILDSTATUS,TIER_FINISH $1)
|
$(call BUILDSTATUS,TIER_FINISH $1)
|
||||||
endef
|
endef
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -470,6 +470,113 @@ def mozconfig_options(mozconfig, automation, help):
|
||||||
add(key, value)
|
add(key, value)
|
||||||
|
|
||||||
|
|
||||||
|
# Python 2
|
||||||
|
# ========
|
||||||
|
|
||||||
|
option(env='PYTHON', nargs=1, help='Python 2.7 interpreter')
|
||||||
|
|
||||||
|
|
||||||
|
@depends('PYTHON', check_build_environment, 'MOZILLABUILD')
|
||||||
|
@imports(_from='__builtin__', _import='Exception')
|
||||||
|
@imports(_from='mozbuild.configure.util', _import='LineIO')
|
||||||
|
@imports(_from='mozbuild.virtualenv', _import='VirtualenvManager')
|
||||||
|
@imports(_from='mozbuild.virtualenv', _import='verify_python_version')
|
||||||
|
@imports(_from='mozbuild.pythonutil', _import='find_python2_executable')
|
||||||
|
@imports(_from='mozbuild.pythonutil', _import='python_executable_version')
|
||||||
|
@imports(_from='six', _import='ensure_text')
|
||||||
|
def virtualenv_python2(env_python, build_env, mozillabuild):
|
||||||
|
# Verify that the Python version we executed this code with is the minimum
|
||||||
|
# required version to handle all project code.
|
||||||
|
with LineIO(lambda l: log.error(l)) as out:
|
||||||
|
verify_python_version(out)
|
||||||
|
|
||||||
|
python = env_python[0] if env_python else None
|
||||||
|
|
||||||
|
log.debug("python2: executable from configuration: %r" % python)
|
||||||
|
|
||||||
|
# If this is a mozilla-central build, we'll find the virtualenv in the top
|
||||||
|
# source directory. If this is a SpiderMonkey build, we assume we're at
|
||||||
|
# js/src and try to find the virtualenv from the mozilla-central root.
|
||||||
|
# See mozilla-central changeset d2cce982a7c809815d86d5daecefe2e7a563ecca
|
||||||
|
# Bug 784841
|
||||||
|
topsrcdir, topobjdir = build_env.topsrcdir, build_env.topobjdir
|
||||||
|
if topobjdir.endswith('/js/src'):
|
||||||
|
topobjdir = topobjdir[:-7]
|
||||||
|
|
||||||
|
# If we know the Python executable the caller is asking for then verify its
|
||||||
|
# version. If the caller did not ask for a specific executable then find
|
||||||
|
# a reasonable default.
|
||||||
|
if python:
|
||||||
|
try:
|
||||||
|
version = python_executable_version(python).version
|
||||||
|
except Exception as e:
|
||||||
|
raise FatalCheckError('could not determine version of PYTHON '
|
||||||
|
'(%s): %s' % (python, e))
|
||||||
|
elif mozillabuild:
|
||||||
|
# MozillaBuild provides a Python 2.
|
||||||
|
python = normsep('%s/python/python2.exe' % mozillabuild)
|
||||||
|
|
||||||
|
try:
|
||||||
|
version = python_executable_version(python).version
|
||||||
|
except Exception as e:
|
||||||
|
raise FatalCheckError('could not determine version of '
|
||||||
|
'MozillaBuild python: %s' % e)
|
||||||
|
else:
|
||||||
|
# Fall back to the search routine.
|
||||||
|
python, version = find_python2_executable()
|
||||||
|
|
||||||
|
# The API returns a bytes whereas everything in configure is unicode.
|
||||||
|
if python:
|
||||||
|
python = ensure_text(python)
|
||||||
|
|
||||||
|
if not python:
|
||||||
|
raise FatalCheckError('Python 2.7 is required to build. '
|
||||||
|
'Ensure a `python2.7` executable is in your '
|
||||||
|
'PATH or define PYTHON to point to a Python '
|
||||||
|
'2.7 executable.')
|
||||||
|
|
||||||
|
if version < (2, 7, 0):
|
||||||
|
raise FatalCheckError('Python 2.7 required to build; '
|
||||||
|
'%s is Python %d.%d' % (python, version[0],
|
||||||
|
version[1]))
|
||||||
|
|
||||||
|
log.debug("python2: found executable: %r" % python)
|
||||||
|
|
||||||
|
virtualenvs_root = os.path.join(topobjdir, '_virtualenvs')
|
||||||
|
with LineIO(lambda l: log.info(l), 'replace') as out:
|
||||||
|
manager = VirtualenvManager(
|
||||||
|
topsrcdir, topobjdir,
|
||||||
|
os.path.join(virtualenvs_root, 'init'), out,
|
||||||
|
os.path.join(topsrcdir, 'build', 'virtualenv_packages.txt'))
|
||||||
|
|
||||||
|
log.debug("python: using venv: %r" % manager.virtualenv_root)
|
||||||
|
|
||||||
|
if not manager.up_to_date(python):
|
||||||
|
log.info('Creating Python 2 environment')
|
||||||
|
manager.build(python)
|
||||||
|
else:
|
||||||
|
log.debug("python2: venv is up to date")
|
||||||
|
|
||||||
|
python = normsep(manager.python_path)
|
||||||
|
str_version = '.'.join(str(v) for v in version)
|
||||||
|
|
||||||
|
return namespace(
|
||||||
|
path=python,
|
||||||
|
version=version,
|
||||||
|
str_version=str_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@depends(virtualenv_python2)
|
||||||
|
@checking('for Python 2', callback=lambda x: '%s (%s)' % (x.path, x.str_version))
|
||||||
|
def virtualenv_python2(venv):
|
||||||
|
return venv
|
||||||
|
|
||||||
|
|
||||||
|
set_config('PYTHON', virtualenv_python2.path)
|
||||||
|
add_old_configure_assignment('PYTHON', virtualenv_python2.path)
|
||||||
|
|
||||||
|
|
||||||
# Source checkout and version control integration.
|
# Source checkout and version control integration.
|
||||||
# ================================================
|
# ================================================
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,7 @@ ifeq "$(CWD)" "/"
|
||||||
CWD := /.
|
CWD := /.
|
||||||
endif
|
endif
|
||||||
|
|
||||||
PYTHON3 ?= python3
|
PYTHON ?= $(shell which python2.7 > /dev/null 2>&1 && echo python2.7 || echo python)
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# Load mozconfig Options
|
# Load mozconfig Options
|
||||||
|
|
@ -46,7 +46,7 @@ endif
|
||||||
|
|
||||||
# Automatically add -jN to make flags if not defined. N defaults to number of cores.
|
# Automatically add -jN to make flags if not defined. N defaults to number of cores.
|
||||||
ifeq (,$(findstring -j,$(MOZ_MAKE_FLAGS)))
|
ifeq (,$(findstring -j,$(MOZ_MAKE_FLAGS)))
|
||||||
cores=$(shell $(PYTHON3) -c 'import multiprocessing; print(multiprocessing.cpu_count())')
|
cores=$(shell $(PYTHON) -c 'import multiprocessing; print(multiprocessing.cpu_count())')
|
||||||
MOZ_MAKE_FLAGS += -j$(cores)
|
MOZ_MAKE_FLAGS += -j$(cores)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,8 @@
|
||||||
# JSErrorFormatString.format member should be in ASCII encoding.
|
# JSErrorFormatString.format member should be in ASCII encoding.
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
|
||||||
|
|
@ -38,7 +38,8 @@
|
||||||
# mismatched alloc/free checking.
|
# mismatched alloc/free checking.
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import re
|
import re
|
||||||
|
|
@ -111,7 +112,7 @@ def main():
|
||||||
]
|
]
|
||||||
|
|
||||||
# This is like alloc_fns, but regexp chars are not escaped.
|
# This is like alloc_fns, but regexp chars are not escaped.
|
||||||
alloc_fns_unescaped = [fn.replace('\\', '') for fn in alloc_fns]
|
alloc_fns_unescaped = [fn.translate(None, r'\\') for fn in alloc_fns]
|
||||||
|
|
||||||
# This regexp matches the relevant lines in the output of |nm|, which look
|
# This regexp matches the relevant lines in the output of |nm|, which look
|
||||||
# like the following.
|
# like the following.
|
||||||
|
|
|
||||||
|
|
@ -112,7 +112,7 @@ MKDIR ?= mkdir
|
||||||
SLEEP ?= sleep
|
SLEEP ?= sleep
|
||||||
TOUCH ?= touch
|
TOUCH ?= touch
|
||||||
|
|
||||||
PYTHON_PATH = $(PYTHON3) $(topsrcdir)/config/pythonpath.py
|
PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py
|
||||||
|
|
||||||
#
|
#
|
||||||
# Build using PIC by default
|
# Build using PIC by default
|
||||||
|
|
|
||||||
|
|
@ -27,4 +27,8 @@ core_winabspath = $(error core_winabspath is unsupported)
|
||||||
#
|
#
|
||||||
# libs::
|
# libs::
|
||||||
# $(call py_action,purge_manifests,_build_manifests/purge/foo.manifest)
|
# $(call py_action,purge_manifests,_build_manifests/purge/foo.manifest)
|
||||||
py_action = $(PYTHON3) -m mozbuild.action.$(1) $(2)
|
py_action = $(PYTHON) -m mozbuild.action.$(1) $(2)
|
||||||
|
|
||||||
|
# Same as above, but targeting Python 3. Wherever possible py3_action should be
|
||||||
|
# used rather than py_action.
|
||||||
|
py3_action = $(PYTHON3) -m mozbuild.action.$(1) $(2)
|
||||||
|
|
|
||||||
|
|
@ -303,7 +303,7 @@ ifndef MOZ_PROFILE_GENERATE
|
||||||
ifeq ($(OS_ARCH), Linux)
|
ifeq ($(OS_ARCH), Linux)
|
||||||
ifeq (,$(rustflags_sancov)$(MOZ_ASAN)$(MOZ_TSAN)$(MOZ_UBSAN))
|
ifeq (,$(rustflags_sancov)$(MOZ_ASAN)$(MOZ_TSAN)$(MOZ_UBSAN))
|
||||||
ifneq (,$(filter -Clto,$(cargo_rustc_flags)))
|
ifneq (,$(filter -Clto,$(cargo_rustc_flags)))
|
||||||
$(call py_action,check_binary,--target --networking $@)
|
$(call py3_action,check_binary,--target --networking $@)
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
|
||||||
|
|
@ -2,9 +2,8 @@
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Run a python script, adding extra directories to the python path.
|
Run a python script, adding extra directories to the python path.
|
||||||
"""
|
"""
|
||||||
|
|
@ -12,7 +11,7 @@ Run a python script, adding extra directories to the python path.
|
||||||
|
|
||||||
def main(args):
|
def main(args):
|
||||||
def usage():
|
def usage():
|
||||||
print("pythonpath.py -I directory script.py [args...]", file=sys.stderr)
|
print >>sys.stderr, "pythonpath.py -I directory script.py [args...]"
|
||||||
sys.exit(150)
|
sys.exit(150)
|
||||||
|
|
||||||
paths = []
|
paths = []
|
||||||
|
|
@ -48,14 +47,14 @@ def main(args):
|
||||||
frozenglobals['__name__'] = '__main__'
|
frozenglobals['__name__'] = '__main__'
|
||||||
frozenglobals['__file__'] = script
|
frozenglobals['__file__'] = script
|
||||||
|
|
||||||
exec(open(script, encoding='utf-8').read(), frozenglobals)
|
execfile(script, frozenglobals)
|
||||||
|
|
||||||
|
|
||||||
# Freeze scope here ... why this makes things work I have no idea ...
|
# Freeze scope here ... why this makes things work I have no idea ...
|
||||||
frozenglobals = globals()
|
frozenglobals = globals()
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,8 @@
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
import errno
|
import errno
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ USE_AUTOTARGETS_MK = 1
|
||||||
include $(MOZILLA_DIR)/config/makefiles/makeutils.mk
|
include $(MOZILLA_DIR)/config/makefiles/makeutils.mk
|
||||||
|
|
||||||
ifdef REBUILD_CHECK
|
ifdef REBUILD_CHECK
|
||||||
REPORT_BUILD = $(info $(shell $(PYTHON3) $(MOZILLA_DIR)/config/rebuild_check.py $@ $^))
|
REPORT_BUILD = $(info $(shell $(PYTHON) $(MOZILLA_DIR)/config/rebuild_check.py $@ $^))
|
||||||
REPORT_BUILD_VERBOSE = $(REPORT_BUILD)
|
REPORT_BUILD_VERBOSE = $(REPORT_BUILD)
|
||||||
else
|
else
|
||||||
REPORT_BUILD = $(info $(relativesrcdir)/$(notdir $@))
|
REPORT_BUILD = $(info $(relativesrcdir)/$(notdir $@))
|
||||||
|
|
@ -66,10 +66,10 @@ INSTALL_TARGETS += CPP_UNIT_TESTS
|
||||||
endif
|
endif
|
||||||
|
|
||||||
run-cppunittests::
|
run-cppunittests::
|
||||||
@$(PYTHON3) $(MOZILLA_DIR)/testing/runcppunittests.py --xre-path=$(DIST)/bin --symbols-path=$(DIST)/crashreporter-symbols $(CPP_UNIT_TESTS)
|
@$(PYTHON) $(MOZILLA_DIR)/testing/runcppunittests.py --xre-path=$(DIST)/bin --symbols-path=$(DIST)/crashreporter-symbols $(CPP_UNIT_TESTS)
|
||||||
|
|
||||||
cppunittests-remote:
|
cppunittests-remote:
|
||||||
$(PYTHON3) -u $(MOZILLA_DIR)/testing/remotecppunittests.py \
|
$(PYTHON) -u $(MOZILLA_DIR)/testing/remotecppunittests.py \
|
||||||
--xre-path=$(DEPTH)/dist/bin \
|
--xre-path=$(DEPTH)/dist/bin \
|
||||||
--localLib=$(DEPTH)/dist/$(MOZ_APP_NAME) \
|
--localLib=$(DEPTH)/dist/$(MOZ_APP_NAME) \
|
||||||
--deviceIP=${TEST_DEVICE} \
|
--deviceIP=${TEST_DEVICE} \
|
||||||
|
|
@ -474,7 +474,7 @@ ifdef MSMANIFEST_TOOL
|
||||||
endif # MSVC with manifest tool
|
endif # MSVC with manifest tool
|
||||||
else # !WINNT || GNU_CC
|
else # !WINNT || GNU_CC
|
||||||
$(call EXPAND_CC_OR_CXX,$@) -o $@ $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) $($(notdir $@)_OBJS) $(RESFILE) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
$(call EXPAND_CC_OR_CXX,$@) -o $@ $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) $($(notdir $@)_OBJS) $(RESFILE) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
||||||
$(call py_action,check_binary,--target $@)
|
$(call py3_action,check_binary,--target $@)
|
||||||
endif # WINNT && !GNU_CC
|
endif # WINNT && !GNU_CC
|
||||||
|
|
||||||
ifdef ENABLE_STRIP
|
ifdef ENABLE_STRIP
|
||||||
|
|
@ -505,7 +505,7 @@ else
|
||||||
endif # HOST_CPP_PROG_LINK
|
endif # HOST_CPP_PROG_LINK
|
||||||
endif
|
endif
|
||||||
ifndef CROSS_COMPILE
|
ifndef CROSS_COMPILE
|
||||||
$(call py_action,check_binary,--host $@)
|
$(call py3_action,check_binary,--host $@)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
#
|
#
|
||||||
|
|
@ -530,7 +530,7 @@ ifdef MSMANIFEST_TOOL
|
||||||
endif # MSVC with manifest tool
|
endif # MSVC with manifest tool
|
||||||
else
|
else
|
||||||
$(call EXPAND_CC_OR_CXX,$@) $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) -o $@ $($@_OBJS) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
$(call EXPAND_CC_OR_CXX,$@) $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) -o $@ $($@_OBJS) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
||||||
$(call py_action,check_binary,--target $@)
|
$(call py3_action,check_binary,--target $@)
|
||||||
endif # WINNT && !GNU_CC
|
endif # WINNT && !GNU_CC
|
||||||
|
|
||||||
ifdef ENABLE_STRIP
|
ifdef ENABLE_STRIP
|
||||||
|
|
@ -552,7 +552,7 @@ else
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
ifndef CROSS_COMPILE
|
ifndef CROSS_COMPILE
|
||||||
$(call py_action,check_binary,--host $@)
|
$(call py3_action,check_binary,--host $@)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
$(LIBRARY): $(OBJS) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
$(LIBRARY): $(OBJS) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||||
|
|
@ -610,7 +610,7 @@ ifndef INCREMENTAL_LINKER
|
||||||
$(RM) $@
|
$(RM) $@
|
||||||
endif
|
endif
|
||||||
$(MKSHLIB) $($@_OBJS) $(RESFILE) $(LDFLAGS) $(STATIC_LIBS) $(SHARED_LIBS) $(EXTRA_DSO_LDOPTS) $(MOZ_GLUE_LDFLAGS) $(OS_LIBS)
|
$(MKSHLIB) $($@_OBJS) $(RESFILE) $(LDFLAGS) $(STATIC_LIBS) $(SHARED_LIBS) $(EXTRA_DSO_LDOPTS) $(MOZ_GLUE_LDFLAGS) $(OS_LIBS)
|
||||||
$(call py_action,check_binary,--target $@)
|
$(call py3_action,check_binary,--target $@)
|
||||||
|
|
||||||
ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
|
ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
|
||||||
ifdef MSMANIFEST_TOOL
|
ifdef MSMANIFEST_TOOL
|
||||||
|
|
@ -702,7 +702,7 @@ define syms_template
|
||||||
syms:: $(2)
|
syms:: $(2)
|
||||||
$(2): $(1)
|
$(2): $(1)
|
||||||
ifdef MOZ_CRASHREPORTER
|
ifdef MOZ_CRASHREPORTER
|
||||||
$$(call py_action,dumpsymbols,$$(abspath $$<) $$(abspath $$@) $$(DUMP_SYMBOLS_FLAGS))
|
$$(call py3_action,dumpsymbols,$$(abspath $$<) $$(abspath $$@) $$(DUMP_SYMBOLS_FLAGS))
|
||||||
ifeq ($(OS_ARCH),WINNT)
|
ifeq ($(OS_ARCH),WINNT)
|
||||||
ifdef WINCHECKSEC
|
ifdef WINCHECKSEC
|
||||||
$$(PYTHON3) $$(topsrcdir)/build/win32/autowinchecksec.py $$<
|
$$(PYTHON3) $$(topsrcdir)/build/win32/autowinchecksec.py $$<
|
||||||
|
|
@ -941,7 +941,7 @@ endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
libs realchrome:: $(FINAL_TARGET)/chrome
|
libs realchrome:: $(FINAL_TARGET)/chrome
|
||||||
$(call py_action,jar_maker,\
|
$(call py3_action,jar_maker,\
|
||||||
$(QUIET) -d $(FINAL_TARGET) \
|
$(QUIET) -d $(FINAL_TARGET) \
|
||||||
$(MAKE_JARS_FLAGS) $(DEFINES) $(ACDEFINES) \
|
$(MAKE_JARS_FLAGS) $(DEFINES) $(ACDEFINES) \
|
||||||
$(JAR_MANIFEST))
|
$(JAR_MANIFEST))
|
||||||
|
|
@ -984,7 +984,7 @@ ifndef MOZ_DEBUG
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
@echo 'Packaging $(XPI_PKGNAME).xpi...'
|
@echo 'Packaging $(XPI_PKGNAME).xpi...'
|
||||||
$(call py_action,zip,-C $(FINAL_TARGET) ../$(XPI_PKGNAME).xpi '*')
|
$(call py3_action,zip,-C $(FINAL_TARGET) ../$(XPI_PKGNAME).xpi '*')
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# See comment above about moving this out of the tools tier.
|
# See comment above about moving this out of the tools tier.
|
||||||
|
|
@ -1164,7 +1164,7 @@ PP_TARGETS_ALL_RESULTS := $(sort $(foreach tier,$(PP_TARGETS_TIERS),$(PP_TARGETS
|
||||||
$(PP_TARGETS_ALL_RESULTS):
|
$(PP_TARGETS_ALL_RESULTS):
|
||||||
$(if $(filter-out $(notdir $@),$(notdir $(<:.in=))),$(error Looks like $@ has an unexpected dependency on $< which breaks PP_TARGETS))
|
$(if $(filter-out $(notdir $@),$(notdir $(<:.in=))),$(error Looks like $@ has an unexpected dependency on $< which breaks PP_TARGETS))
|
||||||
$(RM) '$@'
|
$(RM) '$@'
|
||||||
$(call py_action,preprocessor,--depend $(MDDEPDIR)/$(@F).pp $(PP_TARGET_FLAGS) $(DEFINES) $(ACDEFINES) '$<' -o '$@')
|
$(call py3_action,preprocessor,--depend $(MDDEPDIR)/$(@F).pp $(PP_TARGET_FLAGS) $(DEFINES) $(ACDEFINES) '$<' -o '$@')
|
||||||
|
|
||||||
$(filter %.css,$(PP_TARGETS_ALL_RESULTS)): PP_TARGET_FLAGS+=--marker %
|
$(filter %.css,$(PP_TARGETS_ALL_RESULTS)): PP_TARGET_FLAGS+=--marker %
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,22 +6,23 @@
|
||||||
# This script runs a process and prefixes its output with.
|
# This script runs a process and prefixes its output with.
|
||||||
# Usage: run-and-prefix.py prefix command arg0 argv1...
|
# Usage: run-and-prefix.py prefix command arg0 argv1...
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'wb', 0)
|
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||||
sys.stderr = os.fdopen(sys.stderr.fileno(), 'wb', 0)
|
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
|
||||||
|
|
||||||
prefix = sys.argv[1].encode('utf-8')
|
prefix = sys.argv[1]
|
||||||
args = sys.argv[2:]
|
args = sys.argv[2:]
|
||||||
|
|
||||||
p = subprocess.Popen(args, bufsize=0,
|
p = subprocess.Popen(args, bufsize=0,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.STDOUT,
|
stderr=subprocess.STDOUT,
|
||||||
stdin=sys.stdin.fileno())
|
stdin=sys.stdin.fileno(),
|
||||||
|
universal_newlines=True)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
data = p.stdout.readline()
|
data = p.stdout.readline()
|
||||||
|
|
@ -29,6 +30,6 @@ while True:
|
||||||
if data == b'':
|
if data == b'':
|
||||||
break
|
break
|
||||||
|
|
||||||
sys.stdout.write(b'%s> %s' % (prefix, data))
|
print('%s> %s' % (prefix, data), end=b'')
|
||||||
|
|
||||||
sys.exit(p.wait())
|
sys.exit(p.wait())
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ export:: webidl.stub
|
||||||
-include codegen.pp
|
-include codegen.pp
|
||||||
|
|
||||||
webidl.stub: $(codegen_dependencies)
|
webidl.stub: $(codegen_dependencies)
|
||||||
$(call py_action,webidl,$(srcdir))
|
$(call py3_action,webidl,$(srcdir))
|
||||||
@$(TOUCH) $@
|
@$(TOUCH) $@
|
||||||
|
|
||||||
.PHONY: compiletests
|
.PHONY: compiletests
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ ipdl_py_deps := \
|
||||||
# NB: the IPDL compiler manages .ipdl-->.h/.cpp dependencies itself,
|
# NB: the IPDL compiler manages .ipdl-->.h/.cpp dependencies itself,
|
||||||
# which is why we don't have explicit .h/.cpp targets here
|
# which is why we don't have explicit .h/.cpp targets here
|
||||||
ipdl.track: $(ALL_IPDLSRCS) $(srcdir)/sync-messages.ini $(srcdir)/message-metadata.ini $(ipdl_py_deps)
|
ipdl.track: $(ALL_IPDLSRCS) $(srcdir)/sync-messages.ini $(srcdir)/message-metadata.ini $(ipdl_py_deps)
|
||||||
$(PYTHON3) $(topsrcdir)/config/pythonpath.py \
|
$(PYTHON) $(topsrcdir)/config/pythonpath.py \
|
||||||
$(PLY_INCLUDE) \
|
$(PLY_INCLUDE) \
|
||||||
$(srcdir)/ipdl.py \
|
$(srcdir)/ipdl.py \
|
||||||
--sync-msg-list=$(srcdir)/sync-messages.ini \
|
--sync-msg-list=$(srcdir)/sync-messages.ini \
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,11 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
from io import StringIO
|
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from configparser import RawConfigParser
|
from cStringIO import StringIO
|
||||||
|
from ConfigParser import RawConfigParser
|
||||||
|
|
||||||
import ipdl
|
import ipdl
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ __all__ = ['gencxx', 'genipdl', 'parse', 'typecheck', 'writeifmodified',
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from io import StringIO
|
from cStringIO import StringIO
|
||||||
|
|
||||||
from ipdl.cgen import IPDLCodeGen
|
from ipdl.cgen import IPDLCodeGen
|
||||||
from ipdl.lower import LowerToCxx, msgenums
|
from ipdl.lower import LowerToCxx, msgenums
|
||||||
|
|
@ -75,7 +75,6 @@ def genmsgenum(ast):
|
||||||
|
|
||||||
|
|
||||||
def writeifmodified(contents, file):
|
def writeifmodified(contents, file):
|
||||||
contents = contents.encode('utf-8')
|
|
||||||
dir = os.path.dirname(file)
|
dir = os.path.dirname(file)
|
||||||
os.path.exists(dir) or os.makedirs(dir)
|
os.path.exists(dir) or os.makedirs(dir)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import functools
|
|
||||||
|
|
||||||
|
|
||||||
class Visitor:
|
class Visitor:
|
||||||
|
|
@ -443,7 +442,6 @@ class TypeFunction(Node):
|
||||||
self.ret = ret
|
self.ret = ret
|
||||||
|
|
||||||
|
|
||||||
@functools.total_ordering
|
|
||||||
class Typedef(Node):
|
class Typedef(Node):
|
||||||
def __init__(self, fromtype, totypename, templateargs=[]):
|
def __init__(self, fromtype, totypename, templateargs=[]):
|
||||||
assert isinstance(totypename, str)
|
assert isinstance(totypename, str)
|
||||||
|
|
@ -453,12 +451,12 @@ class Typedef(Node):
|
||||||
self.totypename = totypename
|
self.totypename = totypename
|
||||||
self.templateargs = templateargs
|
self.templateargs = templateargs
|
||||||
|
|
||||||
def __lt__(self, other):
|
def __cmp__(self, o):
|
||||||
return self.totypename < other.totypename
|
return cmp(self.totypename, o.totypename)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, o):
|
||||||
return (self.__class__ == other.__class__
|
return (self.__class__ == o.__class__
|
||||||
and self.totypename == other.totypename)
|
and 0 == cmp(self, o))
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash(self.totypename)
|
return hash(self.totypename)
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@
|
||||||
# API are often easier to read than users of the AST APIs in these cases.
|
# API are often easier to read than users of the AST APIs in these cases.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
import math
|
import math
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
|
|
@ -139,7 +140,7 @@ def _line(raw, skip_indent, lineno, context):
|
||||||
values = eval(expr, context, {})
|
values = eval(expr, context, {})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = "%s in substitution on line %d" % (repr(e), lineno)
|
msg = "%s in substitution on line %d" % (repr(e), lineno)
|
||||||
raise ValueError(msg) from e
|
raise ValueError(msg), None, sys.exc_traceback
|
||||||
|
|
||||||
# If we aren't dealing with lists, wrap the result into a
|
# If we aren't dealing with lists, wrap the result into a
|
||||||
# single-element list.
|
# single-element list.
|
||||||
|
|
|
||||||
|
|
@ -1450,7 +1450,7 @@ with some new IPDL/C++ nodes that are tuned for C++ codegen."""
|
||||||
|
|
||||||
# Compute a permutation of the fields for in-memory storage such
|
# Compute a permutation of the fields for in-memory storage such
|
||||||
# that the memory layout of the structure will be well-packed.
|
# that the memory layout of the structure will be well-packed.
|
||||||
permutation = list(range(len(newfields)))
|
permutation = range(len(newfields))
|
||||||
|
|
||||||
# Note that the results of `pod_size` ensure that non-POD fields
|
# Note that the results of `pod_size` ensure that non-POD fields
|
||||||
# sort before POD ones.
|
# sort before POD ones.
|
||||||
|
|
|
||||||
|
|
@ -125,10 +125,10 @@ class Type:
|
||||||
return self.__class__.__name__
|
return self.__class__.__name__
|
||||||
|
|
||||||
def name(self):
|
def name(self):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError
|
||||||
|
|
||||||
def fullname(self):
|
def fullname(self):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError
|
||||||
|
|
||||||
def accept(self, visitor, *args):
|
def accept(self, visitor, *args):
|
||||||
visit = getattr(visitor, 'visit' + self.__class__.__name__, None)
|
visit = getattr(visitor, 'visit' + self.__class__.__name__, None)
|
||||||
|
|
@ -229,18 +229,8 @@ class IPDLType(Type):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def convertsTo(cls, lesser, greater):
|
def convertsTo(cls, lesser, greater):
|
||||||
def _unwrap(nr):
|
if (lesser.nestedRange[0] < greater.nestedRange[0] or
|
||||||
if isinstance(nr, dict):
|
lesser.nestedRange[1] > greater.nestedRange[1]):
|
||||||
return _unwrap(nr['nested'])
|
|
||||||
elif isinstance(nr, int):
|
|
||||||
return nr
|
|
||||||
else:
|
|
||||||
raise ValueError('Got unexpected nestedRange value: %s' % nr)
|
|
||||||
|
|
||||||
lnr0, gnr0, lnr1, gnr1 = (
|
|
||||||
_unwrap(lesser.nestedRange[0]), _unwrap(greater.nestedRange[0]),
|
|
||||||
_unwrap(lesser.nestedRange[1]), _unwrap(greater.nestedRange[1]))
|
|
||||||
if (lnr0 < gnr0 or lnr1 > gnr1):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Protocols that use intr semantics are not allowed to use
|
# Protocols that use intr semantics are not allowed to use
|
||||||
|
|
@ -603,7 +593,7 @@ def iteractortypes(t, visited=None):
|
||||||
|
|
||||||
def hasshmem(type):
|
def hasshmem(type):
|
||||||
"""Return true iff |type| is shmem or has it buried within."""
|
"""Return true iff |type| is shmem or has it buried within."""
|
||||||
class found(BaseException):
|
class found:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class findShmem(TypeVisitor):
|
class findShmem(TypeVisitor):
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ include $(topsrcdir)/config/rules.mk
|
||||||
IPDLUNITTEST_BIN = $(DEPTH)/dist/bin/ipdlunittest$(BIN_SUFFIX)
|
IPDLUNITTEST_BIN = $(DEPTH)/dist/bin/ipdlunittest$(BIN_SUFFIX)
|
||||||
|
|
||||||
IPDLUnitTests.cpp : Makefile.in moz.build $(GENTESTER) $(TESTER_TEMPLATE) $(IPDLTESTHDRS)
|
IPDLUnitTests.cpp : Makefile.in moz.build $(GENTESTER) $(TESTER_TEMPLATE) $(IPDLTESTHDRS)
|
||||||
$(PYTHON3) $(GENTESTER) $(TESTER_TEMPLATE) -t $(IPDLTESTS) -e $(EXTRA_PROTOCOLS) > $@
|
$(PYTHON) $(GENTESTER) $(TESTER_TEMPLATE) -t $(IPDLTESTS) -e $(EXTRA_PROTOCOLS) > $@
|
||||||
|
|
||||||
check-proc::
|
check-proc::
|
||||||
@$(EXIT_ON_ERROR) \
|
@$(EXIT_ON_ERROR) \
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function
|
||||||
|
|
||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
|
|
@ -124,7 +124,7 @@ def main(argv):
|
||||||
}
|
}
|
||||||
''' % (t, t, t, t) for t in unittests+extras])
|
''' % (t, t, t, t) for t in unittests+extras])
|
||||||
|
|
||||||
templatefile = open(template, 'r', encoding='utf-8')
|
templatefile = open(template, 'r')
|
||||||
sys.stdout.write(
|
sys.stdout.write(
|
||||||
string.Template(templatefile.read()).substitute(
|
string.Template(templatefile.read()).substitute(
|
||||||
INCLUDES=includes,
|
INCLUDES=includes,
|
||||||
|
|
|
||||||
|
|
@ -28,9 +28,9 @@ class IPDLCompile:
|
||||||
self.specfilename
|
self.specfilename
|
||||||
])
|
])
|
||||||
|
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(args=self.argv,
|
||||||
args=self.argv, stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE, universal_newlines=True)
|
stderr=subprocess.PIPE)
|
||||||
self.stdout, self.stderr = proc.communicate()
|
self.stdout, self.stderr = proc.communicate()
|
||||||
|
|
||||||
self.returncode = proc.returncode
|
self.returncode = proc.returncode
|
||||||
|
|
|
||||||
|
|
@ -8,9 +8,9 @@ OKTESTS := $(wildcard $(srcdir)/ok/*.ipdl) $(wildcard $(srcdir)/ok/*.ipdlh)
|
||||||
ERRORTESTS := $(wildcard $(srcdir)/error/*.ipdl) $(wildcard $(srcdir)/error/*.ipdlh)
|
ERRORTESTS := $(wildcard $(srcdir)/error/*.ipdl) $(wildcard $(srcdir)/error/*.ipdlh)
|
||||||
|
|
||||||
check::
|
check::
|
||||||
@$(PYTHON3) $(srcdir)/runtests.py \
|
@$(PYTHON) $(srcdir)/runtests.py \
|
||||||
$(srcdir)/ok $(srcdir)/error \
|
$(srcdir)/ok $(srcdir)/error \
|
||||||
$(PYTHON3) $(topsrcdir)/config/pythonpath.py \
|
$(PYTHON) $(topsrcdir)/config/pythonpath.py \
|
||||||
$(PLY_INCLUDE) \
|
$(PLY_INCLUDE) \
|
||||||
$(topsrcdir)/ipc/ipdl/ipdl.py \
|
$(topsrcdir)/ipc/ipdl/ipdl.py \
|
||||||
--sync-msg-list=$(srcdir)/sync-messages.ini \
|
--sync-msg-list=$(srcdir)/sync-messages.ini \
|
||||||
|
|
|
||||||
9
ipc/ipdl/test/ipdl/error/BadNestedManagee.ipdl
Normal file
9
ipc/ipdl/test/ipdl/error/BadNestedManagee.ipdl
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
//error: protocol `BadNestedManagee' requires more powerful send semantics than its manager `BadNestedManager' provides
|
||||||
|
|
||||||
|
include protocol BadNestedManager;
|
||||||
|
|
||||||
|
nested(upto inside_sync) async protocol BadNestedManagee {
|
||||||
|
manager BadNestedManager;
|
||||||
|
child:
|
||||||
|
async __delete__();
|
||||||
|
};
|
||||||
9
ipc/ipdl/test/ipdl/error/BadNestedManager.ipdl
Normal file
9
ipc/ipdl/test/ipdl/error/BadNestedManager.ipdl
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
//error: protocol `BadNestedManagee' requires more powerful send semantics than its manager `BadNestedManager' provides
|
||||||
|
|
||||||
|
include protocol BadNestedManagee;
|
||||||
|
|
||||||
|
nested(upto not) async protocol BadNestedManager {
|
||||||
|
manages BadNestedManagee;
|
||||||
|
parent:
|
||||||
|
async BadNestedManagee();
|
||||||
|
};
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
//error: protocol `PBadNestedManagee' requires more powerful send semantics than its manager `PBadNestedManager' provides
|
|
||||||
|
|
||||||
include protocol PBadNestedManager;
|
|
||||||
|
|
||||||
nested(upto inside_sync) async protocol PBadNestedManagee {
|
|
||||||
manager PBadNestedManager;
|
|
||||||
child:
|
|
||||||
async __delete__();
|
|
||||||
};
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
//error: protocol `PBadNestedManagee' requires more powerful send semantics than its manager `PBadNestedManager' provides
|
|
||||||
|
|
||||||
include protocol PBadNestedManagee;
|
|
||||||
|
|
||||||
nested(upto not) async protocol PBadNestedManager {
|
|
||||||
manages PBadNestedManagee;
|
|
||||||
parent:
|
|
||||||
async PBadNestedManagee();
|
|
||||||
};
|
|
||||||
|
|
@ -40,10 +40,10 @@ endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
check-js-msg::
|
check-js-msg::
|
||||||
(cd $(topsrcdir) && $(PYTHON3) $(topsrcdir)/config/check_js_msg_encoding.py);
|
(cd $(topsrcdir) && $(PYTHON) $(topsrcdir)/config/check_js_msg_encoding.py);
|
||||||
|
|
||||||
check-jit-test::
|
check-jit-test::
|
||||||
$(JITTEST_SANITIZER_ENV) $(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON3) -u $(srcdir)/jit-test/jit_test.py \
|
$(JITTEST_SANITIZER_ENV) $(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON) -u $(srcdir)/jit-test/jit_test.py \
|
||||||
--no-slow --no-progress --format=automation --jitflags=all \
|
--no-slow --no-progress --format=automation --jitflags=all \
|
||||||
$(JITTEST_VALGRIND_FLAG) \
|
$(JITTEST_VALGRIND_FLAG) \
|
||||||
$(JITTEST_EXTRA_ARGS) \
|
$(JITTEST_EXTRA_ARGS) \
|
||||||
|
|
@ -52,7 +52,7 @@ check-jit-test::
|
||||||
check:: check-js-msg
|
check:: check-js-msg
|
||||||
|
|
||||||
check-jstests:
|
check-jstests:
|
||||||
$(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON3) -u $(srcdir)/tests/jstests.py \
|
$(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON) -u $(srcdir)/tests/jstests.py \
|
||||||
--no-progress --format=automation --timeout 300 \
|
--no-progress --format=automation --timeout 300 \
|
||||||
$(JSTESTS_EXTRA_ARGS) \
|
$(JSTESTS_EXTRA_ARGS) \
|
||||||
$(DIST)/bin/$(JS_SHELL_NAME)$(BIN_SUFFIX)
|
$(DIST)/bin/$(JS_SHELL_NAME)$(BIN_SUFFIX)
|
||||||
|
|
|
||||||
|
|
@ -9,12 +9,12 @@ include $(topsrcdir)/config/rules.mk
|
||||||
# check_vanilla_allocations.py is tailored to Linux, so only run it there.
|
# check_vanilla_allocations.py is tailored to Linux, so only run it there.
|
||||||
# That should be enough to catch any problems.
|
# That should be enough to catch any problems.
|
||||||
check-vanilla-allocations:
|
check-vanilla-allocations:
|
||||||
$(PYTHON3) $(topsrcdir)/config/check_vanilla_allocations.py $(REAL_LIBRARY)
|
$(PYTHON) $(topsrcdir)/config/check_vanilla_allocations.py $(REAL_LIBRARY)
|
||||||
|
|
||||||
# The "aggressive" variant will likely fail on some compiler/platform
|
# The "aggressive" variant will likely fail on some compiler/platform
|
||||||
# combinations, but is worth running by hand every once in a while.
|
# combinations, but is worth running by hand every once in a while.
|
||||||
check-vanilla-allocations-aggressive:
|
check-vanilla-allocations-aggressive:
|
||||||
$(PYTHON3) $(topsrcdir)/config/check_vanilla_allocations.py --aggressive $(REAL_LIBRARY)
|
$(PYTHON) $(topsrcdir)/config/check_vanilla_allocations.py --aggressive $(REAL_LIBRARY)
|
||||||
|
|
||||||
ifeq ($(OS_ARCH),Linux)
|
ifeq ($(OS_ARCH),Linux)
|
||||||
ifeq (,$(filter -flto,$(COMPUTED_CFLAGS) $(COMPUTED_CXXFLAGS) $(COMPUTED_LDFLAGS)))
|
ifeq (,$(filter -flto,$(COMPUTED_CFLAGS) $(COMPUTED_CXXFLAGS) $(COMPUTED_LDFLAGS)))
|
||||||
|
|
@ -58,7 +58,7 @@ install:: ../js-config.h
|
||||||
#
|
#
|
||||||
|
|
||||||
install::
|
install::
|
||||||
$(call py_action,process_install_manifest,--track install_dist_include.track --no-symlinks $(DESTDIR)$(includedir)/$(JS_LIBRARY_NAME) $(DEPTH)/_build_manifests/install/dist_include)
|
$(call py3_action,process_install_manifest,--track install_dist_include.track --no-symlinks $(DESTDIR)$(includedir)/$(JS_LIBRARY_NAME) $(DEPTH)/_build_manifests/install/dist_include)
|
||||||
|
|
||||||
#
|
#
|
||||||
# END SpiderMonkey header installation
|
# END SpiderMonkey header installation
|
||||||
|
|
|
||||||
|
|
@ -580,7 +580,7 @@ if args.variant == 'msan':
|
||||||
|
|
||||||
# Generate stacks from minidumps.
|
# Generate stacks from minidumps.
|
||||||
if use_minidump:
|
if use_minidump:
|
||||||
venv_python = os.path.join(OBJDIR, "_virtualenvs", "init_py3", "bin", "python3")
|
venv_python = os.path.join(OBJDIR, "_virtualenvs", "init", "bin", "python")
|
||||||
run_command([
|
run_command([
|
||||||
venv_python,
|
venv_python,
|
||||||
os.path.join(DIR.source, "testing/mozbase/mozcrash/mozcrash/mozcrash.py"),
|
os.path.join(DIR.source, "testing/mozbase/mozcrash/mozcrash/mozcrash.py"),
|
||||||
|
|
|
||||||
|
|
@ -24,4 +24,4 @@ PKG_STAGE = $(DIST)/test-stage
|
||||||
stage-package:
|
stage-package:
|
||||||
$(NSINSTALL) -D $(PKG_STAGE)/jsreftest/tests/js/src/tests
|
$(NSINSTALL) -D $(PKG_STAGE)/jsreftest/tests/js/src/tests
|
||||||
(cd $(srcdir) && tar $(TAR_CREATE_FLAGS) - $(TEST_FILES)) | (cd $(PKG_STAGE)/jsreftest/tests/js/src/tests && tar -xf -)
|
(cd $(srcdir) && tar $(TAR_CREATE_FLAGS) - $(TEST_FILES)) | (cd $(PKG_STAGE)/jsreftest/tests/js/src/tests && tar -xf -)
|
||||||
$(PYTHON3) $(srcdir)/jstests.py --make-manifests $(PKG_STAGE)/jsreftest/tests/js/src/tests
|
$(PYTHON) $(srcdir)/jstests.py --make-manifests $(PKG_STAGE)/jsreftest/tests/js/src/tests
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
|
|
@ -72,7 +71,7 @@ class XULInfo:
|
||||||
# Read the values.
|
# Read the values.
|
||||||
val_re = re.compile(r'(TARGET_XPCOM_ABI|OS_TARGET|MOZ_DEBUG)\s*=\s*(.*)')
|
val_re = re.compile(r'(TARGET_XPCOM_ABI|OS_TARGET|MOZ_DEBUG)\s*=\s*(.*)')
|
||||||
kw = {'isdebug': False}
|
kw = {'isdebug': False}
|
||||||
for line in io.open(path, encoding='utf-8'):
|
for line in open(path):
|
||||||
m = val_re.match(line)
|
m = val_re.match(line)
|
||||||
if m:
|
if m:
|
||||||
key, val = m.groups()
|
key, val = m.groups()
|
||||||
|
|
@ -339,7 +338,7 @@ def _emit_manifest_at(location, relative, test_gen, depth):
|
||||||
manifest = ["url-prefix {}jsreftest.html?test={}/".format(
|
manifest = ["url-prefix {}jsreftest.html?test={}/".format(
|
||||||
'../' * depth, relative)] + manifest
|
'../' * depth, relative)] + manifest
|
||||||
|
|
||||||
fp = io.open(filename, 'w', encoding='utf-8', newline='\n')
|
fp = open(filename, 'w')
|
||||||
try:
|
try:
|
||||||
fp.write('\n'.join(manifest) + '\n')
|
fp.write('\n'.join(manifest) + '\n')
|
||||||
finally:
|
finally:
|
||||||
|
|
@ -379,7 +378,7 @@ def _parse_test_header(fullpath, testcase, xul_tester):
|
||||||
This looks a bit weird. The reason is that it needs to be efficient, since
|
This looks a bit weird. The reason is that it needs to be efficient, since
|
||||||
it has to be done on every test
|
it has to be done on every test
|
||||||
"""
|
"""
|
||||||
fp = io.open(fullpath, 'r', encoding='utf-8')
|
fp = open(fullpath, 'r')
|
||||||
try:
|
try:
|
||||||
buf = fp.read(512)
|
buf = fp.read(512)
|
||||||
finally:
|
finally:
|
||||||
|
|
@ -416,7 +415,7 @@ def _parse_external_manifest(filename, relpath):
|
||||||
|
|
||||||
entries = []
|
entries = []
|
||||||
|
|
||||||
with io.open(filename, 'r', encoding='utf-8') as fp:
|
with open(filename, 'r') as fp:
|
||||||
manifest_re = re.compile(r'^\s*(?P<terms>.*)\s+(?P<type>include|script)\s+(?P<path>\S+)$')
|
manifest_re = re.compile(r'^\s*(?P<terms>.*)\s+(?P<type>include|script)\s+(?P<path>\S+)$')
|
||||||
include_re = re.compile(r'^\s*include\s+(?P<path>\S+)$')
|
include_re = re.compile(r'^\s*include\s+(?P<path>\S+)$')
|
||||||
for line in fp:
|
for line in fp:
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,11 @@
|
||||||
from __future__ import print_function, unicode_literals, division
|
from __future__ import print_function, unicode_literals, division
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from six.moves.queue import Queue, Empty
|
from Queue import Queue, Empty
|
||||||
|
|
||||||
from .progressbar import ProgressBar
|
from .progressbar import ProgressBar
|
||||||
from .results import NullTestOutput, TestOutput, escape_cmdline
|
from .results import NullTestOutput, TestOutput, escape_cmdline
|
||||||
|
|
@ -23,7 +24,7 @@ class TaskFinishedMarker:
|
||||||
|
|
||||||
def _do_work(qTasks, qResults, qWatch, prefix, run_skipped, timeout, show_cmd):
|
def _do_work(qTasks, qResults, qWatch, prefix, run_skipped, timeout, show_cmd):
|
||||||
while True:
|
while True:
|
||||||
test = qTasks.get()
|
test = qTasks.get(block=True, timeout=sys.maxint)
|
||||||
if test is EndMarker:
|
if test is EndMarker:
|
||||||
qWatch.put(EndMarker)
|
qWatch.put(EndMarker)
|
||||||
qResults.put(EndMarker)
|
qResults.put(EndMarker)
|
||||||
|
|
@ -38,9 +39,9 @@ def _do_work(qTasks, qResults, qWatch, prefix, run_skipped, timeout, show_cmd):
|
||||||
if show_cmd:
|
if show_cmd:
|
||||||
print(escape_cmdline(cmd))
|
print(escape_cmdline(cmd))
|
||||||
tStart = datetime.now()
|
tStart = datetime.now()
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(cmd,
|
||||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
universal_newlines=True)
|
stderr=subprocess.PIPE)
|
||||||
|
|
||||||
# Push the task to the watchdog -- it will kill the task
|
# Push the task to the watchdog -- it will kill the task
|
||||||
# if it goes over the timeout while we keep its stdout
|
# if it goes over the timeout while we keep its stdout
|
||||||
|
|
@ -74,7 +75,7 @@ def _do_watch(qWatch, timeout):
|
||||||
# ignore this.
|
# ignore this.
|
||||||
if ex.winerror != 5:
|
if ex.winerror != 5:
|
||||||
raise
|
raise
|
||||||
fin = qWatch.get()
|
fin = qWatch.get(block=True, timeout=sys.maxint)
|
||||||
assert fin is TaskFinishedMarker, "invalid finish marker"
|
assert fin is TaskFinishedMarker, "invalid finish marker"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -32,13 +32,13 @@ check:: $(srcdir)/replay.log expected_output.log $(srcdir)/expected_output_minim
|
||||||
# command in replay.log) because starting with libstdc++ 5, a static
|
# command in replay.log) because starting with libstdc++ 5, a static
|
||||||
# initializer in the STL allocates memory, which we obviously don't have
|
# initializer in the STL allocates memory, which we obviously don't have
|
||||||
# in expected_output.log.
|
# in expected_output.log.
|
||||||
MALLOC_LOG=1 $(LOGALLOC) ./$(PROGRAM) < $< | sed -n '/jemalloc_stats/,$$p' | $(PYTHON3) $(srcdir)/logalloc_munge.py | diff -w - expected_output.log
|
MALLOC_LOG=1 $(LOGALLOC) ./$(PROGRAM) < $< | sed -n '/jemalloc_stats/,$$p' | $(PYTHON) $(srcdir)/logalloc_munge.py | diff -w - expected_output.log
|
||||||
# Test with MALLOC_LOG as a file name
|
# Test with MALLOC_LOG as a file name
|
||||||
$(RM) test_output.log
|
$(RM) test_output.log
|
||||||
MALLOC_LOG=test_output.log $(LOGALLOC) ./$(PROGRAM) < $<
|
MALLOC_LOG=test_output.log $(LOGALLOC) ./$(PROGRAM) < $<
|
||||||
sed -n '/jemalloc_stats/,$$p' test_output.log | $(PYTHON3) $(srcdir)/logalloc_munge.py | diff -w - expected_output.log
|
sed -n '/jemalloc_stats/,$$p' test_output.log | $(PYTHON) $(srcdir)/logalloc_munge.py | diff -w - expected_output.log
|
||||||
|
|
||||||
MALLOC_LOG=1 MALLOC_LOG_MINIMAL=1 $(LOGALLOC) ./$(PROGRAM) < $< | sed -n '/jemalloc_stats/,$$p' | $(PYTHON3) $(srcdir)/logalloc_munge.py | diff -w - $(srcdir)/expected_output_minimal.log
|
MALLOC_LOG=1 MALLOC_LOG_MINIMAL=1 $(LOGALLOC) ./$(PROGRAM) < $< | sed -n '/jemalloc_stats/,$$p' | $(PYTHON) $(srcdir)/logalloc_munge.py | diff -w - $(srcdir)/expected_output_minimal.log
|
||||||
|
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ include $(topsrcdir)/config/config.mk
|
||||||
|
|
||||||
ifeq (WINNT,$(OS_TARGET))
|
ifeq (WINNT,$(OS_TARGET))
|
||||||
mozglue.def: $(srcdir)/mozglue.def.in $(GLOBAL_DEPS)
|
mozglue.def: $(srcdir)/mozglue.def.in $(GLOBAL_DEPS)
|
||||||
$(call py_action,preprocessor,$(if $(MOZ_REPLACE_MALLOC),-DMOZ_REPLACE_MALLOC) $(ACDEFINES) $< -o $@)
|
$(call py3_action,preprocessor,$(if $(MOZ_REPLACE_MALLOC),-DMOZ_REPLACE_MALLOC) $(ACDEFINES) $< -o $@)
|
||||||
|
|
||||||
GARBAGE += mozglue.def
|
GARBAGE += mozglue.def
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -35,7 +35,7 @@ def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
|
||||||
rule = mk.create_rule()
|
rule = mk.create_rule()
|
||||||
|
|
||||||
glbl = {}
|
glbl = {}
|
||||||
exec(open(bindings_conf, encoding='utf-8').read(), glbl)
|
execfile(bindings_conf, glbl)
|
||||||
webidlconfig = glbl['DOMInterfaces']
|
webidlconfig = glbl['DOMInterfaces']
|
||||||
|
|
||||||
# Write out dependencies for Python modules we import. If this list isn't
|
# Write out dependencies for Python modules we import. If this list isn't
|
||||||
|
|
@ -46,7 +46,7 @@ def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
|
||||||
for path in idl_files:
|
for path in idl_files:
|
||||||
basename = os.path.basename(path)
|
basename = os.path.basename(path)
|
||||||
stem, _ = os.path.splitext(basename)
|
stem, _ = os.path.splitext(basename)
|
||||||
idl_data = open(path, encoding='utf-8').read()
|
idl_data = open(path).read()
|
||||||
|
|
||||||
idl = p.parse(idl_data, filename=path)
|
idl = p.parse(idl_data, filename=path)
|
||||||
idl.resolve(inc_paths, p, webidlconfig)
|
idl.resolve(inc_paths, p, webidlconfig)
|
||||||
|
|
@ -74,7 +74,7 @@ def process(input_dirs, inc_paths, bindings_conf, cache_dir, header_dir,
|
||||||
# time a build is run whether or not anything changed. To fix this we
|
# time a build is run whether or not anything changed. To fix this we
|
||||||
# unconditionally write out the file.
|
# unconditionally write out the file.
|
||||||
xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
|
xpt_path = os.path.join(xpt_dir, '%s.xpt' % module)
|
||||||
with open(xpt_path, 'w', encoding='utf-8', newline='\n') as fh:
|
with open(xpt_path, 'w') as fh:
|
||||||
jsonxpt.write(jsonxpt.link(xpts), fh)
|
jsonxpt.write(jsonxpt.link(xpts), fh)
|
||||||
|
|
||||||
rule.add_targets([six.ensure_text(xpt_path)])
|
rule.add_targets([six.ensure_text(xpt_path)])
|
||||||
|
|
|
||||||
|
|
@ -171,6 +171,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
|
||||||
|
|
||||||
# Add a few necessary variables inherited from configure
|
# Add a few necessary variables inherited from configure
|
||||||
for var in (
|
for var in (
|
||||||
|
'PYTHON',
|
||||||
'PYTHON3',
|
'PYTHON3',
|
||||||
'ACDEFINES',
|
'ACDEFINES',
|
||||||
'MOZ_BUILD_APP',
|
'MOZ_BUILD_APP',
|
||||||
|
|
@ -216,7 +217,7 @@ class FasterMakeBackend(MakeBackend, PartialBackend):
|
||||||
[ref_file, l10n_file] + python_deps)
|
[ref_file, l10n_file] + python_deps)
|
||||||
rule.add_commands(
|
rule.add_commands(
|
||||||
[
|
[
|
||||||
'$(PYTHON3) -m mozbuild.action.l10n_merge '
|
'$(PYTHON) -m mozbuild.action.l10n_merge '
|
||||||
'--output {} --ref-file {} --l10n-file {}'.format(
|
'--output {} --ref-file {} --l10n-file {}'.format(
|
||||||
merge, ref_file, l10n_file
|
merge, ref_file, l10n_file
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -91,7 +91,7 @@ class MakeBackend(CommonBackend):
|
||||||
ret.append((
|
ret.append((
|
||||||
"""{stub}: {script}{inputs}{backend}{force}
|
"""{stub}: {script}{inputs}{backend}{force}
|
||||||
\t$(REPORT_BUILD)
|
\t$(REPORT_BUILD)
|
||||||
\t$(call py_action,file_generate,{locale}{script} """ # wrap for E501
|
\t$(call py3_action,file_generate,{locale}{script} """ # wrap for E501
|
||||||
"""{method} {output} {dep_file} {stub}{inputs}{flags})
|
"""{method} {output} {dep_file} {stub}{inputs}{flags})
|
||||||
\t@$(TOUCH) $@
|
\t@$(TOUCH) $@
|
||||||
""").format(
|
""").format(
|
||||||
|
|
|
||||||
|
|
@ -1559,13 +1559,13 @@ class RecursiveMakeBackend(MakeBackend):
|
||||||
mozpath.relpath(obj.path,
|
mozpath.relpath(obj.path,
|
||||||
obj.install_target))),
|
obj.install_target))),
|
||||||
]
|
]
|
||||||
rule.add_commands(['$(call py_action,buildlist,%s)' %
|
rule.add_commands(['$(call py3_action,buildlist,%s)' %
|
||||||
' '.join(args)])
|
' '.join(args)])
|
||||||
args = [
|
args = [
|
||||||
mozpath.join('$(DEPTH)', obj.path),
|
mozpath.join('$(DEPTH)', obj.path),
|
||||||
make_quote(shell_quote(str(obj.entry))),
|
make_quote(shell_quote(str(obj.entry))),
|
||||||
]
|
]
|
||||||
rule.add_commands(['$(call py_action,buildlist,%s)' % ' '.join(args)])
|
rule.add_commands(['$(call py3_action,buildlist,%s)' % ' '.join(args)])
|
||||||
fragment.dump(backend_file.fh, removal_guard=False)
|
fragment.dump(backend_file.fh, removal_guard=False)
|
||||||
|
|
||||||
self._no_skip['misc'].add(obj.relsrcdir)
|
self._no_skip['misc'].add(obj.relsrcdir)
|
||||||
|
|
@ -1651,7 +1651,7 @@ class RecursiveMakeBackend(MakeBackend):
|
||||||
rule.add_dependencies([source])
|
rule.add_dependencies([source])
|
||||||
rule.add_commands([
|
rule.add_commands([
|
||||||
'$(RM) $@',
|
'$(RM) $@',
|
||||||
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
'$(call py3_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||||
'$< -o $@)'
|
'$< -o $@)'
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
@ -1706,7 +1706,7 @@ class RecursiveMakeBackend(MakeBackend):
|
||||||
# static to preprocessed don't end up writing to a symlink,
|
# static to preprocessed don't end up writing to a symlink,
|
||||||
# which would modify content in the source directory.
|
# which would modify content in the source directory.
|
||||||
'$(RM) $@',
|
'$(RM) $@',
|
||||||
'$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
'$(call py3_action,preprocessor,$(DEFINES) $(ACDEFINES) '
|
||||||
'$< -o $@)'
|
'$< -o $@)'
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
# to a given compilation unit. This is used as a helper to find a bug in some
|
# to a given compilation unit. This is used as a helper to find a bug in some
|
||||||
# versions of GNU ld.
|
# versions of GNU ld.
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
@ -46,8 +46,7 @@ def get_range_length(range, debug_ranges):
|
||||||
|
|
||||||
|
|
||||||
def main(bin, compilation_unit):
|
def main(bin, compilation_unit):
|
||||||
p = subprocess.Popen(['objdump', '-W', bin], stdout=subprocess.PIPE,
|
p = subprocess.Popen(['objdump', '-W', bin], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
stderr=subprocess.PIPE, universal_newlines=True)
|
|
||||||
(out, err) = p.communicate()
|
(out, err) = p.communicate()
|
||||||
sections = re.split('\n(Contents of the|The section) ', out)
|
sections = re.split('\n(Contents of the|The section) ', out)
|
||||||
debug_info = [s for s in sections if s.startswith('.debug_info')]
|
debug_info = [s for s in sections if s.startswith('.debug_info')]
|
||||||
|
|
|
||||||
|
|
@ -1094,7 +1094,7 @@ class BuildDriver(MozbuildObject):
|
||||||
backend))
|
backend))
|
||||||
for backend in all_backends])):
|
for backend in all_backends])):
|
||||||
print('Build configuration changed. Regenerating backend.')
|
print('Build configuration changed. Regenerating backend.')
|
||||||
args = [config.substs['PYTHON3'],
|
args = [config.substs['PYTHON'],
|
||||||
mozpath.join(self.topobjdir, 'config.status')]
|
mozpath.join(self.topobjdir, 'config.status')]
|
||||||
self.run_process(args, cwd=self.topobjdir, pass_thru=True)
|
self.run_process(args, cwd=self.topobjdir, pass_thru=True)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -416,7 +416,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp',
|
||||||
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py' % env.topsrcdir,
|
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py' % env.topsrcdir,
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
'$(call py3_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'export:: $(MDDEPDIR)/foo.h.stub',
|
'export:: $(MDDEPDIR)/foo.h.stub',
|
||||||
|
|
@ -426,7 +426,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.h.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.h.pp',
|
||||||
'$(MDDEPDIR)/foo.h.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
'$(MDDEPDIR)/foo.h.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,%s/generate-foo.py main foo.h $(MDDEPDIR)/foo.h.pp $(MDDEPDIR)/foo.h.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
'$(call py3_action,file_generate,%s/generate-foo.py main foo.h $(MDDEPDIR)/foo.h.pp $(MDDEPDIR)/foo.h.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
]
|
]
|
||||||
|
|
@ -449,7 +449,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp',
|
||||||
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE' % env.topsrcdir,
|
'$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE' % env.topsrcdir,
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
'$(call py3_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'foo.c: $(MDDEPDIR)/foo.c.stub ;',
|
'foo.c: $(MDDEPDIR)/foo.c.stub ;',
|
||||||
|
|
@ -458,7 +458,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.c.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.c.pp',
|
||||||
'$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
'$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir),
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
'$(call py3_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
]
|
]
|
||||||
|
|
@ -482,7 +482,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp',
|
||||||
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'LOCALIZED_FILES_0_FILES += foo.xyz',
|
'LOCALIZED_FILES_0_FILES += foo.xyz',
|
||||||
|
|
@ -510,7 +510,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp',
|
||||||
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
'$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'libs:: $(MDDEPDIR)/abc.xyz.stub',
|
'libs:: $(MDDEPDIR)/abc.xyz.stub',
|
||||||
|
|
@ -520,7 +520,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/abc.xyz.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/abc.xyz.pp',
|
||||||
'$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE' % env.topsrcdir, # noqa
|
'$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
]
|
]
|
||||||
|
|
@ -545,7 +545,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo$(AB_CD).xyz.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo$(AB_CD).xyz.pp',
|
||||||
'$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
'$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'bar$(AB_rCD).xyz: $(MDDEPDIR)/bar$(AB_rCD).xyz.stub ;',
|
'bar$(AB_rCD).xyz: $(MDDEPDIR)/bar$(AB_rCD).xyz.stub ;',
|
||||||
|
|
@ -554,7 +554,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar$(AB_rCD).xyz.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar$(AB_rCD).xyz.pp',
|
||||||
'$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
'$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
'zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;',
|
'zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;',
|
||||||
|
|
@ -563,7 +563,7 @@ class TestRecursiveMakeBackend(BackendTester):
|
||||||
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/zot$(AB_rCD).xyz.pp',
|
'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/zot$(AB_rCD).xyz.pp',
|
||||||
'$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
'$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa
|
||||||
'$(REPORT_BUILD)',
|
'$(REPORT_BUILD)',
|
||||||
'$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
'$(call py3_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa
|
||||||
'@$(TOUCH) $@',
|
'@$(TOUCH) $@',
|
||||||
'',
|
'',
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -282,12 +282,12 @@ class SimplePackager(object):
|
||||||
self._file_queue.append(self.formatter.add, path, file)
|
self._file_queue.append(self.formatter.add, path, file)
|
||||||
if mozpath.basename(path) == 'install.rdf':
|
if mozpath.basename(path) == 'install.rdf':
|
||||||
addon = True
|
addon = True
|
||||||
install_rdf = six.ensure_text(file.open('rt').read())
|
install_rdf = file.open('rt').read()
|
||||||
if self.UNPACK_ADDON_RE.search(install_rdf):
|
if self.UNPACK_ADDON_RE.search(install_rdf):
|
||||||
addon = 'unpacked'
|
addon = 'unpacked'
|
||||||
self._add_addon(mozpath.dirname(path), addon)
|
self._add_addon(mozpath.dirname(path), addon)
|
||||||
elif mozpath.basename(path) == 'manifest.json':
|
elif mozpath.basename(path) == 'manifest.json':
|
||||||
manifest = six.ensure_text(file.open('rt').read())
|
manifest = file.open('rt').read()
|
||||||
try:
|
try:
|
||||||
parsed = json.loads(manifest)
|
parsed = json.loads(manifest)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
|
|
||||||
|
|
@ -107,7 +107,7 @@ class L10NRepackFormatterMixin(object):
|
||||||
root, ext = mozpath.splitext(mozpath.basename(path))
|
root, ext = mozpath.splitext(mozpath.basename(path))
|
||||||
self._dictionaries[root] = path
|
self._dictionaries[root] = path
|
||||||
elif path.endswith('/built_in_addons.json'):
|
elif path.endswith('/built_in_addons.json'):
|
||||||
data = json.loads(six.ensure_text(file.open().read()))
|
data = json.load(file.open())
|
||||||
data['dictionaries'] = self._dictionaries
|
data['dictionaries'] = self._dictionaries
|
||||||
# The GeneratedFile content is only really generated after
|
# The GeneratedFile content is only really generated after
|
||||||
# all calls to formatter.add.
|
# all calls to formatter.add.
|
||||||
|
|
|
||||||
|
|
@ -397,7 +397,7 @@ class HgRepository(Repository):
|
||||||
def get_files_in_working_directory(self):
|
def get_files_in_working_directory(self):
|
||||||
# Can return backslashes on Windows. Normalize to forward slashes.
|
# Can return backslashes on Windows. Normalize to forward slashes.
|
||||||
return list(p.replace('\\', '/') for p in
|
return list(p.replace('\\', '/') for p in
|
||||||
self._run(b'files', b'-0').split('\0') if p)
|
self._run(b'files', b'-0').split(b'\0') if p)
|
||||||
|
|
||||||
def working_directory_clean(self, untracked=False, ignored=False):
|
def working_directory_clean(self, untracked=False, ignored=False):
|
||||||
args = [b'status', b'--modified', b'--added', b'--removed',
|
args = [b'status', b'--modified', b'--added', b'--removed',
|
||||||
|
|
@ -520,7 +520,7 @@ class GitRepository(Repository):
|
||||||
self._run('reset', *paths)
|
self._run('reset', *paths)
|
||||||
|
|
||||||
def get_files_in_working_directory(self):
|
def get_files_in_working_directory(self):
|
||||||
return self._run('ls-files', '-z').split('\0')
|
return self._run('ls-files', '-z').split(b'\0')
|
||||||
|
|
||||||
def working_directory_clean(self, untracked=False, ignored=False):
|
def working_directory_clean(self, untracked=False, ignored=False):
|
||||||
args = ['status', '--porcelain']
|
args = ['status', '--porcelain']
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,6 @@ import subprocess
|
||||||
def main(output, *inputs):
|
def main(output, *inputs):
|
||||||
env = dict(os.environ)
|
env = dict(os.environ)
|
||||||
env['PERL'] = str(buildconfig.substs['PERL'])
|
env['PERL'] = str(buildconfig.substs['PERL'])
|
||||||
output.write(subprocess.check_output([buildconfig.substs['PYTHON3'],
|
output.write(subprocess.check_output([buildconfig.substs['PYTHON'],
|
||||||
inputs[0], inputs[2]], env=env))
|
inputs[0], inputs[2]], env=env))
|
||||||
return None
|
return None
|
||||||
|
|
|
||||||
|
|
@ -72,7 +72,7 @@ if CONFIG['MOZ_BUILD_APP'].startswith('comm/'):
|
||||||
|
|
||||||
# We run shlibsign as part of packaging, not build.
|
# We run shlibsign as part of packaging, not build.
|
||||||
gyp_vars['sign_libs'] = 0
|
gyp_vars['sign_libs'] = 0
|
||||||
gyp_vars['python'] = CONFIG['PYTHON3']
|
gyp_vars['python'] = CONFIG['PYTHON']
|
||||||
# The NSS gyp files do not have a default for this.
|
# The NSS gyp files do not have a default for this.
|
||||||
gyp_vars['nss_dist_dir'] = '$PRODUCT_DIR/dist'
|
gyp_vars['nss_dist_dir'] = '$PRODUCT_DIR/dist'
|
||||||
# NSS wants to put public headers in $nss_dist_dir/public/nss by default,
|
# NSS wants to put public headers in $nss_dist_dir/public/nss by default,
|
||||||
|
|
|
||||||
|
|
@ -26,12 +26,12 @@ def smooth_scroll(marionette_session, start_element, axis, direction,
|
||||||
scroll_back = scroll_back or False
|
scroll_back = scroll_back or False
|
||||||
current = 0
|
current = 0
|
||||||
if axis == "x":
|
if axis == "x":
|
||||||
if direction == -1:
|
if direction is -1:
|
||||||
offset = [-increments, 0]
|
offset = [-increments, 0]
|
||||||
else:
|
else:
|
||||||
offset = [increments, 0]
|
offset = [increments, 0]
|
||||||
else:
|
else:
|
||||||
if direction == -1:
|
if direction is -1:
|
||||||
offset = [0, -increments]
|
offset = [0, -increments]
|
||||||
else:
|
else:
|
||||||
offset = [0, increments]
|
offset = [0, increments]
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ import socket
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import six
|
from six import reraise
|
||||||
|
|
||||||
|
|
||||||
class SocketTimeout(object):
|
class SocketTimeout(object):
|
||||||
|
|
@ -129,10 +129,7 @@ class TcpTransport(object):
|
||||||
|
|
||||||
# protocol 3 and above
|
# protocol 3 and above
|
||||||
if self.protocol >= 3:
|
if self.protocol >= 3:
|
||||||
if six.PY3:
|
typ = int(packet[1])
|
||||||
typ = int(chr(packet[1]))
|
|
||||||
else:
|
|
||||||
typ = int(packet[1])
|
|
||||||
if typ == Command.TYPE:
|
if typ == Command.TYPE:
|
||||||
msg = Command.from_msg(packet)
|
msg = Command.from_msg(packet)
|
||||||
elif typ == Response.TYPE:
|
elif typ == Response.TYPE:
|
||||||
|
|
@ -148,7 +145,7 @@ class TcpTransport(object):
|
||||||
the raw packet.
|
the raw packet.
|
||||||
"""
|
"""
|
||||||
now = time.time()
|
now = time.time()
|
||||||
data = b""
|
data = ""
|
||||||
bytes_to_recv = 10
|
bytes_to_recv = 10
|
||||||
|
|
||||||
while self.socket_timeout is None or (time.time() - now < self.socket_timeout):
|
while self.socket_timeout is None or (time.time() - now < self.socket_timeout):
|
||||||
|
|
@ -161,7 +158,7 @@ class TcpTransport(object):
|
||||||
if not chunk:
|
if not chunk:
|
||||||
raise socket.error("No data received over socket")
|
raise socket.error("No data received over socket")
|
||||||
|
|
||||||
sep = data.find(b":")
|
sep = data.find(":")
|
||||||
if sep > -1:
|
if sep > -1:
|
||||||
length = data[0:sep]
|
length = data[0:sep]
|
||||||
remaining = data[sep + 1:]
|
remaining = data[sep + 1:]
|
||||||
|
|
@ -210,7 +207,7 @@ class TcpTransport(object):
|
||||||
except socket.timeout:
|
except socket.timeout:
|
||||||
exc_cls, exc, tb = sys.exc_info()
|
exc_cls, exc, tb = sys.exc_info()
|
||||||
msg = "Connection attempt failed because no data has been received over the socket: {}"
|
msg = "Connection attempt failed because no data has been received over the socket: {}"
|
||||||
six.reraise(exc_cls, exc_cls(msg.format(exc)), tb)
|
reraise(exc_cls, exc_cls(msg.format(exc)), tb)
|
||||||
|
|
||||||
hello = json.loads(raw)
|
hello = json.loads(raw)
|
||||||
application_type = hello.get("applicationType")
|
application_type = hello.get("applicationType")
|
||||||
|
|
@ -241,8 +238,7 @@ class TcpTransport(object):
|
||||||
self.expected_response = obj
|
self.expected_response = obj
|
||||||
else:
|
else:
|
||||||
data = json.dumps(obj)
|
data = json.dumps(obj)
|
||||||
data = six.ensure_binary(data)
|
payload = "{0}:{1}".format(len(data), data)
|
||||||
payload = six.ensure_binary(str(len(data))) + b":" + data
|
|
||||||
|
|
||||||
totalsent = 0
|
totalsent = 0
|
||||||
while totalsent < len(payload):
|
while totalsent < len(payload):
|
||||||
|
|
|
||||||
|
|
@ -27,11 +27,11 @@ CHECK_TEST_ERROR_RERUN = $(call check_test_error_internal,'To rerun your failure
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Usage: |make [EXTRA_TEST_ARGS=...] *test|.
|
# Usage: |make [EXTRA_TEST_ARGS=...] *test|.
|
||||||
RUN_REFTEST = rm -f ./$@.log && $(PYTHON3) _tests/reftest/runreftest.py \
|
RUN_REFTEST = rm -f ./$@.log && $(PYTHON) _tests/reftest/runreftest.py \
|
||||||
--extra-profile-file=$(DIST)/plugins \
|
--extra-profile-file=$(DIST)/plugins \
|
||||||
$(SYMBOLS_PATH) $(EXTRA_TEST_ARGS) $(1) | tee ./$@.log
|
$(SYMBOLS_PATH) $(EXTRA_TEST_ARGS) $(1) | tee ./$@.log
|
||||||
|
|
||||||
REMOTE_REFTEST = rm -f ./$@.log && $(PYTHON3) _tests/reftest/remotereftest.py \
|
REMOTE_REFTEST = rm -f ./$@.log && $(PYTHON) _tests/reftest/remotereftest.py \
|
||||||
--ignore-window-size \
|
--ignore-window-size \
|
||||||
--app=$(TEST_PACKAGE_NAME) --deviceIP=${TEST_DEVICE} --xre-path=${MOZ_HOST_BIN} \
|
--app=$(TEST_PACKAGE_NAME) --deviceIP=${TEST_DEVICE} --xre-path=${MOZ_HOST_BIN} \
|
||||||
--httpd-path=_tests/modules --suite reftest \
|
--httpd-path=_tests/modules --suite reftest \
|
||||||
|
|
@ -81,7 +81,7 @@ jstestbrowser:
|
||||||
GARBAGE += $(addsuffix .log,$(MOCHITESTS) reftest crashtest jstestbrowser)
|
GARBAGE += $(addsuffix .log,$(MOCHITESTS) reftest crashtest jstestbrowser)
|
||||||
|
|
||||||
REMOTE_CPPUNITTESTS = \
|
REMOTE_CPPUNITTESTS = \
|
||||||
$(PYTHON3) -u $(topsrcdir)/testing/remotecppunittests.py \
|
$(PYTHON) -u $(topsrcdir)/testing/remotecppunittests.py \
|
||||||
--xre-path=$(DEPTH)/dist/bin \
|
--xre-path=$(DEPTH)/dist/bin \
|
||||||
--localLib=$(DEPTH)/dist/fennec \
|
--localLib=$(DEPTH)/dist/fennec \
|
||||||
--deviceIP=${TEST_DEVICE} \
|
--deviceIP=${TEST_DEVICE} \
|
||||||
|
|
@ -138,7 +138,7 @@ PKG_ARG = --$(1) '$(PKG_BASENAME).$(1).tests.$(2)'
|
||||||
test-packages-manifest:
|
test-packages-manifest:
|
||||||
@rm -f $(MOZ_TEST_PACKAGES_FILE)
|
@rm -f $(MOZ_TEST_PACKAGES_FILE)
|
||||||
$(NSINSTALL) -D $(dir $(MOZ_TEST_PACKAGES_FILE))
|
$(NSINSTALL) -D $(dir $(MOZ_TEST_PACKAGES_FILE))
|
||||||
$(PYTHON3) $(topsrcdir)/build/gen_test_packages_manifest.py \
|
$(PYTHON) $(topsrcdir)/build/gen_test_packages_manifest.py \
|
||||||
--jsshell $(JSSHELL_NAME) \
|
--jsshell $(JSSHELL_NAME) \
|
||||||
--dest-file '$(MOZ_TEST_PACKAGES_FILE)' \
|
--dest-file '$(MOZ_TEST_PACKAGES_FILE)' \
|
||||||
$(call PKG_ARG,common,zip) \
|
$(call PKG_ARG,common,zip) \
|
||||||
|
|
@ -158,7 +158,7 @@ download-wpt-manifest:
|
||||||
|
|
||||||
define package_archive
|
define package_archive
|
||||||
package-tests-$(1): stage-all package-tests-prepare-dest download-wpt-manifest
|
package-tests-$(1): stage-all package-tests-prepare-dest download-wpt-manifest
|
||||||
$$(call py_action,test_archive, \
|
$$(call py3_action,test_archive, \
|
||||||
$(1) \
|
$(1) \
|
||||||
'$$(abspath $$(test_archive_dir))/$$(PKG_BASENAME).$(1).tests.$(2)')
|
'$$(abspath $$(test_archive_dir))/$$(PKG_BASENAME).$(1).tests.$(2)')
|
||||||
package-tests: package-tests-$(1)
|
package-tests: package-tests-$(1)
|
||||||
|
|
@ -268,7 +268,7 @@ stage-extensions: make-stage-dir
|
||||||
|
|
||||||
|
|
||||||
check::
|
check::
|
||||||
$(eval cores=$(shell $(PYTHON3) -c 'import multiprocessing; print(multiprocessing.cpu_count())'))
|
$(eval cores=$(shell $(PYTHON) -c 'import multiprocessing; print(multiprocessing.cpu_count())'))
|
||||||
@echo "Starting 'mach python-test' with -j$(cores)"
|
@echo "Starting 'mach python-test' with -j$(cores)"
|
||||||
@$(topsrcdir)/mach --log-no-times python-test -j$(cores) --subsuite default
|
@$(topsrcdir)/mach --log-no-times python-test -j$(cores) --subsuite default
|
||||||
@echo "Finished 'mach python-test' successfully"
|
@echo "Finished 'mach python-test' successfully"
|
||||||
|
|
|
||||||
|
|
@ -118,7 +118,7 @@ def taskcluster_url(logger, commits):
|
||||||
if not pushes:
|
if not pushes:
|
||||||
logger.debug("Error reading response; 'pushes' key not found")
|
logger.debug("Error reading response; 'pushes' key not found")
|
||||||
continue
|
continue
|
||||||
[cset] = next(iter(pushes.values()))['changesets']
|
[cset] = pushes.values()[0]['changesets']
|
||||||
|
|
||||||
tc_index_url = tc_url.format(changeset=cset, name=index_name)
|
tc_index_url = tc_url.format(changeset=cset, name=index_name)
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,9 @@
|
||||||
import io
|
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from multiprocessing import Pool, cpu_count
|
from multiprocessing import Pool, cpu_count
|
||||||
from six import (
|
from six import PY3, iteritems, itervalues, string_types, binary_type, text_type
|
||||||
PY3,
|
|
||||||
binary_type,
|
|
||||||
ensure_text,
|
|
||||||
iteritems,
|
|
||||||
itervalues,
|
|
||||||
string_types,
|
|
||||||
text_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
from . import vcs
|
from . import vcs
|
||||||
from .item import (ConformanceCheckerTest, ManifestItem, ManualTest, RefTest, SupportFile,
|
from .item import (ConformanceCheckerTest, ManifestItem, ManualTest, RefTest, SupportFile,
|
||||||
|
|
@ -327,7 +318,7 @@ def _load(logger, # type: Logger
|
||||||
else:
|
else:
|
||||||
logger.debug("Creating new manifest at %s" % manifest)
|
logger.debug("Creating new manifest at %s" % manifest)
|
||||||
try:
|
try:
|
||||||
with io.open(manifest, "r", encoding="utf-8") as f:
|
with open(manifest, "rb") as f:
|
||||||
rv = Manifest.from_json(tests_root,
|
rv = Manifest.from_json(tests_root,
|
||||||
fast_json.load(f),
|
fast_json.load(f),
|
||||||
types=types,
|
types=types,
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import time
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from six import iteritems, iterkeys
|
from six import iteritems
|
||||||
from six.moves.urllib.parse import urljoin
|
from six.moves.urllib.parse import urljoin
|
||||||
|
|
||||||
errors = None
|
errors = None
|
||||||
|
|
@ -602,7 +602,7 @@ class MarionetteProtocol(Protocol):
|
||||||
|
|
||||||
def on_environment_change(self, old_environment, new_environment):
|
def on_environment_change(self, old_environment, new_environment):
|
||||||
#Unset all the old prefs
|
#Unset all the old prefs
|
||||||
for name in iterkeys(old_environment.get("prefs", {})):
|
for name in old_environment.get("prefs", {}).iterkeys():
|
||||||
value = self.executor.original_pref_values[name]
|
value = self.executor.original_pref_values[name]
|
||||||
if value is None:
|
if value is None:
|
||||||
self.prefs.clear(name)
|
self.prefs.clear(name)
|
||||||
|
|
|
||||||
|
|
@ -78,12 +78,7 @@ class FontInstaller(object):
|
||||||
# Per https://github.com/web-platform-tests/results-collection/issues/218
|
# Per https://github.com/web-platform-tests/results-collection/issues/218
|
||||||
# installing Ahem on macOS is flaky, so check if it actually installed
|
# installing Ahem on macOS is flaky, so check if it actually installed
|
||||||
fonts = check_output(['/usr/sbin/system_profiler', '-xml', 'SPFontsDataType'])
|
fonts = check_output(['/usr/sbin/system_profiler', '-xml', 'SPFontsDataType'])
|
||||||
try:
|
fonts = plistlib.readPlistFromString(fonts)
|
||||||
# if py3
|
|
||||||
readPlistFromBytes = plistlib.readPlistFromBytes
|
|
||||||
except AttributeError:
|
|
||||||
readPlistFromBytes = plistlib.readPlistFromString
|
|
||||||
fonts = readPlistFromBytes(fonts)
|
|
||||||
assert len(fonts) == 1
|
assert len(fonts) == 1
|
||||||
for font in fonts[0]['_items']:
|
for font in fonts[0]['_items']:
|
||||||
if font['path'] == installed_font_path:
|
if font['path'] == installed_font_path:
|
||||||
|
|
|
||||||
|
|
@ -401,8 +401,7 @@ class TestharnessTest(Test):
|
||||||
testdriver = manifest_item.testdriver if hasattr(manifest_item, "testdriver") else False
|
testdriver = manifest_item.testdriver if hasattr(manifest_item, "testdriver") else False
|
||||||
jsshell = manifest_item.jsshell if hasattr(manifest_item, "jsshell") else False
|
jsshell = manifest_item.jsshell if hasattr(manifest_item, "jsshell") else False
|
||||||
script_metadata = manifest_item.script_metadata or []
|
script_metadata = manifest_item.script_metadata or []
|
||||||
scripts = [v for (k, v) in script_metadata
|
scripts = [v for (k, v) in script_metadata if k == b"script"]
|
||||||
if k in (b"script", "script")]
|
|
||||||
return cls(manifest_file.tests_root,
|
return cls(manifest_file.tests_root,
|
||||||
manifest_item.url,
|
manifest_item.url,
|
||||||
inherit_metadata,
|
inherit_metadata,
|
||||||
|
|
|
||||||
|
|
@ -112,7 +112,7 @@ MOZDEPTH ?= $(DEPTH)
|
||||||
|
|
||||||
repackage-zip: UNPACKAGE='$(ZIP_IN)'
|
repackage-zip: UNPACKAGE='$(ZIP_IN)'
|
||||||
repackage-zip:
|
repackage-zip:
|
||||||
$(PYTHON3) $(MOZILLA_DIR)/toolkit/mozapps/installer/l10n-repack.py '$(STAGEDIST)' $(DIST)/xpi-stage/locale-$(AB_CD) \
|
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/l10n-repack.py '$(STAGEDIST)' $(DIST)/xpi-stage/locale-$(AB_CD) \
|
||||||
$(MOZ_PKG_EXTRAL10N) \
|
$(MOZ_PKG_EXTRAL10N) \
|
||||||
$(if $(filter omni,$(MOZ_PACKAGER_FORMAT)),$(if $(NON_OMNIJAR_FILES),--non-resource $(NON_OMNIJAR_FILES)))
|
$(if $(filter omni,$(MOZ_PACKAGER_FORMAT)),$(if $(NON_OMNIJAR_FILES),--non-resource $(NON_OMNIJAR_FILES)))
|
||||||
|
|
||||||
|
|
@ -218,8 +218,8 @@ package-langpack-%: XPI_NAME=locale-$*
|
||||||
package-langpack-%: AB_CD=$*
|
package-langpack-%: AB_CD=$*
|
||||||
package-langpack-%:
|
package-langpack-%:
|
||||||
$(NSINSTALL) -D $(DIST)/$(PKG_LANGPACK_PATH)
|
$(NSINSTALL) -D $(DIST)/$(PKG_LANGPACK_PATH)
|
||||||
$(call py_action,langpack_manifest,--locales $(AB_CD) --min-app-ver $(MOZ_APP_VERSION) --max-app-ver $(MOZ_APP_MAXVERSION) --app-name '$(MOZ_APP_DISPLAYNAME)' --l10n-basedir '$(L10NBASEDIR)' --defines $(LANGPACK_DEFINES) --langpack-eid '$(MOZ_LANGPACK_EID)' --input $(DIST)/xpi-stage/locale-$(AB_CD))
|
$(call py3_action,langpack_manifest,--locales $(AB_CD) --min-app-ver $(MOZ_APP_VERSION) --max-app-ver $(MOZ_APP_MAXVERSION) --app-name '$(MOZ_APP_DISPLAYNAME)' --l10n-basedir '$(L10NBASEDIR)' --defines $(LANGPACK_DEFINES) --langpack-eid '$(MOZ_LANGPACK_EID)' --input $(DIST)/xpi-stage/locale-$(AB_CD))
|
||||||
$(call py_action,zip,-C $(DIST)/xpi-stage/locale-$(AB_CD) -x **/*.manifest -x **/*.js -x **/*.ini $(LANGPACK_FILE) $(PKG_ZIP_DIRS) manifest.json)
|
$(call py3_action,zip,-C $(DIST)/xpi-stage/locale-$(AB_CD) -x **/*.manifest -x **/*.js -x **/*.ini $(LANGPACK_FILE) $(PKG_ZIP_DIRS) manifest.json)
|
||||||
|
|
||||||
# This variable is to allow the wget-en-US target to know which ftp server to download from
|
# This variable is to allow the wget-en-US target to know which ftp server to download from
|
||||||
ifndef EN_US_BINARY_URL
|
ifndef EN_US_BINARY_URL
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ endif # RUN_FIND_DUPES
|
||||||
ifndef MOZ_IS_COMM_TOPDIR
|
ifndef MOZ_IS_COMM_TOPDIR
|
||||||
ifdef RUN_MOZHARNESS_ZIP
|
ifdef RUN_MOZHARNESS_ZIP
|
||||||
# Package mozharness
|
# Package mozharness
|
||||||
$(call py_action,test_archive, \
|
$(call py3_action,test_archive, \
|
||||||
mozharness \
|
mozharness \
|
||||||
$(ABS_DIST)/$(PKG_PATH)$(MOZHARNESS_PACKAGE))
|
$(ABS_DIST)/$(PKG_PATH)$(MOZHARNESS_PACKAGE))
|
||||||
endif # RUN_MOZHARNESS_ZIP
|
endif # RUN_MOZHARNESS_ZIP
|
||||||
|
|
@ -211,14 +211,14 @@ endif
|
||||||
# and places it in dist/bin/res - it should be used when packaging a build.
|
# and places it in dist/bin/res - it should be used when packaging a build.
|
||||||
multilocale.txt: LOCALES?=$(MOZ_CHROME_MULTILOCALE)
|
multilocale.txt: LOCALES?=$(MOZ_CHROME_MULTILOCALE)
|
||||||
multilocale.txt:
|
multilocale.txt:
|
||||||
$(call py_action,file_generate,$(MOZILLA_DIR)/toolkit/locales/gen_multilocale.py main '$(MULTILOCALE_DIR)/multilocale.txt' $(MDDEPDIR)/multilocale.txt.pp '$(MULTILOCALE_DIR)/multilocale.txt' $(ALL_LOCALES))
|
$(call py3_action,file_generate,$(MOZILLA_DIR)/toolkit/locales/gen_multilocale.py main '$(MULTILOCALE_DIR)/multilocale.txt' $(MDDEPDIR)/multilocale.txt.pp '$(MULTILOCALE_DIR)/multilocale.txt' $(ALL_LOCALES))
|
||||||
|
|
||||||
# This version of the target uses AB_CD to build multilocale.txt and places it
|
# This version of the target uses AB_CD to build multilocale.txt and places it
|
||||||
# in the $(XPI_NAME)/res dir - it should be used when repackaging a build.
|
# in the $(XPI_NAME)/res dir - it should be used when repackaging a build.
|
||||||
multilocale.txt-%: LOCALES?=$(AB_CD)
|
multilocale.txt-%: LOCALES?=$(AB_CD)
|
||||||
multilocale.txt-%: MULTILOCALE_DIR=$(DIST)/xpi-stage/$(XPI_NAME)/res
|
multilocale.txt-%: MULTILOCALE_DIR=$(DIST)/xpi-stage/$(XPI_NAME)/res
|
||||||
multilocale.txt-%:
|
multilocale.txt-%:
|
||||||
$(call py_action,file_generate,$(MOZILLA_DIR)/toolkit/locales/gen_multilocale.py main '$(MULTILOCALE_DIR)/multilocale.txt' $(MDDEPDIR)/multilocale.txt.pp '$(MULTILOCALE_DIR)/multilocale.txt' $(ALL_LOCALES))
|
$(call py3_action,file_generate,$(MOZILLA_DIR)/toolkit/locales/gen_multilocale.py main '$(MULTILOCALE_DIR)/multilocale.txt' $(MDDEPDIR)/multilocale.txt.pp '$(MULTILOCALE_DIR)/multilocale.txt' $(ALL_LOCALES))
|
||||||
|
|
||||||
locale-manifest.in: LOCALES?=$(MOZ_CHROME_MULTILOCALE)
|
locale-manifest.in: LOCALES?=$(MOZ_CHROME_MULTILOCALE)
|
||||||
locale-manifest.in: $(GLOBAL_DEPS) FORCE
|
locale-manifest.in: $(GLOBAL_DEPS) FORCE
|
||||||
|
|
|
||||||
|
|
@ -91,7 +91,7 @@ ifdef FUZZING_INTERFACES
|
||||||
JSSHELL_BINS += fuzz-tests$(BIN_SUFFIX)
|
JSSHELL_BINS += fuzz-tests$(BIN_SUFFIX)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
MAKE_JSSHELL = $(call py_action,zip,-C $(DIST)/bin --strip $(abspath $(PKG_JSSHELL)) $(JSSHELL_BINS))
|
MAKE_JSSHELL = $(call py3_action,zip,-C $(DIST)/bin --strip $(abspath $(PKG_JSSHELL)) $(JSSHELL_BINS))
|
||||||
|
|
||||||
ifneq (,$(PGO_JARLOG_PATH))
|
ifneq (,$(PGO_JARLOG_PATH))
|
||||||
# The backslash subst is to work around an issue with our version of mozmake,
|
# The backslash subst is to work around an issue with our version of mozmake,
|
||||||
|
|
@ -134,14 +134,14 @@ endif
|
||||||
|
|
||||||
ifeq ($(MOZ_PKG_FORMAT),ZIP)
|
ifeq ($(MOZ_PKG_FORMAT),ZIP)
|
||||||
PKG_SUFFIX = .zip
|
PKG_SUFFIX = .zip
|
||||||
INNER_MAKE_PACKAGE = $(call py_action,make_zip,'$(MOZ_PKG_DIR)' '$(PACKAGE)')
|
INNER_MAKE_PACKAGE = $(call py3_action,make_zip,'$(MOZ_PKG_DIR)' '$(PACKAGE)')
|
||||||
INNER_UNMAKE_PACKAGE = $(call py_action,make_unzip,$(UNPACKAGE))
|
INNER_UNMAKE_PACKAGE = $(call py3_action,make_unzip,$(UNPACKAGE))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(MOZ_PKG_FORMAT),SFX7Z)
|
ifeq ($(MOZ_PKG_FORMAT),SFX7Z)
|
||||||
PKG_SUFFIX = .exe
|
PKG_SUFFIX = .exe
|
||||||
INNER_MAKE_PACKAGE = $(call py_action,exe_7z_archive,'$(MOZ_PKG_DIR)' '$(MOZ_INSTALLER_PATH)/app.tag' '$(MOZ_SFX_PACKAGE)' '$(PACKAGE)')
|
INNER_MAKE_PACKAGE = $(call py3_action,exe_7z_archive,'$(MOZ_PKG_DIR)' '$(MOZ_INSTALLER_PATH)/app.tag' '$(MOZ_SFX_PACKAGE)' '$(PACKAGE)')
|
||||||
INNER_UNMAKE_PACKAGE = $(call py_action,exe_7z_extract,$(UNPACKAGE) $(MOZ_PKG_DIR))
|
INNER_UNMAKE_PACKAGE = $(call py3_action,exe_7z_extract,$(UNPACKAGE) $(MOZ_PKG_DIR))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
#Create an RPM file
|
#Create an RPM file
|
||||||
|
|
@ -162,7 +162,7 @@ ifeq ($(MOZ_PKG_FORMAT),RPM)
|
||||||
|
|
||||||
RPM_CMD = \
|
RPM_CMD = \
|
||||||
echo Creating RPM && \
|
echo Creating RPM && \
|
||||||
$(PYTHON3) -m mozbuild.action.preprocessor \
|
$(PYTHON) -m mozbuild.action.preprocessor \
|
||||||
-DMOZ_APP_NAME=$(MOZ_APP_NAME) \
|
-DMOZ_APP_NAME=$(MOZ_APP_NAME) \
|
||||||
-DMOZ_APP_DISPLAYNAME='$(MOZ_APP_DISPLAYNAME)' \
|
-DMOZ_APP_DISPLAYNAME='$(MOZ_APP_DISPLAYNAME)' \
|
||||||
-DMOZ_APP_REMOTINGNAME='$(MOZ_APP_REMOTINGNAME)' \
|
-DMOZ_APP_REMOTINGNAME='$(MOZ_APP_REMOTINGNAME)' \
|
||||||
|
|
@ -231,7 +231,7 @@ ifeq ($(MOZ_PKG_FORMAT),DMG)
|
||||||
_ABS_MOZSRCDIR = $(shell cd $(MOZILLA_DIR) && pwd)
|
_ABS_MOZSRCDIR = $(shell cd $(MOZILLA_DIR) && pwd)
|
||||||
PKG_DMG_SOURCE = $(MOZ_PKG_DIR)
|
PKG_DMG_SOURCE = $(MOZ_PKG_DIR)
|
||||||
INNER_MAKE_PACKAGE = \
|
INNER_MAKE_PACKAGE = \
|
||||||
$(call py_action,make_dmg, \
|
$(call py3_action,make_dmg, \
|
||||||
$(if $(MOZ_PKG_MAC_DSSTORE),--dsstore '$(MOZ_PKG_MAC_DSSTORE)') \
|
$(if $(MOZ_PKG_MAC_DSSTORE),--dsstore '$(MOZ_PKG_MAC_DSSTORE)') \
|
||||||
$(if $(MOZ_PKG_MAC_BACKGROUND),--background '$(MOZ_PKG_MAC_BACKGROUND)') \
|
$(if $(MOZ_PKG_MAC_BACKGROUND),--background '$(MOZ_PKG_MAC_BACKGROUND)') \
|
||||||
$(if $(MOZ_PKG_MAC_ICON),--icon '$(MOZ_PKG_MAC_ICON)') \
|
$(if $(MOZ_PKG_MAC_ICON),--icon '$(MOZ_PKG_MAC_ICON)') \
|
||||||
|
|
@ -239,7 +239,7 @@ ifeq ($(MOZ_PKG_FORMAT),DMG)
|
||||||
'$(PKG_DMG_SOURCE)' '$(PACKAGE)' \
|
'$(PKG_DMG_SOURCE)' '$(PACKAGE)' \
|
||||||
)
|
)
|
||||||
INNER_UNMAKE_PACKAGE = \
|
INNER_UNMAKE_PACKAGE = \
|
||||||
$(call py_action,unpack_dmg, \
|
$(call py3_action,unpack_dmg, \
|
||||||
$(if $(MOZ_PKG_MAC_DSSTORE),--dsstore '$(MOZ_PKG_MAC_DSSTORE)') \
|
$(if $(MOZ_PKG_MAC_DSSTORE),--dsstore '$(MOZ_PKG_MAC_DSSTORE)') \
|
||||||
$(if $(MOZ_PKG_MAC_BACKGROUND),--background '$(MOZ_PKG_MAC_BACKGROUND)') \
|
$(if $(MOZ_PKG_MAC_BACKGROUND),--background '$(MOZ_PKG_MAC_BACKGROUND)') \
|
||||||
$(if $(MOZ_PKG_MAC_ICON),--icon '$(MOZ_PKG_MAC_ICON)') \
|
$(if $(MOZ_PKG_MAC_ICON),--icon '$(MOZ_PKG_MAC_ICON)') \
|
||||||
|
|
|
||||||
|
|
@ -99,9 +99,9 @@ $(CONFIG_DIR)/helper.exe: $(HELPER_DEPS)
|
||||||
$(MKDIR) $(CONFIG_DIR)
|
$(MKDIR) $(CONFIG_DIR)
|
||||||
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
||||||
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
||||||
$(call py_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
$(call py3_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||||
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
||||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||||
--preprocess-locale $(topsrcdir) \
|
--preprocess-locale $(topsrcdir) \
|
||||||
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
||||||
$(INSTALL) $(addprefix $(MOZILLA_DIR)/toolkit/mozapps/installer/windows/nsis/,$(TOOLKIT_NSIS_FILES)) $(CONFIG_DIR)
|
$(INSTALL) $(addprefix $(MOZILLA_DIR)/toolkit/mozapps/installer/windows/nsis/,$(TOOLKIT_NSIS_FILES)) $(CONFIG_DIR)
|
||||||
|
|
@ -118,9 +118,9 @@ maintenanceservice_installer::
|
||||||
$(MKDIR) $(CONFIG_DIR)
|
$(MKDIR) $(CONFIG_DIR)
|
||||||
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
$(INSTALL) $(addprefix $(srcdir)/,$(INSTALLER_FILES)) $(CONFIG_DIR)
|
||||||
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
$(INSTALL) $(addprefix $(topsrcdir)/$(MOZ_BRANDING_DIRECTORY)/,$(BRANDING_FILES)) $(CONFIG_DIR)
|
||||||
$(call py_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
$(call py3_action,preprocessor,-Fsubstitution $(DEFINES) $(ACDEFINES) \
|
||||||
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
$(srcdir)/nsis/defines.nsi.in -o $(CONFIG_DIR)/defines.nsi)
|
||||||
$(PYTHON3) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/windows/nsis/preprocess-locale.py \
|
||||||
--preprocess-locale $(topsrcdir) \
|
--preprocess-locale $(topsrcdir) \
|
||||||
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
$(PPL_LOCALE_ARGS) $(AB_CD) $(CONFIG_DIR)
|
||||||
$(INSTALL) $(addprefix $(MOZILLA_DIR)/toolkit/mozapps/installer/windows/nsis/,$(TOOLKIT_NSIS_FILES)) $(CONFIG_DIR)
|
$(INSTALL) $(addprefix $(MOZILLA_DIR)/toolkit/mozapps/installer/windows/nsis/,$(TOOLKIT_NSIS_FILES)) $(CONFIG_DIR)
|
||||||
|
|
|
||||||
|
|
@ -12,9 +12,7 @@
|
||||||
# --convert-utf8-utf16le.
|
# --convert-utf8-utf16le.
|
||||||
|
|
||||||
from codecs import BOM_UTF16_LE
|
from codecs import BOM_UTF16_LE
|
||||||
import io
|
|
||||||
from os.path import join, isfile
|
from os.path import join, isfile
|
||||||
import six
|
|
||||||
import sys
|
import sys
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
|
@ -22,7 +20,7 @@ def open_utf16le_file(path):
|
||||||
"""
|
"""
|
||||||
Returns an opened file object with a a UTF-16LE byte order mark.
|
Returns an opened file object with a a UTF-16LE byte order mark.
|
||||||
"""
|
"""
|
||||||
fp = io.open(path, "w+b")
|
fp = open(path, "w+b")
|
||||||
fp.write(BOM_UTF16_LE)
|
fp.write(BOM_UTF16_LE)
|
||||||
return fp
|
return fp
|
||||||
|
|
||||||
|
|
@ -39,7 +37,7 @@ def get_locale_strings(path, prefix, middle, add_cr):
|
||||||
linefeeds when there isn't one already
|
linefeeds when there isn't one already
|
||||||
"""
|
"""
|
||||||
output = ""
|
output = ""
|
||||||
fp = io.open(path, "r", encoding="utf-8")
|
fp = open(path, "r")
|
||||||
for line in fp:
|
for line in fp:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line == "" or line[0] == "#":
|
if line == "" or line[0] == "#":
|
||||||
|
|
@ -86,7 +84,7 @@ def preprocess_locale_files(config_dir, l10ndirs):
|
||||||
"LangString ^",
|
"LangString ^",
|
||||||
" 0 ",
|
" 0 ",
|
||||||
False)
|
False)
|
||||||
fp.write(locale_strings.encode("utf-16-le"))
|
fp.write(unicode(locale_strings, "utf-8").encode("utf-16-le"))
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
# Create the Modern User Interface language file
|
# Create the Modern User Interface language file
|
||||||
|
|
@ -99,7 +97,7 @@ def preprocess_locale_files(config_dir, l10ndirs):
|
||||||
""").encode("utf-16-le"))
|
""").encode("utf-16-le"))
|
||||||
locale_strings = get_locale_strings(lookup("mui.properties", l10ndirs),
|
locale_strings = get_locale_strings(lookup("mui.properties", l10ndirs),
|
||||||
"!define ", " ", True)
|
"!define ", " ", True)
|
||||||
fp.write(locale_strings.encode("utf-16-le"))
|
fp.write(unicode(locale_strings, "utf-8").encode("utf-16-le"))
|
||||||
fp.write(u"!insertmacro MOZ_MUI_LANGUAGEFILE_END\n".encode("utf-16-le"))
|
fp.write(u"!insertmacro MOZ_MUI_LANGUAGEFILE_END\n".encode("utf-16-le"))
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
|
|
@ -110,7 +108,7 @@ def preprocess_locale_files(config_dir, l10ndirs):
|
||||||
"LangString ",
|
"LangString ",
|
||||||
" 0 ",
|
" 0 ",
|
||||||
True)
|
True)
|
||||||
fp.write(locale_strings.encode("utf-16-le"))
|
fp.write(unicode(locale_strings, "utf-8").encode("utf-16-le"))
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
def create_nlf_file(moz_dir, ab_cd, config_dir):
|
def create_nlf_file(moz_dir, ab_cd, config_dir):
|
||||||
|
|
@ -125,9 +123,9 @@ def create_nlf_file(moz_dir, ab_cd, config_dir):
|
||||||
rtl = "-"
|
rtl = "-"
|
||||||
|
|
||||||
# Check whether the locale is right to left from locales.nsi.
|
# Check whether the locale is right to left from locales.nsi.
|
||||||
fp = io.open(join(moz_dir,
|
fp = open(join(moz_dir,
|
||||||
"toolkit/mozapps/installer/windows/nsis/locales.nsi"),
|
"toolkit/mozapps/installer/windows/nsis/locales.nsi"),
|
||||||
"r", encoding='utf-8')
|
"r")
|
||||||
for line in fp:
|
for line in fp:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line == "!define " + ab_cd + "_rtl":
|
if line == "!define " + ab_cd + "_rtl":
|
||||||
|
|
@ -177,7 +175,7 @@ def preprocess_locale_file(config_dir,
|
||||||
"LangString ",
|
"LangString ",
|
||||||
" 0 ",
|
" 0 ",
|
||||||
True)
|
True)
|
||||||
fp.write(locale_strings.encode("utf-16-le"))
|
fp.write(unicode(locale_strings, "utf-8").encode("utf-16-le"))
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -189,9 +187,9 @@ def convert_utf8_utf16le(in_file_path, out_file_path):
|
||||||
in_file_path - the path to the UTF-8 source file to convert
|
in_file_path - the path to the UTF-8 source file to convert
|
||||||
out_file_path - the path to the UTF-16LE destination file to create
|
out_file_path - the path to the UTF-16LE destination file to create
|
||||||
"""
|
"""
|
||||||
in_fp = open(in_file_path, "r", encoding='utf-8')
|
in_fp = open(in_file_path, "r")
|
||||||
out_fp = open_utf16le_file(out_file_path)
|
out_fp = open_utf16le_file(out_file_path)
|
||||||
out_fp.write(in_fp.read().encode("utf-16-le"))
|
out_fp.write(unicode(in_fp.read(), "utf-8").encode("utf-16-le"))
|
||||||
in_fp.close()
|
in_fp.close()
|
||||||
out_fp.close()
|
out_fp.close()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -150,9 +150,12 @@ def paramlistAsNative(m, empty='void'):
|
||||||
l.append('uint8_t _argc')
|
l.append('uint8_t _argc')
|
||||||
|
|
||||||
if not m.notxpcom and m.realtype.name != 'void':
|
if not m.notxpcom and m.realtype.name != 'void':
|
||||||
l.append(paramAsNative(xpidl.Param(
|
l.append(paramAsNative(xpidl.Param(paramtype='out',
|
||||||
paramtype='out', type=None, name='_retval', attlist=[],
|
type=None,
|
||||||
location=None, realtype=m.realtype)))
|
name='_retval',
|
||||||
|
attlist=[],
|
||||||
|
location=None,
|
||||||
|
realtype=m.realtype)))
|
||||||
|
|
||||||
# Set any optional out params to default to nullptr. Skip if we just added
|
# Set any optional out params to default to nullptr. Skip if we just added
|
||||||
# extra non-optional args to l.
|
# extra non-optional args to l.
|
||||||
|
|
@ -474,7 +477,7 @@ def write_interface(iface, fd):
|
||||||
|
|
||||||
names = uuid_decoder.match(iface.attributes.uuid).groupdict()
|
names = uuid_decoder.match(iface.attributes.uuid).groupdict()
|
||||||
m3str = names['m3'] + names['m4']
|
m3str = names['m3'] + names['m4']
|
||||||
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in range(0, 16, 2)])
|
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in xrange(0, 16, 2)])
|
||||||
|
|
||||||
if iface.name[2] == 'I':
|
if iface.name[2] == 'I':
|
||||||
implclass = iface.name[:2] + iface.name[3:]
|
implclass = iface.name[:2] + iface.name[3:]
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
skip-if = python == 2
|
skip-if = python == 3
|
||||||
|
|
||||||
[runtests.py]
|
[runtests.py]
|
||||||
|
|
|
||||||
|
|
@ -486,7 +486,7 @@ def write_interface(iface, fd):
|
||||||
# Extract the UUID's information so that it can be written into the struct definition
|
# Extract the UUID's information so that it can be written into the struct definition
|
||||||
names = uuid_decoder.match(iface.attributes.uuid).groupdict()
|
names = uuid_decoder.match(iface.attributes.uuid).groupdict()
|
||||||
m3str = names['m3'] + names['m4']
|
m3str = names['m3'] + names['m4']
|
||||||
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in range(0, 16, 2)])
|
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in xrange(0, 16, 2)])
|
||||||
names['name'] = iface.name
|
names['name'] = iface.name
|
||||||
|
|
||||||
if printdoccomments:
|
if printdoccomments:
|
||||||
|
|
|
||||||
|
|
@ -50,7 +50,7 @@ def attlistToIDL(attlist):
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
attlist = list(attlist)
|
attlist = list(attlist)
|
||||||
attlist.sort(key=lambda a: a[0])
|
attlist.sort(cmp=lambda a, b: cmp(a[0], b[0]))
|
||||||
|
|
||||||
return '[%s] ' % ','.join(["%s%s" % (name, value is not None and '(%s)' % value or '')
|
return '[%s] ' % ','.join(["%s%s" % (name, value is not None and '(%s)' % value or '')
|
||||||
for name, value, aloc in attlist])
|
for name, value, aloc in attlist])
|
||||||
|
|
@ -205,7 +205,7 @@ class Location(object):
|
||||||
|
|
||||||
def pointerline(self):
|
def pointerline(self):
|
||||||
def i():
|
def i():
|
||||||
for i in range(0, self._colno):
|
for i in xrange(0, self._colno):
|
||||||
yield " "
|
yield " "
|
||||||
yield "^"
|
yield "^"
|
||||||
|
|
||||||
|
|
@ -310,8 +310,7 @@ class Include(object):
|
||||||
if not os.path.exists(file):
|
if not os.path.exists(file):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.IDL = parent.parser.parse(open(file, encoding='utf-8').read(),
|
self.IDL = parent.parser.parse(open(file).read(), filename=file)
|
||||||
filename=file)
|
|
||||||
self.IDL.resolve(parent.incdirs, parent.parser, parent.webidlconfig)
|
self.IDL.resolve(parent.incdirs, parent.parser, parent.webidlconfig)
|
||||||
for type in self.IDL.getNames():
|
for type in self.IDL.getNames():
|
||||||
parent.setName(type)
|
parent.setName(type)
|
||||||
|
|
@ -441,10 +440,10 @@ class Forward(object):
|
||||||
# Hack alert: if an identifier is already present, move the doccomments
|
# Hack alert: if an identifier is already present, move the doccomments
|
||||||
# forward.
|
# forward.
|
||||||
if parent.hasName(self.name):
|
if parent.hasName(self.name):
|
||||||
for i in range(0, len(parent.productions)):
|
for i in xrange(0, len(parent.productions)):
|
||||||
if parent.productions[i] is self:
|
if parent.productions[i] is self:
|
||||||
break
|
break
|
||||||
for i in range(i + 1, len(parent.productions)):
|
for i in xrange(i + 1, len(parent.productions)):
|
||||||
if hasattr(parent.productions[i], 'doccomments'):
|
if hasattr(parent.productions[i], 'doccomments'):
|
||||||
parent.productions[i].doccomments[0:0] = self.doccomments
|
parent.productions[i].doccomments[0:0] = self.doccomments
|
||||||
break
|
break
|
||||||
|
|
@ -1862,4 +1861,4 @@ if __name__ == '__main__':
|
||||||
p = IDLParser()
|
p = IDLParser()
|
||||||
for f in sys.argv[1:]:
|
for f in sys.argv[1:]:
|
||||||
print("Parsing %s" % f)
|
print("Parsing %s" % f)
|
||||||
p.parse(open(f, encoding='utf-8').read(), filename=f)
|
p.parse(open(f).read(), filename=f)
|
||||||
|
|
|
||||||
|
|
@ -501,7 +501,7 @@ namespace detail {
|
||||||
|
|
||||||
# The strings array. We write out individual characters to avoid MSVC restrictions.
|
# The strings array. We write out individual characters to avoid MSVC restrictions.
|
||||||
fd.write("const char sStrings[] = {\n")
|
fd.write("const char sStrings[] = {\n")
|
||||||
for s, off in strings.items():
|
for s, off in strings.iteritems():
|
||||||
fd.write(" // %d = %s\n '%s','\\0',\n" % (off, s, "','".join(s)))
|
fd.write(" // %d = %s\n '%s','\\0',\n" % (off, s, "','".join(s)))
|
||||||
fd.write("};\n\n")
|
fd.write("};\n\n")
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue