gyp: update gyp to v0.8.0 (#2318)

PR-URL: https://github.com/nodejs/node-gyp/pull/2318
Reviewed-By: Rod Vagg <rod@vagg.org>
Reviewed-By: Jiawen Geng <technicalcute@gmail.com>
This commit is contained in:
Christian Clauss 2021-01-28 08:11:53 +01:00 committed by GitHub
parent cc1cbce056
commit c3c510d89e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 637 additions and 760 deletions

View file

@ -1,5 +1,4 @@
# TODO: Enable os: windows-latest
# TODO: Enable python-version: 3.5
# TODO: Enable pytest --doctest-modules
name: Python_tests
@ -9,10 +8,10 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
max-parallel: 15
max-parallel: 8
matrix:
os: [macos-latest, ubuntu-latest] # , windows-latest]
python-version: [2.7, 3.6, 3.7, 3.8, 3.9]
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}

View file

@ -1,5 +1,21 @@
# Changelog
## [0.8.0](https://www.github.com/nodejs/gyp-next/compare/v0.7.0...v0.8.0) (2021-01-15)
### ⚠ BREAKING CHANGES
* remove support for Python 2
### Bug Fixes
* revert posix build job ([#86](https://www.github.com/nodejs/gyp-next/issues/86)) ([39dc34f](https://www.github.com/nodejs/gyp-next/commit/39dc34f0799c074624005fb9bbccf6e028607f9d))
### gyp
* Remove support for Python 2 ([#88](https://www.github.com/nodejs/gyp-next/issues/88)) ([22e4654](https://www.github.com/nodejs/gyp-next/commit/22e465426fd892403c95534229af819a99c3f8dc))
## [0.7.0](https://www.github.com/nodejs/gyp-next/compare/v0.6.2...v0.7.0) (2020-12-17)

View file

@ -8,8 +8,6 @@ import os
import sys
import subprocess
PY3 = bytes != str
def IsCygwin():
# Function copied from pylib/gyp/common.py
@ -17,10 +15,8 @@ def IsCygwin():
out = subprocess.Popen(
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
stdout, stderr = out.communicate()
if PY3:
stdout = stdout.decode("utf-8")
return "CYGWIN" in str(stdout)
stdout, _ = out.communicate()
return "CYGWIN" in stdout.decode("utf-8")
except Exception:
return False
@ -33,9 +29,7 @@ def UnixifyPath(path):
["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
stdout, _ = out.communicate()
if PY3:
stdout = stdout.decode("utf-8")
return str(stdout)
return stdout.decode("utf-8")
except Exception:
return path

View file

@ -11,12 +11,9 @@ from operator import attrgetter
import gyp.common
try:
cmp
except NameError:
def cmp(x, y):
return (x > y) - (x < y)
def cmp(x, y):
return (x > y) - (x < y)
# Initialize random number generator
@ -69,7 +66,7 @@ def MakeGuid(name, seed="msvs_new"):
# ------------------------------------------------------------------------------
class MSVSSolutionEntry(object):
class MSVSSolutionEntry:
def __cmp__(self, other):
# Sort by name then guid (so things are in order on vs2008).
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
@ -190,7 +187,7 @@ class MSVSProject(MSVSSolutionEntry):
# ------------------------------------------------------------------------------
class MSVSSolution(object):
class MSVSSolution:
"""Visual Studio solution."""
def __init__(
@ -292,14 +289,14 @@ class MSVSSolution(object):
if e.items:
f.write("\tProjectSection(SolutionItems) = preProject\r\n")
for i in e.items:
f.write("\t\t%s = %s\r\n" % (i, i))
f.write(f"\t\t{i} = {i}\r\n")
f.write("\tEndProjectSection\r\n")
if isinstance(e, MSVSProject):
if e.dependencies:
f.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
for d in e.dependencies:
f.write("\t\t%s = %s\r\n" % (d.get_guid(), d.get_guid()))
f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n")
f.write("\tEndProjectSection\r\n")
f.write("EndProject\r\n")
@ -310,7 +307,7 @@ class MSVSSolution(object):
# Configurations (variants)
f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
for v in self.variants:
f.write("\t\t%s = %s\r\n" % (v, v))
f.write(f"\t\t{v} = {v}\r\n")
f.write("\tEndGlobalSection\r\n")
# Sort config guids for easier diffing of solution changes.
@ -362,7 +359,7 @@ class MSVSSolution(object):
if not isinstance(e, MSVSFolder):
continue # Does not apply to projects, only folders
for subentry in e.entries:
f.write("\t\t%s = %s\r\n" % (subentry.get_guid(), e.get_guid()))
f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n")
f.write("\tEndGlobalSection\r\n")
f.write("EndGlobal\r\n")

View file

@ -9,7 +9,7 @@ import gyp.easy_xml as easy_xml
# ------------------------------------------------------------------------------
class Tool(object):
class Tool:
"""Visual Studio tool."""
def __init__(self, name, attrs=None):
@ -31,7 +31,7 @@ class Tool(object):
return ["Tool", self._attrs]
class Filter(object):
class Filter:
"""Visual Studio filter - that is, a virtual folder."""
def __init__(self, name, contents=None):
@ -48,7 +48,7 @@ class Filter(object):
# ------------------------------------------------------------------------------
class Writer(object):
class Writer:
"""Visual Studio XML project writer."""
def __init__(self, project_path, version, name, guid=None, platforms=None):

View file

@ -14,12 +14,8 @@ The MSBuild schemas were also considered. They are typically found in the
MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
"""
from __future__ import print_function
from gyp import string_types
import sys
import re
import sys
# Dictionaries of settings validators. The key is the tool name, the value is
# a dictionary mapping setting names to validation functions.
@ -36,7 +32,7 @@ _msvs_to_msbuild_converters = {}
_msbuild_name_of_tool = {}
class _Tool(object):
class _Tool:
"""Represents a tool used by MSVS or MSBuild.
Attributes:
@ -68,7 +64,7 @@ def _GetMSBuildToolSettings(msbuild_settings, tool):
return msbuild_settings.setdefault(tool.msbuild_name, {})
class _Type(object):
class _Type:
"""Type of settings (Base class)."""
def ValidateMSVS(self, value):
@ -110,11 +106,11 @@ class _String(_Type):
"""A setting that's just a string."""
def ValidateMSVS(self, value):
if not isinstance(value, string_types):
if not isinstance(value, str):
raise ValueError("expected string; got %r" % value)
def ValidateMSBuild(self, value):
if not isinstance(value, string_types):
if not isinstance(value, str):
raise ValueError("expected string; got %r" % value)
def ConvertToMSBuild(self, value):
@ -126,11 +122,11 @@ class _StringList(_Type):
"""A settings that's a list of strings."""
def ValidateMSVS(self, value):
if not isinstance(value, string_types) and not isinstance(value, list):
if not isinstance(value, (list, str)):
raise ValueError("expected string list; got %r" % value)
def ValidateMSBuild(self, value):
if not isinstance(value, string_types) and not isinstance(value, list):
if not isinstance(value, (list, str)):
raise ValueError("expected string list; got %r" % value)
def ConvertToMSBuild(self, value):
@ -195,7 +191,7 @@ class _Enumeration(_Type):
def __init__(self, label_list, new=None):
_Type.__init__(self)
self._label_list = label_list
self._msbuild_values = set(value for value in label_list if value is not None)
self._msbuild_values = {value for value in label_list if value is not None}
if new is not None:
self._msbuild_values.update(new)
@ -342,7 +338,7 @@ def _ConvertedToAdditionalOption(tool, msvs_name, flag):
if value == "true":
tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
if "AdditionalOptions" in tool_settings:
new_flags = "%s %s" % (tool_settings["AdditionalOptions"], flag)
new_flags = "{} {}".format(tool_settings["AdditionalOptions"], flag)
else:
new_flags = flag
tool_settings["AdditionalOptions"] = new_flags
@ -536,14 +532,14 @@ def _ValidateSettings(validators, settings, stderr):
tool_validators[setting](value)
except ValueError as e:
print(
"Warning: for %s/%s, %s" % (tool_name, setting, e),
f"Warning: for {tool_name}/{setting}, {e}",
file=stderr,
)
else:
_ValidateExclusionSetting(
setting,
tool_validators,
("Warning: unrecognized setting %s/%s" % (tool_name, setting)),
(f"Warning: unrecognized setting {tool_name}/{setting}"),
stderr,
)

View file

@ -9,10 +9,7 @@
import unittest
import gyp.MSVSSettings as MSVSSettings
try:
from StringIO import StringIO # Python 2
except ImportError:
from io import StringIO # Python 3
from io import StringIO
class TestSequenceFunctions(unittest.TestCase):

View file

@ -7,7 +7,7 @@
import gyp.easy_xml as easy_xml
class Writer(object):
class Writer:
"""Visual Studio XML tool file writer."""
def __init__(self, tool_file_path, name):

View file

@ -53,7 +53,7 @@ def _QuoteWin32CommandLineArgs(args):
return new_args
class Writer(object):
class Writer:
"""Visual Studio XML user user file writer."""
def __init__(self, user_file_path, version, name):
@ -93,7 +93,7 @@ class Writer(object):
abs_command = _FindCommandInPath(command[0])
if environment and isinstance(environment, dict):
env_list = ['%s="%s"' % (key, val) for (key, val) in environment.items()]
env_list = [f'{key}="{val}"' for (key, val) in environment.items()]
environment = " ".join(env_list)
else:
environment = ""

View file

@ -55,7 +55,7 @@ def _SuffixName(name, suffix):
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit("#", 1)
parts[0] = "%s_%s" % (parts[0], suffix)
parts[0] = "{}_{}".format(parts[0], suffix)
return "#".join(parts)
@ -160,7 +160,7 @@ def _GetPdbPath(target_dict, config_name, vars):
return pdb_path
pdb_base = target_dict.get("product_name", target_dict["target_name"])
pdb_base = "%s.%s.pdb" % (pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
return pdb_path

View file

@ -11,14 +11,12 @@ import subprocess
import sys
import glob
PY3 = bytes != str
def JoinPath(*args):
return os.path.normpath(os.path.join(*args))
class VisualStudioVersion(object):
class VisualStudioVersion:
"""Information regarding a version of Visual Studio."""
def __init__(
@ -176,9 +174,7 @@ def _RegistryQueryBase(sysdir, key, value):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
if PY3:
text = text.decode("utf-8")
text = p.communicate()[0].decode("utf-8")
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
@ -221,21 +217,15 @@ def _RegistryGetValueUsingWinReg(key, value):
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
ImportError if winreg is unavailable.
"""
try:
# Python 2
from _winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
except ImportError:
# Python 3
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
try:
root, subkey = key.split("\\", 1)
assert root == "HKLM" # Only need HKLM for now.
with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey:
return QueryValueEx(hkey, value)[0]
except WindowsError:
except OSError:
return None
@ -426,9 +416,7 @@ def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == "cygwin":
p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
if PY3:
path = path.decode("utf-8")
path = p.communicate()[0].decode("utf-8").strip()
return path

View file

@ -4,7 +4,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import copy
import gyp.input
@ -16,13 +15,6 @@ import sys
import traceback
from gyp.common import GypError
try:
# Python 2
string_types = basestring
except NameError:
# Python 3
string_types = str
# Default debug modes for GYP
debug = {}
@ -193,7 +185,7 @@ def ShlexEnv(env_name):
def FormatOpt(opt, value):
if opt.startswith("--"):
return "%s=%s" % (opt, value)
return f"{opt}={value}"
return opt + value
@ -524,7 +516,7 @@ def gyp_main(args):
for option, value in sorted(options.__dict__.items()):
if option[0] == "_":
continue
if isinstance(value, string_types):
if isinstance(value, str):
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
else:
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)

View file

@ -10,17 +10,12 @@ import tempfile
import sys
import subprocess
try:
from collections.abc import MutableSet
except ImportError:
from collections import MutableSet
PY3 = bytes != str
from collections.abc import MutableSet
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
# among other "problems".
class memoize(object):
class memoize:
def __init__(self, func):
self.func = func
self.cache = {}
@ -348,7 +343,7 @@ def WriteOnDiff(filename):
the target if it differs (on close).
"""
class Writer(object):
class Writer:
"""Wrapper around file which only covers the target if it differs."""
def __init__(self):
@ -566,8 +561,8 @@ class OrderedSet(MutableSet):
def __repr__(self):
if not self:
return "%s()" % (self.__class__.__name__,)
return "%s(%r)" % (self.__class__.__name__, list(self))
return f"{self.__class__.__name__}()"
return "{}({!r})".format(self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
@ -653,9 +648,7 @@ def IsCygwin():
out = subprocess.Popen(
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
stdout, stderr = out.communicate()
if PY3:
stdout = stdout.decode("utf-8")
stdout = out.communicate()[0].decode("utf-8")
return "CYGWIN" in str(stdout)
except Exception:
return False

View file

@ -84,7 +84,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0):
rest = specification[1:]
if rest and isinstance(rest[0], dict):
for at, val in sorted(rest[0].items()):
xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
xml_parts.append(' {}="{}"'.format(at, _XmlEscape(val, attr=True)))
rest = rest[1:]
if rest:
xml_parts.append(">")
@ -101,7 +101,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0):
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
if multi_line and indentation:
xml_parts.append(indentation)
xml_parts.append("</%s>%s" % (name, new_line))
xml_parts.append(f"</{name}>{new_line}")
else:
xml_parts.append("/>%s" % new_line)
@ -125,9 +125,9 @@ def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, win32=False
# Get the old content
try:
with open(path, "r") as file:
with open(path) as file:
existing = file.read()
except IOError:
except OSError:
existing = None
# It has changed, write it

View file

@ -9,10 +9,7 @@
import gyp.easy_xml as easy_xml
import unittest
try:
from StringIO import StringIO # Python 2
except ImportError:
from io import StringIO # Python 3
from io import StringIO
class TestSequenceFunctions(unittest.TestCase):

View file

@ -18,7 +18,7 @@ def main(args):
executor.Dispatch(args)
class FlockTool(object):
class FlockTool:
"""This class emulates the 'flock' command."""
def Dispatch(self, args):

View file

@ -62,7 +62,6 @@ directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
then the "all" target includes "b1" and "b2".
"""
from __future__ import print_function
import gyp.common
import json
@ -216,7 +215,7 @@ def _ExtractSources(target, target_dict, toplevel_dir):
return results
class Target(object):
class Target:
"""Holds information about a particular target:
deps: set of Targets this Target depends upon. This is not recursive, only the
direct dependent Targets.
@ -252,7 +251,7 @@ class Target(object):
self.is_or_has_linked_ancestor = False
class Config(object):
class Config:
"""Details what we're looking for
files: set of files to search for
targets: see file description for details."""
@ -271,10 +270,10 @@ class Config(object):
if not config_path:
return
try:
f = open(config_path, "r")
f = open(config_path)
config = json.load(f)
f.close()
except IOError:
except OSError:
raise Exception("Unable to open file " + config_path)
except ValueError as e:
raise Exception("Unable to parse config file " + config_path + str(e))
@ -586,7 +585,7 @@ def _WriteOutput(params, **values):
f = open(output_path, "w")
f.write(json.dumps(values) + "\n")
f.close()
except IOError as e:
except OSError as e:
print("Error writing to output file", output_path, str(e))
@ -627,7 +626,7 @@ def CalculateVariables(default_variables, params):
default_variables.setdefault("OS", operating_system)
class TargetCalculator(object):
class TargetCalculator:
"""Calculates the matching test_targets and matching compile_targets."""
def __init__(

View file

@ -14,7 +14,6 @@
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
from __future__ import print_function
import gyp
import gyp.common
@ -84,7 +83,7 @@ def IsCPPExtension(ext):
def Sourceify(path):
"""Convert a path to its source directory form. The Android backend does not
support options.generator_output, so this function is a noop."""
support options.generator_output, so this function is a noop."""
return path
@ -100,11 +99,11 @@ target_outputs = {}
target_link_deps = {}
class AndroidMkWriter(object):
class AndroidMkWriter:
"""AndroidMkWriter packages up the writing of one target-specific Android.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, android_top_dir):
self.android_top_dir = android_top_dir
@ -123,18 +122,18 @@ class AndroidMkWriter(object):
):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
relative_target: qualified target name relative to the root
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for
this target
sdk_version: what to emit for LOCAL_SDK_VERSION in output
"""
Arguments:
qualified_target: target we're generating
relative_target: qualified target name relative to the root
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for
this target
sdk_version: what to emit for LOCAL_SDK_VERSION in output
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, "w")
@ -254,15 +253,15 @@ class AndroidMkWriter(object):
def WriteActions(self, actions, extra_sources, extra_outputs):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
"""
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
"""
for action in actions:
name = make.StringToMakefileVariable(
"%s_%s" % (self.relative_target, action["action_name"])
"{}_{}".format(self.relative_target, action["action_name"])
)
self.WriteLn('### Rules for action "%s":' % action["action_name"])
inputs = action["inputs"]
@ -350,7 +349,7 @@ class AndroidMkWriter(object):
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn("%s: %s ;" % (self.LocalPathify(output), main_output))
self.WriteLn("{}: {} ;".format(self.LocalPathify(output), main_output))
extra_outputs += outputs
self.WriteLn()
@ -360,11 +359,11 @@ class AndroidMkWriter(object):
def WriteRules(self, rules, extra_sources, extra_outputs):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
"""
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
"""
if len(rules) == 0:
return
@ -372,7 +371,7 @@ class AndroidMkWriter(object):
if len(rule.get("rule_sources", [])) == 0:
continue
name = make.StringToMakefileVariable(
"%s_%s" % (self.relative_target, rule["rule_name"])
"{}_{}".format(self.relative_target, rule["rule_name"])
)
self.WriteLn('\n### Generated for rule "%s":' % name)
self.WriteLn('# "%s":' % rule)
@ -452,7 +451,7 @@ class AndroidMkWriter(object):
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn("%s: %s ;" % (output, main_output))
self.WriteLn(f"{output}: {main_output} ;")
self.WriteLn()
self.WriteLn()
@ -460,9 +459,9 @@ class AndroidMkWriter(object):
def WriteCopies(self, copies, extra_outputs):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
"""
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
"""
self.WriteLn("### Generated for copy rule.")
variable = make.StringToMakefileVariable(self.relative_target + "_copies")
@ -487,25 +486,25 @@ class AndroidMkWriter(object):
self.LocalPathify(os.path.join(copy["destination"], filename))
)
self.WriteLn(
"%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)" % (output, path)
)
self.WriteLn(f"{output}: {path} $(GYP_TARGET_DEPENDENCIES) | $(ACP)")
self.WriteLn("\t@echo Copying: $@")
self.WriteLn("\t$(hide) mkdir -p $(dir $@)")
self.WriteLn("\t$(hide) $(ACP) -rpf $< $@")
self.WriteLn()
outputs.append(output)
self.WriteLn("%s = %s" % (variable, " ".join(map(make.QuoteSpaces, outputs))))
self.WriteLn(
"{} = {}".format(variable, " ".join(map(make.QuoteSpaces, outputs)))
)
extra_outputs.append("$(%s)" % variable)
self.WriteLn()
def WriteSourceFlags(self, spec, configs):
"""Write out the flags and include paths used to compile source files for
the current target.
the current target.
Args:
spec, configs: input from gyp.
"""
Args:
spec, configs: input from gyp.
"""
for configname, config in sorted(configs.items()):
extracted_includes = []
@ -554,16 +553,16 @@ class AndroidMkWriter(object):
def WriteSources(self, spec, configs, extra_sources):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
We need to handle shared_intermediate directory source files as
a special case by copying them to the intermediate directory and
treating them as a generated sources. Otherwise the Android build
rules won't pick them up.
These are source files necessary to build the current target.
We need to handle shared_intermediate directory source files as
a special case by copying them to the intermediate directory and
treating them as a generated sources. Otherwise the Android build
rules won't pick them up.
Args:
spec, configs: input from gyp.
extra_sources: Sources generated from Actions or Rules.
"""
Args:
spec, configs: input from gyp.
extra_sources: Sources generated from Actions or Rules.
"""
sources = filter(make.Compilable, spec.get("sources", []))
generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
extra_sources = filter(make.Compilable, extra_sources)
@ -617,7 +616,7 @@ class AndroidMkWriter(object):
if IsCPPExtension(ext) and ext != local_cpp_extension:
local_file = root + local_cpp_extension
if local_file != source:
self.WriteLn("%s: %s" % (local_file, self.LocalPathify(source)))
self.WriteLn("{}: {}".format(local_file, self.LocalPathify(source)))
self.WriteLn("\tmkdir -p $(@D); cp $< $@")
origin_src_dirs.append(os.path.dirname(source))
final_generated_sources.append(local_file)
@ -640,10 +639,10 @@ class AndroidMkWriter(object):
def ComputeAndroidModule(self, spec):
"""Return the Android module name used for a gyp spec.
We use the complete qualified target name to avoid collisions between
duplicate targets in different directories. We also add a suffix to
distinguish gyp-generated module names.
"""
We use the complete qualified target name to avoid collisions between
duplicate targets in different directories. We also add a suffix to
distinguish gyp-generated module names.
"""
if int(spec.get("android_unmangled_name", 0)):
assert self.type != "shared_library" or self.target.startswith("lib")
@ -662,7 +661,7 @@ class AndroidMkWriter(object):
suffix = "_gyp"
if self.path:
middle = make.StringToMakefileVariable("%s_%s" % (self.path, self.target))
middle = make.StringToMakefileVariable(f"{self.path}_{self.target}")
else:
middle = make.StringToMakefileVariable(self.target)
@ -671,11 +670,11 @@ class AndroidMkWriter(object):
def ComputeOutputParts(self, spec):
"""Return the 'output basename' of a gyp spec, split into filename + ext.
Android libraries must be named the same thing as their module name,
otherwise the linker can't find them, so product_name and so on must be
ignored if we are building a library, and the "lib" prepending is
not done for Android.
"""
Android libraries must be named the same thing as their module name,
otherwise the linker can't find them, so product_name and so on must be
ignored if we are building a library, and the "lib" prepending is
not done for Android.
"""
assert self.type != "loadable_module" # TODO: not supported?
target = spec["target_name"]
@ -711,17 +710,17 @@ class AndroidMkWriter(object):
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
return "".join(self.ComputeOutputParts(spec))
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
if self.type == "executable":
# We install host executables into shared_intermediate_dir so they can be
# run by gyp rules that refer to PRODUCT_DIR.
@ -740,7 +739,7 @@ class AndroidMkWriter(object):
% (self.android_class, self.android_module)
)
else:
path = "$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))" % (
path = "$(call intermediates-dir-for,{},{},,,$(GYP_VAR_PREFIX))".format(
self.android_class,
self.android_module,
)
@ -749,14 +748,14 @@ class AndroidMkWriter(object):
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory.
"""Normalize include_paths.
Convert absolute paths to relative to the Android top directory.
Args:
include_paths: A list of unprocessed include paths.
Returns:
A list of normalized include paths.
"""
Args:
include_paths: A list of unprocessed include paths.
Returns:
A list of normalized include paths.
"""
normalized = []
for path in include_paths:
if path[0] == "/":
@ -767,11 +766,11 @@ class AndroidMkWriter(object):
def ExtractIncludesFromCFlags(self, cflags):
"""Extract includes "-I..." out from cflags
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
"""
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
"""
clean_cflags = []
include_paths = []
for flag in cflags:
@ -785,14 +784,14 @@ class AndroidMkWriter(object):
def FilterLibraries(self, libraries):
"""Filter the 'libraries' key to separate things that shouldn't be ldflags.
Library entries that look like filenames should be converted to android
module names instead of being passed to the linker as flags.
Library entries that look like filenames should be converted to android
module names instead of being passed to the linker as flags.
Args:
libraries: the value of spec.get('libraries')
Returns:
A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
"""
Args:
libraries: the value of spec.get('libraries')
Returns:
A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
"""
static_lib_modules = []
dynamic_lib_modules = []
ldflags = []
@ -823,10 +822,10 @@ class AndroidMkWriter(object):
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if "dependencies" in spec:
@ -846,9 +845,9 @@ class AndroidMkWriter(object):
def WriteTargetFlags(self, spec, configs, link_deps):
"""Write Makefile code to specify the link flags and library dependencies.
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
# Libraries (i.e. -lfoo)
# These must be included even for static libraries as some of them provide
# implicit include paths through the build system.
@ -891,12 +890,12 @@ class AndroidMkWriter(object):
):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for this
target
"""
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for this
target
"""
self.WriteLn("### Rules for final target.")
if self.type != "none":
@ -909,7 +908,7 @@ class AndroidMkWriter(object):
if isinstance(v, list):
self.WriteList(v, k)
else:
self.WriteLn("%s := %s" % (k, make.QuoteIfNecessary(v)))
self.WriteLn("{} := {}".format(k, make.QuoteIfNecessary(v)))
self.WriteLn("")
# Add to the set of targets which represent the gyp 'all' target. We use the
@ -928,7 +927,7 @@ class AndroidMkWriter(object):
if self.target != self.android_module and write_alias_target:
self.WriteLn("# Alias gyp target name.")
self.WriteLn(".PHONY: %s" % self.target)
self.WriteLn("%s: %s" % (self.target, self.android_module))
self.WriteLn(f"{self.target}: {self.android_module}")
self.WriteLn("")
# Add the command to trigger build of the target type depending
@ -975,25 +974,25 @@ class AndroidMkWriter(object):
):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ""
if value_list:
value_list = [quoter(prefix + value) for value in value_list]
if local_pathify:
value_list = [self.LocalPathify(value) for value in value_list]
values = " \\\n\t" + " \\\n\t".join(value_list)
self.fp.write("%s :=%s\n\n" % (variable, values))
self.fp.write(f"{variable} :={values}\n\n")
def WriteLn(self, text=""):
self.fp.write(text + "\n")
def LocalPathify(self, path):
"""Convert a subdirectory-relative path into a normalized path which starts
with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
Absolute paths, or paths that contain variables, are just normalized."""
with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
Absolute paths, or paths that contain variables, are just normalized."""
if "$(" in path or os.path.isabs(path):
# path is not a file in the project tree in this case, but calling
# normpath is still important for trimming trailing slashes.
@ -1006,7 +1005,7 @@ class AndroidMkWriter(object):
# so we don't look for a slash.
assert local_path.startswith(
"$(LOCAL_PATH)"
), "Path %s attempts to escape from gyp path %s !)" % (path, self.path)
), f"Path {path} attempts to escape from gyp path {self.path} !)"
return local_path
def ExpandInputRoot(self, template, expansion, dirname):

View file

@ -28,21 +28,15 @@ not be able to find the header file directories described in the generated
CMakeLists.txt file.
"""
from __future__ import print_function
import multiprocessing
import os
import signal
import string
import subprocess
import gyp.common
import gyp.xcode_emulation
try:
# maketrans moved to str in python3.
_maketrans = string.maketrans
except (NameError, AttributeError):
_maketrans = str.maketrans
_maketrans = str.maketrans
generator_default_variables = {
"EXECUTABLE_PREFIX": "",
@ -223,7 +217,7 @@ def WriteVariable(output, variable_name, prepend=None):
output.write("}")
class CMakeTargetType(object):
class CMakeTargetType:
def __init__(self, command, modifier, property_modifier):
self.command = command
self.modifier = modifier
@ -263,7 +257,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, o
"""
for action in actions:
action_name = StringToCMakeTargetName(action["action_name"])
action_target_name = "%s__%s" % (target_name, action_name)
action_target_name = f"{target_name}__{action_name}"
inputs = action["inputs"]
inputs_name = action_target_name + "__input"
@ -282,7 +276,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, o
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir}
if int(action.get("process_outputs_as_sources", False)):
extra_sources.extend(zip(cmake_outputs, outputs))
@ -377,7 +371,7 @@ def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, outpu
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir}
# Create variables for the output, as 'local' variable will be unset.
these_outputs = []
@ -478,7 +472,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
extra_deps.append(copy_name)
return
class Copy(object):
class Copy:
def __init__(self, ext, command):
self.cmake_inputs = []
self.cmake_outputs = []
@ -587,7 +581,7 @@ def CreateCMakeTargetFullName(qualified_target):
return StringToCMakeTargetName(cmake_target_full_name)
class CMakeNamer(object):
class CMakeNamer:
"""Converts Gyp target names into CMake target names.
CMake requires that target names be globally unique. One way to ensure
@ -1285,11 +1279,11 @@ def PerformBuild(data, configurations, params):
os.path.join(generator_dir, output_dir, config_name)
)
arguments = ["cmake", "-G", "Ninja"]
print("Generating [%s]: %s" % (config_name, arguments))
print(f"Generating [{config_name}]: {arguments}")
subprocess.check_call(arguments, cwd=build_dir)
arguments = ["ninja", "-C", build_dir]
print("Building [%s]: %s" % (config_name, arguments))
print(f"Building [{config_name}]: {arguments}")
subprocess.check_call(arguments)

View file

@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import os
import gyp

View file

@ -26,8 +26,6 @@ import gyp.msvs_emulation
import shlex
import xml.etree.cElementTree as ET
PY3 = bytes != str
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {}
@ -105,9 +103,7 @@ def GetAllIncludeDirectories(
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
output = proc.communicate()[1]
if PY3:
output = output.decode("utf-8")
output = proc.communicate()[1].decode("utf-8")
# Extract the list of include dirs from the output, which has this format:
# ...
# #include "..." search starts here:
@ -245,9 +241,7 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler
cpp_proc = subprocess.Popen(
args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
cpp_output = cpp_proc.communicate()[0]
if PY3:
cpp_output = cpp_output.decode("utf-8")
cpp_output = cpp_proc.communicate()[0].decode("utf-8")
cpp_lines = cpp_output.split("\n")
for cpp_line in cpp_lines:
if not cpp_line.strip():

View file

@ -49,7 +49,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner = "Python %s on %s\nlocals.keys() = %s\ngypsh" % (
banner = "Python {} on {}\nlocals.keys() = {}\ngypsh".format(
sys.version,
sys.platform,
repr(sorted(locals.keys())),

View file

@ -21,7 +21,6 @@
# toplevel Makefile. It may make sense to generate some .mk files on
# the side to keep the files readable.
from __future__ import print_function
import os
import re
@ -108,7 +107,7 @@ def CalculateVariables(default_variables, params):
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
gyp)."""
generator_flags = params.get("generator_flags", {})
android_ndk_version = generator_flags.get("android_ndk_version", None)
# Android NDK requires a strict link order.
@ -615,15 +614,15 @@ def Target(filename):
def EscapeShellArgument(s):
"""Quotes an argument so that it will be interpreted literally by a POSIX
shell. Taken from
http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
"""
shell. Taken from
http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
"""
return "'" + s.replace("'", "'\\''") + "'"
def EscapeMakeVariableExpansion(s):
"""Make has its own variable expansion syntax using $. We must escape it for
string to be interpreted literally."""
string to be interpreted literally."""
return s.replace("$", "$$")
@ -638,7 +637,7 @@ def EscapeCppDefine(s):
def QuoteIfNecessary(string):
"""TODO: Should this ideally be replaced with one or more of the above
functions?"""
functions?"""
if '"' in string:
string = '"' + string.replace('"', '\\"') + '"'
return string
@ -679,11 +678,11 @@ target_outputs = {}
target_link_deps = {}
class MakefileWriter(object):
class MakefileWriter:
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, generator_flags, flavor):
self.generator_flags = generator_flags
@ -737,14 +736,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
Arguments:
qualified_target: target we're generating
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, "w")
@ -844,7 +843,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
sources = [x for x in all_sources if Compilable(x)]
if sources:
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
extensions = set([os.path.splitext(s)[1] for s in sources])
extensions = {os.path.splitext(s)[1] for s in sources}
for ext in extensions:
if ext in self.suffix_rules_srcdir:
self.WriteLn(self.suffix_rules_srcdir[ext])
@ -888,15 +887,15 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
"""Write a "sub-project" Makefile.
This is a small, wrapper Makefile that calls the top-level Makefile to build
the targets from a single gyp file (i.e. a sub-project).
This is a small, wrapper Makefile that calls the top-level Makefile to build
the targets from a single gyp file (i.e. a sub-project).
Arguments:
output_filename: sub-project Makefile name to write
makefile_path: path to the top-level Makefile
targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project
"""
Arguments:
output_filename: sub-project Makefile name to write
makefile_path: path to the top-level Makefile
targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, "w")
self.fp.write(header)
@ -910,7 +909,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteLn("all:")
if makefile_path:
makefile_path = " -C " + makefile_path
self.WriteLn("\t$(MAKE)%s %s" % (makefile_path, " ".join(targets)))
self.WriteLn("\t$(MAKE){} {}".format(makefile_path, " ".join(targets)))
self.fp.close()
def WriteActions(
@ -923,17 +922,17 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
part_of_all: flag indicating this target is part of 'all'
"""
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
part_of_all: flag indicating this target is part of 'all'
"""
env = self.GetSortedXcodeEnv()
for action in actions:
name = StringToMakefileVariable(
"%s_%s" % (self.qualified_target, action["action_name"])
"{}_{}".format(self.qualified_target, action["action_name"])
)
self.WriteLn('### Rules for action "%s":' % action["action_name"])
inputs = action["inputs"]
@ -960,9 +959,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
]
command = gyp.common.EncodePOSIXShellList(action_commands)
if "message" in action:
self.WriteLn("quiet_cmd_%s = ACTION %s $@" % (name, action["message"]))
self.WriteLn(
"quiet_cmd_{} = ACTION {} $@".format(name, action["message"])
)
else:
self.WriteLn("quiet_cmd_%s = ACTION %s $@" % (name, name))
self.WriteLn(f"quiet_cmd_{name} = ACTION {name} $@")
if len(dirs) > 0:
command = "mkdir -p %s" % " ".join(dirs) + "; " + command
@ -1022,7 +1023,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# Stuff the outputs in a variable so we can refer to them later.
outputs_variable = "action_%s_outputs" % name
self.WriteLn("%s := %s" % (outputs_variable, " ".join(outputs)))
self.WriteLn("{} := {}".format(outputs_variable, " ".join(outputs)))
extra_outputs.append("$(%s)" % outputs_variable)
self.WriteLn()
@ -1038,16 +1039,16 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
part_of_all: flag indicating this target is part of 'all'
"""
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
part_of_all: flag indicating this target is part of 'all'
"""
env = self.GetSortedXcodeEnv()
for rule in rules:
name = StringToMakefileVariable(
"%s_%s" % (self.qualified_target, rule["rule_name"])
"{}_{}".format(self.qualified_target, rule["rule_name"])
)
count = 0
self.WriteLn("### Generated for rule %s:" % name)
@ -1175,10 +1176,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteCopies(self, copies, extra_outputs, part_of_all):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
part_of_all: flag indicating this target is part of 'all'
"""
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn("### Generated for copy rule.")
variable = StringToMakefileVariable(self.qualified_target + "_copies")
@ -1206,7 +1207,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
self.WriteDoCmd([output], [path], "copy", part_of_all)
outputs.append(output)
self.WriteLn("%s = %s" % (variable, " ".join(QuoteSpaces(o) for o in outputs)))
self.WriteLn(
"{} = {}".format(variable, " ".join(QuoteSpaces(o) for o in outputs))
)
extra_outputs.append("$(%s)" % variable)
self.WriteLn()
@ -1278,15 +1281,15 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
precompiled_header,
):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
These are source files necessary to build the current target.
configs, deps, sources: input from gyp.
extra_outputs: a list of extra outputs this action should be dependent on;
used to serialize action/rules before compilation
extra_link_deps: a list that will be filled in with any outputs of
compilation (to be used in link lines)
part_of_all: flag indicating this target is part of 'all'
"""
configs, deps, sources: input from gyp.
extra_outputs: a list of extra outputs this action should be dependent on;
used to serialize action/rules before compilation
extra_link_deps: a list that will be filled in with any outputs of
compilation (to be used in link lines)
part_of_all: flag indicating this target is part of 'all'
"""
# Write configuration-specific variables for CFLAGS, etc.
for configname in sorted(configs.keys()):
@ -1300,8 +1303,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
if self.flavor == "mac":
cflags = self.xcode_settings.GetCflags(
configname,
arch=config.get('xcode_configuration_platform')
configname, arch=config.get("xcode_configuration_platform")
)
cflags_c = self.xcode_settings.GetCflagsC(configname)
cflags_cc = self.xcode_settings.GetCflagsCC(configname)
@ -1364,7 +1366,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
if pchdeps:
self.WriteLn("# Dependencies from obj files to their precompiled headers")
for source, obj, gch in pchdeps:
self.WriteLn("%s: %s" % (obj, gch))
self.WriteLn(f"{obj}: {gch}")
self.WriteLn("# End precompiled header dependencies")
if objs:
@ -1436,12 +1438,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
"mm": "GYP_PCH_OBJCXXFLAGS",
}[lang]
self.WriteLn(
"%s: %s := %s " % (gch, var_name, lang_flag) + "$(DEFS_$(BUILDTYPE)) "
f"{gch}: {var_name} := {lang_flag} " + "$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"$(CFLAGS_$(BUILDTYPE)) " + extra_flags
)
self.WriteLn("%s: %s FORCE_DO_CMD" % (gch, input))
self.WriteLn(f"{gch}: {input} FORCE_DO_CMD")
self.WriteLn("\t@$(call do_cmd,pch_%s,1)" % lang)
self.WriteLn("")
assert " " not in gch, "Spaces in gch filenames not supported (%s)" % gch
@ -1451,9 +1453,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
assert not self.is_mac_bundle
if self.flavor == "mac" and self.type in (
@ -1510,9 +1512,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
assert not self.is_mac_bundle
path = os.path.join("$(obj)." + self.toolset, self.path)
@ -1535,10 +1537,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if "dependencies" in spec:
@ -1571,11 +1573,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
extra_outputs: any extra outputs that our target should depend on
part_of_all: flag indicating this target is part of 'all'
"""
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
extra_outputs: any extra outputs that our target should depend on
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn("### Rules for final target.")
@ -1597,7 +1599,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
configname,
generator_default_variables["PRODUCT_DIR"],
lambda p: Sourceify(self.Absolutify(p)),
arch=config.get('xcode_configuration_platform')
arch=config.get("xcode_configuration_platform"),
)
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
@ -1860,7 +1862,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
and self.toolset == "target"
):
# On mac, products are created in install_path immediately.
assert install_path == self.output, "%s != %s" % (
assert install_path == self.output, "{} != {}".format(
install_path,
self.output,
)
@ -1897,24 +1899,24 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteList(self, value_list, variable=None, prefix="", quoter=QuoteIfNecessary):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ""
if value_list:
value_list = [quoter(prefix + value) for value in value_list]
values = " \\\n\t" + " \\\n\t".join(value_list)
self.fp.write("%s :=%s\n\n" % (variable, values))
self.fp.write(f"{variable} :={values}\n\n")
def WriteDoCmd(
self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False
):
"""Write a Makefile rule that uses do_cmd.
This makes the outputs dependent on the command line that was run,
as well as support the V= make command line flag.
"""
This makes the outputs dependent on the command line that was run,
as well as support the V= make command line flag.
"""
suffix = ""
if postbuilds:
assert "," not in command
@ -1922,7 +1924,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteMakeRule(
outputs,
inputs,
actions=["$(call do_cmd,%s%s)" % (command, suffix)],
actions=[f"$(call do_cmd,{command}{suffix})"],
comment=comment,
command=command,
force=True,
@ -1947,18 +1949,18 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""Write a Makefile rule, with some extra tricks.
outputs: a list of outputs for the rule (note: this is not directly
supported by make; see comments below)
inputs: a list of inputs for the rule
actions: a list of shell commands to run for the rule
comment: a comment to put in the Makefile above the rule (also useful
for making this Python script's code self-documenting)
order_only: if true, makes the dependency order-only
force: if true, include FORCE_DO_CMD as an order-only dep
phony: if true, the rule does not actually generate the named output, the
output is just a name to run the rule
command: (optional) command name to generate unambiguous labels
"""
outputs: a list of outputs for the rule (note: this is not directly
supported by make; see comments below)
inputs: a list of inputs for the rule
actions: a list of shell commands to run for the rule
comment: a comment to put in the Makefile above the rule (also useful
for making this Python script's code self-documenting)
order_only: if true, makes the dependency order-only
force: if true, include FORCE_DO_CMD as an order-only dep
phony: if true, the rule does not actually generate the named output, the
output is just a name to run the rule
command: (optional) command name to generate unambiguous labels
"""
outputs = [QuoteSpaces(o) for o in outputs]
inputs = [QuoteSpaces(i) for i in inputs]
@ -1974,11 +1976,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# Order only rule: Just write a simple rule.
# TODO(evanm): just make order_only a list of deps instead of this hack.
self.WriteLn(
"%s: | %s%s" % (" ".join(outputs), " ".join(inputs), force_append)
"{}: | {}{}".format(" ".join(outputs), " ".join(inputs), force_append)
)
elif len(outputs) == 1:
# Regular rule, one output: Just write a simple rule.
self.WriteLn("%s: %s%s" % (outputs[0], " ".join(inputs), force_append))
self.WriteLn("{}: {}{}".format(outputs[0], " ".join(inputs), force_append))
else:
# Regular rule, more than one output: Multiple outputs are tricky in
# make. We will write three rules:
@ -1994,10 +1996,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
(command or self.target).encode("utf-8")
).hexdigest()
intermediate = "%s.intermediate" % cmddigest
self.WriteLn("%s: %s" % (" ".join(outputs), intermediate))
self.WriteLn("{}: {}".format(" ".join(outputs), intermediate))
self.WriteLn("\t%s" % "@:")
self.WriteLn("%s: %s" % (".INTERMEDIATE", intermediate))
self.WriteLn("%s: %s%s" % (intermediate, " ".join(inputs), force_append))
self.WriteLn("{}: {}".format(".INTERMEDIATE", intermediate))
self.WriteLn(
"{}: {}{}".format(intermediate, " ".join(inputs), force_append)
)
actions.insert(0, "$(call do_cmd,touch)")
if actions:
@ -2008,16 +2012,16 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
"""Write a set of LOCAL_XXX definitions for Android NDK.
These variable definitions will be used by Android NDK but do nothing for
non-Android applications.
These variable definitions will be used by Android NDK but do nothing for
non-Android applications.
Arguments:
module_name: Android NDK module name, which must be unique among all
module names.
all_sources: A list of source files (will be filtered by Compilable).
link_deps: A list of link dependencies, which must be sorted in
the order from dependencies to dependents.
"""
Arguments:
module_name: Android NDK module name, which must be unique among all
module names.
all_sources: A list of source files (will be filtered by Compilable).
link_deps: A list of link dependencies, which must be sorted in
the order from dependencies to dependents.
"""
if self.type not in ("executable", "shared_library", "static_library"):
return
@ -2129,14 +2133,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# export foo := a\ b
# it does not -- the backslash is written to the env as literal character.
# So don't escape spaces in |env[k]|.
self.WriteLn("%s: export %s := %s" % (QuoteSpaces(target), k, v))
self.WriteLn("{}: export {} := {}".format(QuoteSpaces(target), k, v))
def Objectify(self, path):
"""Convert a path to its output directory form."""
if "$(" in path:
path = path.replace("$(obj)/", "$(obj).%s/$(TARGET)/" % self.toolset)
if "$(obj)" not in path:
path = "$(obj).%s/$(TARGET)/%s" % (self.toolset, path)
path = f"$(obj).{self.toolset}/$(TARGET)/{path}"
return path
def Pchify(self, path, lang):
@ -2144,14 +2148,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
path = self.Absolutify(path)
if "$(" in path:
path = path.replace(
"$(obj)/", "$(obj).%s/$(TARGET)/pch-%s" % (self.toolset, lang)
"$(obj)/", f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}"
)
return path
return "$(obj).%s/$(TARGET)/pch-%s/%s" % (self.toolset, lang, path)
return f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}/{path}"
def Absolutify(self, path):
"""Convert a subdirectory-relative path into a base-relative path.
Skips over paths that contain variables."""
Skips over paths that contain variables."""
if "$(" in path:
# Don't call normpath in this case, as it might collapse the
# path too aggressively if it features '..'. However it's still
@ -2219,7 +2223,7 @@ def PerformBuild(data, configurations, params):
if options.toplevel_dir and options.toplevel_dir != ".":
arguments += "-C", options.toplevel_dir
arguments.append("BUILDTYPE=" + config)
print("Building [%s]: %s" % (config, arguments))
print(f"Building [{config}]: {arguments}")
subprocess.check_call(arguments)
@ -2253,7 +2257,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]["toolset"] for target in target_list])
toolsets = {target_dicts[target]["toolset"] for target in target_list}
for target in target_list:
spec = target_dicts[target]
if spec["default_configuration"] != "Default":
@ -2328,7 +2332,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
{
"copy_archive_args": copy_archive_arguments,
"flock": "./gyp-flock-tool flock",
"flock_index": 2
"flock_index": 2,
}
)
elif flavor == "freebsd":
@ -2362,7 +2366,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
value = "$(abspath %s)" % value
wrapper = wrappers.get(key)
if wrapper:
value = "%s %s" % (wrapper, value)
value = f"{wrapper} {value}"
del wrappers[key]
if key in ("CC", "CC.host", "CXX", "CXX.host"):
make_global_settings += (
@ -2372,10 +2376,10 @@ def GenerateOutput(target_list, target_dicts, data, params):
env_key = key.replace(".", "_") # CC.host -> CC_host
if env_key in os.environ:
value = os.environ[env_key]
make_global_settings += " %s = %s\n" % (key, value)
make_global_settings += f" {key} = {value}\n"
make_global_settings += "endif\n"
else:
make_global_settings += "%s ?= %s\n" % (key, value)
make_global_settings += f"{key} ?= {value}\n"
# TODO(ukai): define cmd when only wrapper is specified in
# make_global_settings.
@ -2413,8 +2417,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
this_make_global_settings = data[build_file].get("make_global_settings", [])
assert make_global_settings_array == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s"
% (this_make_global_settings, make_global_settings)
"make_global_settings needs to be the same for all targets "
f"{this_make_global_settings} vs. {make_global_settings}"
)
build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))

View file

@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import ntpath
import os
@ -26,8 +25,6 @@ import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
from gyp.common import OrderedSet
PY3 = bytes != str
# Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However,
@ -120,9 +117,7 @@ def _GetDomainAndUserName():
call = subprocess.Popen(
["net", "config", "Workstation"], stdout=subprocess.PIPE
)
config = call.communicate()[0]
if PY3:
config = config.decode("utf-8")
config = call.communicate()[0].decode("utf-8")
username_re = re.compile(r"^User name\s+(\S+)", re.MULTILINE)
username_match = username_re.search(config)
if username_match:
@ -319,7 +314,7 @@ def _ConfigBaseName(config_name, platform_name):
def _ConfigFullName(config_name, config_data):
platform_name = _ConfigPlatform(config_data)
return "%s|%s" % (_ConfigBaseName(config_name, platform_name), platform_name)
return "{}|{}".format(_ConfigBaseName(config_name, platform_name), platform_name)
def _ConfigWindowsTargetPlatformVersion(config_data, version):
@ -614,7 +609,7 @@ def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
spec: the project dict
options: global generator options
"""
rules_filename = "%s%s.rules" % (spec["target_name"], options.suffix)
rules_filename = "{}{}.rules".format(spec["target_name"], options.suffix)
rules_file = MSVSToolFile.Writer(
os.path.join(output_dir, rules_filename), spec["target_name"]
)
@ -660,7 +655,7 @@ def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to
options: global generator options
actions_to_add: The list of actions we will add to.
"""
filename = "%s_rules%s.mk" % (spec["target_name"], options.suffix)
filename = "{}_rules{}.mk".format(spec["target_name"], options.suffix)
mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
# Find cygwin style versions of some paths.
mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
@ -703,7 +698,7 @@ def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to
cmd = ['"%s"' % i for i in cmd]
cmd = " ".join(cmd)
# Add it to the makefile.
mk_file.write("%s: %s\n" % (" ".join(outputs), " ".join(inputs)))
mk_file.write("{}: {}\n".format(" ".join(outputs), " ".join(inputs)))
mk_file.write("\t%s\n\n" % cmd)
# Close up the file.
mk_file.close()
@ -1570,7 +1565,7 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
if version.UsesVcxproj():
while (
all([isinstance(s, MSVSProject.Filter) for s in sources])
and len(set([s.name for s in sources])) == 1
and len({s.name for s in sources}) == 1
):
assert all([len(s.contents) == 1 for s in sources])
sources = [s.contents[0] for s in sources]
@ -1776,8 +1771,8 @@ def _GetCopies(spec):
base_dir = posixpath.split(src_bare)[0]
outer_dir = posixpath.split(src_bare)[1]
fixed_dst = _FixPath(dst)
full_dst = '"%s\\%s\\"' % (fixed_dst, outer_dir)
cmd = 'mkdir %s 2>nul & cd "%s" && xcopy /e /f /y "%s" %s' % (
full_dst = f'"{fixed_dst}\\{outer_dir}\\"'
cmd = 'mkdir {} 2>nul & cd "{}" && xcopy /e /f /y "{}" {}'.format(
full_dst,
_FixPath(base_dir),
outer_dir,
@ -1788,17 +1783,17 @@ def _GetCopies(spec):
[src],
["dummy_copies", dst],
cmd,
"Copying %s to %s" % (src, fixed_dst),
f"Copying {src} to {fixed_dst}",
)
)
else:
fix_dst = _FixPath(cpy["destination"])
cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
cmd = 'mkdir "{}" 2>nul & set ERRORLEVEL=0 & copy /Y "{}" "{}"'.format(
fix_dst,
_FixPath(src),
_FixPath(dst),
)
copies.append(([src], [dst], cmd, "Copying %s to %s" % (src, fix_dst)))
copies.append(([src], [dst], cmd, f"Copying {src} to {fix_dst}"))
return copies
@ -1898,12 +1893,12 @@ def _GetPlatformOverridesOfProject(spec):
for config_name, c in spec["configurations"].items():
config_fullname = _ConfigFullName(config_name, c)
platform = c.get("msvs_target_platform", _ConfigPlatform(c))
fixed_config_fullname = "%s|%s" % (
fixed_config_fullname = "{}|{}".format(
_ConfigBaseName(config_name, _ConfigPlatform(c)),
platform,
)
if spec["toolset"] == "host" and generator_supports_multiple_toolsets:
fixed_config_fullname = "%s|x64" % (config_name,)
fixed_config_fullname = f"{config_name}|x64"
config_platform_overrides[config_fullname] = fixed_config_fullname
return config_platform_overrides
@ -2056,7 +2051,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
arguments = [devenv, sln_path, "/Build", config]
print("Building [%s]: %s" % (config, arguments))
print(f"Building [{config}]: {arguments}")
subprocess.check_call(arguments)
@ -2242,7 +2237,7 @@ def _AppendFiltersForMSBuild(
if not parent_filter_name:
filter_name = source.name
else:
filter_name = "%s\\%s" % (parent_filter_name, source.name)
filter_name = f"{parent_filter_name}\\{source.name}"
# Add the filter to the group.
filter_group.append(
[
@ -2370,7 +2365,7 @@ def _GenerateRulesForMSBuild(
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
class MSBuildRule(object):
class MSBuildRule:
"""Used to store information used to generate an MSBuild rule.
Attributes:
@ -2569,7 +2564,7 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
"Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
"File": "$(IntDir)$(ProjectName).read.1.tlog",
"Lines": "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog),
"Lines": f"^%({rule.tlog}.Source);%({rule.tlog}.Inputs)",
},
]
command_and_input_section = [
@ -2915,7 +2910,7 @@ def _GetMSBuildProjectConfigurations(configurations, spec):
group = ["ItemGroup", {"Label": "ProjectConfigurations"}]
for (name, settings) in sorted(configurations.items()):
configuration, platform = _GetConfigurationAndPlatform(name, settings, spec)
designation = "%s|%s" % (configuration, platform)
designation = f"{configuration}|{platform}"
group.append(
[
"ProjectConfiguration",
@ -3280,13 +3275,11 @@ def _GetMSBuildPropertyGroup(spec, label, properties):
# Self references are ignored. Self reference is used in a few places to
# append to the default value. I.e. PATH=$(PATH);other_path
edges.update(
set(
[
{
v
for v in MSVS_VARIABLE_REFERENCE.findall(value)
if v in properties and v != node
]
)
}
)
return edges

View file

@ -8,10 +8,7 @@
import gyp.generator.msvs as msvs
import unittest
try:
from StringIO import StringIO # Python 2
except ImportError:
from io import StringIO # Python 3
from io import StringIO
class TestSequenceFunctions(unittest.TestCase):

View file

@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import collections
import copy
@ -20,10 +19,7 @@ import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from io import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
@ -76,7 +72,7 @@ def StripPrefix(arg, prefix):
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# allow common OK ones and quote anything else.
if re.match(r"^[a-zA-Z0-9_=.\\/-]+$", arg):
@ -88,7 +84,7 @@ def QuoteShellArgument(arg, flavor):
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
shell-escaped."""
shell-escaped."""
if flavor == "win":
# cl.exe replaces literal # characters with = in preprocessor definitions for
# some reason. Octal-encode to work around that.
@ -99,32 +95,32 @@ def Define(d, flavor):
def AddArch(output, arch):
"""Adds an arch string to an output path."""
output, extension = os.path.splitext(output)
return "%s.%s%s" % (output, arch, extension)
return f"{output}.{arch}{extension}"
class Target(object):
class Target:
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
@ -163,7 +159,7 @@ class Target(object):
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
file."""
file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
@ -173,19 +169,19 @@ class Target(object):
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + ".TOC"
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
steps."""
return self.bundle or self.binary or self.actions_stamp
@ -214,7 +210,7 @@ class Target(object):
# to the input file name as well as the output target name.
class NinjaWriter(object):
class NinjaWriter:
def __init__(
self,
hash_for_rules,
@ -228,11 +224,11 @@ class NinjaWriter(object):
toplevel_dir=None,
):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
self.hash_for_rules = hash_for_rules
self.target_outputs = target_outputs
@ -263,10 +259,10 @@ class NinjaWriter(object):
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = "$!PRODUCT_DIR"
if PRODUCT_DIR in path:
@ -303,9 +299,9 @@ class NinjaWriter(object):
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
variable references in |path| with |env|.
See the above discourse on path conversions."""
See the above discourse on path conversions."""
if env:
if self.flavor == "mac":
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
@ -324,11 +320,11 @@ class NinjaWriter(object):
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith("$"), path
@ -361,9 +357,9 @@ class NinjaWriter(object):
def WriteCollapsedDependencies(self, name, targets, order_only=None):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
Uses a stamp file if necessary."""
assert targets == [item for item in targets if item], targets
if len(targets) == 0:
@ -377,14 +373,14 @@ class NinjaWriter(object):
def _SubninjaNameForArch(self, arch):
output_file_base = os.path.splitext(self.output_file_name)[0]
return "%s.%s.ninja" % (output_file_base, arch)
return f"{output_file_base}.{arch}.ninja"
def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec["target_name"]
@ -418,20 +414,17 @@ class NinjaWriter(object):
if self.flavor == "mac":
self.archs = self.xcode_settings.GetActiveArchs(config_name)
if len(self.archs) > 1:
self.arch_subninjas = dict(
(
arch,
ninja_syntax.Writer(
OpenOutput(
os.path.join(
self.toplevel_build, self._SubninjaNameForArch(arch)
),
"w",
)
),
self.arch_subninjas = {
arch: ninja_syntax.Writer(
OpenOutput(
os.path.join(
self.toplevel_build, self._SubninjaNameForArch(arch)
),
"w",
)
)
for arch in self.archs
)
}
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
@ -558,7 +551,7 @@ class NinjaWriter(object):
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
with files that are generated."""
with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name
)
@ -595,7 +588,7 @@ class NinjaWriter(object):
self, spec, extra_sources, prebuild, mac_bundle_depends
):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
representing the outputs of these steps."""
outputs = []
if self.is_mac_bundle:
mac_bundle_resources = spec.get("mac_bundle_resources", [])[:]
@ -638,16 +631,16 @@ class NinjaWriter(object):
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != "target":
verb += "(%s)" % self.toolset
if message:
return "%s %s" % (verb, self.ExpandSpecial(message))
return "{} {}".format(verb, self.ExpandSpecial(message))
else:
return "%s %s: %s" % (verb, self.name, fallback)
return f"{verb} {self.name}: {fallback}"
def WriteActions(
self, actions, extra_sources, prebuild, extra_mac_bundle_resources
@ -657,7 +650,7 @@ class NinjaWriter(object):
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = "%s_%s" % (action["action_name"], self.hash_for_rules)
name = "{}_{}".format(action["action_name"], self.hash_for_rules)
description = self.GenerateDescription(
"ACTION", action.get("message", None), name
)
@ -706,7 +699,7 @@ class NinjaWriter(object):
continue
# First write out a rule for the rule action.
name = "%s_%s" % (rule["rule_name"], self.hash_for_rules)
name = "{}_{}".format(rule["rule_name"], self.hash_for_rules)
args = rule["action"]
description = self.GenerateDescription(
@ -731,7 +724,7 @@ class NinjaWriter(object):
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ("source", "root", "dirname", "ext", "name")
needed_variables = set(["source"])
needed_variables = {"source"}
for argument in args:
for var in special_locals:
if "${%s}" % var in argument:
@ -875,7 +868,7 @@ class NinjaWriter(object):
output = self.GypPathToUniqueOutput("headers.hmap")
self.xcode_settings.header_map_path = output
all_headers = map(
self.GypPathToNinja, filter(lambda x: x.endswith((".h")), all_sources)
self.GypPathToNinja, filter(lambda x: x.endswith(".h"), all_sources)
)
variables = [
("framework", framework),
@ -925,11 +918,11 @@ class NinjaWriter(object):
def WriteMacXCassets(self, xcassets, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources' .xcassets files.
This add an invocation of 'actool' via the 'mac_tool.py' helper script.
It assumes that the assets catalogs define at least one imageset and
thus an Assets.car file will be generated in the application resources
directory. If this is not the case, then the build will probably be done
at each invocation of ninja."""
This add an invocation of 'actool' via the 'mac_tool.py' helper script.
It assumes that the assets catalogs define at least one imageset and
thus an Assets.car file will be generated in the application resources
directory. If this is not the case, then the build will probably be done
at each invocation of ninja."""
if not xcassets:
return
@ -1047,22 +1040,19 @@ class NinjaWriter(object):
spec,
)
else:
return dict(
(
arch,
self.WriteSourcesForArch(
self.arch_subninjas[arch],
config_name,
config,
sources,
predepends,
precompiled_header,
spec,
arch=arch,
),
return {
arch: self.WriteSourcesForArch(
self.arch_subninjas[arch],
config_name,
config,
sources,
predepends,
precompiled_header,
spec,
arch=arch,
)
for arch in self.archs
)
}
def WriteSourcesForArch(
self,
@ -1729,8 +1719,8 @@ class NinjaWriter(object):
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec["type"] == "none" or not output:
return ""
output = QuoteShellArgument(output, self.flavor)
@ -1776,8 +1766,8 @@ class NinjaWriter(object):
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append(
@ -1842,7 +1832,7 @@ class NinjaWriter(object):
"shared_library",
"executable",
):
return "%s%s%s" % (prefix, target, extension)
return f"{prefix}{target}{extension}"
elif type == "none":
return "%s.stamp" % target
else:
@ -1909,8 +1899,8 @@ class NinjaWriter(object):
):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == "win":
args = [
@ -2147,7 +2137,7 @@ def GetDefaultConcurrentLinks():
def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
whether the manifest embedding is enabled."""
whether the manifest embedding is enabled."""
return "_embed" if embed_manifest else ""
@ -2538,10 +2528,12 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
"solink",
description="SOLINK $lib",
restat=True,
command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"}, # noqa: E501
command=mtime_preserving_solink_base
% {"suffix": "@$link_file_list"}, # noqa: E501
rspfile="$link_file_list",
rspfile_content=("-Wl,--whole-archive $in $solibs -Wl,"
"--no-whole-archive $libs"),
rspfile_content=(
"-Wl,--whole-archive $in $solibs -Wl," "--no-whole-archive $libs"
),
pool="link_pool",
)
master_ninja.rule(
@ -2798,8 +2790,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
this_make_global_settings = data[build_file].get("make_global_settings", [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s"
% (this_make_global_settings, make_global_settings)
"make_global_settings needs to be the same for all targets. "
f"{this_make_global_settings} vs. {make_global_settings}"
)
spec = target_dicts[qualified_target]
@ -2891,7 +2883,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
builddir = os.path.join(options.toplevel_dir, "out", config)
arguments = ["ninja", "-C", builddir]
print("Building [%s]: %s" % (config, arguments))
print(f"Building [{config}]: {arguments}")
subprocess.check_call(arguments)

View file

@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import filecmp
import gyp.common
@ -110,7 +109,7 @@ def CreateXCConfigurationList(configuration_names):
return xccl
class XcodeProject(object):
class XcodeProject:
def __init__(self, gyp_path, path, build_file_dict):
self.gyp_path = gyp_path
self.path = path
@ -613,7 +612,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
arguments = ["xcodebuild", "-project", xcodeproj_path]
arguments += ["-configuration", config]
print("Building [%s]: %s" % (config, arguments))
print(f"Building [{config}]: {arguments}")
subprocess.check_call(arguments)
@ -1072,7 +1071,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# TODO(mark): There's a possibility for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = "%s.make" % re.sub(
"[^a-zA-Z0-9_]", "_", "%s_%s" % (target_name, rule["rule_name"])
"[^a-zA-Z0-9_]", "_", "{}_{}".format(target_name, rule["rule_name"])
)
makefile_path = os.path.join(
xcode_projects[build_file].path, makefile_name
@ -1102,7 +1101,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
eol = ""
else:
eol = " \\"
makefile.write(" %s%s\n" % (concrete_output, eol))
makefile.write(f" {concrete_output}{eol}\n")
for (rule_source, concrete_outputs, message, action) in zip(
rule["rule_sources"],
@ -1123,7 +1122,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
bol = ""
else:
bol = " "
makefile.write("%s%s \\\n" % (bol, concrete_output))
makefile.write(f"{bol}{concrete_output} \\\n")
concrete_output_dir = posixpath.dirname(concrete_output)
if (
@ -1143,7 +1142,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
eol = ""
else:
eol = " \\"
makefile.write(" %s%s\n" % (prerequisite, eol))
makefile.write(f" {prerequisite}{eol}\n")
# Make sure that output directories exist before executing the rule
# action.

View file

@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import ast
@ -21,8 +20,6 @@ from distutils.version import StrictVersion
from gyp.common import GypError
from gyp.common import OrderedSet
PY3 = bytes != str
# A list of types that are treated as linkable.
linkable_types = [
"executable",
@ -228,17 +225,9 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check
return data[build_file_path]
if os.path.exists(build_file_path):
# Open the build file for read ('r') with universal-newlines mode ('U')
# to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n'
# which otherwise will fail eval()
if PY3 or sys.platform == "zos":
# On z/OS, universal-newlines mode treats the file as an ascii file.
# But since node-gyp produces ebcdic files, do not use that mode.
build_file_contents = open(build_file_path, "r").read()
else:
build_file_contents = open(build_file_path, "rU").read()
build_file_contents = open(build_file_path).read()
else:
raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
raise GypError(f"{build_file_path} not found (cwd: {os.getcwd()})")
build_file_data = None
try:
@ -567,7 +556,7 @@ class ParallelProcessingError(Exception):
pass
class ParallelState(object):
class ParallelState:
"""Class to keep track of state when processing input files in parallel.
If build files are loaded in parallel, use this to keep track of
@ -987,9 +976,8 @@ def ExpandVariables(input, phase, variables, build_file):
)
p_stdout, p_stderr = p.communicate("")
if PY3:
p_stdout = p_stdout.decode("utf-8")
p_stderr = p_stderr.decode("utf-8")
p_stdout = p_stdout.decode("utf-8")
p_stderr = p_stderr.decode("utf-8")
if p.wait() != 0 or p_stderr:
sys.stderr.write(p_stderr)
@ -1219,7 +1207,7 @@ def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, buil
except NameError as e:
gyp.common.ExceptionAppend(
e,
"while evaluating condition '%s' in %s" % (cond_expr_expanded, build_file),
f"while evaluating condition '{cond_expr_expanded}' in {build_file}",
)
raise GypError(e)
@ -1675,7 +1663,7 @@ def RemoveLinkDependenciesFromNoneTargets(targets):
)
class DependencyGraphNode(object):
class DependencyGraphNode:
"""
Attributes:
@ -2252,7 +2240,7 @@ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
# Make membership testing of hashables in |to| (in particular, strings)
# faster.
hashable_to_set = set(x for x in to if is_hashable(x))
hashable_to_set = {x for x in to if is_hashable(x)}
for item in fro:
singleton = False
if type(item) in (str, int):
@ -2772,7 +2760,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
rule_name = rule["rule_name"]
if rule_name in rule_names:
raise GypError(
"rule %s exists in duplicate, target %s" % (rule_name, target)
f"rule {rule_name} exists in duplicate, target {target}"
)
rule_names[rule_name] = rule

View file

@ -8,7 +8,6 @@
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
from __future__ import print_function
import fcntl
import fnmatch
@ -23,8 +22,6 @@ import subprocess
import sys
import tempfile
PY3 = bytes != str
def main(args):
executor = MacTool()
@ -33,7 +30,7 @@ def main(args):
sys.exit(exit_code)
class MacTool(object):
class MacTool:
"""This class performs all the Mac tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
@ -179,7 +176,7 @@ class MacTool(object):
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
with open(source, "r") as fd:
with open(source) as fd:
lines = fd.read()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
@ -251,7 +248,7 @@ class MacTool(object):
dest = os.path.join(os.path.dirname(info_plist), "PkgInfo")
with open(dest, "w") as fp:
fp.write("%s%s" % (package_type, signature_code))
fp.write(f"{package_type}{signature_code}")
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
@ -278,9 +275,7 @@ class MacTool(object):
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
env["ZERO_AR_DATE"] = "1"
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
_, err = libtoolout.communicate()
if PY3:
err = err.decode("utf-8")
err = libtoolout.communicate()[1].decode("utf-8")
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
print(line, file=sys.stderr)
@ -540,7 +535,7 @@ class MacTool(object):
"application-identifier", ""
)
for team_identifier in profile_data.get("TeamIdentifier", []):
app_id = "%s.%s" % (team_identifier, bundle_identifier)
app_id = f"{team_identifier}.{bundle_identifier}"
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path,

View file

@ -16,15 +16,13 @@ from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
PY3 = bytes != str
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
@ -74,7 +72,7 @@ def EncodeRspFileList(args):
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
@ -95,7 +93,7 @@ def _AddPrefix(element, prefix):
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
@ -108,8 +106,8 @@ def _DoRemapping(element, map):
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
@ -121,8 +119,8 @@ def _AppendOrReturn(append, element):
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, "dxsdk_dir"):
return _FindDirectXInstallation.dxsdk_dir
@ -132,9 +130,7 @@ def _FindDirectXInstallation():
# Setup params to pass to and attempt to launch reg.exe.
cmd = ["reg.exe", "query", r"HKLM\Software\Microsoft\DirectX", "/s"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = p.communicate()[0]
if PY3:
stdout = stdout.decode("utf-8")
stdout = p.communicate()[0].decode("utf-8")
for line in stdout.splitlines():
if "InstallPath" in line:
dxsdk_dir = line.split(" ")[3] + "\\"
@ -146,7 +142,7 @@ def _FindDirectXInstallation():
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
@ -167,7 +163,7 @@ def GetGlobalVSMacroEnv(vs_version):
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(configs[0].get("msvs_system_include_dirs", []))
for config in configs[1:]:
system_includes = config.get("msvs_system_include_dirs", [])
@ -193,10 +189,10 @@ def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
return expanded_system_includes
class MsvsSettings(object):
class MsvsSettings:
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
@ -229,7 +225,9 @@ class MsvsSettings(object):
for config in configs.values():
if field in config:
unsupported += [
"%s not supported (target %s)." % (field, spec["target_name"])
"{} not supported (target {}).".format(
field, spec["target_name"]
)
]
if unsupported:
raise Exception("\n".join(unsupported))
@ -237,9 +235,9 @@ class MsvsSettings(object):
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get("product_extension", None)
if ext:
return ext
@ -247,7 +245,7 @@ class MsvsSettings(object):
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
equivalents."""
target_arch = self.GetArch(config)
if target_arch == "x86":
target_platform = "Win32"
@ -294,15 +292,15 @@ class MsvsSettings(object):
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
class _GetWrapper:
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
@ -321,7 +319,7 @@ class MsvsSettings(object):
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, "")
platform = self.msvs_target_platform.get(config, "")
if not platform: # If no specific override, use the configuration's.
@ -368,7 +366,7 @@ class MsvsSettings(object):
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(
@ -380,7 +378,7 @@ class MsvsSettings(object):
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(
@ -392,7 +390,7 @@ class MsvsSettings(object):
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(["CharacterSet"], config) == "1":
@ -408,7 +406,7 @@ class MsvsSettings(object):
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(("VCCLCompilerTool", "ProgramDataBaseFileName"), config)
if pdbname:
@ -417,7 +415,7 @@ class MsvsSettings(object):
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overridden map file name for a target or returns None
if it's not set."""
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(("VCLinkerTool", "MapFileName"), config)
if map_file:
@ -426,7 +424,7 @@ class MsvsSettings(object):
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec["type"]
root = "VCLibrarianTool" if type == "static_library" else "VCLinkerTool"
@ -440,7 +438,7 @@ class MsvsSettings(object):
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(("VCLinkerTool", "ProgramDatabaseFile"), config)
generate_debug_info = self._Setting(
@ -456,7 +454,7 @@ class MsvsSettings(object):
def GetNoImportLibrary(self, config):
"""If NoImportLibrary: true, ninja will not expect the output to include
an import library."""
an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(("NoImportLibrary",), config)
return noimplib == "true"
@ -549,8 +547,7 @@ class MsvsSettings(object):
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
"""Get the flags to be added to the cflags for precompiled header support."""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
@ -575,7 +572,7 @@ class MsvsSettings(object):
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting(
(root, "AdditionalLibraryDirectories"), config, default=[]
@ -622,14 +619,14 @@ class MsvsSettings(object):
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(("VCLinkerTool", "ProfileGuidedDatabase"), config)
if output_file:
@ -649,7 +646,7 @@ class MsvsSettings(object):
build_dir,
):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(
@ -709,7 +706,7 @@ class MsvsSettings(object):
)
if stack_commit_size:
stack_commit_size = "," + stack_commit_size
ldflags.append("/STACK:%s%s" % (stack_reserve_size, stack_commit_size))
ldflags.append(f"/STACK:{stack_reserve_size}{stack_commit_size}")
ld("TerminalServerAware", map={"1": ":NO", "2": ""}, prefix="/TSAWARE")
ld("LinkIncremental", map={"1": ":NO", "2": ""}, prefix="/INCREMENTAL")
@ -775,12 +772,12 @@ class MsvsSettings(object):
self, config, name, gyp_to_build_path, allow_isolation, build_dir
):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(
("VCLinkerTool", "GenerateManifest"), config, default="true"
)
@ -835,10 +832,10 @@ class MsvsSettings(object):
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
<requestedExecutionLevel level='{}' uiAccess='{}' />
</requestedPrivileges>
</security>
</trustInfo>""" % (
</trustInfo>""".format(
execution_level_map[execution_level],
ui_access,
)
@ -867,7 +864,7 @@ class MsvsSettings(object):
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
generated by the linker."""
files = self._Setting(
("VCManifestTool", "AdditionalManifestFiles"), config, default=[]
)
@ -880,7 +877,7 @@ class MsvsSettings(object):
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(("VCLinkerTool", "UseLibraryDependencyInputs"), config)
return uldi == "true"
@ -901,7 +898,7 @@ class MsvsSettings(object):
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(
@ -916,13 +913,13 @@ class MsvsSettings(object):
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0])
)
@ -932,13 +929,13 @@ class MsvsSettings(object):
bash_cmd = " ".join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir
+ 'bash -c "%s ; %s"' % (cd, bash_cmd)
+ f'bash -c "{cd} ; {bash_cmd}"'
)
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
unset, or set to 1 we use cygwin."""
return (
int(rule.get("msvs_cygwin_shell", self.spec.get("msvs_cygwin_shell", 1)))
!= 0
@ -959,19 +956,19 @@ class MsvsSettings(object):
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
there isn't we need to generate implicit rules to build MIDL .idl files."""
return self._HasExplicitRuleForExtension(
spec, "idl"
) or self._HasExplicitIdlActions(spec)
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, "asm")
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], "VCMIDLTool")
@ -1010,10 +1007,10 @@ def _LanguageMatchesForPch(source_ext, pch_source_ext):
)
class PrecompiledHeader(object):
class PrecompiledHeader:
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext
@ -1027,14 +1024,14 @@ class PrecompiledHeader(object):
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
files."""
return self.settings.msvs_precompiled_header[self.config]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
@ -1046,14 +1043,14 @@ class PrecompiledHeader(object):
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(
self, input, output, implicit, command, cflags_c, cflags_cc, expand_special
):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
for the pch compilation step."""
if input == self.pch_source:
pch_output = ["/Yc" + self._PchHeader()]
if command == "cxx":
@ -1090,7 +1087,7 @@ def _GetVsvarsSetupArgs(generator_flags, arch):
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
for the canonical way to retrieve a suitable dict."""
if "$" in string:
for old, new in expansions.items():
assert "$(" not in new, new
@ -1100,7 +1097,7 @@ def ExpandMacros(string, expansions):
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
"goma_.*", # TODO(scottmg): This is ugly, but needed for goma.
"include",
@ -1140,8 +1137,8 @@ def _ExtractImportantEnvironment(output_of_set):
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ""
nul = "\0"
for key, value in envvar_dict.items():
@ -1152,7 +1149,7 @@ def _FormatAsEnvironmentBlock(envvar_dict):
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith("LOC:"):
@ -1163,19 +1160,19 @@ def GenerateEnvironmentFiles(
toplevel_build_dir, generator_flags, system_includes, open_out
):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ("x86", "x64")
if generator_flags.get("ninja_use_custom_environment_files", 0):
cl_paths = {}
@ -1191,9 +1188,7 @@ def GenerateEnvironmentFiles(
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
variables, _ = popen.communicate()
if PY3:
variables = variables.decode("utf-8")
variables = popen.communicate()[0].decode("utf-8")
if popen.returncode != 0:
raise Exception('"%s" failed with error %d' % (args, popen.returncode))
env = _ExtractImportantEnvironment(variables)
@ -1216,19 +1211,17 @@ def GenerateEnvironmentFiles(
("&&", "for", "%i", "in", "(cl.exe)", "do", "@echo", "LOC:%~$PATH:i")
)
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
if PY3:
output = output.decode("utf-8")
output = popen.communicate()[0].decode("utf-8")
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get("msvs_error_on_missing_sources", 0)):
no_specials = filter(lambda x: "$" not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]

View file

@ -16,7 +16,7 @@ def escape_path(word):
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
class Writer(object):
class Writer:
def __init__(self, output, width=78):
self.output = output
self.width = width
@ -33,7 +33,7 @@ class Writer(object):
return
if isinstance(value, list):
value = " ".join(filter(None, value)) # Filter out empty strings.
self._line("%s = %s" % (key, value), indent)
self._line(f"{key} = {value}", indent)
def pool(self, name, depth):
self._line("pool %s" % name)
@ -89,7 +89,7 @@ class Writer(object):
all_inputs.extend(order_only)
self._line(
"build %s: %s" % (" ".join(out_outputs), " ".join([rule] + all_inputs))
"build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs))
)
if variables:

View file

@ -36,10 +36,7 @@ def _deepcopy_atomic(x):
return x
try:
types = bool, float, int, str, type, type(None), long, unicode
except NameError: # Python 3
types = bool, float, int, str, type, type(None)
types = bool, float, int, str, type, type(None)
for x in types:
d[x] = _deepcopy_atomic

View file

@ -9,7 +9,6 @@
These functions are executed via gyp-win-tool when using the ninja generator.
"""
from __future__ import print_function
import os
import re
@ -20,7 +19,6 @@ import string
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
PY3 = bytes != str
# A regex matching an argument corresponding to the output filename passed to
# link.exe.
@ -34,7 +32,7 @@ def main(args):
sys.exit(exit_code)
class WinTool(object):
class WinTool:
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
@ -141,9 +139,7 @@ class WinTool(object):
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
out, _ = link.communicate()
if PY3:
out = out.decode("utf-8")
out = link.communicate()[0].decode("utf-8")
for line in out.splitlines():
if (
not line.startswith(" Creating library ")
@ -223,8 +219,8 @@ class WinTool(object):
our_manifest = "%(out)s.manifest" % variables
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
# and sometimes doesn't unfortunately.
with open(our_manifest, "r") as our_f:
with open(assert_manifest, "r") as assert_f:
with open(our_manifest) as our_f:
with open(assert_manifest) as assert_f:
our_data = our_f.read().translate(None, string.whitespace)
assert_data = assert_f.read().translate(None, string.whitespace)
if our_data != assert_data:
@ -233,7 +229,7 @@ class WinTool(object):
def dump(filename):
print(filename, file=sys.stderr)
print("-----", file=sys.stderr)
with open(filename, "r") as f:
with open(filename) as f:
print(f.read(), file=sys.stderr)
print("-----", file=sys.stderr)
@ -256,9 +252,7 @@ class WinTool(object):
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
out, _ = popen.communicate()
if PY3:
out = out.decode("utf-8")
out = popen.communicate()[0].decode("utf-8")
for line in out.splitlines():
if line and "manifest authoring warning 81010002" not in line:
print(line)
@ -302,16 +296,14 @@ class WinTool(object):
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
out, _ = popen.communicate()
if PY3:
out = out.decode("utf-8")
out = popen.communicate()[0].decode("utf-8")
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
prefixes = ("Processing ", "64 bit Processing ")
processing = set(os.path.basename(x) for x in lines if x.startswith(prefixes))
processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)}
for line in lines:
if not line.startswith(prefixes) and line not in processing:
print(line)
@ -323,9 +315,7 @@ class WinTool(object):
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
out, _ = popen.communicate()
if PY3:
out = out.decode("utf-8")
out = popen.communicate()[0].decode("utf-8")
for line in out.splitlines():
if (
not line.startswith("Copyright (C) Microsoft Corporation")
@ -343,9 +333,7 @@ class WinTool(object):
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
out, _ = popen.communicate()
if PY3:
out = out.decode("utf-8")
out = popen.communicate()[0].decode("utf-8")
for line in out.splitlines():
if (
not line.startswith("Microsoft (R) Windows (R) Resource Compiler")

View file

@ -7,7 +7,6 @@ This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
from __future__ import print_function
import copy
import gyp.common
@ -19,8 +18,6 @@ import subprocess
import sys
from gyp.common import GypError
PY3 = bytes != str
# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
# "xcodebuild" is called too quickly (it has been found to return incorrect
# version number).
@ -40,7 +37,7 @@ def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
return mapping
class XcodeArchsDefault(object):
class XcodeArchsDefault:
"""A class to resolve ARCHS variable from xcode_settings, resolving Xcode
macros and implementing filtering by VALID_ARCHS. The expansion of macros
depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
@ -148,7 +145,7 @@ def GetXcodeArchsDefault():
return XCODE_ARCHS_DEFAULT_CACHE
class XcodeSettings(object):
class XcodeSettings:
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
@ -281,7 +278,7 @@ class XcodeSettings(object):
else:
return "." + self.spec.get("product_extension", "app")
else:
assert False, "Don't know extension for '%s', target '%s'" % (
assert False, "Don't know extension for '{}', target '{}'".format(
self.spec["type"],
self.spec["target_name"],
)
@ -1088,7 +1085,7 @@ class XcodeSettings(object):
if not quiet:
result.append("echo STRIP\\(%s\\)" % self.spec["target_name"])
result.append("strip %s %s" % (strip_flags, output_binary))
result.append(f"strip {strip_flags} {output_binary}")
self.configname = None
return result
@ -1110,7 +1107,7 @@ class XcodeSettings(object):
):
if not quiet:
result.append("echo DSYMUTIL\\(%s\\)" % self.spec["target_name"])
result.append("dsymutil %s -o %s" % (output_binary, output + ".dSYM"))
result.append("dsymutil {} -o {}".format(output_binary, output + ".dSYM"))
self.configname = None
return result
@ -1143,7 +1140,7 @@ class XcodeSettings(object):
source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name)
test_host = os.path.dirname(settings.get("TEST_HOST"))
xctest_destination = os.path.join(test_host, "PlugIns", product_name)
postbuilds.extend(["ditto %s %s" % (source, xctest_destination)])
postbuilds.extend([f"ditto {source} {xctest_destination}"])
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
@ -1170,7 +1167,7 @@ class XcodeSettings(object):
for framework in frameworks:
source = os.path.join(platform_root, framework)
destination = os.path.join(frameworks_dir, os.path.basename(framework))
postbuilds.extend(["ditto %s %s" % (source, destination)])
postbuilds.extend([f"ditto {source} {destination}"])
# Then re-sign everything with 'preserve=True'
postbuilds.extend(
@ -1371,7 +1368,7 @@ class XcodeSettings(object):
return ""
class MacPrefixHeader(object):
class MacPrefixHeader:
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
@ -1561,9 +1558,7 @@ def GetStdoutQuiet(cmdlist):
Ignores the stderr.
Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = job.communicate()[0]
if PY3:
out = out.decode("utf-8")
out = job.communicate()[0].decode("utf-8")
if job.returncode != 0:
raise GypError("Error %d running %s" % (job.returncode, cmdlist[0]))
return out.rstrip("\n")
@ -1573,9 +1568,7 @@ def GetStdout(cmdlist):
"""Returns the content of standard output returned by invoking |cmdlist|.
Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if PY3:
out = out.decode("utf-8")
out = job.communicate()[0].decode("utf-8")
if job.returncode != 0:
sys.stderr.write(out + "\n")
raise GypError("Error %d running %s" % (job.returncode, cmdlist[0]))
@ -1871,7 +1864,7 @@ def _TopologicallySortedEnvVarKeys(env):
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
matches = {v for v in regex.findall(env[node]) if v in env}
for dependee in matches:
assert "${" not in dependee, "Nested variables not supported: " + dependee
return matches

View file

@ -43,11 +43,11 @@ def _WriteWorkspace(main_gyp, sources_gyp, params):
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
try:
with open(workspace_file, "r") as input_file:
with open(workspace_file) as input_file:
input_string = input_file.read()
if input_string == output_string:
return
except IOError:
except OSError:
# Ignore errors if the file doesn't exist.
pass
@ -214,7 +214,7 @@ def CreateWrapper(target_list, target_dicts, data, params):
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
# Add to new_target_list.
target_name = spec.get("target_name")
new_target_name = "%s:%s#target" % (main_gyp, target_name)
new_target_name = f"{main_gyp}:{target_name}#target"
new_target_list.append(new_target_name)
# Add to new_target_dicts.
@ -282,7 +282,7 @@ def CreateWrapper(target_list, target_dicts, data, params):
# Put sources_to_index in it's own gyp.
sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
fully_qualified_target_name = "%s:%s#target" % (sources_gyp, sources_target_name)
fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target"
# Add to new_target_list, new_target_dicts and new_data.
new_target_list.append(fully_qualified_target_name)

View file

@ -144,13 +144,9 @@ import re
import struct
import sys
try:
basestring, cmp, unicode
except NameError: # Python 3
basestring = unicode = str
def cmp(x, y):
return (x > y) - (x < y)
def cmp(x, y):
return (x > y) - (x < y)
# See XCObject._EncodeString. This pattern is used to determine when a string
@ -199,7 +195,7 @@ def ConvertVariablesToShellSyntax(input_string):
return re.sub(r"\$\((.*?)\)", "${\\1}", input_string)
class XCObject(object):
class XCObject:
"""The abstract base of all class types used in Xcode project files.
Class variables:
@ -301,8 +297,8 @@ class XCObject(object):
try:
name = self.Name()
except NotImplementedError:
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self))
return "<{} at 0x{:x}>".format(self.__class__.__name__, id(self))
return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self))
def Copy(self):
"""Make a copy of this object.
@ -325,7 +321,7 @@ class XCObject(object):
that._properties[key] = new_value
else:
that._properties[key] = value
elif isinstance(value, (basestring, int)):
elif isinstance(value, (str, int)):
that._properties[key] = value
elif isinstance(value, list):
if is_strong:
@ -616,7 +612,7 @@ class XCObject(object):
comment = value.Comment()
elif isinstance(value, str):
printable += self._EncodeString(value)
elif isinstance(value, basestring):
elif isinstance(value, str):
printable += self._EncodeString(value.encode("utf-8"))
elif isinstance(value, int):
printable += str(value)
@ -791,7 +787,7 @@ class XCObject(object):
)
for item in value:
if not isinstance(item, property_type) and not (
isinstance(item, basestring) and property_type == str
isinstance(item, str) and property_type == str
):
# Accept unicode where str is specified. str is treated as
# UTF-8-encoded.
@ -806,7 +802,7 @@ class XCObject(object):
+ item.__class__.__name__
)
elif not isinstance(value, property_type) and not (
isinstance(value, basestring) and property_type == str
isinstance(value, str) and property_type == str
):
# Accept unicode where str is specified. str is treated as
# UTF-8-encoded.
@ -827,7 +823,7 @@ class XCObject(object):
self._properties[property] = value.Copy()
else:
self._properties[property] = value
elif isinstance(value, (basestring, int)):
elif isinstance(value, (str, int)):
self._properties[property] = value
elif isinstance(value, list):
if is_strong:
@ -2185,7 +2181,7 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
relative_path = path[1:]
else:
raise ValueError(
"Can't use path %s in a %s" % (path, self.__class__.__name__)
f"Can't use path {path} in a {self.__class__.__name__}"
)
self._properties["dstPath"] = relative_path
@ -2250,8 +2246,8 @@ class PBXContainerItemProxy(XCObject):
def __repr__(self):
props = self._properties
name = "%s.gyp:%s" % (props["containerPortal"].Name(), props["remoteInfo"])
return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self))
name = "{}.gyp:{}".format(props["containerPortal"].Name(), props["remoteInfo"])
return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self))
def Name(self):
# Admittedly not the best name, but it's what Xcode uses.
@ -2288,7 +2284,7 @@ class PBXTargetDependency(XCObject):
def __repr__(self):
name = self._properties.get("name") or self._properties["target"].Name()
return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self))
return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self))
def Name(self):
# Admittedly not the best name, but it's what Xcode uses.

View file

@ -39,12 +39,12 @@ def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
writer.write(">%s" % newl)
for node in self.childNodes:
node.writexml(writer, indent + addindent, addindent, newl)
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
writer.write(f"{indent}</{self.tagName}>{newl}")
else:
writer.write("/>%s" % newl)
class XmlFix(object):
class XmlFix:
"""Object to manage temporary patching of xml.dom.minidom."""
def __init__(self):

View file

@ -15,7 +15,7 @@ with open(path.join(here, "README.md")) as in_file:
setup(
name="gyp-next",
version="0.7.0",
version="0.8.0",
description="A fork of the GYP build system for use in the Node.js projects",
long_description=long_description,
long_description_content_type="text/markdown",
@ -25,7 +25,7 @@ setup(
package_dir={"": "pylib"},
packages=["gyp", "gyp.generator"],
entry_points={"console_scripts": ["gyp=gyp:script_main"]},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
python_requires=">=3.6",
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
@ -33,12 +33,10 @@ setup(
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
)

View file

@ -5,7 +5,6 @@
"""gyptest.py -- test runner for GYP tests."""
from __future__ import print_function
import argparse
import os
@ -153,7 +152,7 @@ def print_configuration_info():
sys.path.append(os.path.abspath("test/lib"))
import TestMac
print(" Mac %s %s" % (platform.mac_ver()[0], platform.mac_ver()[2]))
print(" Mac {} {}".format(platform.mac_ver()[0], platform.mac_ver()[2]))
print(" Xcode %s" % TestMac.Xcode.Version())
elif sys.platform == "win32":
sys.path.append(os.path.abspath("pylib"))
@ -168,7 +167,7 @@ def print_configuration_info():
print()
class Runner(object):
class Runner:
def __init__(self, formats, tests, gyp_options, verbose):
self.formats = formats
self.tests = tests
@ -217,10 +216,10 @@ class Runner(object):
res = "skipped"
elif proc.returncode:
res = "failed"
self.failures.append("(%s) %s" % (test, fmt))
self.failures.append(f"({test}) {fmt}")
else:
res = "passed"
res_msg = " %s %.3fs" % (res, took)
res_msg = f" {res} {took:.3f}s"
self.print_(res_msg)
if (

View file

@ -5,7 +5,7 @@ pretty_vcproj:
For example, if I want to diff the base.vcproj project:
pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt
pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > original.txt
pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
And you can use your favorite diff tool to see the changes.

View file

@ -8,7 +8,6 @@
generate input suitable for graphviz to render a dependency graph of
targets."""
from __future__ import print_function
import collections
import json
@ -66,7 +65,7 @@ def WriteGraph(edges):
target = targets[0]
build_file, target_name, toolset = ParseTarget(target)
print(
' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, target_name)
f' "{target}" [shape=box, label="{filename}\\n{target_name}"]'
)
else:
# Group multiple nodes together in a subgraph.
@ -74,14 +73,14 @@ def WriteGraph(edges):
print(' label = "%s"' % filename)
for target in targets:
build_file, target_name, toolset = ParseTarget(target)
print(' "%s" [label="%s"]' % (target, target_name))
print(f' "{target}" [label="{target_name}"]')
print(" }")
# Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes.
for src, dsts in edges.items():
for dst in dsts:
print(' "%s" -> "%s"' % (src, dst))
print(f' "{src}" -> "{dst}"')
print("}")

View file

@ -6,7 +6,6 @@
"""Pretty-prints the contents of a GYP file."""
from __future__ import print_function
import sys
import re
@ -34,7 +33,7 @@ def mask_comments(input):
def quote_replace(matchobj):
return "%s%s%s%s" % (
return "{}{}{}{}".format(
matchobj.group(1),
matchobj.group(2),
"x" * len(matchobj.group(3)),

View file

@ -12,7 +12,6 @@
Then it outputs a possible build order.
"""
from __future__ import print_function
import os
import re

View file

@ -12,7 +12,6 @@
It outputs the resulting xml to stdout.
"""
from __future__ import print_function
import os
import sys
@ -21,27 +20,22 @@ from xml.dom.minidom import parse
from xml.dom.minidom import Node
__author__ = "nsylvain (Nicolas Sylvain)"
try:
cmp
except NameError:
def cmp(x, y):
return (x > y) - (x < y)
REPLACEMENTS = dict()
ARGUMENTS = None
REPLACEMENTS = dict()
class CmpTuple(object):
def cmp(x, y):
return (x > y) - (x < y)
class CmpTuple:
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode(object):
class CmpNode:
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
@ -72,7 +66,7 @@ class CmpNode(object):
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print("%s%s" % (" " * indent, node.data.strip()))
print("{}{}".format(" " * indent, node.data.strip()))
return
if node.childNodes:
@ -84,23 +78,23 @@ def PrettyPrintNode(node, indent=0):
# Print the main tag
if attr_count == 0:
print("%s<%s>" % (" " * indent, node.nodeName))
print("{}<{}>".format(" " * indent, node.nodeName))
else:
print("%s<%s" % (" " * indent, node.nodeName))
print("{}<{}".format(" " * indent, node.nodeName))
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print('%s %s="%s"' % (" " * indent, name, value))
print('{} {}="{}"'.format(" " * indent, name, value))
print("%s>" % (" " * indent))
if node.nodeValue:
print("%s %s" % (" " * indent, node.nodeValue))
print("{} {}".format(" " * indent, node.nodeValue))
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent + 2)
print("%s</%s>" % (" " * indent, node.nodeName))
print("{}</{}>".format(" " * indent, node.nodeName))
def FlattenFilter(node):