mirror of
https://github.com/nodejs/node.git
synced 2025-08-15 13:48:44 +02:00
tools: update gyp-next to 0.20.2
PR-URL: https://github.com/nodejs/node/pull/58788 Reviewed-By: Luigi Pinca <luigipinca@gmail.com> Reviewed-By: Chengzhong Wu <legendecas@gmail.com>
This commit is contained in:
parent
e7f6cbb83d
commit
e91aa7965d
23 changed files with 112 additions and 101 deletions
|
@ -1,5 +1,20 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## [0.20.2](https://github.com/nodejs/gyp-next/compare/v0.20.1...v0.20.2) (2025-06-22)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* Python lint import-outside-top-level ruff rule PLC0415 ([#298](https://github.com/nodejs/gyp-next/issues/298)) ([34f4df6](https://github.com/nodejs/gyp-next/commit/34f4df614936ee6a056e47406ebbe7e3c1cb6540))
|
||||||
|
|
||||||
|
## [0.20.1](https://github.com/nodejs/gyp-next/compare/v0.20.0...v0.20.1) (2025-06-06)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* Ensure Consistent Order of build_files in WriteAutoRegenerationRule ([#293](https://github.com/nodejs/gyp-next/issues/293)) ([59b5903](https://github.com/nodejs/gyp-next/commit/59b59035f4ae63419343ffdafe0f0ff511ada17d))
|
||||||
|
* ignore failure of `GetCompilerPredefines` ([#295](https://github.com/nodejs/gyp-next/issues/295)) ([0eaea29](https://github.com/nodejs/gyp-next/commit/0eaea297f0fbb0869597aa162f66f78eb2468fad))
|
||||||
|
|
||||||
## [0.20.0](https://github.com/nodejs/gyp-next/compare/v0.19.1...v0.20.0) (2025-03-27)
|
## [0.20.0](https://github.com/nodejs/gyp-next/compare/v0.19.1...v0.20.0) (2025-03-27)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -396,8 +396,7 @@ def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
|
||||||
# This may be unrecognized because it's an exclusion list. If the
|
# This may be unrecognized because it's an exclusion list. If the
|
||||||
# setting name has the _excluded suffix, then check the root name.
|
# setting name has the _excluded suffix, then check the root name.
|
||||||
unrecognized = True
|
unrecognized = True
|
||||||
m = re.match(_EXCLUDED_SUFFIX_RE, setting)
|
if m := re.match(_EXCLUDED_SUFFIX_RE, setting):
|
||||||
if m:
|
|
||||||
root_setting = m.group(1)
|
root_setting = m.group(1)
|
||||||
unrecognized = root_setting not in settings
|
unrecognized = root_setting not in settings
|
||||||
|
|
||||||
|
|
|
@ -219,7 +219,7 @@ def _RegistryGetValueUsingWinReg(key, value):
|
||||||
contents of the registry key's value, or None on failure. Throws
|
contents of the registry key's value, or None on failure. Throws
|
||||||
ImportError if winreg is unavailable.
|
ImportError if winreg is unavailable.
|
||||||
"""
|
"""
|
||||||
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
|
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx # noqa: PLC0415
|
||||||
try:
|
try:
|
||||||
root, subkey = key.split("\\", 1)
|
root, subkey = key.split("\\", 1)
|
||||||
assert root == "HKLM" # Only need HKLM for now.
|
assert root == "HKLM" # Only need HKLM for now.
|
||||||
|
@ -552,8 +552,7 @@ def SelectVisualStudioVersion(version="auto", allow_fallback=True):
|
||||||
"2019": ("16.0",),
|
"2019": ("16.0",),
|
||||||
"2022": ("17.0",),
|
"2022": ("17.0",),
|
||||||
}
|
}
|
||||||
override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
|
if override_path := os.environ.get("GYP_MSVS_OVERRIDE_PATH"):
|
||||||
if override_path:
|
|
||||||
msvs_version = os.environ.get("GYP_MSVS_VERSION")
|
msvs_version = os.environ.get("GYP_MSVS_VERSION")
|
||||||
if not msvs_version:
|
if not msvs_version:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
|
|
@ -489,7 +489,7 @@ def gyp_main(args):
|
||||||
|
|
||||||
options, build_files_arg = parser.parse_args(args)
|
options, build_files_arg = parser.parse_args(args)
|
||||||
if options.version:
|
if options.version:
|
||||||
import pkg_resources
|
import pkg_resources # noqa: PLC0415
|
||||||
print(f"v{pkg_resources.get_distribution('gyp-next').version}")
|
print(f"v{pkg_resources.get_distribution('gyp-next').version}")
|
||||||
return 0
|
return 0
|
||||||
build_files = build_files_arg
|
build_files = build_files_arg
|
||||||
|
|
|
@ -421,8 +421,9 @@ def EnsureDirExists(path):
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def GetCrossCompilerPredefines(): # -> dict
|
def GetCompilerPredefines(): # -> dict
|
||||||
cmd = []
|
cmd = []
|
||||||
|
defines = {}
|
||||||
|
|
||||||
# shlex.split() will eat '\' in posix mode, but
|
# shlex.split() will eat '\' in posix mode, but
|
||||||
# setting posix=False will preserve extra '"' cause CreateProcess fail on Windows
|
# setting posix=False will preserve extra '"' cause CreateProcess fail on Windows
|
||||||
|
@ -439,7 +440,7 @@ def GetCrossCompilerPredefines(): # -> dict
|
||||||
if CXXFLAGS := os.environ.get("CXXFLAGS"):
|
if CXXFLAGS := os.environ.get("CXXFLAGS"):
|
||||||
cmd += shlex.split(replace_sep(CXXFLAGS))
|
cmd += shlex.split(replace_sep(CXXFLAGS))
|
||||||
else:
|
else:
|
||||||
return {}
|
return defines
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
fd, input = tempfile.mkstemp(suffix=".c")
|
fd, input = tempfile.mkstemp(suffix=".c")
|
||||||
|
@ -450,17 +451,33 @@ def GetCrossCompilerPredefines(): # -> dict
|
||||||
real_cmd, shell=True,
|
real_cmd, shell=True,
|
||||||
capture_output=True, check=True
|
capture_output=True, check=True
|
||||||
).stdout
|
).stdout
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(
|
||||||
|
"Warning: failed to get compiler predefines\n"
|
||||||
|
"cmd: %s\n"
|
||||||
|
"status: %d" % (e.cmd, e.returncode),
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
return defines
|
||||||
finally:
|
finally:
|
||||||
os.unlink(input)
|
os.unlink(input)
|
||||||
else:
|
else:
|
||||||
input = "/dev/null"
|
input = "/dev/null"
|
||||||
real_cmd = [*cmd, "-dM", "-E", "-x", "c", input]
|
real_cmd = [*cmd, "-dM", "-E", "-x", "c", input]
|
||||||
|
try:
|
||||||
stdout = subprocess.run(
|
stdout = subprocess.run(
|
||||||
real_cmd, shell=False,
|
real_cmd, shell=False,
|
||||||
capture_output=True, check=True
|
capture_output=True, check=True
|
||||||
).stdout
|
).stdout
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(
|
||||||
|
"Warning: failed to get compiler predefines\n"
|
||||||
|
"cmd: %s\n"
|
||||||
|
"status: %d" % (e.cmd, e.returncode),
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
return defines
|
||||||
|
|
||||||
defines = {}
|
|
||||||
lines = stdout.decode("utf-8").replace("\r\n", "\n").split("\n")
|
lines = stdout.decode("utf-8").replace("\r\n", "\n").split("\n")
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if (line or "").startswith("#define "):
|
if (line or "").startswith("#define "):
|
||||||
|
@ -499,7 +516,7 @@ def GetFlavor(params):
|
||||||
if "flavor" in params:
|
if "flavor" in params:
|
||||||
return params["flavor"]
|
return params["flavor"]
|
||||||
|
|
||||||
defines = GetCrossCompilerPredefines()
|
defines = GetCompilerPredefines()
|
||||||
if "__EMSCRIPTEN__" in defines:
|
if "__EMSCRIPTEN__" in defines:
|
||||||
return "emscripten"
|
return "emscripten"
|
||||||
if "__wasm__" in defines:
|
if "__wasm__" in defines:
|
||||||
|
@ -566,7 +583,8 @@ def uniquer(seq, idfun=lambda x: x):
|
||||||
|
|
||||||
|
|
||||||
# Based on http://code.activestate.com/recipes/576694/.
|
# Based on http://code.activestate.com/recipes/576694/.
|
||||||
class OrderedSet(MutableSet):
|
class OrderedSet(MutableSet): # noqa: PLW1641
|
||||||
|
# TODO (cclauss): Fix eq-without-hash ruff rule PLW1641
|
||||||
def __init__(self, iterable=None):
|
def __init__(self, iterable=None):
|
||||||
self.end = end = []
|
self.end = end = []
|
||||||
end += [None, end, end] # sentinel node for doubly linked list
|
end += [None, end, end] # sentinel node for doubly linked list
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
"""Unit tests for the common.py file."""
|
"""Unit tests for the common.py file."""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
@ -85,22 +86,34 @@ class TestGetFlavor(unittest.TestCase):
|
||||||
@patch("os.close")
|
@patch("os.close")
|
||||||
@patch("os.unlink")
|
@patch("os.unlink")
|
||||||
@patch("tempfile.mkstemp")
|
@patch("tempfile.mkstemp")
|
||||||
def test_GetCrossCompilerPredefines(self, mock_mkstemp, mock_unlink, mock_close):
|
def test_GetCompilerPredefines(self, mock_mkstemp, mock_unlink, mock_close):
|
||||||
mock_close.return_value = None
|
mock_close.return_value = None
|
||||||
mock_unlink.return_value = None
|
mock_unlink.return_value = None
|
||||||
mock_mkstemp.return_value = (0, "temp.c")
|
mock_mkstemp.return_value = (0, "temp.c")
|
||||||
|
|
||||||
def mock_run(env, defines_stdout, expected_cmd):
|
def mock_run(env, defines_stdout, expected_cmd, throws=False):
|
||||||
with patch("subprocess.run") as mock_run:
|
with patch("subprocess.run") as mock_run:
|
||||||
|
expected_input = "temp.c" if sys.platform == "win32" else "/dev/null"
|
||||||
|
if throws:
|
||||||
|
mock_run.side_effect = subprocess.CalledProcessError(
|
||||||
|
returncode=1,
|
||||||
|
cmd=[
|
||||||
|
*expected_cmd,
|
||||||
|
"-dM", "-E", "-x", "c", expected_input
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
mock_process = MagicMock()
|
mock_process = MagicMock()
|
||||||
mock_process.returncode = 0
|
mock_process.returncode = 0
|
||||||
mock_process.stdout = TestGetFlavor.MockCommunicate(defines_stdout)
|
mock_process.stdout = TestGetFlavor.MockCommunicate(defines_stdout)
|
||||||
mock_run.return_value = mock_process
|
mock_run.return_value = mock_process
|
||||||
expected_input = "temp.c" if sys.platform == "win32" else "/dev/null"
|
|
||||||
with patch.dict(os.environ, env):
|
with patch.dict(os.environ, env):
|
||||||
defines = gyp.common.GetCrossCompilerPredefines()
|
try:
|
||||||
|
defines = gyp.common.GetCompilerPredefines()
|
||||||
|
except Exception as e:
|
||||||
|
self.fail(f"GetCompilerPredefines raised an exception: {e}")
|
||||||
flavor = gyp.common.GetFlavor({})
|
flavor = gyp.common.GetFlavor({})
|
||||||
if env.get("CC_target"):
|
if env.get("CC_target") or env.get("CC"):
|
||||||
mock_run.assert_called_with(
|
mock_run.assert_called_with(
|
||||||
[
|
[
|
||||||
*expected_cmd,
|
*expected_cmd,
|
||||||
|
@ -110,6 +123,9 @@ class TestGetFlavor(unittest.TestCase):
|
||||||
capture_output=True, check=True)
|
capture_output=True, check=True)
|
||||||
return [defines, flavor]
|
return [defines, flavor]
|
||||||
|
|
||||||
|
[defines0, _] = mock_run({ "CC": "cl.exe" }, "", ["cl.exe"], True)
|
||||||
|
assert defines0 == {}
|
||||||
|
|
||||||
[defines1, _] = mock_run({}, "", [])
|
[defines1, _] = mock_run({}, "", [])
|
||||||
assert defines1 == {}
|
assert defines1 == {}
|
||||||
|
|
||||||
|
|
|
@ -900,8 +900,7 @@ class AndroidMkWriter:
|
||||||
if self.type != "none":
|
if self.type != "none":
|
||||||
self.WriteTargetFlags(spec, configs, link_deps)
|
self.WriteTargetFlags(spec, configs, link_deps)
|
||||||
|
|
||||||
settings = spec.get("aosp_build_settings", {})
|
if settings := spec.get("aosp_build_settings", {}):
|
||||||
if settings:
|
|
||||||
self.WriteLn("### Set directly by aosp_build_settings.")
|
self.WriteLn("### Set directly by aosp_build_settings.")
|
||||||
for k, v in settings.items():
|
for k, v in settings.items():
|
||||||
if isinstance(v, list):
|
if isinstance(v, list):
|
||||||
|
|
|
@ -810,8 +810,7 @@ def WriteTarget(
|
||||||
# link directories to targets defined after it is called.
|
# link directories to targets defined after it is called.
|
||||||
# As a result, link_directories must come before the target definition.
|
# As a result, link_directories must come before the target definition.
|
||||||
# CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
|
# CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
|
||||||
library_dirs = config.get("library_dirs")
|
if (library_dirs := config.get("library_dirs")) is not None:
|
||||||
if library_dirs is not None:
|
|
||||||
output.write("link_directories(")
|
output.write("link_directories(")
|
||||||
for library_dir in library_dirs:
|
for library_dir in library_dirs:
|
||||||
output.write(" ")
|
output.write(" ")
|
||||||
|
@ -1295,8 +1294,7 @@ def CallGenerateOutputForConfig(arglist):
|
||||||
|
|
||||||
|
|
||||||
def GenerateOutput(target_list, target_dicts, data, params):
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
user_config = params.get("generator_flags", {}).get("config", None)
|
if user_config := params.get("generator_flags", {}).get("config", None):
|
||||||
if user_config:
|
|
||||||
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
||||||
else:
|
else:
|
||||||
config_names = target_dicts[target_list[0]]["configurations"]
|
config_names = target_dicts[target_list[0]]["configurations"]
|
||||||
|
|
|
@ -451,8 +451,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
if params["options"].generator_output:
|
if params["options"].generator_output:
|
||||||
raise NotImplementedError("--generator_output not implemented for eclipse")
|
raise NotImplementedError("--generator_output not implemented for eclipse")
|
||||||
|
|
||||||
user_config = params.get("generator_flags", {}).get("config", None)
|
if user_config := params.get("generator_flags", {}).get("config", None):
|
||||||
if user_config:
|
|
||||||
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
||||||
else:
|
else:
|
||||||
config_names = target_dicts[target_list[0]]["configurations"]
|
config_names = target_dicts[target_list[0]]["configurations"]
|
||||||
|
|
|
@ -78,7 +78,7 @@ def CalculateVariables(default_variables, params):
|
||||||
|
|
||||||
# Copy additional generator configuration data from Xcode, which is shared
|
# Copy additional generator configuration data from Xcode, which is shared
|
||||||
# by the Mac Make generator.
|
# by the Mac Make generator.
|
||||||
import gyp.generator.xcode as xcode_generator
|
import gyp.generator.xcode as xcode_generator # noqa: PLC0415
|
||||||
|
|
||||||
global generator_additional_non_configuration_keys
|
global generator_additional_non_configuration_keys
|
||||||
generator_additional_non_configuration_keys = getattr(
|
generator_additional_non_configuration_keys = getattr(
|
||||||
|
@ -1465,8 +1465,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||||
order_only=True,
|
order_only=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
pchdeps = precompiled_header.GetObjDependencies(compilable, objs)
|
if pchdeps := precompiled_header.GetObjDependencies(compilable, objs):
|
||||||
if pchdeps:
|
|
||||||
self.WriteLn("# Dependencies from obj files to their precompiled headers")
|
self.WriteLn("# Dependencies from obj files to their precompiled headers")
|
||||||
for source, obj, gch in pchdeps:
|
for source, obj, gch in pchdeps:
|
||||||
self.WriteLn(f"{obj}: {gch}")
|
self.WriteLn(f"{obj}: {gch}")
|
||||||
|
@ -1600,8 +1599,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||||
|
|
||||||
target_prefix = spec.get("product_prefix", target_prefix)
|
target_prefix = spec.get("product_prefix", target_prefix)
|
||||||
target = spec.get("product_name", target)
|
target = spec.get("product_name", target)
|
||||||
product_ext = spec.get("product_extension")
|
if product_ext := spec.get("product_extension"):
|
||||||
if product_ext:
|
|
||||||
target_ext = "." + product_ext
|
target_ext = "." + product_ext
|
||||||
|
|
||||||
return target_prefix + target + target_ext
|
return target_prefix + target + target_ext
|
||||||
|
@ -2383,7 +2381,7 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
|
||||||
% {
|
% {
|
||||||
"makefile_name": makefile_name,
|
"makefile_name": makefile_name,
|
||||||
"deps": replace_sep(
|
"deps": replace_sep(
|
||||||
" ".join(SourceifyAndQuoteSpaces(bf) for bf in build_files)
|
" ".join(sorted(SourceifyAndQuoteSpaces(bf) for bf in build_files))
|
||||||
),
|
),
|
||||||
"cmd": replace_sep(gyp.common.EncodePOSIXShellList(
|
"cmd": replace_sep(gyp.common.EncodePOSIXShellList(
|
||||||
[gyp_binary, "-fmake"] + gyp.RegenerateFlags(options) + build_files_args
|
[gyp_binary, "-fmake"] + gyp.RegenerateFlags(options) + build_files_args
|
||||||
|
|
|
@ -1364,8 +1364,7 @@ def _GetOutputTargetExt(spec):
|
||||||
Returns:
|
Returns:
|
||||||
A string with the extension, or None
|
A string with the extension, or None
|
||||||
"""
|
"""
|
||||||
target_extension = spec.get("product_extension")
|
if target_extension := spec.get("product_extension"):
|
||||||
if target_extension:
|
|
||||||
return "." + target_extension
|
return "." + target_extension
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -3166,8 +3165,7 @@ def _GetMSBuildAttributes(spec, config, build_file):
|
||||||
"windows_driver": "Link",
|
"windows_driver": "Link",
|
||||||
"static_library": "Lib",
|
"static_library": "Lib",
|
||||||
}
|
}
|
||||||
msbuild_tool = msbuild_tool_map.get(spec["type"])
|
if msbuild_tool := msbuild_tool_map.get(spec["type"]):
|
||||||
if msbuild_tool:
|
|
||||||
msbuild_settings = config["finalized_msbuild_settings"]
|
msbuild_settings = config["finalized_msbuild_settings"]
|
||||||
out_file = msbuild_settings[msbuild_tool].get("OutputFile")
|
out_file = msbuild_settings[msbuild_tool].get("OutputFile")
|
||||||
if out_file:
|
if out_file:
|
||||||
|
@ -3184,8 +3182,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
|
||||||
# there are actions.
|
# there are actions.
|
||||||
# TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
|
# TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
|
||||||
new_paths = []
|
new_paths = []
|
||||||
cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])[0]
|
if cygwin_dirs := spec.get("msvs_cygwin_dirs", ["."])[0]:
|
||||||
if cygwin_dirs:
|
|
||||||
cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs)
|
cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs)
|
||||||
new_paths.append(cyg_path)
|
new_paths.append(cyg_path)
|
||||||
# TODO(jeanluc) Change the convention to have both a cygwin_dir and a
|
# TODO(jeanluc) Change the convention to have both a cygwin_dir and a
|
||||||
|
@ -3370,7 +3367,6 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||||
prebuild = configuration.get("msvs_prebuild")
|
prebuild = configuration.get("msvs_prebuild")
|
||||||
postbuild = configuration.get("msvs_postbuild")
|
postbuild = configuration.get("msvs_postbuild")
|
||||||
def_file = _GetModuleDefinition(spec)
|
def_file = _GetModuleDefinition(spec)
|
||||||
precompiled_header = configuration.get("msvs_precompiled_header")
|
|
||||||
|
|
||||||
# Add the information to the appropriate tool
|
# Add the information to the appropriate tool
|
||||||
# TODO(jeanluc) We could optimize and generate these settings only if
|
# TODO(jeanluc) We could optimize and generate these settings only if
|
||||||
|
@ -3408,7 +3404,7 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||||
msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings
|
msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings
|
||||||
)
|
)
|
||||||
# Turn on precompiled headers if appropriate.
|
# Turn on precompiled headers if appropriate.
|
||||||
if precompiled_header:
|
if precompiled_header := configuration.get("msvs_precompiled_header"):
|
||||||
# While MSVC works with just file name eg. "v8_pch.h", ClangCL requires
|
# While MSVC works with just file name eg. "v8_pch.h", ClangCL requires
|
||||||
# the full path eg. "tools/msvs/pch/v8_pch.h" to find the file.
|
# the full path eg. "tools/msvs/pch/v8_pch.h" to find the file.
|
||||||
# P.S. Only ClangCL defines msbuild_toolset, for MSVC it is None.
|
# P.S. Only ClangCL defines msbuild_toolset, for MSVC it is None.
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import copy
|
import copy
|
||||||
|
import ctypes
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
@ -263,8 +264,7 @@ class NinjaWriter:
|
||||||
dir.
|
dir.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
PRODUCT_DIR = "$!PRODUCT_DIR"
|
if (PRODUCT_DIR := "$!PRODUCT_DIR") in path:
|
||||||
if PRODUCT_DIR in path:
|
|
||||||
if product_dir:
|
if product_dir:
|
||||||
path = path.replace(PRODUCT_DIR, product_dir)
|
path = path.replace(PRODUCT_DIR, product_dir)
|
||||||
else:
|
else:
|
||||||
|
@ -272,8 +272,7 @@ class NinjaWriter:
|
||||||
path = path.replace(PRODUCT_DIR + "\\", "")
|
path = path.replace(PRODUCT_DIR + "\\", "")
|
||||||
path = path.replace(PRODUCT_DIR, ".")
|
path = path.replace(PRODUCT_DIR, ".")
|
||||||
|
|
||||||
INTERMEDIATE_DIR = "$!INTERMEDIATE_DIR"
|
if (INTERMEDIATE_DIR := "$!INTERMEDIATE_DIR") in path:
|
||||||
if INTERMEDIATE_DIR in path:
|
|
||||||
int_dir = self.GypPathToUniqueOutput("gen")
|
int_dir = self.GypPathToUniqueOutput("gen")
|
||||||
# GypPathToUniqueOutput generates a path relative to the product dir,
|
# GypPathToUniqueOutput generates a path relative to the product dir,
|
||||||
# so insert product_dir in front if it is provided.
|
# so insert product_dir in front if it is provided.
|
||||||
|
@ -1995,7 +1994,7 @@ def CalculateVariables(default_variables, params):
|
||||||
|
|
||||||
# Copy additional generator configuration data from Xcode, which is shared
|
# Copy additional generator configuration data from Xcode, which is shared
|
||||||
# by the Mac Ninja generator.
|
# by the Mac Ninja generator.
|
||||||
import gyp.generator.xcode as xcode_generator
|
import gyp.generator.xcode as xcode_generator # noqa: PLC0415
|
||||||
|
|
||||||
generator_additional_non_configuration_keys = getattr(
|
generator_additional_non_configuration_keys = getattr(
|
||||||
xcode_generator, "generator_additional_non_configuration_keys", []
|
xcode_generator, "generator_additional_non_configuration_keys", []
|
||||||
|
@ -2018,7 +2017,7 @@ def CalculateVariables(default_variables, params):
|
||||||
|
|
||||||
# Copy additional generator configuration data from VS, which is shared
|
# Copy additional generator configuration data from VS, which is shared
|
||||||
# by the Windows Ninja generator.
|
# by the Windows Ninja generator.
|
||||||
import gyp.generator.msvs as msvs_generator
|
import gyp.generator.msvs as msvs_generator # noqa: PLC0415
|
||||||
|
|
||||||
generator_additional_non_configuration_keys = getattr(
|
generator_additional_non_configuration_keys = getattr(
|
||||||
msvs_generator, "generator_additional_non_configuration_keys", []
|
msvs_generator, "generator_additional_non_configuration_keys", []
|
||||||
|
@ -2075,21 +2074,17 @@ def OpenOutput(path, mode="w"):
|
||||||
|
|
||||||
|
|
||||||
def CommandWithWrapper(cmd, wrappers, prog):
|
def CommandWithWrapper(cmd, wrappers, prog):
|
||||||
wrapper = wrappers.get(cmd, "")
|
if wrapper := wrappers.get(cmd, ""):
|
||||||
if wrapper:
|
|
||||||
return wrapper + " " + prog
|
return wrapper + " " + prog
|
||||||
return prog
|
return prog
|
||||||
|
|
||||||
|
|
||||||
def GetDefaultConcurrentLinks():
|
def GetDefaultConcurrentLinks():
|
||||||
"""Returns a best-guess for a number of concurrent links."""
|
"""Returns a best-guess for a number of concurrent links."""
|
||||||
pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY") or 0)
|
if pool_size := int(os.environ.get("GYP_LINK_CONCURRENCY") or 0):
|
||||||
if pool_size:
|
|
||||||
return pool_size
|
return pool_size
|
||||||
|
|
||||||
if sys.platform in ("win32", "cygwin"):
|
if sys.platform in ("win32", "cygwin"):
|
||||||
import ctypes
|
|
||||||
|
|
||||||
class MEMORYSTATUSEX(ctypes.Structure):
|
class MEMORYSTATUSEX(ctypes.Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
("dwLength", ctypes.c_ulong),
|
("dwLength", ctypes.c_ulong),
|
||||||
|
@ -2305,8 +2300,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
|
||||||
key_prefix = re.sub(r"\.HOST$", ".host", key_prefix)
|
key_prefix = re.sub(r"\.HOST$", ".host", key_prefix)
|
||||||
wrappers[key_prefix] = os.path.join(build_to_root, value)
|
wrappers[key_prefix] = os.path.join(build_to_root, value)
|
||||||
|
|
||||||
mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
|
if mac_toolchain_dir := generator_flags.get("mac_toolchain_dir", None):
|
||||||
if mac_toolchain_dir:
|
|
||||||
wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
|
wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
|
||||||
|
|
||||||
if flavor == "win":
|
if flavor == "win":
|
||||||
|
|
|
@ -25,8 +25,7 @@ import tempfile
|
||||||
|
|
||||||
def main(args):
|
def main(args):
|
||||||
executor = MacTool()
|
executor = MacTool()
|
||||||
exit_code = executor.Dispatch(args)
|
if (exit_code := executor.Dispatch(args)) is not None:
|
||||||
if exit_code is not None:
|
|
||||||
sys.exit(exit_code)
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
||||||
|
@ -142,7 +141,7 @@ class MacTool:
|
||||||
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
||||||
# semicolon in dictionary.
|
# semicolon in dictionary.
|
||||||
# on invalid files. Do the same kind of validation.
|
# on invalid files. Do the same kind of validation.
|
||||||
import CoreFoundation
|
import CoreFoundation # noqa: PLC0415
|
||||||
|
|
||||||
with open(source, "rb") as in_file:
|
with open(source, "rb") as in_file:
|
||||||
s = in_file.read()
|
s = in_file.read()
|
||||||
|
|
|
@ -247,9 +247,7 @@ class MsvsSettings:
|
||||||
the target type.
|
the target type.
|
||||||
"""
|
"""
|
||||||
ext = self.spec.get("product_extension", None)
|
ext = self.spec.get("product_extension", None)
|
||||||
if ext:
|
return ext or gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "")
|
||||||
return ext
|
|
||||||
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "")
|
|
||||||
|
|
||||||
def GetVSMacroEnv(self, base_to_build=None, config=None):
|
def GetVSMacroEnv(self, base_to_build=None, config=None):
|
||||||
"""Get a dict of variables mapping internal VS macro names to their gyp
|
"""Get a dict of variables mapping internal VS macro names to their gyp
|
||||||
|
@ -625,8 +623,7 @@ class MsvsSettings:
|
||||||
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
|
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
|
||||||
""".def files get implicitly converted to a ModuleDefinitionFile for the
|
""".def files get implicitly converted to a ModuleDefinitionFile for the
|
||||||
linker in the VS generator. Emulate that behaviour here."""
|
linker in the VS generator. Emulate that behaviour here."""
|
||||||
def_file = self.GetDefFile(gyp_to_build_path)
|
if def_file := self.GetDefFile(gyp_to_build_path):
|
||||||
if def_file:
|
|
||||||
ldflags.append('/DEF:"%s"' % def_file)
|
ldflags.append('/DEF:"%s"' % def_file)
|
||||||
|
|
||||||
def GetPGDName(self, config, expand_special):
|
def GetPGDName(self, config, expand_special):
|
||||||
|
@ -674,14 +671,11 @@ class MsvsSettings:
|
||||||
)
|
)
|
||||||
ld("DelayLoadDLLs", prefix="/DELAYLOAD:")
|
ld("DelayLoadDLLs", prefix="/DELAYLOAD:")
|
||||||
ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"})
|
ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"})
|
||||||
out = self.GetOutputName(config, expand_special)
|
if out := self.GetOutputName(config, expand_special):
|
||||||
if out:
|
|
||||||
ldflags.append("/OUT:" + out)
|
ldflags.append("/OUT:" + out)
|
||||||
pdb = self.GetPDBName(config, expand_special, output_name + ".pdb")
|
if pdb := self.GetPDBName(config, expand_special, output_name + ".pdb"):
|
||||||
if pdb:
|
|
||||||
ldflags.append("/PDB:" + pdb)
|
ldflags.append("/PDB:" + pdb)
|
||||||
pgd = self.GetPGDName(config, expand_special)
|
if pgd := self.GetPGDName(config, expand_special):
|
||||||
if pgd:
|
|
||||||
ldflags.append("/PGD:" + pgd)
|
ldflags.append("/PGD:" + pgd)
|
||||||
map_file = self.GetMapFileName(config, expand_special)
|
map_file = self.GetMapFileName(config, expand_special)
|
||||||
ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"})
|
ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"})
|
||||||
|
|
|
@ -27,8 +27,7 @@ _LINK_EXE_OUT_ARG = re.compile("/OUT:(?P<out>.+)$", re.IGNORECASE)
|
||||||
|
|
||||||
def main(args):
|
def main(args):
|
||||||
executor = WinTool()
|
executor = WinTool()
|
||||||
exit_code = executor.Dispatch(args)
|
if (exit_code := executor.Dispatch(args)) is not None:
|
||||||
if exit_code is not None:
|
|
||||||
sys.exit(exit_code)
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1350,8 +1350,7 @@ class XcodeSettings:
|
||||||
if xcode_version < "0500":
|
if xcode_version < "0500":
|
||||||
return ""
|
return ""
|
||||||
default_sdk_path = self._XcodeSdkPath("")
|
default_sdk_path = self._XcodeSdkPath("")
|
||||||
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
|
if default_sdk_root := XcodeSettings._sdk_root_cache.get(default_sdk_path):
|
||||||
if default_sdk_root:
|
|
||||||
return default_sdk_root
|
return default_sdk_root
|
||||||
try:
|
try:
|
||||||
all_sdks = GetStdout(["xcodebuild", "-showsdks"])
|
all_sdks = GetStdout(["xcodebuild", "-showsdks"])
|
||||||
|
@ -1787,11 +1786,9 @@ def _GetXcodeEnv(
|
||||||
env["INFOPLIST_PATH"] = xcode_settings.GetBundlePlistPath()
|
env["INFOPLIST_PATH"] = xcode_settings.GetBundlePlistPath()
|
||||||
env["WRAPPER_NAME"] = xcode_settings.GetWrapperName()
|
env["WRAPPER_NAME"] = xcode_settings.GetWrapperName()
|
||||||
|
|
||||||
install_name = xcode_settings.GetInstallName()
|
if install_name := xcode_settings.GetInstallName():
|
||||||
if install_name:
|
|
||||||
env["LD_DYLIB_INSTALL_NAME"] = install_name
|
env["LD_DYLIB_INSTALL_NAME"] = install_name
|
||||||
install_name_base = xcode_settings.GetInstallNameBase()
|
if install_name_base := xcode_settings.GetInstallNameBase():
|
||||||
if install_name_base:
|
|
||||||
env["DYLIB_INSTALL_NAME_BASE"] = install_name_base
|
env["DYLIB_INSTALL_NAME_BASE"] = install_name_base
|
||||||
xcode_version, _ = XcodeVersion()
|
xcode_version, _ = XcodeVersion()
|
||||||
if xcode_version >= "0500" and not env.get("SDKROOT"):
|
if xcode_version >= "0500" and not env.get("SDKROOT"):
|
||||||
|
|
|
@ -70,12 +70,11 @@ def _TargetFromSpec(old_spec, params):
|
||||||
|
|
||||||
target_name = old_spec.get("target_name")
|
target_name = old_spec.get("target_name")
|
||||||
product_name = old_spec.get("product_name", target_name)
|
product_name = old_spec.get("product_name", target_name)
|
||||||
product_extension = old_spec.get("product_extension")
|
|
||||||
|
|
||||||
ninja_target = {}
|
ninja_target = {}
|
||||||
ninja_target["target_name"] = target_name
|
ninja_target["target_name"] = target_name
|
||||||
ninja_target["product_name"] = product_name
|
ninja_target["product_name"] = product_name
|
||||||
if product_extension:
|
if product_extension := old_spec.get("product_extension"):
|
||||||
ninja_target["product_extension"] = product_extension
|
ninja_target["product_extension"] = product_extension
|
||||||
ninja_target["toolset"] = old_spec.get("toolset")
|
ninja_target["toolset"] = old_spec.get("toolset")
|
||||||
ninja_target["default_configuration"] = old_spec.get("default_configuration")
|
ninja_target["default_configuration"] = old_spec.get("default_configuration")
|
||||||
|
|
|
@ -183,8 +183,7 @@ def SourceTreeAndPathFromPath(input_path):
|
||||||
'path' (None, 'path')
|
'path' (None, 'path')
|
||||||
"""
|
"""
|
||||||
|
|
||||||
source_group_match = _path_leading_variable.match(input_path)
|
if source_group_match := _path_leading_variable.match(input_path):
|
||||||
if source_group_match:
|
|
||||||
source_tree = source_group_match.group(1)
|
source_tree = source_group_match.group(1)
|
||||||
output_path = source_group_match.group(3) # This may be None.
|
output_path = source_group_match.group(3) # This may be None.
|
||||||
else:
|
else:
|
||||||
|
@ -390,8 +389,7 @@ class XCObject:
|
||||||
def Hashables(self):
|
def Hashables(self):
|
||||||
hashables = [self.__class__.__name__]
|
hashables = [self.__class__.__name__]
|
||||||
|
|
||||||
name = self.Name()
|
if (name := self.Name()) is not None:
|
||||||
if name is not None:
|
|
||||||
hashables.append(name)
|
hashables.append(name)
|
||||||
|
|
||||||
hashables.extend(self._hashables)
|
hashables.extend(self._hashables)
|
||||||
|
@ -1051,8 +1049,7 @@ class XCHierarchicalElement(XCObject):
|
||||||
# including paths with a sourceTree, they'll still inherit their parents'
|
# including paths with a sourceTree, they'll still inherit their parents'
|
||||||
# hashables, even though the paths aren't relative to their parents. This
|
# hashables, even though the paths aren't relative to their parents. This
|
||||||
# is not expected to be much of a problem in practice.
|
# is not expected to be much of a problem in practice.
|
||||||
path = self.PathFromSourceTreeAndPath()
|
if (path := self.PathFromSourceTreeAndPath()) is not None:
|
||||||
if path is not None:
|
|
||||||
components = path.split(posixpath.sep)
|
components = path.split(posixpath.sep)
|
||||||
for component in components:
|
for component in components:
|
||||||
hashables.append(self.__class__.__name__ + ".path")
|
hashables.append(self.__class__.__name__ + ".path")
|
||||||
|
@ -2109,8 +2106,7 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
|
||||||
specifically, "$(DIR)/path".
|
specifically, "$(DIR)/path".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
path_tree_match = self.path_tree_re.search(path)
|
if path_tree_match := self.path_tree_re.search(path):
|
||||||
if path_tree_match:
|
|
||||||
path_tree = path_tree_match.group(1)
|
path_tree = path_tree_match.group(1)
|
||||||
if path_tree in self.path_tree_first_to_subfolder:
|
if path_tree in self.path_tree_first_to_subfolder:
|
||||||
subfolder = self.path_tree_first_to_subfolder[path_tree]
|
subfolder = self.path_tree_first_to_subfolder[path_tree]
|
||||||
|
|
|
@ -48,8 +48,7 @@ class ELFFile:
|
||||||
ident = self._read("16B")
|
ident = self._read("16B")
|
||||||
except struct.error:
|
except struct.error:
|
||||||
raise ELFInvalid("unable to parse identification")
|
raise ELFInvalid("unable to parse identification")
|
||||||
magic = bytes(ident[:4])
|
if (magic := bytes(ident[:4])) != b"\x7fELF":
|
||||||
if magic != b"\x7fELF":
|
|
||||||
raise ELFInvalid(f"invalid magic: {magic!r}")
|
raise ELFInvalid(f"invalid magic: {magic!r}")
|
||||||
|
|
||||||
self.capacity = ident[4] # Format for program header (bitness).
|
self.capacity = ident[4] # Format for program header (bitness).
|
||||||
|
|
|
@ -166,8 +166,7 @@ def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
|
||||||
|
|
||||||
def format_full_version(info: "sys._version_info") -> str:
|
def format_full_version(info: "sys._version_info") -> str:
|
||||||
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
||||||
kind = info.releaselevel
|
if (kind := info.releaselevel) != "final":
|
||||||
if kind != "final":
|
|
||||||
version += kind[0] + str(info.serial)
|
version += kind[0] + str(info.serial)
|
||||||
return version
|
return version
|
||||||
|
|
||||||
|
|
|
@ -591,8 +591,7 @@ class _Validator(Generic[T]):
|
||||||
f"{{field}} must be one of {list(content_types)}, not {value!r}"
|
f"{{field}} must be one of {list(content_types)}, not {value!r}"
|
||||||
)
|
)
|
||||||
|
|
||||||
charset = parameters.get("charset", "UTF-8")
|
if (charset := parameters.get("charset", "UTF-8")) != "UTF-8":
|
||||||
if charset != "UTF-8":
|
|
||||||
raise self._invalid_metadata(
|
raise self._invalid_metadata(
|
||||||
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
|
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
|
||||||
)
|
)
|
||||||
|
|
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "gyp-next"
|
name = "gyp-next"
|
||||||
version = "0.20.0"
|
version = "0.20.2"
|
||||||
authors = [
|
authors = [
|
||||||
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
|
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
|
||||||
]
|
]
|
||||||
|
@ -39,7 +39,6 @@ gyp = "gyp:script_main"
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
extend-exclude = ["pylib/packaging"]
|
extend-exclude = ["pylib/packaging"]
|
||||||
line-length = 88
|
line-length = 88
|
||||||
target-version = "py37"
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
|
|
|
@ -148,13 +148,13 @@ def print_configuration_info():
|
||||||
print("Test configuration:")
|
print("Test configuration:")
|
||||||
if sys.platform == "darwin":
|
if sys.platform == "darwin":
|
||||||
sys.path.append(os.path.abspath("test/lib"))
|
sys.path.append(os.path.abspath("test/lib"))
|
||||||
import TestMac
|
import TestMac # noqa: PLC0415
|
||||||
|
|
||||||
print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
|
print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
|
||||||
print(f" Xcode {TestMac.Xcode.Version()}")
|
print(f" Xcode {TestMac.Xcode.Version()}")
|
||||||
elif sys.platform == "win32":
|
elif sys.platform == "win32":
|
||||||
sys.path.append(os.path.abspath("pylib"))
|
sys.path.append(os.path.abspath("pylib"))
|
||||||
import gyp.MSVSVersion
|
import gyp.MSVSVersion # noqa: PLC0415
|
||||||
|
|
||||||
print(" Win %s %s\n" % platform.win32_ver()[0:2])
|
print(" Win %s %s\n" % platform.win32_ver()[0:2])
|
||||||
print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
|
print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue