mirror of
https://github.com/electron/node-gyp.git
synced 2025-08-15 12:58:19 +02:00
build: more Python 3 compat, replace compile with ast
Make Python 3 compatiblity changes so the code works in both Python 2 and Python 3. Especially, make changes required because the compiler module was removed in Python 3 in favor of the ast module that exists in both Python 2 and Python 3. PR-URL: https://github.com/nodejs/node-gyp/pull/1820 Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl> Reviewed-By: Rod Vagg <r@va.gg> Reviewed-By: Richard Lau <riclau@uk.ibm.com>
This commit is contained in:
parent
573607981e
commit
4ef83eddd0
7 changed files with 50 additions and 56 deletions
|
@ -671,7 +671,7 @@ class TargetCalculator(object):
|
|||
assert self.is_build_impacted();
|
||||
# Compile targets are found by searching up from changed targets.
|
||||
# Reset the visited status for _GetBuildTargets.
|
||||
for target in self._name_to_target.itervalues():
|
||||
for target in self._name_to_target.values():
|
||||
target.visited = False
|
||||
|
||||
supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
|
||||
|
|
|
@ -272,7 +272,7 @@ def WriteMacros(out, eclipse_langs, defines):
|
|||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for key in sorted(defines.iterkeys()):
|
||||
for key in sorted(defines):
|
||||
out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
|
||||
(escape(key), escape(defines[key])))
|
||||
out.write(' </language>\n')
|
||||
|
|
|
@ -821,7 +821,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
gyp.xcode_emulation.MacPrefixHeader(
|
||||
self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
|
||||
self.Pchify))
|
||||
sources = filter(Compilable, all_sources)
|
||||
sources = list(filter(Compilable, all_sources))
|
||||
if sources:
|
||||
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
|
||||
extensions = set([os.path.splitext(s)[1] for s in sources])
|
||||
|
@ -950,7 +950,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
'%s%s'
|
||||
% (name, cd_action, command))
|
||||
self.WriteLn()
|
||||
outputs = map(self.Absolutify, outputs)
|
||||
outputs = [self.Absolutify(output) for output in outputs]
|
||||
# The makefile rules are all relative to the top dir, but the gyp actions
|
||||
# are defined relative to their containing dir. This replaces the obj
|
||||
# variable for the action rule with an absolute version so that the output
|
||||
|
@ -974,7 +974,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
|
||||
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
|
||||
|
||||
self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
|
||||
self.WriteDoCmd(outputs, [Sourceify(self.Absolutify(i)) for i in inputs],
|
||||
part_of_all=part_of_all, command=name)
|
||||
|
||||
# Stuff the outputs in a variable so we can refer to them later.
|
||||
|
@ -1023,8 +1023,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
extra_sources += outputs
|
||||
if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
|
||||
extra_mac_bundle_resources += outputs
|
||||
inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
|
||||
rule.get('inputs', [])))
|
||||
inputs = [Sourceify(self.Absolutify(i)) for i
|
||||
in [rule_source] + rule.get('inputs', [])]
|
||||
actions = ['$(call do_cmd,%s_%d)' % (name, count)]
|
||||
|
||||
if name == 'resources_grit':
|
||||
|
@ -1040,7 +1040,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
|
||||
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
|
||||
|
||||
outputs = map(self.Absolutify, outputs)
|
||||
outputs = [self.Absolutify(output) for output in outputs]
|
||||
all_outputs += outputs
|
||||
# Only write the 'obj' and 'builddir' rules for the "primary" output
|
||||
# (:1); it's superfluous for the "extra outputs", and this avoids
|
||||
|
@ -1147,7 +1147,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
|
||||
self.WriteDoCmd([output], [path], 'copy', part_of_all)
|
||||
outputs.append(output)
|
||||
self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
|
||||
self.WriteLn('%s = %s' % (variable, ' '.join(QuoteSpaces(o) for o in outputs)))
|
||||
extra_outputs.append('$(%s)' % variable)
|
||||
self.WriteLn()
|
||||
|
||||
|
@ -1158,7 +1158,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
|
||||
for output, res in gyp.xcode_emulation.GetMacBundleResources(
|
||||
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
|
||||
map(Sourceify, map(self.Absolutify, resources))):
|
||||
[Sourceify(self.Absolutify(r)) for r in resources]):
|
||||
_, ext = os.path.splitext(output)
|
||||
if ext != '.xcassets':
|
||||
# Make does not supports '.xcassets' emulation.
|
||||
|
@ -1238,11 +1238,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
|
||||
includes = config.get('include_dirs')
|
||||
if includes:
|
||||
includes = map(Sourceify, map(self.Absolutify, includes))
|
||||
includes = [Sourceify(self.Absolutify(i)) for i in includes]
|
||||
self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
|
||||
|
||||
compilable = filter(Compilable, sources)
|
||||
objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
|
||||
compilable = list(filter(Compilable, sources))
|
||||
objs = [self.Objectify(self.Absolutify(Target(c))) for c in compilable]
|
||||
self.WriteList(objs, 'OBJS')
|
||||
|
||||
for obj in objs:
|
||||
|
@ -1314,7 +1314,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
|
||||
# If there are any object files in our input file list, link them into our
|
||||
# output.
|
||||
extra_link_deps += filter(Linkable, sources)
|
||||
extra_link_deps += list(filter(Linkable, sources))
|
||||
|
||||
self.WriteLn()
|
||||
|
||||
|
@ -1564,7 +1564,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
|
||||
# Bundle dependencies. Note that the code below adds actions to this
|
||||
# target, so if you move these two lines, move the lines below as well.
|
||||
self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
|
||||
self.WriteList([QuoteSpaces(dep) for dep in bundle_deps], 'BUNDLE_DEPS')
|
||||
self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
|
||||
|
||||
# After the framework is built, package it. Needs to happen before
|
||||
|
@ -1598,7 +1598,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
if self.type == 'executable':
|
||||
self.WriteLn('%s: LD_INPUTS := %s' % (
|
||||
QuoteSpaces(self.output_binary),
|
||||
' '.join(map(QuoteSpaces, link_deps))))
|
||||
' '.join(QuoteSpaces(dep) for dep in link_deps)))
|
||||
if self.toolset == 'host' and self.flavor == 'android':
|
||||
self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
|
||||
part_of_all, postbuilds=postbuilds)
|
||||
|
@ -1620,7 +1620,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
elif self.type == 'shared_library':
|
||||
self.WriteLn('%s: LD_INPUTS := %s' % (
|
||||
QuoteSpaces(self.output_binary),
|
||||
' '.join(map(QuoteSpaces, link_deps))))
|
||||
' '.join(QuoteSpaces(dep) for dep in link_deps)))
|
||||
self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
|
||||
postbuilds=postbuilds)
|
||||
elif self.type == 'loadable_module':
|
||||
|
@ -1746,8 +1746,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
output is just a name to run the rule
|
||||
command: (optional) command name to generate unambiguous labels
|
||||
"""
|
||||
outputs = map(QuoteSpaces, outputs)
|
||||
inputs = map(QuoteSpaces, inputs)
|
||||
outputs = [QuoteSpaces(o) for o in outputs]
|
||||
inputs = [QuoteSpaces(i) for i in inputs]
|
||||
|
||||
if comment:
|
||||
self.WriteLn('# ' + comment)
|
||||
|
@ -1836,7 +1836,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||
default_cpp_ext = ext
|
||||
self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
|
||||
|
||||
self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
|
||||
self.WriteList(list(map(self.Absolutify, filter(Compilable, all_sources))),
|
||||
'LOCAL_SRC_FILES')
|
||||
|
||||
# Filter out those which do not match prefix and suffix and produce
|
||||
|
@ -1979,7 +1979,7 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
|
|||
"%(makefile_name)s: %(deps)s\n"
|
||||
"\t$(call do_cmd,regen_makefile)\n\n" % {
|
||||
'makefile_name': makefile_name,
|
||||
'deps': ' '.join(map(SourceifyAndQuoteSpaces, build_files)),
|
||||
'deps': ' '.join(SourceifyAndQuoteSpaces(bf) for bf in build_files),
|
||||
'cmd': gyp.common.EncodePOSIXShellList(
|
||||
[gyp_binary, '-fmake'] +
|
||||
gyp.RegenerateFlags(options) +
|
||||
|
|
|
@ -2691,7 +2691,7 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
|
|||
|
||||
platform_name = None
|
||||
msvs_windows_target_platform_version = None
|
||||
for configuration in spec['configurations'].itervalues():
|
||||
for configuration in spec['configurations'].values():
|
||||
platform_name = platform_name or _ConfigPlatform(configuration)
|
||||
msvs_windows_target_platform_version = \
|
||||
msvs_windows_target_platform_version or \
|
||||
|
@ -3252,7 +3252,7 @@ def _GetMSBuildProjectReferences(project):
|
|||
['Project', guid],
|
||||
['ReferenceOutputAssembly', 'false']
|
||||
]
|
||||
for config in dependency.spec.get('configurations', {}).itervalues():
|
||||
for config in dependency.spec.get('configurations', {}).values():
|
||||
if config.get('msvs_use_library_dependency_inputs', 0):
|
||||
project_ref.append(['UseLibraryDependencyInputs', 'true'])
|
||||
break
|
||||
|
@ -3321,7 +3321,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
|||
extension_to_rule_name, _GetUniquePlatforms(spec))
|
||||
missing_sources = _VerifySourcesExist(sources, project_dir)
|
||||
|
||||
for configuration in configurations.itervalues():
|
||||
for configuration in configurations.values():
|
||||
_FinalizeMSBuildSettings(spec, configuration)
|
||||
|
||||
# Add attributes to root element
|
||||
|
|
|
@ -4,14 +4,8 @@
|
|||
|
||||
from __future__ import print_function
|
||||
|
||||
from compiler.ast import Const
|
||||
from compiler.ast import Dict
|
||||
from compiler.ast import Discard
|
||||
from compiler.ast import List
|
||||
from compiler.ast import Module
|
||||
from compiler.ast import Node
|
||||
from compiler.ast import Stmt
|
||||
import compiler
|
||||
import ast
|
||||
|
||||
import gyp.common
|
||||
import gyp.simple_copy
|
||||
import multiprocessing
|
||||
|
@ -184,43 +178,39 @@ def CheckedEval(file_contents):
|
|||
Note that this is slower than eval() is.
|
||||
"""
|
||||
|
||||
ast = compiler.parse(file_contents)
|
||||
assert isinstance(ast, Module)
|
||||
c1 = ast.getChildren()
|
||||
assert c1[0] is None
|
||||
assert isinstance(c1[1], Stmt)
|
||||
c2 = c1[1].getChildren()
|
||||
assert isinstance(c2[0], Discard)
|
||||
c3 = c2[0].getChildren()
|
||||
assert len(c3) == 1
|
||||
return CheckNode(c3[0], [])
|
||||
syntax_tree = ast.parse(file_contents)
|
||||
assert isinstance(syntax_tree, ast.Module)
|
||||
c1 = syntax_tree.body
|
||||
assert len(c1) == 1
|
||||
c2 = c1[0]
|
||||
assert isinstance(c2, ast.Expr)
|
||||
return CheckNode(c2.value, [])
|
||||
|
||||
|
||||
def CheckNode(node, keypath):
|
||||
if isinstance(node, Dict):
|
||||
if isinstance(node, ast.Dict):
|
||||
c = node.getChildren()
|
||||
dict = {}
|
||||
for n in range(0, len(c), 2):
|
||||
assert isinstance(c[n], Const)
|
||||
key = c[n].getChildren()[0]
|
||||
for key, value in zip(node.keys, node.values):
|
||||
assert isinstance(key, ast.Str)
|
||||
key = key.s
|
||||
if key in dict:
|
||||
raise GypError("Key '" + key + "' repeated at level " +
|
||||
repr(len(keypath) + 1) + " with key path '" +
|
||||
'.'.join(keypath) + "'")
|
||||
kp = list(keypath) # Make a copy of the list for descending this node.
|
||||
kp.append(key)
|
||||
dict[key] = CheckNode(c[n + 1], kp)
|
||||
dict[key] = CheckNode(value, kp)
|
||||
return dict
|
||||
elif isinstance(node, List):
|
||||
c = node.getChildren()
|
||||
elif isinstance(node, ast.List):
|
||||
children = []
|
||||
for index, child in enumerate(c):
|
||||
for index, child in enumerate(node.elts):
|
||||
kp = list(keypath) # Copy list.
|
||||
kp.append(repr(index))
|
||||
children.append(CheckNode(child, kp))
|
||||
return children
|
||||
elif isinstance(node, Const):
|
||||
return node.getChildren()[0]
|
||||
elif isinstance(node, ast.Str):
|
||||
return node.s
|
||||
else:
|
||||
raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
|
||||
"': " + repr(node))
|
||||
|
@ -954,8 +944,12 @@ def ExpandVariables(input, phase, variables, build_file):
|
|||
else:
|
||||
replacement = variables[contents]
|
||||
|
||||
if isinstance(replacement, bytes) and not isinstance(replacement, str):
|
||||
replacement = replacement.decode("utf-8") # done on Python 3 only
|
||||
if type(replacement) is list:
|
||||
for item in replacement:
|
||||
if isinstance(item, bytes) and not isinstance(item, str):
|
||||
item = item.decode("utf-8") # done on Python 3 only
|
||||
if not contents[-1] == '/' and type(item) not in (str, int):
|
||||
raise GypError('Variable ' + contents +
|
||||
' must expand to a string or list of strings; ' +
|
||||
|
@ -1847,7 +1841,7 @@ def VerifyNoGYPFileCircularDependencies(targets):
|
|||
# Create a DependencyGraphNode for each gyp file containing a target. Put
|
||||
# it into a dict for easy access.
|
||||
dependency_nodes = {}
|
||||
for target in targets.iterkeys():
|
||||
for target in targets:
|
||||
build_file = gyp.common.BuildFile(target)
|
||||
if not build_file in dependency_nodes:
|
||||
dependency_nodes[build_file] = DependencyGraphNode(build_file)
|
||||
|
@ -1878,7 +1872,7 @@ def VerifyNoGYPFileCircularDependencies(targets):
|
|||
|
||||
# Files that have no dependencies are treated as dependent on root_node.
|
||||
root_node = DependencyGraphNode(None)
|
||||
for build_file_node in dependency_nodes.itervalues():
|
||||
for build_file_node in dependency_nodes.values():
|
||||
if len(build_file_node.dependencies) == 0:
|
||||
build_file_node.dependencies.append(root_node)
|
||||
root_node.dependents.append(build_file_node)
|
||||
|
|
|
@ -1636,7 +1636,7 @@ def _HasIOSTarget(targets):
|
|||
def _AddIOSDeviceConfigurations(targets):
|
||||
"""Clone all targets and append -iphoneos to the name. Configure these targets
|
||||
to build for iOS devices and use correct architectures for those builds."""
|
||||
for target_dict in targets.itervalues():
|
||||
for target_dict in targets.values():
|
||||
toolset = target_dict['toolset']
|
||||
configs = target_dict['configurations']
|
||||
for config_name, config_dict in dict(configs).items():
|
||||
|
|
|
@ -85,7 +85,7 @@ def _TargetFromSpec(old_spec, params):
|
|||
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
|
||||
|
||||
if 'configurations' in old_spec:
|
||||
for config in old_spec['configurations'].iterkeys():
|
||||
for config in old_spec['configurations']:
|
||||
old_xcode_settings = \
|
||||
old_spec['configurations'][config].get('xcode_settings', {})
|
||||
if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue