mirror of
https://github.com/electron/node-gyp.git
synced 2025-08-15 12:58:19 +02:00
gyp: update gyp to e1c8fcf7
Copied the "tools/gyp" dir over from io.js commit cb381fe3e08e8a249738f791a984f452ff70c68d. Includes io.js commits: * nodejs/io.js@21f4fb6215 * nodejs/io.js@15f058f609 * nodejs/io.js@eb459c8151 * nodejs/io.js@644ece1f67
This commit is contained in:
parent
7ea00c914c
commit
b768d20f38
40 changed files with 3805 additions and 1438 deletions
|
@ -16,8 +16,6 @@ PYLINT_BLACKLIST = [
|
||||||
'test/lib/TestCmd.py',
|
'test/lib/TestCmd.py',
|
||||||
'test/lib/TestCommon.py',
|
'test/lib/TestCommon.py',
|
||||||
'test/lib/TestGyp.py',
|
'test/lib/TestGyp.py',
|
||||||
# Needs style fix.
|
|
||||||
'pylib/gyp/generator/xcode.py',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,6 +23,10 @@ PYLINT_DISABLED_WARNINGS = [
|
||||||
# TODO: fix me.
|
# TODO: fix me.
|
||||||
# Many tests include modules they don't use.
|
# Many tests include modules they don't use.
|
||||||
'W0611',
|
'W0611',
|
||||||
|
# Possible unbalanced tuple unpacking with sequence.
|
||||||
|
'W0632',
|
||||||
|
# Attempting to unpack a non-sequence.
|
||||||
|
'W0633',
|
||||||
# Include order doesn't properly include local files?
|
# Include order doesn't properly include local files?
|
||||||
'F0401',
|
'F0401',
|
||||||
# Some use of built-in names.
|
# Some use of built-in names.
|
||||||
|
@ -40,6 +42,10 @@ PYLINT_DISABLED_WARNINGS = [
|
||||||
'W0613',
|
'W0613',
|
||||||
# String has no effect (docstring in wrong place).
|
# String has no effect (docstring in wrong place).
|
||||||
'W0105',
|
'W0105',
|
||||||
|
# map/filter on lambda could be replaced by comprehension.
|
||||||
|
'W0110',
|
||||||
|
# Use of eval.
|
||||||
|
'W0123',
|
||||||
# Comma not followed by space.
|
# Comma not followed by space.
|
||||||
'C0324',
|
'C0324',
|
||||||
# Access to a protected member.
|
# Access to a protected member.
|
||||||
|
@ -56,6 +62,8 @@ PYLINT_DISABLED_WARNINGS = [
|
||||||
'E1101',
|
'E1101',
|
||||||
# Dangerous default {}.
|
# Dangerous default {}.
|
||||||
'W0102',
|
'W0102',
|
||||||
|
# Cyclic import.
|
||||||
|
'R0401',
|
||||||
# Others, too many to sort.
|
# Others, too many to sort.
|
||||||
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
|
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
|
||||||
'R0201', 'E0101', 'C0321',
|
'R0201', 'E0101', 'C0321',
|
||||||
|
@ -116,5 +124,16 @@ def CheckChangeOnCommit(input_api, output_api):
|
||||||
return report
|
return report
|
||||||
|
|
||||||
|
|
||||||
def GetPreferredTrySlaves():
|
TRYBOTS = [
|
||||||
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac', 'gyp-android']
|
'gyp-win32',
|
||||||
|
'gyp-win64',
|
||||||
|
'gyp-linux',
|
||||||
|
'gyp-mac',
|
||||||
|
'gyp-android'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def GetPreferredTryMasters(_, change):
|
||||||
|
return {
|
||||||
|
'tryserver.nacl': { t: set(['defaulttests']) for t in TRYBOTS },
|
||||||
|
}
|
||||||
|
|
466
gyp/buildbot/aosp_manifest.xml
Normal file
466
gyp/buildbot/aosp_manifest.xml
Normal file
|
@ -0,0 +1,466 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<manifest>
|
||||||
|
<remote name="aosp"
|
||||||
|
fetch=".."
|
||||||
|
review="https://android-review.googlesource.com/" />
|
||||||
|
<default revision="master"
|
||||||
|
remote="aosp"
|
||||||
|
sync-j="4" />
|
||||||
|
|
||||||
|
<project groups="device,flo" name="device/asus/deb" revision="0ce3a783d549d023ddc553a04fed717ffb2ff533" />
|
||||||
|
<project groups="device,flo" name="device/asus/flo" revision="55ea79b11f9f82b2aa03f44a3429112fc5c06d07" />
|
||||||
|
<project groups="device,flo" name="device/asus/flo-kernel" revision="6d74123947016999ae62d9c3067ae97782fdba21" />
|
||||||
|
<project groups="device,grouper" name="device/asus/grouper" revision="78fe48f44e90ef3a7eceab5465dbad63cd16ce88" />
|
||||||
|
<project groups="device,grouper" name="device/asus/tilapia" revision="e5033bc80764067cbb1c9dc3970f0718e35ae8c7" />
|
||||||
|
<project name="device/common" revision="6a2995683de147791e516aae2ccb31fdfbe2ad30" />
|
||||||
|
<project groups="pdk" name="device/generic/armv7-a-neon" revision="8bcf4b7a6380b26c2b42dae00dd8443de2a8e12c" />
|
||||||
|
<project groups="pdk" name="device/generic/common" revision="11c092a6cbfcf6207f07a9a8e3398e747e7f5461" />
|
||||||
|
<project groups="pdk" name="device/generic/goldfish" revision="638ee524f83053613c47ddea22c4bf98a0175c2f" />
|
||||||
|
<project groups="pdk" name="device/generic/mini-emulator-armv7-a-neon" revision="2a7ade61377b7906187ab46b5859c896baa0ab0e" />
|
||||||
|
<project groups="pdk" name="device/generic/mini-emulator-mips" revision="2ff06dda649ba43507a911057f7854a3373ef7d6" />
|
||||||
|
<project groups="pdk" name="device/generic/mini-emulator-x86" revision="a2f05b8c5259c232be5b029b2d5e721ba3f70917" />
|
||||||
|
<project groups="pdk" name="device/generic/mips" revision="dd06e7883227cc68bb1206584c8e3a768e49d02d" />
|
||||||
|
<project name="device/generic/qemu" revision="bd2543e810f3fa56e9dcfe301b893832534c85db" />
|
||||||
|
<project groups="pdk" name="device/generic/x86" revision="f111878fb41e2bdf4eb092d1edf0eb53cc5d0153" />
|
||||||
|
<project groups="device" name="device/google/accessory/arduino" revision="abc5159a3ca9dbb5c7e364a1eab99901a4440ac5" />
|
||||||
|
<project groups="device" name="device/google/accessory/demokit" revision="7dfe7f89a3b174709c773fe319531006e46440d9" />
|
||||||
|
<project groups="device,hammerhead" name="device/lge/hammerhead" revision="ec229bf178b891cc18552833f2de743acf390a7c" />
|
||||||
|
<project groups="device,hammerhead" name="device/lge/hammerhead-kernel" revision="a1dc58be96e7a71496e3e89079ac704930f982f2" />
|
||||||
|
<project groups="device,mako" name="device/lge/mako" revision="7e5f0f313819ffa3b45cd4208ab552f446c33936" />
|
||||||
|
<project groups="device,mako" name="device/lge/mako-kernel" revision="b7de901b8cb86036e9b92b3b6f188b45a524b125" />
|
||||||
|
<project groups="pdk" name="device/sample" revision="096f9eb5763fd2766fcbbe4f6b9da51c87f61797" />
|
||||||
|
<project groups="device,manta" name="device/samsung/manta" revision="78fe248ddb214aca2215df46be83882dc50c9283" />
|
||||||
|
<project groups="pdk" name="platform/abi/cpp" path="abi/cpp" revision="a0f99286d0909f7a30b0bee742bec2a0b62c4dd0" />
|
||||||
|
<project name="platform/art" path="art" revision="36b111c7d3d635e262114dabde4c26952c7dcbe6" />
|
||||||
|
<project groups="pdk" name="platform/bionic" path="bionic" revision="36bacd237de931c48714d1a8aa4aa9522283e407" />
|
||||||
|
<project name="platform/bootable/bootloader/legacy" path="bootable/bootloader/legacy" revision="3c491d6efb8ff2534a6934702760a6273f197918" />
|
||||||
|
<project name="platform/bootable/diskinstaller" path="bootable/diskinstaller" revision="ca40959a8caafa0df6a5c3d845e2afe6b252093f" />
|
||||||
|
<project groups="pdk" name="platform/bootable/recovery" path="bootable/recovery" revision="974fe112ae6df95ca6d49688d6e3e459d87e16de" />
|
||||||
|
<project groups="pdk" name="platform/build" path="build" revision="d23798bfdc9bb34909177c3c5f06f0c97cc9897e" >
|
||||||
|
<copyfile dest="Makefile" src="core/root.mk"/>
|
||||||
|
</project>
|
||||||
|
<project groups="cts" name="platform/cts" path="cts" revision="e15e8f846e19816e18ee3293c5b99f78463be28e" />
|
||||||
|
<project name="platform/dalvik" path="dalvik" revision="fb5b0d5bc46bce9c8ed6b1150498d6e145811a7d" />
|
||||||
|
<project name="platform/developers/build" path="developers/build" revision="75c5c41b06f045c3304b1b19d8250f04a8da8f10" />
|
||||||
|
<project name="platform/developers/demos" path="developers/demos" revision="64526120cd8da89bcb9a48acf95307d2c172a6e8" />
|
||||||
|
<project name="platform/developers/docs" path="developers/docs" revision="c0b835ddd9acc27176dc9a0f7d1aa2faf5d51806" />
|
||||||
|
<project name="platform/developers/samples/android" path="developers/samples/android" revision="dea82fa23f038d66bd9cfdff2afb8ef22add1c4f" />
|
||||||
|
<project name="platform/development" path="development" revision="0efeb2c66bff9b36feecd9315d14d2afb46e4669" />
|
||||||
|
<project name="platform/docs/source.android.com" path="docs/source.android.com" revision="c4795fa0df2c5fb4832ae65482944e8e5400e4f6" />
|
||||||
|
<project groups="pdk" name="platform/external/aac" path="external/aac" revision="35f30c5ab8089f38681d2fdd416c00aebef5a7ff" />
|
||||||
|
<project name="platform/external/android-clat" path="external/android-clat" revision="18921713780edb45ceef327d5fcf3387818300f3" />
|
||||||
|
<project name="platform/external/android-mock" path="external/android-mock" revision="4fe497660c2e939300dc5b743d662aef458b1726" />
|
||||||
|
<project name="platform/external/ant-glob" path="external/ant-glob" revision="0f189400fd2a36bf11bfb058e7f3917eb7ed163a" />
|
||||||
|
<project name="platform/external/antlr" path="external/antlr" revision="47997265eeb7d954a32ece693bbe6dab740872dd" />
|
||||||
|
<project name="platform/external/apache-harmony" path="external/apache-harmony" revision="6942e08fdbbd8402c9deabb0f60c8c871194b244" />
|
||||||
|
<project name="platform/external/apache-http" path="external/apache-http" revision="85ed0e10781c3c57343300a02556dd5131c450aa" />
|
||||||
|
<project name="platform/external/apache-qp" path="external/apache-qp" revision="64ea622b23e6612eb8e7dcae6bfd4314beb022a8" />
|
||||||
|
<project name="platform/external/apache-xml" path="external/apache-xml" revision="00ee83ff1bd827a852065986ed0da7a3ded57a55" />
|
||||||
|
<project name="platform/external/arduino" path="external/arduino" revision="d06daf9bbc46838400461eb8e15842974e38d82a" />
|
||||||
|
<project groups="pdk" name="platform/external/bison" path="external/bison" revision="c2418b886165add7f5a31fc5609f0ce2d004a90e" />
|
||||||
|
<project name="platform/external/blktrace" path="external/blktrace" revision="d345431f16b8f76f30a58193ff2b26d5853e1109" />
|
||||||
|
<project groups="pdk" name="platform/external/bluetooth/bluedroid" path="external/bluetooth/bluedroid" revision="3b4040093ddf0e0025d0dd034aa65078bb695514" />
|
||||||
|
<project name="platform/external/bouncycastle" path="external/bouncycastle" revision="234720ebe66540a53cff98b2448dddbc884bd09f" />
|
||||||
|
<project groups="pdk" name="platform/external/bsdiff" path="external/bsdiff" revision="6f503758fad2cbcf8359e8f0af32e4d79a2a48ae" />
|
||||||
|
<project groups="pdk" name="platform/external/bzip2" path="external/bzip2" revision="1cb636bd8e9e5cdfd5d5b2909a122f6e80db62de" />
|
||||||
|
<project name="platform/external/ceres-solver" path="external/ceres-solver" revision="399f7d09e0c45af54b77b4ab9508d6f23759b927" />
|
||||||
|
<project groups="pdk" name="platform/external/checkpolicy" path="external/checkpolicy" revision="c66ac590eebc731f6021f267ebea208e87d8f04f" />
|
||||||
|
<project name="platform/external/chromium" path="external/chromium" revision="f294081d501ad98b7d7f50bc73f291063caf2c5f" />
|
||||||
|
<project name="platform/external/chromium-libpac" path="external/chromium-libpac" revision="09cf45bf5a650fe1abd50b9d61c2670a62f62767" />
|
||||||
|
<project groups="pdk" name="platform/external/chromium-trace" path="external/chromium-trace" revision="8252ae6b83ea65cf871e7981e981da07379f5a0f" />
|
||||||
|
<project name="platform/external/chromium_org" path="external/chromium_org" revision="43165a58c6167882aabb62f470c4e4d21f807d79" />
|
||||||
|
<project name="platform/external/chromium_org/sdch/open-vcdiff" path="external/chromium_org/sdch/open-vcdiff" revision="6d634da5463d9bc5fc88f86aec1d2ac4fe6f612e" />
|
||||||
|
<project name="platform/external/chromium_org/testing/gtest" path="external/chromium_org/testing/gtest" revision="65df883d09205766c521f2e6c126f4070a423141" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/WebKit" path="external/chromium_org/third_party/WebKit" revision="a25b4978c2c50d573391a6d56a0e8ad35f52ffc8" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/angle" path="external/chromium_org/third_party/angle" revision="8b77c2b2231f7d895979f6341e1ad1964a654ce4" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/boringssl/src" path="external/chromium_org/third_party/boringssl/src" revision="85fb7432d3c851200342dd982b211f8dac860687" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/brotli/src" path="external/chromium_org/third_party/brotli/src" revision="96f298ac43a9216b251d6c3264d8f5ada89e107f" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/eyesfree/src/android/java/src/com/googlecode/eyesfree/braille" path="external/chromium_org/third_party/eyesfree/src/android/java/src/com/googlecode/eyesfree/braille" revision="bb4c72f1deb0b8b2b0468b0bf1050462ebcf6135" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/freetype" path="external/chromium_org/third_party/freetype" revision="dc263f2ee2786739da036911ed8b29c07a639ab9" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/icu" path="external/chromium_org/third_party/icu" revision="85e5871666cade1bb4b53f0cebfae53bc7d8d1f2" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/leveldatabase/src" path="external/chromium_org/third_party/leveldatabase/src" revision="d4e10f2a91f5de7bd17adcdbd80c54b19ab336fe" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/libaddressinput/src" path="external/chromium_org/third_party/libaddressinput/src" revision="7127f6844fac19d7610e34f4f7e03398fcd95531" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/libjingle/source/talk" path="external/chromium_org/third_party/libjingle/source/talk" revision="8fd7b6a4d9e6757c5e1ff50147e6089979bf6701" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/libphonenumber/src/phonenumbers" path="external/chromium_org/third_party/libphonenumber/src/phonenumbers" revision="de6af28b9f9f34a31ffb7772b7510fd215a0814e" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/libphonenumber/src/resources" path="external/chromium_org/third_party/libphonenumber/src/resources" revision="8f194ead1ebd76ebb28b7e2dfc0a7baddc62bb22" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/libsrtp" path="external/chromium_org/third_party/libsrtp" revision="5eddd5b3436aa8b2c7eb1f3c6db154281c6b91c5" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/libvpx" path="external/chromium_org/third_party/libvpx" revision="c20d6540c47e427470c5a56b35fea3c5e9098748" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/libyuv" path="external/chromium_org/third_party/libyuv" revision="6e77b766a9eb7889c1a10cab978705ffe03ff3e7" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/mesa/src" path="external/chromium_org/third_party/mesa/src" revision="e70a8ff30d20e1bf6bb5c06b5cd7bd4ea9ae20e1" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/openmax_dl" path="external/chromium_org/third_party/openmax_dl" revision="83d0254a412b93e81b06a354b90fb627408b4ec8" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/openssl" path="external/chromium_org/third_party/openssl" revision="c2a9402712e13e15fcae2b17ec0cbecb816ef52e" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/opus/src" path="external/chromium_org/third_party/opus/src" revision="e383b38591b010ab08ebddf1fd8d821796bd961a" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/ots" path="external/chromium_org/third_party/ots" revision="4d6e4ddc4b0db2023b1380236c33aa04a7e9e927" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/sfntly/cpp/src" path="external/chromium_org/third_party/sfntly/cpp/src" revision="2bac2ec7167835b214bfe42e762cd2ce6cf8cf1a" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/skia" path="external/chromium_org/third_party/skia" revision="2d75d0865c7bac54bf5e234855609d0f628388b7" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/smhasher/src" path="external/chromium_org/third_party/smhasher/src" revision="09e3094b8ab52bb1ad9ab8c8351d99df50327b67" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/usrsctp/usrsctplib" path="external/chromium_org/third_party/usrsctp/usrsctplib" revision="ed9a6fb519aa7606cab965b2c4218756e849ddb6" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/webrtc" path="external/chromium_org/third_party/webrtc" revision="8b45a80ec9c21b148a5674d3a23ca5fa70981f71" />
|
||||||
|
<project name="platform/external/chromium_org/third_party/yasm/source/patched-yasm" path="external/chromium_org/third_party/yasm/source/patched-yasm" revision="0f308c9bc9aa3258a0e90285b9d4e69bbb5b0a73" />
|
||||||
|
<project name="platform/external/chromium_org/tools/grit" path="external/chromium_org/tools/grit" revision="4ad93ed16c8ae7742fd7c34c83036b8d03c21fb9" />
|
||||||
|
<project name="platform/external/chromium_org/tools/gyp" path="external/chromium_org/tools/gyp" revision="9c42a79388ce87185ad04cb02047c1e56ac5e066" />
|
||||||
|
<project name="platform/external/chromium_org/v8" path="external/chromium_org/v8" revision="db865e6839e98cc9d07609bf81bb6610117ba6ff" />
|
||||||
|
<project groups="pdk" name="platform/external/clang" path="external/clang" revision="070ed154a0a8bc2c0fd4fb9b8a86a0f1218e6dfa" />
|
||||||
|
<project groups="pdk" name="platform/external/compiler-rt" path="external/compiler-rt" revision="c185902e393cd71823258016ead1b315ed062b24" />
|
||||||
|
<project name="platform/external/conscrypt" path="external/conscrypt" revision="26163c268a6d2625384b87e907afad8ef19f9a47" />
|
||||||
|
<project name="platform/external/dexmaker" path="external/dexmaker" revision="2b528c4b156f2de5c641875b98e59e0b09ebaccd" />
|
||||||
|
<project name="platform/external/dhcpcd" path="external/dhcpcd" revision="03baf5eab896198b5060d287af3fd60d360bf48f" />
|
||||||
|
<project groups="pdk" name="platform/external/dnsmasq" path="external/dnsmasq" revision="7674911bc9d10adf57c2c2d15d0c641b48e4afe6" />
|
||||||
|
<project name="platform/external/doclava" path="external/doclava" revision="b9d279d8f9c29a3044d13482846efb21f27b5df4" />
|
||||||
|
<project groups="pdk" name="platform/external/e2fsprogs" path="external/e2fsprogs" revision="721f3bc56989b5f4101e646a02d598ddb4a7ff6e" />
|
||||||
|
<project name="platform/external/easymock" path="external/easymock" revision="c9a234086537e5fd820b110bbd99e3cdc695004c" />
|
||||||
|
<project name="platform/external/eclipse-basebuilder" path="external/eclipse-basebuilder" revision="6134da6347cc997e0cf2921aaadfb46f21c05d85" />
|
||||||
|
<project name="platform/external/eclipse-windowbuilder" path="external/eclipse-windowbuilder" revision="a5f3ee137e94737538ec3bdf9b3716765d178c17" />
|
||||||
|
<project name="platform/external/eigen" path="external/eigen" revision="b015e75e8c7ba1ab4ddb91e9372a57e76f3fd159" />
|
||||||
|
<project name="platform/external/elfutils" path="external/elfutils" revision="38ecac0276825a9463803485440646582e477e78" />
|
||||||
|
<project name="platform/external/embunit" path="external/embunit" revision="336b7c65098af0d1be69f2db55f4e75342d73b3f" />
|
||||||
|
<project name="platform/external/emma" path="external/emma" revision="daacd02a6b9f7a3e82bdf1cc5b84db85ed59edb1" />
|
||||||
|
<project name="platform/external/esd" path="external/esd" revision="224a67f2683a7ee997179fc5dd16115e39987b0f" />
|
||||||
|
<project groups="pdk" name="platform/external/expat" path="external/expat" revision="907ec055718996baf36961e7f47f8447e49b3865" />
|
||||||
|
<project name="platform/external/eyes-free" path="external/eyes-free" revision="16bd4c7a4d1bfe229068b637614dad7c48dd2ceb" />
|
||||||
|
<project name="platform/external/f2fs-tools" path="external/f2fs-tools" revision="00dc8a1c6c87acf687e64e66cfc2fd7ca28e646e" />
|
||||||
|
<project name="platform/external/fdlibm" path="external/fdlibm" revision="c831c726067e0d8a05362e710e2405f0eff81e07" />
|
||||||
|
<project name="platform/external/fio" path="external/fio" revision="6f4e805b805f1ab3025482e471147bb51efa99bd" />
|
||||||
|
<project groups="pdk" name="platform/external/flac" path="external/flac" revision="7f32dd57579bdff88e46e1e403154be0b99165da" />
|
||||||
|
<project groups="pdk" name="platform/external/freetype" path="external/freetype" revision="899c67b6cfcd2010784fbf08c5415af16c526e0c" />
|
||||||
|
<project name="platform/external/fsck_msdos" path="external/fsck_msdos" revision="17a1471db8c528cd9d44ec4385d2eb3614138856" />
|
||||||
|
<project name="platform/external/ganymed-ssh2" path="external/ganymed-ssh2" revision="d3724dabc1cfbacd105fe6c422b4dcba80e4fb2d" />
|
||||||
|
<project groups="pdk" name="platform/external/gcc-demangle" path="external/gcc-demangle" revision="9241386b62c353302c2f9eccda0672685b252b4d" />
|
||||||
|
<project name="platform/external/genext2fs" path="external/genext2fs" revision="e11a9c7fe6f1cef99aad2f25afaea37b72fe9f93" />
|
||||||
|
<project name="platform/external/giflib" path="external/giflib" revision="621696a283c0ce34956417f760f1005fadcd12ae" />
|
||||||
|
<project name="platform/external/google-diff-match-patch" path="external/google-diff-match-patch" revision="cecbe12841337860291c2d6a5728b681ec5fca2a" />
|
||||||
|
<project name="platform/external/google-fonts/carrois-gothic-sc" path="external/google-fonts/carrois-gothic-sc" revision="0062a10458d4c357f3082d66bcb129d11913aaae" />
|
||||||
|
<project name="platform/external/google-fonts/coming-soon" path="external/google-fonts/coming-soon" revision="2c5cb418c690815545bbb0316eae5fd33b9fc859" />
|
||||||
|
<project name="platform/external/google-fonts/dancing-script" path="external/google-fonts/dancing-script" revision="7b6623bd54cee3e48ae8a4f477f616366643cc78" />
|
||||||
|
<project name="platform/external/grub" path="external/grub" revision="33a4e7e4cfa81dc21d37091515891859ef3ab934" />
|
||||||
|
<project groups="pdk" name="platform/external/gtest" path="external/gtest" revision="fa3c26b862ca17c0d2db67606226b49d1648b4bf" />
|
||||||
|
<project name="platform/external/guava" path="external/guava" revision="5e6db342fc75b1945298142530f2d1d1861bce73" />
|
||||||
|
<project name="platform/external/hamcrest" path="external/hamcrest" revision="ba28ac1e0386f26d9a45be5ed16fc9c598b27e70" />
|
||||||
|
<project name="platform/external/harfbuzz" path="external/harfbuzz" revision="7a08026033b424da3b7022ebcce35f033949df8b" />
|
||||||
|
<project name="platform/external/harfbuzz_ng" path="external/harfbuzz_ng" revision="3e537b48a7b56c742ecf3c2ed24ff15fcb73f575" />
|
||||||
|
<project name="platform/external/hyphenation" path="external/hyphenation" revision="bfa84834dfeb7fe8d058c2e7e07b5981451ddf82" />
|
||||||
|
<project name="platform/external/icu" path="external/icu" revision="3c09e2ebbdae6000f3bd471c34d055bc1913f7e4" />
|
||||||
|
<project groups="pdk" name="platform/external/icu4c" path="external/icu4c" revision="e5311394ca22b280da41cd17059288dab3fb1ea6" />
|
||||||
|
<project groups="pdk" name="platform/external/iproute2" path="external/iproute2" revision="5d4c86892885ae1bc12e0e157b35ef44e8ba81bd" />
|
||||||
|
<project name="platform/external/ipsec-tools" path="external/ipsec-tools" revision="f4cb1ee4b00abbfb6f968dc25818c23b4b47e584" />
|
||||||
|
<project name="platform/external/iptables" path="external/iptables" revision="e3928b77f18db0fdc615693017c6c15eb71bf4e0" />
|
||||||
|
<project name="platform/external/iputils" path="external/iputils" revision="1c7c426ab377c3a005a36d612ebbb16de86fb7d4" />
|
||||||
|
<project name="platform/external/jack" path="external/jack" revision="5ceb2025ac5d25ed48183ac2d3dac4691fe761fb" />
|
||||||
|
<project name="platform/external/javasqlite" path="external/javasqlite" revision="b8501bdeb0b7e39a0d82f2a96ad382c05a763b22" />
|
||||||
|
<project name="platform/external/javassist" path="external/javassist" revision="9566207cff5871c672fac1f0d4332d93292036d7" />
|
||||||
|
<project name="platform/external/jdiff" path="external/jdiff" revision="e4694302d6a3786c64d954e0b3cf42786283bd3c" />
|
||||||
|
<project name="platform/external/jemalloc" path="external/jemalloc" revision="615fe54259e545c33275753a316c2bfd1198b4f0" />
|
||||||
|
<project groups="pdk" name="platform/external/jhead" path="external/jhead" revision="871af5c305ce1d3087e58fae091c60c359f5fa45" />
|
||||||
|
<project name="platform/external/jmdns" path="external/jmdns" revision="f4eb7466d5c09098f9dc54137ed3235e3c43fc9f" />
|
||||||
|
<project name="platform/external/jmonkeyengine" path="external/jmonkeyengine" revision="a6b44658eb1c55295f132a36233a11aa2bd8f9cf" />
|
||||||
|
<project groups="pdk" name="platform/external/jpeg" path="external/jpeg" revision="213197252c8c4825f6572c651126c22067025fe9" />
|
||||||
|
<project name="platform/external/jsilver" path="external/jsilver" revision="739060b01245f1dc5f1800949b3c30c291253cff" />
|
||||||
|
<project name="platform/external/jsr305" path="external/jsr305" revision="a82868820d6350811b9ddfde4bf8ed5016084269" />
|
||||||
|
<project name="platform/external/junit" path="external/junit" revision="8f312e0c3d6dff30d015d2c85fdaae0a39220fd6" />
|
||||||
|
<project name="platform/external/kernel-headers" path="external/kernel-headers" revision="8b663ef01dcaadfe1dec7ba826e5cd1cf0bb2c91" />
|
||||||
|
<project name="platform/external/libcap-ng" path="external/libcap-ng" revision="1d1011a3c5049a7f9eef99d22f3704e4367579cc" />
|
||||||
|
<project name="platform/external/libcxx" path="external/libcxx" revision="a9aa30b5d18422fce29a42ce1a704bc5f28febde" />
|
||||||
|
<project name="platform/external/libcxxabi" path="external/libcxxabi" revision="87a9be28aceed80250cd1d1a47eb8afa0ee67b51" />
|
||||||
|
<project name="platform/external/libcxxrt" path="external/libcxxrt" revision="d1ee2b2a4946a073596514462d7629373d22fb27" />
|
||||||
|
<project name="platform/external/libexif" path="external/libexif" revision="25d371312cee1452a2adcf8b7f6cad6267bda32d" />
|
||||||
|
<project name="platform/external/libffi" path="external/libffi" revision="385ba8b006b9995456d3c9283fd20dded90809cc" />
|
||||||
|
<project groups="pdk" name="platform/external/libgsm" path="external/libgsm" revision="50761abed8f4734970874165b386cfd4d9599db4" />
|
||||||
|
<project groups="pdk" name="platform/external/liblzf" path="external/liblzf" revision="6946aa575b0949d045722794850896099d937cbb" />
|
||||||
|
<project name="platform/external/libmtp" path="external/libmtp" revision="7075348937f6a8c9d9211942fcb6c376f4227776" />
|
||||||
|
<project groups="pdk" name="platform/external/libnfc-nci" path="external/libnfc-nci" revision="46abb3dcf960058e48d1444b6a11cc7e84912339" />
|
||||||
|
<project groups="pdk" name="platform/external/libnfc-nxp" path="external/libnfc-nxp" revision="15d81f71a668b3092549c6b7f83694bf680d9c49" />
|
||||||
|
<project name="platform/external/libnl" path="external/libnl" revision="99debfa4c01b49c9b470884cc56f81fcdee0fa1f" />
|
||||||
|
<project groups="pdk" name="platform/external/libnl-headers" path="external/libnl-headers" revision="52c926a9de955fa2d987bf8c5d4a1304b5a2a611" />
|
||||||
|
<project name="platform/external/libogg" path="external/libogg" revision="ec0b24fb1468abe37be4164a6feb16568e036bde" />
|
||||||
|
<project name="platform/external/libpcap" path="external/libpcap" revision="9dab0cd7430a4d23e0a7752fb13b941692171c3d" />
|
||||||
|
<project name="platform/external/libphonenumber" path="external/libphonenumber" revision="485e6d5c6e48a1fc43cc0a090e687c723dac056c" />
|
||||||
|
<project groups="pdk" name="platform/external/libpng" path="external/libpng" revision="48b7ba25a15a9eae83d366c02475539725d035d0" />
|
||||||
|
<project name="platform/external/libppp" path="external/libppp" revision="706e567fc5ff6b79738a5f470e5aa7b2cae76459" />
|
||||||
|
<project name="platform/external/libseccomp-helper" path="external/libseccomp-helper" revision="e87019943a8b5a7cd0880910f671c37b240d5754" />
|
||||||
|
<project groups="pdk" name="platform/external/libselinux" path="external/libselinux" revision="da4208c8808e6a62fcfe848343abd3e2f3b339cc" />
|
||||||
|
<project groups="pdk" name="platform/external/libsepol" path="external/libsepol" revision="d26204e7d0a3be178a97d4920b82007e05a2a632" />
|
||||||
|
<project name="platform/external/libssh2" path="external/libssh2" revision="2bb40f2445cab3ba588efb29e1835cdba2b27248" />
|
||||||
|
<project name="platform/external/libunwind" path="external/libunwind" revision="b3436a3feed4dcb22dafc8f7818b742cacaddd1d" />
|
||||||
|
<project name="platform/external/libusb" path="external/libusb" revision="2801917fe150393d4f4a354165fe89550ae22613" />
|
||||||
|
<project name="platform/external/libusb-compat" path="external/libusb-compat" revision="94867ba54eb7faa8efca81cf2214d00bb9143d27" />
|
||||||
|
<project name="platform/external/libvorbis" path="external/libvorbis" revision="de559619fd4dd0d2d9608436696fd44bdf74eba8" />
|
||||||
|
<project groups="pdk" name="platform/external/libvpx" path="external/libvpx" revision="d64f247f64fbb814c9ecf06a56bcb0948bfca21f" />
|
||||||
|
<project name="platform/external/libxml2" path="external/libxml2" revision="399e808f940777d18efe377bd34f738dc84729e0" />
|
||||||
|
<project name="platform/external/libxslt" path="external/libxslt" revision="98f5140c33273d3bd67ca03566f8417406001016" />
|
||||||
|
<project groups="libyuv" name="platform/external/libyuv" path="external/libyuv" revision="482a582884351288fb701532359652970b1ba7c0" />
|
||||||
|
<project name="platform/external/linux-tools-perf" path="external/linux-tools-perf" revision="3e1937964f6c183eb6a0000e2dca27fc3a419ca2" />
|
||||||
|
<project name="platform/external/littlemock" path="external/littlemock" revision="328b01eada8965cd38feea884d4080c31e3763b0" />
|
||||||
|
<project groups="pdk" name="platform/external/llvm" path="external/llvm" revision="c4c1f81ae1b07138df50e0459631abf3082bda9c" />
|
||||||
|
<project name="platform/external/ltrace" path="external/ltrace" revision="82ae18484c7b6a8af05354caf6de3a7f1ac5fcf9" />
|
||||||
|
<project name="platform/external/lzma" path="external/lzma" revision="19cf4f773361c09e47a2ffe1613d66cbf632227f" />
|
||||||
|
<project name="platform/external/marisa-trie" path="external/marisa-trie" revision="629ed059b1e85cd8e4de363d8b3dc53c15c3e08a" />
|
||||||
|
<project name="platform/external/markdown" path="external/markdown" revision="6f2e3554ae38cc90518d32e02cb57d05988270a6" />
|
||||||
|
<project groups="pdk" name="platform/external/mdnsresponder" path="external/mdnsresponder" revision="b25c2507ecc3f674e3b4f0a770acf9ad8fd874d0" />
|
||||||
|
<project name="platform/external/mesa3d" path="external/mesa3d" revision="97d3f36a59ea448fa77e47a90bf04f1254670542" />
|
||||||
|
<project name="platform/external/messageformat" path="external/messageformat" revision="180a28770171075aa484729a69d14c7cf0c93fcf" />
|
||||||
|
<project groups="pdk" name="platform/external/mksh" path="external/mksh" revision="2a54bce0ae98f53f0b867e949b26d081691e1493" />
|
||||||
|
<project name="platform/external/mockito" path="external/mockito" revision="4d0dcd53b27a243baf72ee0b127b188a058b318d" />
|
||||||
|
<project name="platform/external/mockwebserver" path="external/mockwebserver" revision="2f7659c426de53122ee7922b0981058a900124a7" />
|
||||||
|
<project name="platform/external/mp4parser" path="external/mp4parser" revision="16051e950485c6b62127c0446a760111de1a0cb9" />
|
||||||
|
<project name="platform/external/mtpd" path="external/mtpd" revision="5ea8006691664b7e6d46d6a6dc889eac91b7fe37" />
|
||||||
|
<project name="platform/external/naver-fonts" path="external/naver-fonts" revision="3bba7d2430bc3ec8105678a27f03fb080f0f8384" />
|
||||||
|
<project name="platform/external/netcat" path="external/netcat" revision="444644cfa9a2f3002863caa168fb2d6b34dfd1e8" />
|
||||||
|
<project name="platform/external/netperf" path="external/netperf" revision="38e47cd883738cb84bdb47a7d263f14f14062d7b" />
|
||||||
|
<project name="platform/external/neven" path="external/neven" revision="504ee5ccaabd8bce4da3430b0f4e9714ac2a8e6c" />
|
||||||
|
<project name="platform/external/nfacct" path="external/nfacct" revision="6f7aae0264821b44e9fe80fb5596c525d3e2f475" />
|
||||||
|
<project name="platform/external/nist-pkits" path="external/nist-pkits" revision="b7a53ad5a587926cb880d9bb6f3d51657596474c" />
|
||||||
|
<project name="platform/external/nist-sip" path="external/nist-sip" revision="b23dbfce7ea84c39cea75b612868a5832cb9af2b" />
|
||||||
|
<project name="platform/external/noto-fonts" path="external/noto-fonts" revision="90372d894b5d9c9f2a111315d2eb3b8de1979ee4" />
|
||||||
|
<project name="platform/external/oauth" path="external/oauth" revision="bc170f58de82000ed6460f111686a850a1890c07" />
|
||||||
|
<project name="platform/external/objenesis" path="external/objenesis" revision="2a7655c0d503fcf5989098f65bf89eae78c32e5a" />
|
||||||
|
<project name="platform/external/okhttp" path="external/okhttp" revision="4909663c795d974d0d4b0e2d1ebd6e179486c897" />
|
||||||
|
<project name="platform/external/open-vcdiff" path="external/open-vcdiff" revision="6d29f2f083baf8250db94ed0b4807e513a84163d" />
|
||||||
|
<project name="platform/external/opencv" path="external/opencv" revision="4a99e243b42afcb885d036bb451eb3c2739275b6" />
|
||||||
|
<project name="platform/external/openfst" path="external/openfst" revision="b7434caa51427a0f5ab5c807e1a92d6ca2af8884" />
|
||||||
|
<project name="platform/external/openssh" path="external/openssh" revision="3c335c9fb9c12375ad62748fa1d1e5ebe4710c94" />
|
||||||
|
<project groups="pdk" name="platform/external/openssl" path="external/openssl" revision="cfe73257599ae4baae3ffb50c2c841d9249d2d16" />
|
||||||
|
<project name="platform/external/oprofile" path="external/oprofile" revision="3722f1053f4cab90c4daf61451713a2d61d79c71" />
|
||||||
|
<project name="platform/external/owasp/sanitizer" path="external/owasp/sanitizer" revision="6a304233f9f2010821a5a1dd40e2832b68353a3c" />
|
||||||
|
<project name="platform/external/pcre" path="external/pcre" revision="993a14b71c8e7af03eb929d44a444137393a5324" />
|
||||||
|
<project name="platform/external/pixman" path="external/pixman" revision="afd5bbd8074cedec8544d07920fa06786d5a4f08" />
|
||||||
|
<project name="platform/external/ppp" path="external/ppp" revision="8b58d9bd02e2c55f547fafbe9ba55b1160665761" />
|
||||||
|
<project groups="pdk-java" name="platform/external/proguard" path="external/proguard" revision="3fd19dba2bdc0c4b64afda4d75836e1dcf7abf97" />
|
||||||
|
<project groups="pdk" name="platform/external/protobuf" path="external/protobuf" revision="95d99df4574c28debcf9646056a0350ff44bc7c9" />
|
||||||
|
<project name="platform/external/qemu" path="external/qemu" revision="539e1f25ecbfe80814dba2ea77feb22087b9d53b" />
|
||||||
|
<project name="platform/external/qemu-pc-bios" path="external/qemu-pc-bios" revision="20349dae98d7de09a7e390d4a706c64f1db6edc2" />
|
||||||
|
<project name="platform/external/regex-re2" path="external/regex-re2" revision="0d4c52358a1af421705c54bd8a9fdd8a30558a2e" />
|
||||||
|
<project name="platform/external/replicaisland" path="external/replicaisland" revision="99e2e54c5d036048caf09bb05eea0969de093104" />
|
||||||
|
<project name="platform/external/robolectric" path="external/robolectric" revision="6bf395c984ed3f69711663b006aeffbb0f7e8a90" />
|
||||||
|
<project groups="pdk" name="platform/external/safe-iop" path="external/safe-iop" revision="aa0725fb1da35e47676b6da30009322eb5ed59be" />
|
||||||
|
<project groups="pdk" name="platform/external/scrypt" path="external/scrypt" revision="dde037b82e5cd6215244e3240dbaad417928eafa" />
|
||||||
|
<project groups="pdk" name="platform/external/sepolicy" path="external/sepolicy" revision="21ada26daea538397029396099dce865267bae2f" />
|
||||||
|
<project name="platform/external/sfntly" path="external/sfntly" revision="6723e5241a45c6de224c96384a595a1bf5bc5449" />
|
||||||
|
<project name="platform/external/sil-fonts" path="external/sil-fonts" revision="795a2f4339f8a82d6cff187e2a77bb01d5911aac" />
|
||||||
|
<project name="platform/external/skia" path="external/skia" revision="d6f2c76fdb9b0469261fa2db0b29ed48c7ac38b5" />
|
||||||
|
<project name="platform/external/smack" path="external/smack" revision="d7955ce24d294fb2014c59d11fca184471056f44" />
|
||||||
|
<project name="platform/external/smali" path="external/smali" revision="5fd395796e215a80c722815bf180728948868f18" />
|
||||||
|
<project groups="pdk" name="platform/external/sonivox" path="external/sonivox" revision="c0723d864b10fbd6c5cbbfa65e886c5e9eb3aafd" />
|
||||||
|
<project groups="pdk" name="platform/external/speex" path="external/speex" revision="eaa4765b8cc6a6dd5ee0d26dc1b61a1044817f32" />
|
||||||
|
<project groups="pdk" name="platform/external/sqlite" path="external/sqlite" revision="50af37d784661b2d54c8e043de52ffc4f02a1a50" />
|
||||||
|
<project name="platform/external/srec" path="external/srec" revision="540e7ee8dbf1d7ee72ef45c92efbebcb89bf6d1a" />
|
||||||
|
<project name="platform/external/srtp" path="external/srtp" revision="98bd63b48a31b4633cdfdc8138577dfa6d8dd2a6" />
|
||||||
|
<project groups="pdk" name="platform/external/stlport" path="external/stlport" revision="dc05ca5be2319f74b41cb429ea50f30fceff4ace" />
|
||||||
|
<project name="platform/external/strace" path="external/strace" revision="a2adbed6e2d3ce85ebb167e16ae370681a8b5188" />
|
||||||
|
<project name="platform/external/stressapptest" path="external/stressapptest" revision="0956427aa995561acb4471764158ae057a36dad5" />
|
||||||
|
<project name="platform/external/svox" path="external/svox" revision="ad0a55bd0e13a27ed11034346eee9c47e3684ef2" />
|
||||||
|
<project name="platform/external/syspatch" path="external/syspatch" revision="358a4f86b8c2cb3d3f879a37f6773dd09d4b77b0" />
|
||||||
|
<project name="platform/external/tagsoup" path="external/tagsoup" revision="a97828cb3f8f3a1af8470e55d3c5cd62d6a7cb4c" />
|
||||||
|
<project name="platform/external/tcpdump" path="external/tcpdump" revision="de49cdcfddf36f2b41ef3278e98a8a550a189952" />
|
||||||
|
<project name="platform/external/timezonepicker-support" path="external/timezonepicker-support" revision="99e91a76fd74bad10266623d67cdb98d011f709e" />
|
||||||
|
<project groups="pdk" name="platform/external/tinyalsa" path="external/tinyalsa" revision="653e7a4015341c87b4d55ec9a94ec7bdee044f6f" />
|
||||||
|
<project groups="pdk" name="platform/external/tinycompress" path="external/tinycompress" revision="aeee2c6a19b9d3765f72bc79555005786a424233" />
|
||||||
|
<project groups="pdk" name="platform/external/tinyxml" path="external/tinyxml" revision="f065a8058659c0e6c5a5ccddcdb4faf0fe645cd0" />
|
||||||
|
<project groups="pdk" name="platform/external/tinyxml2" path="external/tinyxml2" revision="c74b546f5af36968ffa56d7fd4529f4273b96f48" />
|
||||||
|
<project groups="pdk" name="platform/external/tremolo" path="external/tremolo" revision="0fec2aefa8143c83df43752bb0218dfa371cc57e" />
|
||||||
|
<project groups="pdk" name="platform/external/valgrind" path="external/valgrind" revision="893257d6c86a18cc5cf6c92528b7027f327dca70" />
|
||||||
|
<project name="platform/external/vixl" path="external/vixl" revision="e1ab25cde167109efb28fa6a86d5c2c80b762d58" />
|
||||||
|
<project name="platform/external/webp" path="external/webp" revision="0db01fc3411621bec473d50db0071fd2a225962e" />
|
||||||
|
<project groups="pdk" name="platform/external/webrtc" path="external/webrtc" revision="d62aeac391d16d4953a12120c0ff614ccde02a30" />
|
||||||
|
<project groups="pdk" name="platform/external/wpa_supplicant_8" path="external/wpa_supplicant_8" revision="88ef20ce4facae68a3e6b05429bb9f3f73a93996" />
|
||||||
|
<project name="platform/external/xdelta3" path="external/xdelta3" revision="52d9c642e6a307c43881f20a4ed1c10e947234ba" />
|
||||||
|
<project name="platform/external/xmlwriter" path="external/xmlwriter" revision="e95d92246ee35273dde2bee8b00485cc14c12be5" />
|
||||||
|
<project name="platform/external/xmp_toolkit" path="external/xmp_toolkit" revision="42ea4dc6d1fc2206a7778029070ed9213e3b0fbf" />
|
||||||
|
<project groups="pdk" name="platform/external/yaffs2" path="external/yaffs2" revision="a2cff2275e1b501ff478b03757d6e4f05fddc2db" />
|
||||||
|
<project groups="pdk" name="platform/external/zlib" path="external/zlib" revision="8d977782c1cfe9d75cc9a464439c2ff1e27e1665" />
|
||||||
|
<project name="platform/external/zxing" path="external/zxing" revision="7620644768ffc235607b3a94671e49518c18686f" />
|
||||||
|
<project groups="pdk" name="platform/frameworks/av" path="frameworks/av" revision="a018cd4926460f8f5ab30a9a11df9775572d8620" />
|
||||||
|
<project name="platform/frameworks/base" path="frameworks/base" revision="6a58309e734086a21580dd8d9175ac1817ca3ab2" />
|
||||||
|
<project groups="pdk" name="platform/frameworks/compile/libbcc" path="frameworks/compile/libbcc" revision="3fc91521640692f844aece8b1743c4df702d1c66" />
|
||||||
|
<project groups="pdk" name="platform/frameworks/compile/mclinker" path="frameworks/compile/mclinker" revision="e673be8f0526f9cbc83093fb579c0f76de9e4e3c" />
|
||||||
|
<project groups="pdk" name="platform/frameworks/compile/slang" path="frameworks/compile/slang" revision="c957dd47b0a0705a686896b26cd1859d25824552" />
|
||||||
|
<project name="platform/frameworks/ex" path="frameworks/ex" revision="3696df848aa7c574f913c97c3bf415b634934048" />
|
||||||
|
<project name="platform/frameworks/mff" path="frameworks/mff" revision="b9669b8540a1e5c953374d53b115514335e23c27" />
|
||||||
|
<project name="platform/frameworks/ml" path="frameworks/ml" revision="b020ad88ca28ada76a596b5dcc7e6c2854fcc132" />
|
||||||
|
<project name="platform/frameworks/multidex" path="frameworks/multidex" revision="590a07e63868f0a1da311ff22b4a9f35eb48a865" />
|
||||||
|
<project groups="pdk" name="platform/frameworks/native" path="frameworks/native" revision="e8878921db4a51ff5d4e75d9c8958d889a048603" />
|
||||||
|
<project name="platform/frameworks/opt/calendar" path="frameworks/opt/calendar" revision="03b18577f8f8f799e87a62b8e03889ddacf6daa2" />
|
||||||
|
<project name="platform/frameworks/opt/carddav" path="frameworks/opt/carddav" revision="f08aa2df132dd8dc32a0013d3750137d9dd9280a" />
|
||||||
|
<project name="platform/frameworks/opt/colorpicker" path="frameworks/opt/colorpicker" revision="720a40ae24d526268b3c0f2dd8497b5df2cc6f23" />
|
||||||
|
<project name="platform/frameworks/opt/datetimepicker" path="frameworks/opt/datetimepicker" revision="8a1c55baaf5ced7a98b196c689ccdd59238f6e58" />
|
||||||
|
<project name="platform/frameworks/opt/emoji" path="frameworks/opt/emoji" revision="709f713ebcd62c61defc270d945810efca179621" />
|
||||||
|
<project name="platform/frameworks/opt/inputmethodcommon" path="frameworks/opt/inputmethodcommon" revision="df9dd39c2047992a43b64e13bb0fc348a1630f3b" />
|
||||||
|
<project name="platform/frameworks/opt/mailcommon" path="frameworks/opt/mailcommon" revision="1537812900e59f875cfea0483f0ae261b16d3e4b" />
|
||||||
|
<project name="platform/frameworks/opt/mms" path="frameworks/opt/mms" revision="64817e848552fd0a429a3e026b7b1562103c56bb" />
|
||||||
|
<project name="platform/frameworks/opt/net/voip" path="frameworks/opt/net/voip" revision="0f722c7f09ce67e058eb1cfaabf1d85f1abdf797" />
|
||||||
|
<project name="platform/frameworks/opt/photoviewer" path="frameworks/opt/photoviewer" revision="8c32972911bf73babdb01d30267f57255e242d78" />
|
||||||
|
<project groups="pdk" name="platform/frameworks/opt/telephony" path="frameworks/opt/telephony" revision="93faaed9056491c551ef7046e9e1de7d6397e95c" />
|
||||||
|
<project name="platform/frameworks/opt/timezonepicker" path="frameworks/opt/timezonepicker" revision="3820b87bfbc86d066e9093e78254e1f3728ad77d" />
|
||||||
|
<project name="platform/frameworks/opt/vcard" path="frameworks/opt/vcard" revision="5907243e6cf0603adf266ebfa7ee5ee465b9c596" />
|
||||||
|
<project name="platform/frameworks/opt/widget" path="frameworks/opt/widget" revision="466e0e0307b3f6aa4f4be3d9419b5996bd389da5" />
|
||||||
|
<project groups="pdk" name="platform/frameworks/rs" path="frameworks/rs" revision="ad0544fdf918e64cec05d1c98588880f10b09220" />
|
||||||
|
<project name="platform/frameworks/support" path="frameworks/support" revision="f05c07d3528765076adc16337a1f68f1700955dc" />
|
||||||
|
<project name="platform/frameworks/testing" path="frameworks/testing" revision="5c8e0271db889518f5969b142a37faa01a4ee54d" />
|
||||||
|
<project name="platform/frameworks/volley" path="frameworks/volley" revision="0e406003b5d434d8f16d7d6ad97d446060b788e6" />
|
||||||
|
<project name="platform/frameworks/webview" path="frameworks/webview" revision="6ed700e171cb2ee3303c08a1db2abc0e56fd307a" />
|
||||||
|
<project name="platform/frameworks/wilhelm" path="frameworks/wilhelm" revision="a62c3572e60ae0446632de15418a65089cccf551" />
|
||||||
|
<project name="platform/hardware/akm" path="hardware/akm" revision="32838ef838d1341aa8b77022869b801fb0bbb26c" />
|
||||||
|
<project groups="pdk" name="platform/hardware/broadcom/libbt" path="hardware/broadcom/libbt" revision="55ddd0cce019e88829f92b2fe4e17d5869daa9b9" />
|
||||||
|
<project groups="broadcom_wlan" name="platform/hardware/broadcom/wlan" path="hardware/broadcom/wlan" revision="47a3b8f496e6d2a836ac6b7268e5626c969542ec" />
|
||||||
|
<project groups="invensense" name="platform/hardware/invensense" path="hardware/invensense" revision="0f5bc7cd710fac85377621a8b9a4c364af80605f" />
|
||||||
|
<project groups="pdk" name="platform/hardware/libhardware" path="hardware/libhardware" revision="3e618a6aa10c783d1536f20edfc3347939cfa18e" />
|
||||||
|
<project groups="pdk" name="platform/hardware/libhardware_legacy" path="hardware/libhardware_legacy" revision="4c20a09e8684657448f0bc97a2da4e56c94d484e" />
|
||||||
|
<project groups="qcom" name="platform/hardware/qcom/audio" path="hardware/qcom/audio" revision="d47ff224c7b24933c701acae8d5e4c98a1bc80af" />
|
||||||
|
<project groups="qcom" name="platform/hardware/qcom/bt" path="hardware/qcom/bt" revision="cf314a462ba06f4bd3352d5d4630edcf6edbbe97" />
|
||||||
|
<project groups="qcom" name="platform/hardware/qcom/camera" path="hardware/qcom/camera" revision="fbf72e519ec5fe2f2720b1a3d119e2d69e172e34" />
|
||||||
|
<project groups="qcom" name="platform/hardware/qcom/display" path="hardware/qcom/display" revision="0a611c6ae11b65fec5ada5ecaa0893541db34156" />
|
||||||
|
<project groups="qcom" name="platform/hardware/qcom/keymaster" path="hardware/qcom/keymaster" revision="70d36107318e1d3f7abf62a56279b3f9da3ff000" />
|
||||||
|
<project groups="qcom" name="platform/hardware/qcom/media" path="hardware/qcom/media" revision="1208a868bcb0ffaa650a7e68b51031254c775d39" />
|
||||||
|
<project groups="qcom_msm8960" name="platform/hardware/qcom/msm8960" path="hardware/qcom/msm8960" revision="ca38ed098b05a79d20e852348f27d7c40a53f801" />
|
||||||
|
<project groups="qcom_msm8x74" name="platform/hardware/qcom/msm8x74" path="hardware/qcom/msm8x74" revision="0c6844ea9ee14fd7bbfd6af0bcc6b6b682f46d1c" />
|
||||||
|
<project groups="qcom" name="platform/hardware/qcom/power" path="hardware/qcom/power" revision="ff9f4538c09399030fa73e3e65a167852cb91e8f" />
|
||||||
|
<project groups="qcom" name="platform/hardware/qcom/sensors" path="hardware/qcom/sensors" revision="07c5bcdb36158e22d33bac02eecd83d4ff1fb2f8" />
|
||||||
|
<project groups="qcom_wlan" name="platform/hardware/qcom/wlan" path="hardware/qcom/wlan" revision="daa321b0ad8c10b454dc28d7e6dadc72196a8c7a" />
|
||||||
|
<project groups="pdk" name="platform/hardware/ril" path="hardware/ril" revision="eb2a93458204a928edfe36f043ddb48cf5575143" />
|
||||||
|
<project groups="exynos5" name="platform/hardware/samsung_slsi/exynos5" path="hardware/samsung_slsi/exynos5" revision="d7bd354358ecfb1e52afb3da4fc586c0822c696a" />
|
||||||
|
<project name="platform/hardware/ti/omap3" path="hardware/ti/omap3" revision="949aad363a9cc794f9ac8fd42338ae1678e50bc1" />
|
||||||
|
<project groups="omap4" name="platform/hardware/ti/omap4xxx" path="hardware/ti/omap4xxx" revision="c32caab84ff9edc1489ed6c8079c7d252caafc4d" />
|
||||||
|
<project name="platform/libcore" path="libcore" revision="d343e35535a99bad32eea0defc8a3e9c23c9967f" />
|
||||||
|
<project groups="pdk-java" name="platform/libnativehelper" path="libnativehelper" revision="b37e11d07dec2d49b576709ae8e0568a9daabd07" />
|
||||||
|
<project name="platform/ndk" path="ndk" revision="f584f76882baf374166cf12b99cd5f3dbdf3b6b9" />
|
||||||
|
<project name="platform/packages/apps/BasicSmsReceiver" path="packages/apps/BasicSmsReceiver" revision="80327793c4b4ebf4a6a53b72e46c477afe18f135" />
|
||||||
|
<project name="platform/packages/apps/Bluetooth" path="packages/apps/Bluetooth" revision="7efa9db2129c99475684a2e44c4fb89cce3134bc" />
|
||||||
|
<project name="platform/packages/apps/Browser" path="packages/apps/Browser" revision="fe4083510dc773911651456f150bf5432f81a6c0" />
|
||||||
|
<project name="platform/packages/apps/Calculator" path="packages/apps/Calculator" revision="6c7521bb685c9b7b7c36f2077612d4b1a0e808d4" />
|
||||||
|
<project name="platform/packages/apps/Calendar" path="packages/apps/Calendar" revision="2d72f6bed6a0eeaddbda08393063fe873c1c7922" />
|
||||||
|
<project name="platform/packages/apps/Camera" path="packages/apps/Camera" revision="b0e357d548fb8d10896200add2b932199a96a2ea" />
|
||||||
|
<project name="platform/packages/apps/Camera2" path="packages/apps/Camera2" revision="ece4866dc575b956801f6dab2d6c4923e272c5fa" />
|
||||||
|
<project name="platform/packages/apps/CellBroadcastReceiver" path="packages/apps/CellBroadcastReceiver" revision="21d8baf492007cc01545905de33ecefe5d947843" />
|
||||||
|
<project name="platform/packages/apps/CertInstaller" path="packages/apps/CertInstaller" revision="483a188feda6e9d311aef437d28f30e1fb6afeb0" />
|
||||||
|
<project name="platform/packages/apps/Contacts" path="packages/apps/Contacts" revision="24a4f48dc5c768188143648e267889477e4185e8" />
|
||||||
|
<project name="platform/packages/apps/ContactsCommon" path="packages/apps/ContactsCommon" revision="6ce4a3bc083a7dbcc7ffa2bebff242638d7f8e61" />
|
||||||
|
<project name="platform/packages/apps/DeskClock" path="packages/apps/DeskClock" revision="d3bfe9223f3e70271813f48b8ef5500c3a90c0b3" />
|
||||||
|
<project name="platform/packages/apps/Dialer" path="packages/apps/Dialer" revision="5cb300ef50e9942eef746319dd1b1b6e7c2c05e2" />
|
||||||
|
<project name="platform/packages/apps/Email" path="packages/apps/Email" revision="22766dcf6a44416b2972c053739472317017257d" />
|
||||||
|
<project name="platform/packages/apps/Exchange" path="packages/apps/Exchange" revision="ab03a7f9b197b6ffcc390dd5fb589067a5161148" />
|
||||||
|
<project name="platform/packages/apps/Gallery" path="packages/apps/Gallery" revision="9595006a3347c08e6b8e31d679903bb8f77a343d" />
|
||||||
|
<project name="platform/packages/apps/Gallery2" path="packages/apps/Gallery2" revision="9cde04ed08f3a5201a007d78b3c89f43fb3003e0" />
|
||||||
|
<project name="platform/packages/apps/HTMLViewer" path="packages/apps/HTMLViewer" revision="7498890092c388dc59ca932e09ec79dd568b1a19" />
|
||||||
|
<project name="platform/packages/apps/InCallUI" path="packages/apps/InCallUI" revision="d968d1a28dae45229b1be9f05bef8df13821e94d" />
|
||||||
|
<project name="platform/packages/apps/KeyChain" path="packages/apps/KeyChain" revision="e6243f79f3ce6daeb2d8d879e6e1a684ffc1b2fc" />
|
||||||
|
<project name="platform/packages/apps/Launcher2" path="packages/apps/Launcher2" revision="31569f6dbd44d443ff54c460b733e62fc37d2319" />
|
||||||
|
<project name="platform/packages/apps/Launcher3" path="packages/apps/Launcher3" revision="3a9f3a7806a0153865415d6207c6812915d3f6b1" />
|
||||||
|
<project name="platform/packages/apps/LegacyCamera" path="packages/apps/LegacyCamera" revision="d9b5d8941d1ec47ff391da2b8cc8ec90f902062f" />
|
||||||
|
<project name="platform/packages/apps/Mms" path="packages/apps/Mms" revision="e770738ea4389afddb0b4e6c69749f9456ed0f48" />
|
||||||
|
<project name="platform/packages/apps/Music" path="packages/apps/Music" revision="bfca689bb6605cfcd1e0c1781c707735efb7444e" />
|
||||||
|
<project name="platform/packages/apps/MusicFX" path="packages/apps/MusicFX" revision="aaa2f99caac6f088b23de55fe2eb1e8ee305b1fb" />
|
||||||
|
<project name="platform/packages/apps/Nfc" path="packages/apps/Nfc" revision="f62a9a00a13ba333e88cb9e8ce2553d6acf708ad" />
|
||||||
|
<project name="platform/packages/apps/OneTimeInitializer" path="packages/apps/OneTimeInitializer" revision="01e429c08e51291315890de9677151a7e0b6ad35" />
|
||||||
|
<project name="platform/packages/apps/PackageInstaller" path="packages/apps/PackageInstaller" revision="212398024b4491276ef00cf7fcd829c89200b6ba" />
|
||||||
|
<project name="platform/packages/apps/Phone" path="packages/apps/Phone" revision="bf4ec5b1258628bfa6a82aa0d80f348a77bbf194" />
|
||||||
|
<project name="platform/packages/apps/PhoneCommon" path="packages/apps/PhoneCommon" revision="16f62c5ab5c21981e63d678187ad4b44e686332b" />
|
||||||
|
<project name="platform/packages/apps/Protips" path="packages/apps/Protips" revision="325232e344b257a3c236ead2adc60c7378f226c0" />
|
||||||
|
<project name="platform/packages/apps/Provision" path="packages/apps/Provision" revision="78ca0db658fe6253d506916e36319e620476f809" />
|
||||||
|
<project name="platform/packages/apps/QuickSearchBox" path="packages/apps/QuickSearchBox" revision="cfb7af6652a7fbcc0f86341edfe14dc316c9ff37" />
|
||||||
|
<project name="platform/packages/apps/Settings" path="packages/apps/Settings" revision="2abbacb7d46657e5863eb2ef0035521ffc41a0a8" />
|
||||||
|
<project name="platform/packages/apps/SmartCardService" path="packages/apps/SmartCardService" revision="29eae320a4bd222b5ff1c092f84f1aebba88d0b7" />
|
||||||
|
<project name="platform/packages/apps/SoundRecorder" path="packages/apps/SoundRecorder" revision="b0e671faf142fa0b933b4f1cd7d186b1f37ebe46" />
|
||||||
|
<project name="platform/packages/apps/SpareParts" path="packages/apps/SpareParts" revision="4db997871e3f4c3f84660815096e5276b47c5c91" />
|
||||||
|
<project name="platform/packages/apps/SpeechRecorder" path="packages/apps/SpeechRecorder" revision="536aa74ff3a77186bef29dc9333a34688fa59d13" />
|
||||||
|
<project name="platform/packages/apps/Stk" path="packages/apps/Stk" revision="115b75461f8e1fb432fe1a892549ca1c96cef497" />
|
||||||
|
<project name="platform/packages/apps/Tag" path="packages/apps/Tag" revision="f830b07335bd2dd794b84507b5390f7d893fe428" />
|
||||||
|
<project name="platform/packages/apps/TvSettings" path="packages/apps/TvSettings" revision="24e45eaf3b4badaf02e449e7f6d07c72e743f521" />
|
||||||
|
<project name="platform/packages/apps/UnifiedEmail" path="packages/apps/UnifiedEmail" revision="d4537c907920f4470b70e91c187ef7a0b31632db" />
|
||||||
|
<project name="platform/packages/apps/VideoEditor" path="packages/apps/VideoEditor" revision="a49ea28e1628f507ae3a564215664c29c5fa1215" />
|
||||||
|
<project name="platform/packages/apps/VoiceDialer" path="packages/apps/VoiceDialer" revision="72df4532dfca9a82e8aef55fcdfce3026d3d3312" />
|
||||||
|
<project name="platform/packages/experimental" path="packages/experimental" revision="588c7cda9c62fb77d23bc089a63cba8a96bc9ffb" />
|
||||||
|
<project name="platform/packages/inputmethods/LatinIME" path="packages/inputmethods/LatinIME" revision="159474f2ae5d13308ca1b92b8a5ccd809ec6a450" />
|
||||||
|
<project name="platform/packages/inputmethods/OpenWnn" path="packages/inputmethods/OpenWnn" revision="59aefa242169b7a51c2381daee58ff22fd1834ce" />
|
||||||
|
<project name="platform/packages/inputmethods/PinyinIME" path="packages/inputmethods/PinyinIME" revision="49aebad1c1cfbbcaa9288ffed5161e79e57c3679" />
|
||||||
|
<project name="platform/packages/providers/ApplicationsProvider" path="packages/providers/ApplicationsProvider" revision="3347f31bd268ca3153abe5def9361f625bd73efd" />
|
||||||
|
<project name="platform/packages/providers/CalendarProvider" path="packages/providers/CalendarProvider" revision="20360f2fdd7ad2de1234b7ed61e3ea120f0dc635" />
|
||||||
|
<project name="platform/packages/providers/ContactsProvider" path="packages/providers/ContactsProvider" revision="6ac2395324c0e7539434b7c68ec738f867d7ed37" />
|
||||||
|
<project name="platform/packages/providers/DownloadProvider" path="packages/providers/DownloadProvider" revision="90e7485d68095b5fc5044dd1bc6cd4dfc485eaa3" />
|
||||||
|
<project name="platform/packages/providers/MediaProvider" path="packages/providers/MediaProvider" revision="501b93fb00db86fe4fb53dc000f6f11587afe4b0" />
|
||||||
|
<project name="platform/packages/providers/PartnerBookmarksProvider" path="packages/providers/PartnerBookmarksProvider" revision="96d0a80af45923767baf449fc8c735c2f71d64ae" />
|
||||||
|
<project name="platform/packages/providers/TelephonyProvider" path="packages/providers/TelephonyProvider" revision="91e705bc7662192ea33f2bac6b0a6c79fc9bc7ab" />
|
||||||
|
<project name="platform/packages/providers/UserDictionaryProvider" path="packages/providers/UserDictionaryProvider" revision="361f35b7b1fe758d93e0952536a298b2ed045a89" />
|
||||||
|
<project name="platform/packages/screensavers/Basic" path="packages/screensavers/Basic" revision="4b5d9d8bea733c4e5876541831f27bf40588b516" />
|
||||||
|
<project name="platform/packages/screensavers/PhotoTable" path="packages/screensavers/PhotoTable" revision="a5e0fee8e923cfc8682eb4431bc3997ed15f649a" />
|
||||||
|
<project name="platform/packages/screensavers/WebView" path="packages/screensavers/WebView" revision="6e0a80f6faed6191acc8ce1b6c79eada09e9e042" />
|
||||||
|
<project name="platform/packages/services/Telephony" path="packages/services/Telephony" revision="aa156251eb0414b8c6546c98769789dc28b38140" />
|
||||||
|
<project name="platform/packages/wallpapers/Basic" path="packages/wallpapers/Basic" revision="2e1d8404b87caf13cde644959f28213f2db09843" />
|
||||||
|
<project name="platform/packages/wallpapers/Galaxy4" path="packages/wallpapers/Galaxy4" revision="34b31b45e75b2e73a770fef1a2f9a862b10f1a57" />
|
||||||
|
<project name="platform/packages/wallpapers/HoloSpiral" path="packages/wallpapers/HoloSpiral" revision="63b75996a7cfb713a6a6feb5c774ba4b46c7d6eb" />
|
||||||
|
<project name="platform/packages/wallpapers/LivePicker" path="packages/wallpapers/LivePicker" revision="8082f92e76774607d62412e8e1191dd940f055ba" />
|
||||||
|
<project name="platform/packages/wallpapers/MagicSmoke" path="packages/wallpapers/MagicSmoke" revision="f01ea4c07914010d52a42130acb7e67d4306fbda" />
|
||||||
|
<project name="platform/packages/wallpapers/MusicVisualization" path="packages/wallpapers/MusicVisualization" revision="72fbcf3a8e4ebee42c36a5887432ca823ef0e4e5" />
|
||||||
|
<project name="platform/packages/wallpapers/NoiseField" path="packages/wallpapers/NoiseField" revision="7d3e52a18a1255baffd7c0675a465f1b85b99f56" />
|
||||||
|
<project name="platform/packages/wallpapers/PhaseBeam" path="packages/wallpapers/PhaseBeam" revision="0da76f35378677f1102e0be218ce1993c0e528b6" />
|
||||||
|
<project groups="pdk" name="platform/pdk" path="pdk" revision="d440d4219412981df7ef90bed65acf29b2e7ea6a" />
|
||||||
|
<project name="platform/prebuilts/android-emulator" path="prebuilts/android-emulator" revision="d6a246c24accff42eb433f5e39d14cb24faf1e58" />
|
||||||
|
<project groups="pdk,darwin" name="platform/prebuilts/clang/darwin-x86/3.1" path="prebuilts/clang/darwin-x86/3.1" revision="426233405bef3c7c825095ab14256c3773894b9b" />
|
||||||
|
<project groups="pdk,darwin" name="platform/prebuilts/clang/darwin-x86/3.2" path="prebuilts/clang/darwin-x86/3.2" revision="af856d77b3cbb1f6afccdc531bee991403c28907" />
|
||||||
|
<project groups="darwin,arm" name="platform/prebuilts/clang/darwin-x86/arm/3.3" path="prebuilts/clang/darwin-x86/arm/3.3" revision="54acc51e28850485e380b55916868a4e1ff17998" />
|
||||||
|
<project groups="pdk,darwin" name="platform/prebuilts/clang/darwin-x86/host/3.4" path="prebuilts/clang/darwin-x86/host/3.4" revision="a798fe00dbd92ad4e5f7123a2e2bc1d805db04f6" />
|
||||||
|
<project groups="pdk,darwin" name="platform/prebuilts/clang/darwin-x86/host/3.5" path="prebuilts/clang/darwin-x86/host/3.5" revision="ce812d27fb78972e71482e93241b9770ca54845d" />
|
||||||
|
<project groups="darwin,mips" name="platform/prebuilts/clang/darwin-x86/mips/3.3" path="prebuilts/clang/darwin-x86/mips/3.3" revision="da3dad928542362835082b2eda44e4dc315d65bb" />
|
||||||
|
<project groups="darwin,x86" name="platform/prebuilts/clang/darwin-x86/x86/3.3" path="prebuilts/clang/darwin-x86/x86/3.3" revision="f67a83f35e30f92b312fbee852184c3f6dc38f34" />
|
||||||
|
<project groups="pdk,linux" name="platform/prebuilts/clang/linux-x86/3.1" path="prebuilts/clang/linux-x86/3.1" revision="e95b4ce22c825da44d14299e1190ea39a5260bde" />
|
||||||
|
<project groups="pdk,linux" name="platform/prebuilts/clang/linux-x86/3.2" path="prebuilts/clang/linux-x86/3.2" revision="471afab478649078ad7c75ec6b252481a59e19b8" />
|
||||||
|
<project groups="linux,arm" name="platform/prebuilts/clang/linux-x86/arm/3.3" path="prebuilts/clang/linux-x86/arm/3.3" revision="2f6d2db9e2af3507d132cf5d286a42fe1d47f7bc" />
|
||||||
|
<project groups="pdk,linux" name="platform/prebuilts/clang/linux-x86/host/3.4" path="prebuilts/clang/linux-x86/host/3.4" revision="fae26a039f79d780ddedcad07f164d9e6c05fc87" />
|
||||||
|
<project groups="pdk,linux" name="platform/prebuilts/clang/linux-x86/host/3.5" path="prebuilts/clang/linux-x86/host/3.5" revision="485ffdc99707f81f4201e85bbbb937f23e1e04aa" />
|
||||||
|
<project groups="linux,mips" name="platform/prebuilts/clang/linux-x86/mips/3.3" path="prebuilts/clang/linux-x86/mips/3.3" revision="51f8e2760628588fe268438d612d942c30d13fb2" />
|
||||||
|
<project groups="linux,x86" name="platform/prebuilts/clang/linux-x86/x86/3.3" path="prebuilts/clang/linux-x86/x86/3.3" revision="017a8a67f92a66b29ab17772e50642a7b9d0f8e6" />
|
||||||
|
<project name="platform/prebuilts/devtools" path="prebuilts/devtools" revision="be724be535ea50585d8c625b768ccb63aacd2926" />
|
||||||
|
<project groups="pdk" name="platform/prebuilts/eclipse" path="prebuilts/eclipse" revision="cf9f78f8cf41b16edf9f712598a42743d5cea4af" />
|
||||||
|
<project groups="notdefault,eclipse" name="platform/prebuilts/eclipse-build-deps" path="prebuilts/eclipse-build-deps" revision="ceb739d6a7c10f5fb5a6cf6e1f702453b1361ad3" />
|
||||||
|
<project groups="notdefault,eclipse" name="platform/prebuilts/eclipse-build-deps-sources" path="prebuilts/eclipse-build-deps-sources" revision="8b7d8f6033ffe2d22905d10cf6d57d5bdcbe519b" />
|
||||||
|
<project groups="pdk,darwin,arm" name="platform/prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.8" path="prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.8" revision="a261d38eaebb7ff406a6bb60237b36fc61714d46" />
|
||||||
|
<project groups="pdk,darwin,arm" name="platform/prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.9" path="prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.9" revision="32d722d66d7a935a8b6f8e6ab2d5d8bf0e9e0986" />
|
||||||
|
<project groups="pdk,darwin,arm" name="platform/prebuilts/gcc/darwin-x86/arm/arm-eabi-4.8" path="prebuilts/gcc/darwin-x86/arm/arm-eabi-4.8" revision="6d08ca9f45ff685648fd13c75bf5cac4b11c19bb" />
|
||||||
|
<project groups="pdk,darwin,arm" name="platform/prebuilts/gcc/darwin-x86/arm/arm-linux-androideabi-4.8" path="prebuilts/gcc/darwin-x86/arm/arm-linux-androideabi-4.8" revision="264394c23b2686ce52cd4ffb116ced127aa7f8fc" />
|
||||||
|
<project groups="pdk,darwin" name="platform/prebuilts/gcc/darwin-x86/host/headers" path="prebuilts/gcc/darwin-x86/host/headers" revision="4ac4f7cc41cf3c9e36fc3d6cf37fd1cfa9587a68" />
|
||||||
|
<project groups="pdk,darwin" name="platform/prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1" path="prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1" revision="8834958755acc291d126ba7ee38ac731d04f9c9e" />
|
||||||
|
<project groups="pdk,darwin,mips" name="platform/prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.8" path="prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.8" revision="3b5bef47de8017ff39ef5bfbe801e3fa6b272fab" />
|
||||||
|
<project name="platform/prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.9" path="prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.9" revision="367a6529b0cc9f5ac5ca69226f583420563fd473" />
|
||||||
|
<project groups="pdk,darwin,mips" name="platform/prebuilts/gcc/darwin-x86/mips/mipsel-linux-android-4.8" path="prebuilts/gcc/darwin-x86/mips/mipsel-linux-android-4.8" revision="ba97180acd4251d3acf08530faa4a724af74abd3" />
|
||||||
|
<project groups="pdk,darwin,x86" name="platform/prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.8" path="prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.8" revision="c3c37a54f07d51a50e17d63dbf1d92da343f45ce" />
|
||||||
|
<project name="platform/prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.9" path="prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.9" revision="a7c5a1df753fd3a24494d5e1fe00211048be5c1d" />
|
||||||
|
<project groups="pdk,linux,arm" name="platform/prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.8" path="prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.8" revision="7334f0a7a872700d0aaf00bea75917c077c45530" />
|
||||||
|
<project groups="pdk,linux,arm" name="platform/prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9" path="prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9" revision="a3f0180676c6b6cd9c664704f86855d3404ae4dd" />
|
||||||
|
<project groups="pdk,linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-eabi-4.8" path="prebuilts/gcc/linux-x86/arm/arm-eabi-4.8" revision="26e93f6af47f7bd3a9beb5c102a5f45e19bfa38a" />
|
||||||
|
<project groups="pdk,linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.8" path="prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.8" revision="d9735fc81434f2af2c44d86ca57740c673c8d9bc" />
|
||||||
|
<project groups="pdk,linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.6" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.6" revision="eb5c9f0ae36bf964f6855bde54e1b387e2c26bb6" />
|
||||||
|
<project groups="pdk,linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.8" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.8" revision="1b0544da652fda90a41a1f69889d6b137ce20fb9" />
|
||||||
|
<project name="platform/prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8" path="prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8" revision="2725a175a32032fb9a63e247c176ecc3d448ea27" />
|
||||||
|
<project groups="pdk,linux,mips" name="platform/prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.8" path="prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.8" revision="38586de6b44714b4adcf21119fe6b267e33f3ca6" />
|
||||||
|
<project name="platform/prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.9" path="prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.9" revision="eabc7ae8ed527ee3d4517196732fa3f3e8939a28" />
|
||||||
|
<project groups="pdk,linux,mips" name="platform/prebuilts/gcc/linux-x86/mips/mipsel-linux-android-4.8" path="prebuilts/gcc/linux-x86/mips/mipsel-linux-android-4.8" revision="c06b9b305c365163c99d4ffba49ac37ce2716024" />
|
||||||
|
<project groups="pdk,linux,x86" name="platform/prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.8" path="prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.8" revision="e08fa7e57a573a9baa5ccd8d4b8d73cc871f9b48" />
|
||||||
|
<project name="platform/prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9" path="prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9" revision="e99278016e6285363bc20d1b35d4b9b5c4e8b0a0" />
|
||||||
|
<project name="platform/prebuilts/gradle-plugin" path="prebuilts/gradle-plugin" revision="e7814a3cbb96742ff74505a1fc152cb534fbf2f9" />
|
||||||
|
<project name="platform/prebuilts/maven_repo/android" path="prebuilts/maven_repo/android" revision="0dbe3df0f057de9e83e599b9be2ca866c673130d" />
|
||||||
|
<project groups="pdk" name="platform/prebuilts/misc" path="prebuilts/misc" revision="3cc2e316acf9da501479bbfd85159407239994d2" />
|
||||||
|
<project groups="pdk" name="platform/prebuilts/ndk" path="prebuilts/ndk" revision="7a8bc5c36d519c41de61765ff94245f56c4bed7a" />
|
||||||
|
<project groups="darwin" name="platform/prebuilts/python/darwin-x86/2.7.5" path="prebuilts/python/darwin-x86/2.7.5" revision="2bdd4fd418614c7c0101147d02199d0e47c4980e" />
|
||||||
|
<project groups="linux" name="platform/prebuilts/python/linux-x86/2.7.5" path="prebuilts/python/linux-x86/2.7.5" revision="6fbc8802b3b68d24a4ee83f164b22490cf702ff2" />
|
||||||
|
<project groups="pdk" name="platform/prebuilts/qemu-kernel" path="prebuilts/qemu-kernel" revision="5f91f38eac40a8465f3a7e4aa298a75afcf2936a" />
|
||||||
|
<project name="platform/prebuilts/runtime" path="prebuilts/runtime" revision="56e663b8ec9cd0df9ce5afdc7b7d56460faf44c8" />
|
||||||
|
<project groups="pdk" name="platform/prebuilts/sdk" path="prebuilts/sdk" revision="52043ca65e06bc84779dd8d3e55e72ad04bcef59" />
|
||||||
|
<project groups="pdk,tools" name="platform/prebuilts/tools" path="prebuilts/tools" revision="130c3d0a1a484d617531d75ddd50714f68213cbb" />
|
||||||
|
<project name="platform/sdk" path="sdk" revision="1af9ef83f5f6c6fd9202d5bdd8d4248a4eb855aa" />
|
||||||
|
<project groups="pdk" name="platform/system/core" path="system/core" revision="cddc97cb3a927d179a42e0fec77f0d267fcd74d1" />
|
||||||
|
<project groups="pdk" name="platform/system/extras" path="system/extras" revision="97ed949ec7bef088ca3d06fb7b5f3bdad9a5103c" />
|
||||||
|
<project name="platform/system/keymaster" path="system/keymaster" revision="7a70abbf29293b30bb1e7ed3a58deb40f8774a53" />
|
||||||
|
<project groups="pdk" name="platform/system/media" path="system/media" revision="77f0f32b32adc5ba1134e7a68e4d907c4f695eb6" />
|
||||||
|
<project groups="pdk" name="platform/system/netd" path="system/netd" revision="f5d949ef0991737af9daa7ba702cc2ec638e435b" />
|
||||||
|
<project groups="pdk" name="platform/system/security" path="system/security" revision="0387a7fd23021b904612101b727a2060847f6169" />
|
||||||
|
<project groups="pdk" name="platform/system/vold" path="system/vold" revision="c0c2867518eed4539444434c95fad8185a6ac08e" />
|
||||||
|
<project groups="notdefault,tools" name="platform/tools/adt/eclipse" path="tools/adt/eclipse" revision="ede2ed86419bb4c78428f1ac09825b1a247d8e24" />
|
||||||
|
<project groups="notdefault,tools" name="platform/tools/adt/idea" path="tools/adt/idea" revision="50a5da1af3e851df7aff37c291541000685bcad4" />
|
||||||
|
<project groups="notdefault,tools" name="platform/tools/base" path="tools/base" revision="4dc06057ba77596807e2d28c715719f240f71549" />
|
||||||
|
<project groups="notdefault,tools" name="platform/tools/build" path="tools/build" revision="69c4b95102b4b9862bfba68b3eaf5b7537a705ee" />
|
||||||
|
<project groups="notdefault,tools" name="platform/tools/emulator" path="tools/emulator" revision="c427e5d5227ba9413307670a5d758d9ced394a7e" />
|
||||||
|
<project groups="tools" name="platform/tools/external/fat32lib" path="tools/external/fat32lib" revision="3880776e41ff7def06e351720f2d162f88b58a03" />
|
||||||
|
<project groups="tools" name="platform/tools/external/gradle" path="tools/external/gradle" revision="842b7a27df8606faa29b0875a13270701eb78dd8" />
|
||||||
|
<project groups="notdefault,tools" name="platform/tools/idea" path="tools/idea" revision="12356153d01fcde14dd3a06948cfded92c20d068" />
|
||||||
|
<project groups="notdefault,motodev" name="platform/tools/motodev" path="tools/motodev" revision="69989786cefbde82527960a1e100ec9afba46a98" />
|
||||||
|
<project groups="notdefault,tools" name="platform/tools/studio/cloud" path="tools/studio/cloud" revision="839eb097c5fc73f91a722f1457a2e8e11eb4e1a5" />
|
||||||
|
<project groups="notdefault,tools" name="platform/tools/swt" path="tools/swt" revision="aaf3131b0e4b15d39156a6e94e5da06b0183d61d" />
|
||||||
|
</manifest>
|
|
@ -7,6 +7,7 @@
|
||||||
"""Argument-less script to select what to run on the buildbots."""
|
"""Argument-less script to select what to run on the buildbots."""
|
||||||
|
|
||||||
|
|
||||||
|
import filecmp
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -30,7 +31,8 @@ OUT_DIR = os.path.join(TRUNK_DIR, 'out')
|
||||||
|
|
||||||
def CallSubProcess(*args, **kwargs):
|
def CallSubProcess(*args, **kwargs):
|
||||||
"""Wrapper around subprocess.call which treats errors as build exceptions."""
|
"""Wrapper around subprocess.call which treats errors as build exceptions."""
|
||||||
retcode = subprocess.call(*args, **kwargs)
|
with open(os.devnull) as devnull_fd:
|
||||||
|
retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs)
|
||||||
if retcode != 0:
|
if retcode != 0:
|
||||||
print '@@@STEP_EXCEPTION@@@'
|
print '@@@STEP_EXCEPTION@@@'
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -49,10 +51,6 @@ def PrepareCmake():
|
||||||
|
|
||||||
print '@@@BUILD_STEP Initialize CMake checkout@@@'
|
print '@@@BUILD_STEP Initialize CMake checkout@@@'
|
||||||
os.mkdir(CMAKE_DIR)
|
os.mkdir(CMAKE_DIR)
|
||||||
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
|
|
||||||
CallSubProcess(['git', 'config', '--global',
|
|
||||||
'user.email', 'chrome-bot@google.com'])
|
|
||||||
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
|
|
||||||
|
|
||||||
print '@@@BUILD_STEP Sync CMake@@@'
|
print '@@@BUILD_STEP Sync CMake@@@'
|
||||||
CallSubProcess(
|
CallSubProcess(
|
||||||
|
@ -73,41 +71,96 @@ def PrepareCmake():
|
||||||
CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
|
CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
|
||||||
|
|
||||||
|
|
||||||
|
_ANDROID_SETUP = 'source build/envsetup.sh && lunch full-eng'
|
||||||
|
|
||||||
|
|
||||||
def PrepareAndroidTree():
|
def PrepareAndroidTree():
|
||||||
"""Prepare an Android tree to run 'android' format tests."""
|
"""Prepare an Android tree to run 'android' format tests."""
|
||||||
if os.environ['BUILDBOT_CLOBBER'] == '1':
|
if os.environ['BUILDBOT_CLOBBER'] == '1':
|
||||||
print '@@@BUILD_STEP Clobber Android checkout@@@'
|
print '@@@BUILD_STEP Clobber Android checkout@@@'
|
||||||
shutil.rmtree(ANDROID_DIR)
|
shutil.rmtree(ANDROID_DIR)
|
||||||
|
|
||||||
# The release of Android we use is static, so there's no need to do anything
|
# (Re)create the directory so that the following steps will succeed.
|
||||||
# if the directory already exists.
|
if not os.path.isdir(ANDROID_DIR):
|
||||||
if os.path.isdir(ANDROID_DIR):
|
os.mkdir(ANDROID_DIR)
|
||||||
|
|
||||||
|
# We use a manifest from the gyp project listing pinned revisions of AOSP to
|
||||||
|
# use, to ensure that we test against a stable target. This needs to be
|
||||||
|
# updated to pick up new build system changes sometimes, so we must test if
|
||||||
|
# it has changed.
|
||||||
|
manifest_filename = 'aosp_manifest.xml'
|
||||||
|
gyp_manifest = os.path.join(BUILDBOT_DIR, manifest_filename)
|
||||||
|
android_manifest = os.path.join(ANDROID_DIR, '.repo', 'manifests',
|
||||||
|
manifest_filename)
|
||||||
|
manifest_is_current = (os.path.isfile(android_manifest) and
|
||||||
|
filecmp.cmp(gyp_manifest, android_manifest))
|
||||||
|
if not manifest_is_current:
|
||||||
|
# It's safe to repeat these steps, so just do them again to make sure we are
|
||||||
|
# in a good state.
|
||||||
|
print '@@@BUILD_STEP Initialize Android checkout@@@'
|
||||||
|
CallSubProcess(
|
||||||
|
['repo', 'init',
|
||||||
|
'-u', 'https://android.googlesource.com/platform/manifest',
|
||||||
|
'-b', 'master',
|
||||||
|
'-g', 'all,-notdefault,-device,-darwin,-mips,-x86'],
|
||||||
|
cwd=ANDROID_DIR)
|
||||||
|
shutil.copy(gyp_manifest, android_manifest)
|
||||||
|
|
||||||
|
print '@@@BUILD_STEP Sync Android@@@'
|
||||||
|
CallSubProcess(['repo', 'sync', '-j4', '-m', manifest_filename],
|
||||||
|
cwd=ANDROID_DIR)
|
||||||
|
|
||||||
|
# If we already built the system image successfully and didn't sync to a new
|
||||||
|
# version of the source, skip running the build again as it's expensive even
|
||||||
|
# when there's nothing to do.
|
||||||
|
system_img = os.path.join(ANDROID_DIR, 'out', 'target', 'product', 'generic',
|
||||||
|
'system.img')
|
||||||
|
if manifest_is_current and os.path.isfile(system_img):
|
||||||
return
|
return
|
||||||
|
|
||||||
print '@@@BUILD_STEP Initialize Android checkout@@@'
|
|
||||||
os.mkdir(ANDROID_DIR)
|
|
||||||
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
|
|
||||||
CallSubProcess(['git', 'config', '--global',
|
|
||||||
'user.email', 'chrome-bot@google.com'])
|
|
||||||
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
|
|
||||||
CallSubProcess(
|
|
||||||
['repo', 'init',
|
|
||||||
'-u', 'https://android.googlesource.com/platform/manifest',
|
|
||||||
'-b', 'android-4.2.1_r1',
|
|
||||||
'-g', 'all,-notdefault,-device,-darwin,-mips,-x86'],
|
|
||||||
cwd=ANDROID_DIR)
|
|
||||||
|
|
||||||
print '@@@BUILD_STEP Sync Android@@@'
|
|
||||||
CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR)
|
|
||||||
|
|
||||||
print '@@@BUILD_STEP Build Android@@@'
|
print '@@@BUILD_STEP Build Android@@@'
|
||||||
CallSubProcess(
|
CallSubProcess(
|
||||||
['/bin/bash',
|
['/bin/bash',
|
||||||
'-c', 'source build/envsetup.sh && lunch full-eng && make -j4'],
|
'-c', '%s && make -j4' % _ANDROID_SETUP],
|
||||||
cwd=ANDROID_DIR)
|
cwd=ANDROID_DIR)
|
||||||
|
|
||||||
|
|
||||||
def GypTestFormat(title, format=None, msvs_version=None):
|
def StartAndroidEmulator():
|
||||||
|
"""Start an android emulator from the built android tree."""
|
||||||
|
print '@@@BUILD_STEP Start Android emulator@@@'
|
||||||
|
|
||||||
|
CallSubProcess(['/bin/bash', '-c',
|
||||||
|
'%s && adb kill-server ' % _ANDROID_SETUP],
|
||||||
|
cwd=ANDROID_DIR)
|
||||||
|
|
||||||
|
# If taskset is available, use it to force adbd to run only on one core, as,
|
||||||
|
# sadly, it improves its reliability (see crbug.com/268450).
|
||||||
|
adbd_wrapper = ''
|
||||||
|
with open(os.devnull, 'w') as devnull_fd:
|
||||||
|
if subprocess.call(['which', 'taskset'], stdout=devnull_fd) == 0:
|
||||||
|
adbd_wrapper = 'taskset -c 0'
|
||||||
|
CallSubProcess(['/bin/bash', '-c',
|
||||||
|
'%s && %s adb start-server ' % (_ANDROID_SETUP, adbd_wrapper)],
|
||||||
|
cwd=ANDROID_DIR)
|
||||||
|
|
||||||
|
subprocess.Popen(
|
||||||
|
['/bin/bash', '-c',
|
||||||
|
'%s && emulator -no-window' % _ANDROID_SETUP],
|
||||||
|
cwd=ANDROID_DIR)
|
||||||
|
CallSubProcess(
|
||||||
|
['/bin/bash', '-c',
|
||||||
|
'%s && adb wait-for-device' % _ANDROID_SETUP],
|
||||||
|
cwd=ANDROID_DIR)
|
||||||
|
|
||||||
|
|
||||||
|
def StopAndroidEmulator():
|
||||||
|
"""Stop all android emulators."""
|
||||||
|
print '@@@BUILD_STEP Stop Android emulator@@@'
|
||||||
|
# If this fails, it's because there is no emulator running.
|
||||||
|
subprocess.call(['pkill', 'emulator.*'])
|
||||||
|
|
||||||
|
|
||||||
|
def GypTestFormat(title, format=None, msvs_version=None, tests=[]):
|
||||||
"""Run the gyp tests for a given format, emitting annotator tags.
|
"""Run the gyp tests for a given format, emitting annotator tags.
|
||||||
|
|
||||||
See annotator docs at:
|
See annotator docs at:
|
||||||
|
@ -126,19 +179,18 @@ def GypTestFormat(title, format=None, msvs_version=None):
|
||||||
if msvs_version:
|
if msvs_version:
|
||||||
env['GYP_MSVS_VERSION'] = msvs_version
|
env['GYP_MSVS_VERSION'] = msvs_version
|
||||||
command = ' '.join(
|
command = ' '.join(
|
||||||
[sys.executable, 'trunk/gyptest.py',
|
[sys.executable, 'gyp/gyptest.py',
|
||||||
'--all',
|
'--all',
|
||||||
'--passed',
|
'--passed',
|
||||||
'--format', format,
|
'--format', format,
|
||||||
'--path', CMAKE_BIN_DIR,
|
'--path', CMAKE_BIN_DIR,
|
||||||
'--chdir', 'trunk'])
|
'--chdir', 'gyp'] + tests)
|
||||||
if format == 'android':
|
if format == 'android':
|
||||||
# gyptest needs the environment setup from envsetup/lunch in order to build
|
# gyptest needs the environment setup from envsetup/lunch in order to build
|
||||||
# using the 'android' backend, so this is done in a single shell.
|
# using the 'android' backend, so this is done in a single shell.
|
||||||
retcode = subprocess.call(
|
retcode = subprocess.call(
|
||||||
['/bin/bash',
|
['/bin/bash',
|
||||||
'-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s'
|
'-c', '%s && cd %s && %s' % (_ANDROID_SETUP, ROOT_DIR, command)],
|
||||||
% (ROOT_DIR, command)],
|
|
||||||
cwd=ANDROID_DIR, env=env)
|
cwd=ANDROID_DIR, env=env)
|
||||||
else:
|
else:
|
||||||
retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
|
retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
|
||||||
|
@ -160,7 +212,11 @@ def GypBuild():
|
||||||
# The Android gyp bot runs on linux so this must be tested first.
|
# The Android gyp bot runs on linux so this must be tested first.
|
||||||
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android':
|
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android':
|
||||||
PrepareAndroidTree()
|
PrepareAndroidTree()
|
||||||
retcode += GypTestFormat('android')
|
StartAndroidEmulator()
|
||||||
|
try:
|
||||||
|
retcode += GypTestFormat('android')
|
||||||
|
finally:
|
||||||
|
StopAndroidEmulator()
|
||||||
elif sys.platform.startswith('linux'):
|
elif sys.platform.startswith('linux'):
|
||||||
retcode += GypTestFormat('ninja')
|
retcode += GypTestFormat('ninja')
|
||||||
retcode += GypTestFormat('make')
|
retcode += GypTestFormat('make')
|
||||||
|
@ -173,8 +229,13 @@ def GypBuild():
|
||||||
elif sys.platform == 'win32':
|
elif sys.platform == 'win32':
|
||||||
retcode += GypTestFormat('ninja')
|
retcode += GypTestFormat('ninja')
|
||||||
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
|
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
|
||||||
retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
|
retcode += GypTestFormat('msvs-ninja-2013', format='msvs-ninja',
|
||||||
retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012')
|
msvs_version='2013',
|
||||||
|
tests=[
|
||||||
|
r'test\generator-output\gyptest-actions.py',
|
||||||
|
r'test\generator-output\gyptest-relocate.py',
|
||||||
|
r'test\generator-output\gyptest-rules.py'])
|
||||||
|
retcode += GypTestFormat('msvs-2013', format='msvs', msvs_version='2013')
|
||||||
else:
|
else:
|
||||||
raise Exception('Unknown platform')
|
raise Exception('Unknown platform')
|
||||||
if retcode:
|
if retcode:
|
||||||
|
|
6
gyp/buildbot/commit_queue/OWNERS
Normal file
6
gyp/buildbot/commit_queue/OWNERS
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
set noparent
|
||||||
|
bradnelson@chromium.org
|
||||||
|
bradnelson@google.com
|
||||||
|
iannucci@chromium.org
|
||||||
|
scottmg@chromium.org
|
||||||
|
thakis@chromium.org
|
3
gyp/buildbot/commit_queue/README
Normal file
3
gyp/buildbot/commit_queue/README
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
cq_config.json describes the trybots that must pass in order
|
||||||
|
to land a change through the commit queue.
|
||||||
|
Comments are here as the file is strictly JSON.
|
16
gyp/buildbot/commit_queue/cq_config.json
Normal file
16
gyp/buildbot/commit_queue/cq_config.json
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
{
|
||||||
|
"trybots": {
|
||||||
|
"launched": {
|
||||||
|
"tryserver.nacl": {
|
||||||
|
"gyp-presubmit": ["defaulttests"],
|
||||||
|
"gyp-android": ["defaulttests"],
|
||||||
|
"gyp-linux": ["defaulttests"],
|
||||||
|
"gyp-mac": ["defaulttests"],
|
||||||
|
"gyp-win32": ["defaulttests"],
|
||||||
|
"gyp-win64": ["defaulttests"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"triggered": {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,10 +1,10 @@
|
||||||
# This file is used by gcl to get repository specific information.
|
# This file is used by gcl to get repository specific information.
|
||||||
CODE_REVIEW_SERVER: codereview.chromium.org
|
CODE_REVIEW_SERVER: codereview.chromium.org
|
||||||
CC_LIST: gyp-developer@googlegroups.com
|
CC_LIST: gyp-developer@googlegroups.com
|
||||||
VIEW_VC: http://code.google.com/p/gyp/source/detail?r=
|
VIEW_VC: https://chromium.googlesource.com/external/gyp/+/
|
||||||
TRY_ON_UPLOAD: True
|
TRY_ON_UPLOAD: False
|
||||||
TRYSERVER_PROJECT: gyp
|
TRYSERVER_PROJECT: gyp
|
||||||
TRYSERVER_PATCHLEVEL: 0
|
TRYSERVER_PATCHLEVEL: 1
|
||||||
TRYSERVER_ROOT: trunk
|
TRYSERVER_ROOT: gyp
|
||||||
TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
|
TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
|
||||||
|
PROJECT: gyp
|
||||||
|
|
2
gyp/gyp
2
gyp/gyp
|
@ -1,4 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/sh
|
||||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||||
# Use of this source code is governed by a BSD-style license that can be
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
# found in the LICENSE file.
|
# found in the LICENSE file.
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
/* Copyright (c) 2009 Google Inc. All rights reserved.
|
|
||||||
* Use of this source code is governed by a BSD-style license that can be
|
|
||||||
* found in the LICENSE file. */
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
|
@ -13,7 +13,7 @@ import optparse
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
class CommandRunner:
|
class CommandRunner(object):
|
||||||
"""
|
"""
|
||||||
Executor class for commands, including "commands" implemented by
|
Executor class for commands, including "commands" implemented by
|
||||||
Python functions.
|
Python functions.
|
||||||
|
@ -117,7 +117,7 @@ class CommandRunner:
|
||||||
return self.execute(command, stdout, stderr)
|
return self.execute(command, stdout, stderr)
|
||||||
|
|
||||||
|
|
||||||
class Unbuffered:
|
class Unbuffered(object):
|
||||||
def __init__(self, fp):
|
def __init__(self, fp):
|
||||||
self.fp = fp
|
self.fp = fp
|
||||||
def write(self, arg):
|
def write(self, arg):
|
||||||
|
@ -224,7 +224,7 @@ def main(argv=None):
|
||||||
'win32': ['msvs', 'ninja'],
|
'win32': ['msvs', 'ninja'],
|
||||||
'linux2': ['make', 'ninja'],
|
'linux2': ['make', 'ninja'],
|
||||||
'linux3': ['make', 'ninja'],
|
'linux3': ['make', 'ninja'],
|
||||||
'darwin': ['make', 'ninja', 'xcode'],
|
'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
|
||||||
}[sys.platform]
|
}[sys.platform]
|
||||||
|
|
||||||
for format in format_list:
|
for format in format_list:
|
||||||
|
|
|
@ -172,7 +172,7 @@ class MSVSProject(MSVSSolutionEntry):
|
||||||
#------------------------------------------------------------------------------
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
class MSVSSolution:
|
class MSVSSolution(object):
|
||||||
"""Visual Studio solution."""
|
"""Visual Studio solution."""
|
||||||
|
|
||||||
def __init__(self, path, version, entries=None, variants=None,
|
def __init__(self, path, version, entries=None, variants=None,
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# Use of this source code is governed by a BSD-style license that can be
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
# found in the LICENSE file.
|
# found in the LICENSE file.
|
||||||
|
|
||||||
"""Code to validate and convert settings of the Microsoft build tools.
|
r"""Code to validate and convert settings of the Microsoft build tools.
|
||||||
|
|
||||||
This file contains code to validate and convert settings of the Microsoft
|
This file contains code to validate and convert settings of the Microsoft
|
||||||
build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
|
build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
|
||||||
|
@ -314,7 +314,14 @@ def _MSBuildOnly(tool, name, setting_type):
|
||||||
name: the name of the setting.
|
name: the name of the setting.
|
||||||
setting_type: the type of this setting.
|
setting_type: the type of this setting.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def _Translate(value, msbuild_settings):
|
||||||
|
# Let msbuild-only properties get translated as-is from msvs_settings.
|
||||||
|
tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
|
||||||
|
tool_settings[name] = value
|
||||||
|
|
||||||
_msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
|
_msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
|
||||||
|
_msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
|
||||||
|
|
||||||
|
|
||||||
def _ConvertedToAdditionalOption(tool, msvs_name, flag):
|
def _ConvertedToAdditionalOption(tool, msvs_name, flag):
|
||||||
|
@ -367,6 +374,35 @@ fix_vc_macro_slashes_regex = re.compile(
|
||||||
r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
|
r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Regular expression to detect keys that were generated by exclusion lists
|
||||||
|
_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
|
||||||
|
|
||||||
|
|
||||||
|
def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
|
||||||
|
"""Verify that 'setting' is valid if it is generated from an exclusion list.
|
||||||
|
|
||||||
|
If the setting appears to be generated from an exclusion list, the root name
|
||||||
|
is checked.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
setting: A string that is the setting name to validate
|
||||||
|
settings: A dictionary where the keys are valid settings
|
||||||
|
error_msg: The message to emit in the event of error
|
||||||
|
stderr: The stream receiving the error messages.
|
||||||
|
"""
|
||||||
|
# This may be unrecognized because it's an exclusion list. If the
|
||||||
|
# setting name has the _excluded suffix, then check the root name.
|
||||||
|
unrecognized = True
|
||||||
|
m = re.match(_EXCLUDED_SUFFIX_RE, setting)
|
||||||
|
if m:
|
||||||
|
root_setting = m.group(1)
|
||||||
|
unrecognized = root_setting not in settings
|
||||||
|
|
||||||
|
if unrecognized:
|
||||||
|
# We don't know this setting. Give a warning.
|
||||||
|
print >> stderr, error_msg
|
||||||
|
|
||||||
|
|
||||||
def FixVCMacroSlashes(s):
|
def FixVCMacroSlashes(s):
|
||||||
"""Replace macros which have excessive following slashes.
|
"""Replace macros which have excessive following slashes.
|
||||||
|
|
||||||
|
@ -388,11 +424,11 @@ def ConvertVCMacrosToMSBuild(s):
|
||||||
if '$' in s:
|
if '$' in s:
|
||||||
replace_map = {
|
replace_map = {
|
||||||
'$(ConfigurationName)': '$(Configuration)',
|
'$(ConfigurationName)': '$(Configuration)',
|
||||||
'$(InputDir)': '%(RootDir)%(Directory)',
|
'$(InputDir)': '%(RelativeDir)',
|
||||||
'$(InputExt)': '%(Extension)',
|
'$(InputExt)': '%(Extension)',
|
||||||
'$(InputFileName)': '%(Filename)%(Extension)',
|
'$(InputFileName)': '%(Filename)%(Extension)',
|
||||||
'$(InputName)': '%(Filename)',
|
'$(InputName)': '%(Filename)',
|
||||||
'$(InputPath)': '%(FullPath)',
|
'$(InputPath)': '%(Identity)',
|
||||||
'$(ParentName)': '$(ProjectFileName)',
|
'$(ParentName)': '$(ProjectFileName)',
|
||||||
'$(PlatformName)': '$(Platform)',
|
'$(PlatformName)': '$(Platform)',
|
||||||
'$(SafeInputName)': '%(Filename)',
|
'$(SafeInputName)': '%(Filename)',
|
||||||
|
@ -429,10 +465,12 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
|
||||||
print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
|
print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
|
||||||
'%s' % (msvs_tool_name, msvs_setting, e))
|
'%s' % (msvs_tool_name, msvs_setting, e))
|
||||||
else:
|
else:
|
||||||
# We don't know this setting. Give a warning.
|
_ValidateExclusionSetting(msvs_setting,
|
||||||
print >> stderr, ('Warning: unrecognized setting %s/%s '
|
msvs_tool,
|
||||||
'while converting to MSBuild.' %
|
('Warning: unrecognized setting %s/%s '
|
||||||
(msvs_tool_name, msvs_setting))
|
'while converting to MSBuild.' %
|
||||||
|
(msvs_tool_name, msvs_setting)),
|
||||||
|
stderr)
|
||||||
else:
|
else:
|
||||||
print >> stderr, ('Warning: unrecognized tool %s while converting to '
|
print >> stderr, ('Warning: unrecognized tool %s while converting to '
|
||||||
'MSBuild.' % msvs_tool_name)
|
'MSBuild.' % msvs_tool_name)
|
||||||
|
@ -483,8 +521,12 @@ def _ValidateSettings(validators, settings, stderr):
|
||||||
print >> stderr, ('Warning: for %s/%s, %s' %
|
print >> stderr, ('Warning: for %s/%s, %s' %
|
||||||
(tool_name, setting, e))
|
(tool_name, setting, e))
|
||||||
else:
|
else:
|
||||||
print >> stderr, ('Warning: unrecognized setting %s/%s' %
|
_ValidateExclusionSetting(setting,
|
||||||
(tool_name, setting))
|
tool_validators,
|
||||||
|
('Warning: unrecognized setting %s/%s' %
|
||||||
|
(tool_name, setting)),
|
||||||
|
stderr)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
|
print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
|
||||||
|
|
||||||
|
@ -496,6 +538,7 @@ _midl = _Tool('VCMIDLTool', 'Midl')
|
||||||
_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
|
_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
|
||||||
_lib = _Tool('VCLibrarianTool', 'Lib')
|
_lib = _Tool('VCLibrarianTool', 'Lib')
|
||||||
_manifest = _Tool('VCManifestTool', 'Manifest')
|
_manifest = _Tool('VCManifestTool', 'Manifest')
|
||||||
|
_masm = _Tool('MASM', 'MASM')
|
||||||
|
|
||||||
|
|
||||||
_AddTool(_compile)
|
_AddTool(_compile)
|
||||||
|
@ -504,6 +547,7 @@ _AddTool(_midl)
|
||||||
_AddTool(_rc)
|
_AddTool(_rc)
|
||||||
_AddTool(_lib)
|
_AddTool(_lib)
|
||||||
_AddTool(_manifest)
|
_AddTool(_manifest)
|
||||||
|
_AddTool(_masm)
|
||||||
# Add sections only found in the MSBuild settings.
|
# Add sections only found in the MSBuild settings.
|
||||||
_msbuild_validators[''] = {}
|
_msbuild_validators[''] = {}
|
||||||
_msbuild_validators['ProjectReference'] = {}
|
_msbuild_validators['ProjectReference'] = {}
|
||||||
|
@ -567,7 +611,8 @@ _Same(_compile, 'BrowseInformation',
|
||||||
_Same(_compile, 'CallingConvention',
|
_Same(_compile, 'CallingConvention',
|
||||||
_Enumeration(['Cdecl', # /Gd
|
_Enumeration(['Cdecl', # /Gd
|
||||||
'FastCall', # /Gr
|
'FastCall', # /Gr
|
||||||
'StdCall'])) # /Gz
|
'StdCall', # /Gz
|
||||||
|
'VectorCall'])) # /Gv
|
||||||
_Same(_compile, 'CompileAs',
|
_Same(_compile, 'CompileAs',
|
||||||
_Enumeration(['Default',
|
_Enumeration(['Default',
|
||||||
'CompileAsC', # /TC
|
'CompileAsC', # /TC
|
||||||
|
@ -581,7 +626,12 @@ _Same(_compile, 'DebugInformationFormat',
|
||||||
_Same(_compile, 'EnableEnhancedInstructionSet',
|
_Same(_compile, 'EnableEnhancedInstructionSet',
|
||||||
_Enumeration(['NotSet',
|
_Enumeration(['NotSet',
|
||||||
'StreamingSIMDExtensions', # /arch:SSE
|
'StreamingSIMDExtensions', # /arch:SSE
|
||||||
'StreamingSIMDExtensions2'])) # /arch:SSE2
|
'StreamingSIMDExtensions2', # /arch:SSE2
|
||||||
|
'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
|
||||||
|
'NoExtensions', # /arch:IA32 (vs2012+)
|
||||||
|
# This one only exists in the new msbuild format.
|
||||||
|
'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+)
|
||||||
|
]))
|
||||||
_Same(_compile, 'ErrorReporting',
|
_Same(_compile, 'ErrorReporting',
|
||||||
_Enumeration(['None', # /errorReport:none
|
_Enumeration(['None', # /errorReport:none
|
||||||
'Prompt', # /errorReport:prompt
|
'Prompt', # /errorReport:prompt
|
||||||
|
@ -836,13 +886,6 @@ _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
|
||||||
# MSVS options not found in MSBuild.
|
# MSVS options not found in MSBuild.
|
||||||
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
|
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
|
||||||
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
|
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
|
||||||
# These settings generate correctly in the MSVS output files when using
|
|
||||||
# e.g. DelayLoadDLLs! or AdditionalDependencies! to exclude files from
|
|
||||||
# configuration entries, but result in spurious artifacts which can be
|
|
||||||
# safely ignored here. See crbug.com/246570
|
|
||||||
_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list)
|
|
||||||
_MSVSOnly(_link, 'DelayLoadDLLs_excluded', _file_list)
|
|
||||||
_MSVSOnly(_link, 'AdditionalDependencies_excluded', _file_list)
|
|
||||||
|
|
||||||
# MSBuild options not found in MSVS.
|
# MSBuild options not found in MSVS.
|
||||||
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
|
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
|
||||||
|
@ -991,9 +1034,6 @@ _Same(_lib, 'TargetMachine', _target_machine_enumeration)
|
||||||
# ProjectReference. We may want to validate that they are consistent.
|
# ProjectReference. We may want to validate that they are consistent.
|
||||||
_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
|
_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
|
||||||
|
|
||||||
# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp.
|
|
||||||
_MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list)
|
|
||||||
|
|
||||||
_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
|
_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
|
||||||
_MSBuildOnly(_lib, 'ErrorReporting',
|
_MSBuildOnly(_lib, 'ErrorReporting',
|
||||||
_Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
|
_Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
|
||||||
|
@ -1049,3 +1089,11 @@ _MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
|
||||||
_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
|
_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
|
||||||
_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
|
_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
|
||||||
_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
|
_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
|
||||||
|
|
||||||
|
|
||||||
|
# Directives for MASM.
|
||||||
|
# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
|
||||||
|
# MSBuild MASM settings.
|
||||||
|
|
||||||
|
# Options that have the same name in MSVS and MSBuild.
|
||||||
|
_Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh
|
||||||
|
|
|
@ -109,6 +109,7 @@ class TestSequenceFunctions(unittest.TestCase):
|
||||||
'ZZXYZ': 'bogus'},
|
'ZZXYZ': 'bogus'},
|
||||||
'VCLinkerTool': {
|
'VCLinkerTool': {
|
||||||
'AdditionalDependencies': 'file1;file2',
|
'AdditionalDependencies': 'file1;file2',
|
||||||
|
'AdditionalDependencies_excluded': 'file3',
|
||||||
'AdditionalLibraryDirectories': 'folder1;folder2',
|
'AdditionalLibraryDirectories': 'folder1;folder2',
|
||||||
'AdditionalManifestDependencies': 'file1;file2',
|
'AdditionalManifestDependencies': 'file1;file2',
|
||||||
'AdditionalOptions': 'a string1',
|
'AdditionalOptions': 'a string1',
|
||||||
|
@ -266,7 +267,7 @@ class TestSequenceFunctions(unittest.TestCase):
|
||||||
'Warning: for VCCLCompilerTool/BrowseInformation, '
|
'Warning: for VCCLCompilerTool/BrowseInformation, '
|
||||||
"invalid literal for int() with base 10: 'fdkslj'",
|
"invalid literal for int() with base 10: 'fdkslj'",
|
||||||
'Warning: for VCCLCompilerTool/CallingConvention, '
|
'Warning: for VCCLCompilerTool/CallingConvention, '
|
||||||
'index value (-1) not in expected range [0, 3)',
|
'index value (-1) not in expected range [0, 4)',
|
||||||
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
|
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
|
||||||
'converted value for 2 not specified.',
|
'converted value for 2 not specified.',
|
||||||
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
|
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
|
||||||
|
|
|
@ -8,10 +8,12 @@ import copy
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
_TARGET_TYPE_EXT = {
|
# A dictionary mapping supported target types to extensions.
|
||||||
'executable': '.exe',
|
TARGET_TYPE_EXT = {
|
||||||
'loadable_module': '.dll',
|
'executable': 'exe',
|
||||||
'shared_library': '.dll',
|
'loadable_module': 'dll',
|
||||||
|
'shared_library': 'dll',
|
||||||
|
'static_library': 'lib',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -109,15 +111,16 @@ def ShardTargets(target_list, target_dicts):
|
||||||
new_target_dicts[t] = target_dicts[t]
|
new_target_dicts[t] = target_dicts[t]
|
||||||
# Shard dependencies.
|
# Shard dependencies.
|
||||||
for t in new_target_dicts:
|
for t in new_target_dicts:
|
||||||
dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
|
for deptype in ('dependencies', 'dependencies_original'):
|
||||||
new_dependencies = []
|
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
|
||||||
for d in dependencies:
|
new_dependencies = []
|
||||||
if d in targets_to_shard:
|
for d in dependencies:
|
||||||
for i in range(targets_to_shard[d]):
|
if d in targets_to_shard:
|
||||||
new_dependencies.append(_ShardName(d, i))
|
for i in range(targets_to_shard[d]):
|
||||||
else:
|
new_dependencies.append(_ShardName(d, i))
|
||||||
new_dependencies.append(d)
|
else:
|
||||||
new_target_dicts[t]['dependencies'] = new_dependencies
|
new_dependencies.append(d)
|
||||||
|
new_target_dicts[t][deptype] = new_dependencies
|
||||||
|
|
||||||
return (new_target_list, new_target_dicts)
|
return (new_target_list, new_target_dicts)
|
||||||
|
|
||||||
|
@ -156,7 +159,7 @@ def _GetPdbPath(target_dict, config_name, vars):
|
||||||
|
|
||||||
|
|
||||||
pdb_base = target_dict.get('product_name', target_dict['target_name'])
|
pdb_base = target_dict.get('product_name', target_dict['target_name'])
|
||||||
pdb_base = '%s%s.pdb' % (pdb_base, _TARGET_TYPE_EXT[target_dict['type']])
|
pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
|
||||||
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
|
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
|
||||||
|
|
||||||
return pdb_path
|
return pdb_path
|
||||||
|
@ -264,4 +267,4 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
|
||||||
# Update the original target to depend on the shim target.
|
# Update the original target to depend on the shim target.
|
||||||
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
|
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
|
||||||
|
|
||||||
return (target_list, target_dicts)
|
return (target_list, target_dicts)
|
||||||
|
|
|
@ -138,7 +138,7 @@ def _RegistryQueryBase(sysdir, key, value):
|
||||||
|
|
||||||
|
|
||||||
def _RegistryQuery(key, value=None):
|
def _RegistryQuery(key, value=None):
|
||||||
"""Use reg.exe to read a particular key through _RegistryQueryBase.
|
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
|
||||||
|
|
||||||
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
|
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
|
||||||
that fails, it falls back to System32. Sysnative is available on Vista and
|
that fails, it falls back to System32. Sysnative is available on Vista and
|
||||||
|
@ -165,8 +165,33 @@ def _RegistryQuery(key, value=None):
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def _RegistryGetValueUsingWinReg(key, value):
|
||||||
|
"""Use the _winreg module to obtain the value of a registry key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The registry key.
|
||||||
|
value: The particular registry value to read.
|
||||||
|
Return:
|
||||||
|
contents of the registry key's value, or None on failure. Throws
|
||||||
|
ImportError if _winreg is unavailable.
|
||||||
|
"""
|
||||||
|
import _winreg
|
||||||
|
try:
|
||||||
|
root, subkey = key.split('\\', 1)
|
||||||
|
assert root == 'HKLM' # Only need HKLM for now.
|
||||||
|
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
|
||||||
|
return _winreg.QueryValueEx(hkey, value)[0]
|
||||||
|
except WindowsError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _RegistryGetValue(key, value):
|
def _RegistryGetValue(key, value):
|
||||||
"""Use reg.exe to obtain the value of a registry key.
|
"""Use _winreg or reg.exe to obtain the value of a registry key.
|
||||||
|
|
||||||
|
Using _winreg is preferable because it solves an issue on some corporate
|
||||||
|
environments where access to reg.exe is locked down. However, we still need
|
||||||
|
to fallback to reg.exe for the case where the _winreg module is not available
|
||||||
|
(for example in cygwin python).
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
key: The registry key.
|
key: The registry key.
|
||||||
|
@ -174,6 +199,12 @@ def _RegistryGetValue(key, value):
|
||||||
Return:
|
Return:
|
||||||
contents of the registry key's value, or None on failure.
|
contents of the registry key's value, or None on failure.
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
|
return _RegistryGetValueUsingWinReg(key, value)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Fallback to reg.exe if we fail to import _winreg.
|
||||||
text = _RegistryQuery(key, value)
|
text = _RegistryQuery(key, value)
|
||||||
if not text:
|
if not text:
|
||||||
return None
|
return None
|
||||||
|
@ -184,19 +215,6 @@ def _RegistryGetValue(key, value):
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
|
||||||
|
|
||||||
def _RegistryKeyExists(key):
|
|
||||||
"""Use reg.exe to see if a key exists.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key: The registry key to check.
|
|
||||||
Return:
|
|
||||||
True if the key exists
|
|
||||||
"""
|
|
||||||
if not _RegistryQuery(key):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _CreateVersion(name, path, sdk_based=False):
|
def _CreateVersion(name, path, sdk_based=False):
|
||||||
"""Sets up MSVS project generation.
|
"""Sets up MSVS project generation.
|
||||||
|
|
||||||
|
@ -207,6 +225,15 @@ def _CreateVersion(name, path, sdk_based=False):
|
||||||
if path:
|
if path:
|
||||||
path = os.path.normpath(path)
|
path = os.path.normpath(path)
|
||||||
versions = {
|
versions = {
|
||||||
|
'2015': VisualStudioVersion('2015',
|
||||||
|
'Visual Studio 2015',
|
||||||
|
solution_version='12.00',
|
||||||
|
project_version='14.0',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based,
|
||||||
|
default_toolset='v140'),
|
||||||
'2013': VisualStudioVersion('2013',
|
'2013': VisualStudioVersion('2013',
|
||||||
'Visual Studio 2013',
|
'Visual Studio 2013',
|
||||||
solution_version='13.00',
|
solution_version='13.00',
|
||||||
|
@ -316,7 +343,8 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||||
2008(e) - Visual Studio 2008 (9)
|
2008(e) - Visual Studio 2008 (9)
|
||||||
2010(e) - Visual Studio 2010 (10)
|
2010(e) - Visual Studio 2010 (10)
|
||||||
2012(e) - Visual Studio 2012 (11)
|
2012(e) - Visual Studio 2012 (11)
|
||||||
2013(e) - Visual Studio 2013 (11)
|
2013(e) - Visual Studio 2013 (12)
|
||||||
|
2015 - Visual Studio 2015 (14)
|
||||||
Where (e) is e for express editions of MSVS and blank otherwise.
|
Where (e) is e for express editions of MSVS and blank otherwise.
|
||||||
"""
|
"""
|
||||||
version_to_year = {
|
version_to_year = {
|
||||||
|
@ -325,6 +353,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||||
'10.0': '2010',
|
'10.0': '2010',
|
||||||
'11.0': '2012',
|
'11.0': '2012',
|
||||||
'12.0': '2013',
|
'12.0': '2013',
|
||||||
|
'14.0': '2015',
|
||||||
}
|
}
|
||||||
versions = []
|
versions = []
|
||||||
for version in versions_to_check:
|
for version in versions_to_check:
|
||||||
|
@ -361,13 +390,14 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||||
if not path:
|
if not path:
|
||||||
continue
|
continue
|
||||||
path = _ConvertToCygpath(path)
|
path = _ConvertToCygpath(path)
|
||||||
versions.append(_CreateVersion(version_to_year[version] + 'e',
|
if version != '14.0': # There is no Express edition for 2015.
|
||||||
os.path.join(path, '..'), sdk_based=True))
|
versions.append(_CreateVersion(version_to_year[version] + 'e',
|
||||||
|
os.path.join(path, '..'), sdk_based=True))
|
||||||
|
|
||||||
return versions
|
return versions
|
||||||
|
|
||||||
|
|
||||||
def SelectVisualStudioVersion(version='auto'):
|
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
|
||||||
"""Select which version of Visual Studio projects to generate.
|
"""Select which version of Visual Studio projects to generate.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
@ -379,7 +409,7 @@ def SelectVisualStudioVersion(version='auto'):
|
||||||
if version == 'auto':
|
if version == 'auto':
|
||||||
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
|
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
|
||||||
version_map = {
|
version_map = {
|
||||||
'auto': ('10.0', '12.0', '9.0', '8.0', '11.0'),
|
'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
|
||||||
'2005': ('8.0',),
|
'2005': ('8.0',),
|
||||||
'2005e': ('8.0',),
|
'2005e': ('8.0',),
|
||||||
'2008': ('9.0',),
|
'2008': ('9.0',),
|
||||||
|
@ -390,6 +420,7 @@ def SelectVisualStudioVersion(version='auto'):
|
||||||
'2012e': ('11.0',),
|
'2012e': ('11.0',),
|
||||||
'2013': ('12.0',),
|
'2013': ('12.0',),
|
||||||
'2013e': ('12.0',),
|
'2013e': ('12.0',),
|
||||||
|
'2015': ('14.0',),
|
||||||
}
|
}
|
||||||
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
|
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
|
||||||
if override_path:
|
if override_path:
|
||||||
|
@ -401,6 +432,8 @@ def SelectVisualStudioVersion(version='auto'):
|
||||||
version = str(version)
|
version = str(version)
|
||||||
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
|
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
|
||||||
if not versions:
|
if not versions:
|
||||||
|
if not allow_fallback:
|
||||||
|
raise ValueError('Could not locate Visual Studio installation.')
|
||||||
if version == 'auto':
|
if version == 'auto':
|
||||||
# Default to 2005 if we couldn't find anything
|
# Default to 2005 if we couldn't find anything
|
||||||
return _CreateVersion('2005', None)
|
return _CreateVersion('2005', None)
|
||||||
|
|
|
@ -59,7 +59,6 @@ def Load(build_files, format, default_variables={},
|
||||||
if params is None:
|
if params is None:
|
||||||
params = {}
|
params = {}
|
||||||
|
|
||||||
flavor = None
|
|
||||||
if '-' in format:
|
if '-' in format:
|
||||||
format, params['flavor'] = format.split('-', 1)
|
format, params['flavor'] = format.split('-', 1)
|
||||||
|
|
||||||
|
@ -69,6 +68,7 @@ def Load(build_files, format, default_variables={},
|
||||||
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
|
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
|
||||||
# avoiding collisions with user and automatic variables.
|
# avoiding collisions with user and automatic variables.
|
||||||
default_variables['GENERATOR'] = format
|
default_variables['GENERATOR'] = format
|
||||||
|
default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
|
||||||
|
|
||||||
# Format can be a custom python file, or by default the name of a module
|
# Format can be a custom python file, or by default the name of a module
|
||||||
# within gyp.generator.
|
# within gyp.generator.
|
||||||
|
@ -371,7 +371,7 @@ def gyp_main(args):
|
||||||
if options.use_environment:
|
if options.use_environment:
|
||||||
generate_formats = os.environ.get('GYP_GENERATORS', [])
|
generate_formats = os.environ.get('GYP_GENERATORS', [])
|
||||||
if generate_formats:
|
if generate_formats:
|
||||||
generate_formats = re.split('[\s,]', generate_formats)
|
generate_formats = re.split(r'[\s,]', generate_formats)
|
||||||
if generate_formats:
|
if generate_formats:
|
||||||
options.formats = generate_formats
|
options.formats = generate_formats
|
||||||
else:
|
else:
|
||||||
|
@ -493,14 +493,13 @@ def gyp_main(args):
|
||||||
'gyp_binary': sys.argv[0],
|
'gyp_binary': sys.argv[0],
|
||||||
'home_dot_gyp': home_dot_gyp,
|
'home_dot_gyp': home_dot_gyp,
|
||||||
'parallel': options.parallel,
|
'parallel': options.parallel,
|
||||||
'root_targets': options.root_targets}
|
'root_targets': options.root_targets,
|
||||||
|
'target_arch': cmdline_default_variables.get('target_arch', '')}
|
||||||
|
|
||||||
# Start with the default variables from the command line.
|
# Start with the default variables from the command line.
|
||||||
[generator, flat_list, targets, data] = Load(build_files, format,
|
[generator, flat_list, targets, data] = Load(
|
||||||
cmdline_default_variables,
|
build_files, format, cmdline_default_variables, includes, options.depth,
|
||||||
includes, options.depth,
|
params, options.check, options.circular_check)
|
||||||
params, options.check,
|
|
||||||
options.circular_check)
|
|
||||||
|
|
||||||
# TODO(mark): Pass |data| for now because the generator needs a list of
|
# TODO(mark): Pass |data| for now because the generator needs a list of
|
||||||
# build files that came in. In the future, maybe it should just accept
|
# build files that came in. In the future, maybe it should just accept
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
import collections
|
||||||
import errno
|
import errno
|
||||||
import filecmp
|
import filecmp
|
||||||
import os.path
|
import os.path
|
||||||
|
@ -328,7 +329,7 @@ def WriteOnDiff(filename):
|
||||||
the target if it differs (on close).
|
the target if it differs (on close).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Writer:
|
class Writer(object):
|
||||||
"""Wrapper around file which only covers the target if it differs."""
|
"""Wrapper around file which only covers the target if it differs."""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# Pick temporary file.
|
# Pick temporary file.
|
||||||
|
@ -472,6 +473,72 @@ def uniquer(seq, idfun=None):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# Based on http://code.activestate.com/recipes/576694/.
|
||||||
|
class OrderedSet(collections.MutableSet):
|
||||||
|
def __init__(self, iterable=None):
|
||||||
|
self.end = end = []
|
||||||
|
end += [None, end, end] # sentinel node for doubly linked list
|
||||||
|
self.map = {} # key --> [key, prev, next]
|
||||||
|
if iterable is not None:
|
||||||
|
self |= iterable
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.map)
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return key in self.map
|
||||||
|
|
||||||
|
def add(self, key):
|
||||||
|
if key not in self.map:
|
||||||
|
end = self.end
|
||||||
|
curr = end[1]
|
||||||
|
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
||||||
|
|
||||||
|
def discard(self, key):
|
||||||
|
if key in self.map:
|
||||||
|
key, prev_item, next_item = self.map.pop(key)
|
||||||
|
prev_item[2] = next_item
|
||||||
|
next_item[1] = prev_item
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
end = self.end
|
||||||
|
curr = end[2]
|
||||||
|
while curr is not end:
|
||||||
|
yield curr[0]
|
||||||
|
curr = curr[2]
|
||||||
|
|
||||||
|
def __reversed__(self):
|
||||||
|
end = self.end
|
||||||
|
curr = end[1]
|
||||||
|
while curr is not end:
|
||||||
|
yield curr[0]
|
||||||
|
curr = curr[1]
|
||||||
|
|
||||||
|
# The second argument is an addition that causes a pylint warning.
|
||||||
|
def pop(self, last=True): # pylint: disable=W0221
|
||||||
|
if not self:
|
||||||
|
raise KeyError('set is empty')
|
||||||
|
key = self.end[1][0] if last else self.end[2][0]
|
||||||
|
self.discard(key)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if not self:
|
||||||
|
return '%s()' % (self.__class__.__name__,)
|
||||||
|
return '%s(%r)' % (self.__class__.__name__, list(self))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, OrderedSet):
|
||||||
|
return len(self) == len(other) and list(self) == list(other)
|
||||||
|
return set(self) == set(other)
|
||||||
|
|
||||||
|
# Extensions to the recipe.
|
||||||
|
def update(self, iterable):
|
||||||
|
for i in iterable:
|
||||||
|
if i not in self:
|
||||||
|
self.add(i)
|
||||||
|
|
||||||
|
|
||||||
class CycleError(Exception):
|
class CycleError(Exception):
|
||||||
"""An exception raised when an unexpected cycle is detected."""
|
"""An exception raised when an unexpected cycle is detected."""
|
||||||
def __init__(self, nodes):
|
def __init__(self, nodes):
|
||||||
|
@ -481,7 +548,7 @@ class CycleError(Exception):
|
||||||
|
|
||||||
|
|
||||||
def TopologicallySorted(graph, get_edges):
|
def TopologicallySorted(graph, get_edges):
|
||||||
"""Topologically sort based on a user provided edge definition.
|
r"""Topologically sort based on a user provided edge definition.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
graph: A list of node names.
|
graph: A list of node names.
|
||||||
|
@ -519,3 +586,14 @@ def TopologicallySorted(graph, get_edges):
|
||||||
for node in sorted(graph):
|
for node in sorted(graph):
|
||||||
Visit(node)
|
Visit(node)
|
||||||
return ordered_nodes
|
return ordered_nodes
|
||||||
|
|
||||||
|
def CrossCompileRequested():
|
||||||
|
# TODO: figure out how to not build extra host objects in the
|
||||||
|
# non-cross-compile case when this is enabled, and enable unconditionally.
|
||||||
|
return (os.environ.get('GYP_CROSSCOMPILE') or
|
||||||
|
os.environ.get('AR_host') or
|
||||||
|
os.environ.get('CC_host') or
|
||||||
|
os.environ.get('CXX_host') or
|
||||||
|
os.environ.get('AR_target') or
|
||||||
|
os.environ.get('CC_target') or
|
||||||
|
os.environ.get('CXX_target'))
|
||||||
|
|
|
@ -40,7 +40,12 @@ class FlockTool(object):
|
||||||
# with EBADF, that's why we use this F_SETLK
|
# with EBADF, that's why we use this F_SETLK
|
||||||
# hack instead.
|
# hack instead.
|
||||||
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
|
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
|
||||||
op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
if sys.platform.startswith('aix'):
|
||||||
|
# Python on AIX is compiled with LARGEFILE support, which changes the
|
||||||
|
# struct size.
|
||||||
|
op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||||
|
else:
|
||||||
|
op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||||
fcntl.fcntl(fd, fcntl.F_SETLK, op)
|
fcntl.fcntl(fd, fcntl.F_SETLK, op)
|
||||||
return subprocess.call(cmd_list)
|
return subprocess.call(cmd_list)
|
||||||
|
|
||||||
|
|
569
gyp/pylib/gyp/generator/analyzer.py
Normal file
569
gyp/pylib/gyp/generator/analyzer.py
Normal file
|
@ -0,0 +1,569 @@
|
||||||
|
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""
|
||||||
|
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
|
||||||
|
the generator flag config_path) the path of a json file that dictates the files
|
||||||
|
and targets to search for. The following keys are supported:
|
||||||
|
files: list of paths (relative) of the files to search for.
|
||||||
|
targets: list of targets to search for. The target names are unqualified.
|
||||||
|
|
||||||
|
The following is output:
|
||||||
|
error: only supplied if there is an error.
|
||||||
|
targets: the set of targets passed in via targets that either directly or
|
||||||
|
indirectly depend upon the set of paths supplied in files.
|
||||||
|
build_targets: minimal set of targets that directly depend on the changed
|
||||||
|
files and need to be built. The expectation is this set of targets is passed
|
||||||
|
into a build step.
|
||||||
|
status: outputs one of three values: none of the supplied files were found,
|
||||||
|
one of the include files changed so that it should be assumed everything
|
||||||
|
changed (in this case targets and build_targets are not output) or at
|
||||||
|
least one file was found.
|
||||||
|
invalid_targets: list of supplied targets thare were not found.
|
||||||
|
|
||||||
|
If the generator flag analyzer_output_path is specified, output is written
|
||||||
|
there. Otherwise output is written to stdout.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import gyp.common
|
||||||
|
import gyp.ninja_syntax as ninja_syntax
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import posixpath
|
||||||
|
import sys
|
||||||
|
|
||||||
|
debug = False
|
||||||
|
|
||||||
|
found_dependency_string = 'Found dependency'
|
||||||
|
no_dependency_string = 'No dependencies'
|
||||||
|
# Status when it should be assumed that everything has changed.
|
||||||
|
all_changed_string = 'Found dependency (all)'
|
||||||
|
|
||||||
|
# MatchStatus is used indicate if and how a target depends upon the supplied
|
||||||
|
# sources.
|
||||||
|
# The target's sources contain one of the supplied paths.
|
||||||
|
MATCH_STATUS_MATCHES = 1
|
||||||
|
# The target has a dependency on another target that contains one of the
|
||||||
|
# supplied paths.
|
||||||
|
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
|
||||||
|
# The target's sources weren't in the supplied paths and none of the target's
|
||||||
|
# dependencies depend upon a target that matched.
|
||||||
|
MATCH_STATUS_DOESNT_MATCH = 3
|
||||||
|
# The target doesn't contain the source, but the dependent targets have not yet
|
||||||
|
# been visited to determine a more specific status yet.
|
||||||
|
MATCH_STATUS_TBD = 4
|
||||||
|
|
||||||
|
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
||||||
|
|
||||||
|
generator_wants_static_library_dependencies_adjusted = False
|
||||||
|
|
||||||
|
generator_default_variables = {
|
||||||
|
}
|
||||||
|
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
|
||||||
|
'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||||
|
generator_default_variables[dirname] = '!!!'
|
||||||
|
|
||||||
|
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||||
|
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||||
|
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
|
||||||
|
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
|
||||||
|
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
|
||||||
|
'CONFIGURATION_NAME']:
|
||||||
|
generator_default_variables[unused] = ''
|
||||||
|
|
||||||
|
|
||||||
|
def _ToGypPath(path):
|
||||||
|
"""Converts a path to the format used by gyp."""
|
||||||
|
if os.sep == '\\' and os.altsep == '/':
|
||||||
|
return path.replace('\\', '/')
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def _ResolveParent(path, base_path_components):
|
||||||
|
"""Resolves |path|, which starts with at least one '../'. Returns an empty
|
||||||
|
string if the path shouldn't be considered. See _AddSources() for a
|
||||||
|
description of |base_path_components|."""
|
||||||
|
depth = 0
|
||||||
|
while path.startswith('../'):
|
||||||
|
depth += 1
|
||||||
|
path = path[3:]
|
||||||
|
# Relative includes may go outside the source tree. For example, an action may
|
||||||
|
# have inputs in /usr/include, which are not in the source tree.
|
||||||
|
if depth > len(base_path_components):
|
||||||
|
return ''
|
||||||
|
if depth == len(base_path_components):
|
||||||
|
return path
|
||||||
|
return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
|
||||||
|
'/' + path
|
||||||
|
|
||||||
|
|
||||||
|
def _AddSources(sources, base_path, base_path_components, result):
|
||||||
|
"""Extracts valid sources from |sources| and adds them to |result|. Each
|
||||||
|
source file is relative to |base_path|, but may contain '..'. To make
|
||||||
|
resolving '..' easier |base_path_components| contains each of the
|
||||||
|
directories in |base_path|. Additionally each source may contain variables.
|
||||||
|
Such sources are ignored as it is assumed dependencies on them are expressed
|
||||||
|
and tracked in some other means."""
|
||||||
|
# NOTE: gyp paths are always posix style.
|
||||||
|
for source in sources:
|
||||||
|
if not len(source) or source.startswith('!!!') or source.startswith('$'):
|
||||||
|
continue
|
||||||
|
# variable expansion may lead to //.
|
||||||
|
org_source = source
|
||||||
|
source = source[0] + source[1:].replace('//', '/')
|
||||||
|
if source.startswith('../'):
|
||||||
|
source = _ResolveParent(source, base_path_components)
|
||||||
|
if len(source):
|
||||||
|
result.append(source)
|
||||||
|
continue
|
||||||
|
result.append(base_path + source)
|
||||||
|
if debug:
|
||||||
|
print 'AddSource', org_source, result[len(result) - 1]
|
||||||
|
|
||||||
|
|
||||||
|
def _ExtractSourcesFromAction(action, base_path, base_path_components,
|
||||||
|
results):
|
||||||
|
if 'inputs' in action:
|
||||||
|
_AddSources(action['inputs'], base_path, base_path_components, results)
|
||||||
|
|
||||||
|
|
||||||
|
def _ToLocalPath(toplevel_dir, path):
|
||||||
|
"""Converts |path| to a path relative to |toplevel_dir|."""
|
||||||
|
if path == toplevel_dir:
|
||||||
|
return ''
|
||||||
|
if path.startswith(toplevel_dir + '/'):
|
||||||
|
return path[len(toplevel_dir) + len('/'):]
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def _ExtractSources(target, target_dict, toplevel_dir):
|
||||||
|
# |target| is either absolute or relative and in the format of the OS. Gyp
|
||||||
|
# source paths are always posix. Convert |target| to a posix path relative to
|
||||||
|
# |toplevel_dir_|. This is done to make it easy to build source paths.
|
||||||
|
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
|
||||||
|
base_path_components = base_path.split('/')
|
||||||
|
|
||||||
|
# Add a trailing '/' so that _AddSources() can easily build paths.
|
||||||
|
if len(base_path):
|
||||||
|
base_path += '/'
|
||||||
|
|
||||||
|
if debug:
|
||||||
|
print 'ExtractSources', target, base_path
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if 'sources' in target_dict:
|
||||||
|
_AddSources(target_dict['sources'], base_path, base_path_components,
|
||||||
|
results)
|
||||||
|
# Include the inputs from any actions. Any changes to these affect the
|
||||||
|
# resulting output.
|
||||||
|
if 'actions' in target_dict:
|
||||||
|
for action in target_dict['actions']:
|
||||||
|
_ExtractSourcesFromAction(action, base_path, base_path_components,
|
||||||
|
results)
|
||||||
|
if 'rules' in target_dict:
|
||||||
|
for rule in target_dict['rules']:
|
||||||
|
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
class Target(object):
|
||||||
|
"""Holds information about a particular target:
|
||||||
|
deps: set of Targets this Target depends upon. This is not recursive, only the
|
||||||
|
direct dependent Targets.
|
||||||
|
match_status: one of the MatchStatus values.
|
||||||
|
back_deps: set of Targets that have a dependency on this Target.
|
||||||
|
visited: used during iteration to indicate whether we've visited this target.
|
||||||
|
This is used for two iterations, once in building the set of Targets and
|
||||||
|
again in _GetBuildTargets().
|
||||||
|
name: fully qualified name of the target.
|
||||||
|
requires_build: True if the target type is such that it needs to be built.
|
||||||
|
See _DoesTargetTypeRequireBuild for details.
|
||||||
|
added_to_compile_targets: used when determining if the target was added to the
|
||||||
|
set of targets that needs to be built.
|
||||||
|
in_roots: true if this target is a descendant of one of the root nodes.
|
||||||
|
is_executable: true if the type of target is executable."""
|
||||||
|
def __init__(self, name):
|
||||||
|
self.deps = set()
|
||||||
|
self.match_status = MATCH_STATUS_TBD
|
||||||
|
self.back_deps = set()
|
||||||
|
self.name = name
|
||||||
|
# TODO(sky): I don't like hanging this off Target. This state is specific
|
||||||
|
# to certain functions and should be isolated there.
|
||||||
|
self.visited = False
|
||||||
|
self.requires_build = False
|
||||||
|
self.added_to_compile_targets = False
|
||||||
|
self.in_roots = False
|
||||||
|
self.is_executable = False
|
||||||
|
|
||||||
|
|
||||||
|
class Config(object):
|
||||||
|
"""Details what we're looking for
|
||||||
|
files: set of files to search for
|
||||||
|
targets: see file description for details."""
|
||||||
|
def __init__(self):
|
||||||
|
self.files = []
|
||||||
|
self.targets = set()
|
||||||
|
|
||||||
|
def Init(self, params):
|
||||||
|
"""Initializes Config. This is a separate method as it raises an exception
|
||||||
|
if there is a parse error."""
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
config_path = generator_flags.get('config_path', None)
|
||||||
|
if not config_path:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
f = open(config_path, 'r')
|
||||||
|
config = json.load(f)
|
||||||
|
f.close()
|
||||||
|
except IOError:
|
||||||
|
raise Exception('Unable to open file ' + config_path)
|
||||||
|
except ValueError as e:
|
||||||
|
raise Exception('Unable to parse config file ' + config_path + str(e))
|
||||||
|
if not isinstance(config, dict):
|
||||||
|
raise Exception('config_path must be a JSON file containing a dictionary')
|
||||||
|
self.files = config.get('files', [])
|
||||||
|
self.targets = set(config.get('targets', []))
|
||||||
|
|
||||||
|
|
||||||
|
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
|
||||||
|
"""Returns true if the build file |build_file| is either in |files| or
|
||||||
|
one of the files included by |build_file| is in |files|. |toplevel_dir| is
|
||||||
|
the root of the source tree."""
|
||||||
|
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
|
||||||
|
if debug:
|
||||||
|
print 'gyp file modified', build_file
|
||||||
|
return True
|
||||||
|
|
||||||
|
# First element of included_files is the file itself.
|
||||||
|
if len(data[build_file]['included_files']) <= 1:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for include_file in data[build_file]['included_files'][1:]:
|
||||||
|
# |included_files| are relative to the directory of the |build_file|.
|
||||||
|
rel_include_file = \
|
||||||
|
_ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
|
||||||
|
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
|
||||||
|
if debug:
|
||||||
|
print 'included gyp file modified, gyp_file=', build_file, \
|
||||||
|
'included file=', rel_include_file
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _GetOrCreateTargetByName(targets, target_name):
|
||||||
|
"""Creates or returns the Target at targets[target_name]. If there is no
|
||||||
|
Target for |target_name| one is created. Returns a tuple of whether a new
|
||||||
|
Target was created and the Target."""
|
||||||
|
if target_name in targets:
|
||||||
|
return False, targets[target_name]
|
||||||
|
target = Target(target_name)
|
||||||
|
targets[target_name] = target
|
||||||
|
return True, target
|
||||||
|
|
||||||
|
|
||||||
|
def _DoesTargetTypeRequireBuild(target_dict):
|
||||||
|
"""Returns true if the target type is such that it needs to be built."""
|
||||||
|
# If a 'none' target has rules or actions we assume it requires a build.
|
||||||
|
return target_dict['type'] != 'none' or \
|
||||||
|
target_dict.get('actions') or target_dict.get('rules')
|
||||||
|
|
||||||
|
|
||||||
|
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
|
||||||
|
build_files):
|
||||||
|
"""Returns a tuple of the following:
|
||||||
|
. A dictionary mapping from fully qualified name to Target.
|
||||||
|
. A list of the targets that have a source file in |files|.
|
||||||
|
. Set of root Targets reachable from the the files |build_files|.
|
||||||
|
This sets the |match_status| of the targets that contain any of the source
|
||||||
|
files in |files| to MATCH_STATUS_MATCHES.
|
||||||
|
|toplevel_dir| is the root of the source tree."""
|
||||||
|
# Maps from target name to Target.
|
||||||
|
targets = {}
|
||||||
|
|
||||||
|
# Targets that matched.
|
||||||
|
matching_targets = []
|
||||||
|
|
||||||
|
# Queue of targets to visit.
|
||||||
|
targets_to_visit = target_list[:]
|
||||||
|
|
||||||
|
# Maps from build file to a boolean indicating whether the build file is in
|
||||||
|
# |files|.
|
||||||
|
build_file_in_files = {}
|
||||||
|
|
||||||
|
# Root targets across all files.
|
||||||
|
roots = set()
|
||||||
|
|
||||||
|
# Set of Targets in |build_files|.
|
||||||
|
build_file_targets = set()
|
||||||
|
|
||||||
|
while len(targets_to_visit) > 0:
|
||||||
|
target_name = targets_to_visit.pop()
|
||||||
|
created_target, target = _GetOrCreateTargetByName(targets, target_name)
|
||||||
|
if created_target:
|
||||||
|
roots.add(target)
|
||||||
|
elif target.visited:
|
||||||
|
continue
|
||||||
|
|
||||||
|
target.visited = True
|
||||||
|
target.requires_build = _DoesTargetTypeRequireBuild(
|
||||||
|
target_dicts[target_name])
|
||||||
|
target.is_executable = target_dicts[target_name]['type'] == 'executable'
|
||||||
|
|
||||||
|
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
|
||||||
|
if not build_file in build_file_in_files:
|
||||||
|
build_file_in_files[build_file] = \
|
||||||
|
_WasBuildFileModified(build_file, data, files, toplevel_dir)
|
||||||
|
|
||||||
|
if build_file in build_files:
|
||||||
|
build_file_targets.add(target)
|
||||||
|
|
||||||
|
# If a build file (or any of its included files) is modified we assume all
|
||||||
|
# targets in the file are modified.
|
||||||
|
if build_file_in_files[build_file]:
|
||||||
|
print 'matching target from modified build file', target_name
|
||||||
|
target.match_status = MATCH_STATUS_MATCHES
|
||||||
|
matching_targets.append(target)
|
||||||
|
else:
|
||||||
|
sources = _ExtractSources(target_name, target_dicts[target_name],
|
||||||
|
toplevel_dir)
|
||||||
|
for source in sources:
|
||||||
|
if source in files:
|
||||||
|
print 'target', target_name, 'matches', source
|
||||||
|
target.match_status = MATCH_STATUS_MATCHES
|
||||||
|
matching_targets.append(target)
|
||||||
|
break
|
||||||
|
|
||||||
|
# Add dependencies to visit as well as updating back pointers for deps.
|
||||||
|
for dep in target_dicts[target_name].get('dependencies', []):
|
||||||
|
targets_to_visit.append(dep)
|
||||||
|
|
||||||
|
created_dep_target, dep_target = _GetOrCreateTargetByName(targets, dep)
|
||||||
|
if not created_dep_target:
|
||||||
|
roots.discard(dep_target)
|
||||||
|
|
||||||
|
target.deps.add(dep_target)
|
||||||
|
dep_target.back_deps.add(target)
|
||||||
|
|
||||||
|
return targets, matching_targets, roots & build_file_targets
|
||||||
|
|
||||||
|
|
||||||
|
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
|
||||||
|
"""Returns a mapping (dictionary) from unqualified name to Target for all the
|
||||||
|
Targets in |to_find|."""
|
||||||
|
result = {}
|
||||||
|
if not to_find:
|
||||||
|
return result
|
||||||
|
to_find = set(to_find)
|
||||||
|
for target_name in all_targets.keys():
|
||||||
|
extracted = gyp.common.ParseQualifiedTarget(target_name)
|
||||||
|
if len(extracted) > 1 and extracted[1] in to_find:
|
||||||
|
to_find.remove(extracted[1])
|
||||||
|
result[extracted[1]] = all_targets[target_name]
|
||||||
|
if not to_find:
|
||||||
|
return result
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _DoesTargetDependOn(target):
|
||||||
|
"""Returns true if |target| or any of its dependencies matches the supplied
|
||||||
|
set of paths. This updates |matches| of the Targets as it recurses.
|
||||||
|
target: the Target to look for."""
|
||||||
|
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
|
||||||
|
return False
|
||||||
|
if target.match_status == MATCH_STATUS_MATCHES or \
|
||||||
|
target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
|
||||||
|
return True
|
||||||
|
for dep in target.deps:
|
||||||
|
if _DoesTargetDependOn(dep):
|
||||||
|
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||||
|
return True
|
||||||
|
target.match_status = MATCH_STATUS_DOESNT_MATCH
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _GetTargetsDependingOn(possible_targets):
|
||||||
|
"""Returns the list of Targets in |possible_targets| that depend (either
|
||||||
|
directly on indirectly) on the matched targets.
|
||||||
|
possible_targets: targets to search from."""
|
||||||
|
found = []
|
||||||
|
for target in possible_targets:
|
||||||
|
if _DoesTargetDependOn(target):
|
||||||
|
found.append(target)
|
||||||
|
return found
|
||||||
|
|
||||||
|
|
||||||
|
def _AddBuildTargets(target, roots, add_if_no_ancestor, result):
|
||||||
|
"""Recurses through all targets that depend on |target|, adding all targets
|
||||||
|
that need to be built (and are in |roots|) to |result|.
|
||||||
|
roots: set of root targets.
|
||||||
|
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|
||||||
|
|target| to |result|. |target| must still be in |roots|.
|
||||||
|
result: targets that need to be built are added here."""
|
||||||
|
if target.visited:
|
||||||
|
return
|
||||||
|
|
||||||
|
target.visited = True
|
||||||
|
target.in_roots = not target.back_deps and target in roots
|
||||||
|
|
||||||
|
for back_dep_target in target.back_deps:
|
||||||
|
_AddBuildTargets(back_dep_target, roots, False, result)
|
||||||
|
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
|
||||||
|
target.in_roots |= back_dep_target.in_roots
|
||||||
|
|
||||||
|
# Always add 'executable' targets. Even though they may be built by other
|
||||||
|
# targets that depend upon them it makes detection of what is going to be
|
||||||
|
# built easier.
|
||||||
|
if target.in_roots and \
|
||||||
|
(target.is_executable or
|
||||||
|
(not target.added_to_compile_targets and
|
||||||
|
(add_if_no_ancestor or target.requires_build))):
|
||||||
|
result.add(target)
|
||||||
|
target.added_to_compile_targets = True
|
||||||
|
|
||||||
|
|
||||||
|
def _GetBuildTargets(matching_targets, roots):
|
||||||
|
"""Returns the set of Targets that require a build.
|
||||||
|
matching_targets: targets that changed and need to be built.
|
||||||
|
roots: set of root targets in the build files to search from."""
|
||||||
|
result = set()
|
||||||
|
for target in matching_targets:
|
||||||
|
_AddBuildTargets(target, roots, True, result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _WriteOutput(params, **values):
|
||||||
|
"""Writes the output, either to stdout or a file is specified."""
|
||||||
|
if 'error' in values:
|
||||||
|
print 'Error:', values['error']
|
||||||
|
if 'status' in values:
|
||||||
|
print values['status']
|
||||||
|
if 'targets' in values:
|
||||||
|
values['targets'].sort()
|
||||||
|
print 'Supplied targets that depend on changed files:'
|
||||||
|
for target in values['targets']:
|
||||||
|
print '\t', target
|
||||||
|
if 'invalid_targets' in values:
|
||||||
|
values['invalid_targets'].sort()
|
||||||
|
print 'The following targets were not found:'
|
||||||
|
for target in values['invalid_targets']:
|
||||||
|
print '\t', target
|
||||||
|
if 'build_targets' in values:
|
||||||
|
values['build_targets'].sort()
|
||||||
|
print 'Targets that require a build:'
|
||||||
|
for target in values['build_targets']:
|
||||||
|
print '\t', target
|
||||||
|
|
||||||
|
output_path = params.get('generator_flags', {}).get(
|
||||||
|
'analyzer_output_path', None)
|
||||||
|
if not output_path:
|
||||||
|
print json.dumps(values)
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
f = open(output_path, 'w')
|
||||||
|
f.write(json.dumps(values) + '\n')
|
||||||
|
f.close()
|
||||||
|
except IOError as e:
|
||||||
|
print 'Error writing to output file', output_path, str(e)
|
||||||
|
|
||||||
|
|
||||||
|
def _WasGypIncludeFileModified(params, files):
|
||||||
|
"""Returns true if one of the files in |files| is in the set of included
|
||||||
|
files."""
|
||||||
|
if params['options'].includes:
|
||||||
|
for include in params['options'].includes:
|
||||||
|
if _ToGypPath(include) in files:
|
||||||
|
print 'Include file modified, assuming all changed', include
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _NamesNotIn(names, mapping):
|
||||||
|
"""Returns a list of the values in |names| that are not in |mapping|."""
|
||||||
|
return [name for name in names if name not in mapping]
|
||||||
|
|
||||||
|
|
||||||
|
def _LookupTargets(names, mapping):
|
||||||
|
"""Returns a list of the mapping[name] for each value in |names| that is in
|
||||||
|
|mapping|."""
|
||||||
|
return [mapping[name] for name in names if name in mapping]
|
||||||
|
|
||||||
|
|
||||||
|
def CalculateVariables(default_variables, params):
|
||||||
|
"""Calculate additional variables for use in the build (called by gyp)."""
|
||||||
|
flavor = gyp.common.GetFlavor(params)
|
||||||
|
if flavor == 'mac':
|
||||||
|
default_variables.setdefault('OS', 'mac')
|
||||||
|
elif flavor == 'win':
|
||||||
|
default_variables.setdefault('OS', 'win')
|
||||||
|
# Copy additional generator configuration data from VS, which is shared
|
||||||
|
# by the Windows Ninja generator.
|
||||||
|
import gyp.generator.msvs as msvs_generator
|
||||||
|
generator_additional_non_configuration_keys = getattr(msvs_generator,
|
||||||
|
'generator_additional_non_configuration_keys', [])
|
||||||
|
generator_additional_path_sections = getattr(msvs_generator,
|
||||||
|
'generator_additional_path_sections', [])
|
||||||
|
|
||||||
|
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||||
|
else:
|
||||||
|
operating_system = flavor
|
||||||
|
if flavor == 'android':
|
||||||
|
operating_system = 'linux' # Keep this legacy behavior for now.
|
||||||
|
default_variables.setdefault('OS', operating_system)
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
"""Called by gyp as the final stage. Outputs results."""
|
||||||
|
config = Config()
|
||||||
|
try:
|
||||||
|
config.Init(params)
|
||||||
|
if not config.files:
|
||||||
|
raise Exception('Must specify files to analyze via config_path generator '
|
||||||
|
'flag')
|
||||||
|
|
||||||
|
toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
|
||||||
|
if debug:
|
||||||
|
print 'toplevel_dir', toplevel_dir
|
||||||
|
|
||||||
|
if _WasGypIncludeFileModified(params, config.files):
|
||||||
|
result_dict = { 'status': all_changed_string,
|
||||||
|
'targets': list(config.targets) }
|
||||||
|
_WriteOutput(params, **result_dict)
|
||||||
|
return
|
||||||
|
|
||||||
|
all_targets, matching_targets, roots = _GenerateTargets(
|
||||||
|
data, target_list, target_dicts, toplevel_dir, frozenset(config.files),
|
||||||
|
params['build_files'])
|
||||||
|
|
||||||
|
unqualified_mapping = _GetUnqualifiedToTargetMapping(all_targets,
|
||||||
|
config.targets)
|
||||||
|
invalid_targets = None
|
||||||
|
if len(unqualified_mapping) != len(config.targets):
|
||||||
|
invalid_targets = _NamesNotIn(config.targets, unqualified_mapping)
|
||||||
|
|
||||||
|
if matching_targets:
|
||||||
|
search_targets = _LookupTargets(config.targets, unqualified_mapping)
|
||||||
|
matched_search_targets = _GetTargetsDependingOn(search_targets)
|
||||||
|
# Reset the visited status for _GetBuildTargets.
|
||||||
|
for target in all_targets.itervalues():
|
||||||
|
target.visited = False
|
||||||
|
build_targets = _GetBuildTargets(matching_targets, roots)
|
||||||
|
matched_search_targets = [gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||||
|
for target in matched_search_targets]
|
||||||
|
build_targets = [gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||||
|
for target in build_targets]
|
||||||
|
else:
|
||||||
|
matched_search_targets = []
|
||||||
|
build_targets = []
|
||||||
|
|
||||||
|
result_dict = { 'targets': matched_search_targets,
|
||||||
|
'status': found_dependency_string if matching_targets else
|
||||||
|
no_dependency_string,
|
||||||
|
'build_targets': build_targets}
|
||||||
|
if invalid_targets:
|
||||||
|
result_dict['invalid_targets'] = invalid_targets
|
||||||
|
_WriteOutput(params, **result_dict)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
_WriteOutput(params, error=str(e))
|
|
@ -50,12 +50,14 @@ generator_supports_multiple_toolsets = True
|
||||||
generator_additional_non_configuration_keys = [
|
generator_additional_non_configuration_keys = [
|
||||||
# Boolean to declare that this target does not want its name mangled.
|
# Boolean to declare that this target does not want its name mangled.
|
||||||
'android_unmangled_name',
|
'android_unmangled_name',
|
||||||
|
# Map of android build system variables to set.
|
||||||
|
'aosp_build_settings',
|
||||||
]
|
]
|
||||||
generator_additional_path_sections = []
|
generator_additional_path_sections = []
|
||||||
generator_extra_sources_for_rules = []
|
generator_extra_sources_for_rules = []
|
||||||
|
|
||||||
|
|
||||||
SHARED_FOOTER = """\
|
ALL_MODULES_FOOTER = """\
|
||||||
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
|
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
|
||||||
# all the included sub-makefiles. This is just here to clarify.
|
# all the included sub-makefiles. This is just here to clarify.
|
||||||
gyp_all_modules:
|
gyp_all_modules:
|
||||||
|
@ -66,33 +68,6 @@ header = """\
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
android_standard_include_paths = set([
|
|
||||||
# JNI_H_INCLUDE in build/core/binary.mk
|
|
||||||
'dalvik/libnativehelper/include/nativehelper',
|
|
||||||
# from SRC_HEADERS in build/core/config.mk
|
|
||||||
'system/core/include',
|
|
||||||
'hardware/libhardware/include',
|
|
||||||
'hardware/libhardware_legacy/include',
|
|
||||||
'hardware/ril/include',
|
|
||||||
'dalvik/libnativehelper/include',
|
|
||||||
'frameworks/native/include',
|
|
||||||
'frameworks/native/opengl/include',
|
|
||||||
'frameworks/base/include',
|
|
||||||
'frameworks/base/opengl/include',
|
|
||||||
'frameworks/base/native/include',
|
|
||||||
'external/skia/include',
|
|
||||||
# TARGET_C_INCLUDES in build/core/combo/TARGET_linux-arm.mk
|
|
||||||
'bionic/libc/arch-arm/include',
|
|
||||||
'bionic/libc/include',
|
|
||||||
'bionic/libstdc++/include',
|
|
||||||
'bionic/libc/kernel/common',
|
|
||||||
'bionic/libc/kernel/arch-arm',
|
|
||||||
'bionic/libm/include',
|
|
||||||
'bionic/libm/include/arm',
|
|
||||||
'bionic/libthread_db/include',
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
# Map gyp target types to Android module classes.
|
# Map gyp target types to Android module classes.
|
||||||
MODULE_CLASSES = {
|
MODULE_CLASSES = {
|
||||||
'static_library': 'STATIC_LIBRARIES',
|
'static_library': 'STATIC_LIBRARIES',
|
||||||
|
@ -133,7 +108,7 @@ class AndroidMkWriter(object):
|
||||||
self.android_top_dir = android_top_dir
|
self.android_top_dir = android_top_dir
|
||||||
|
|
||||||
def Write(self, qualified_target, relative_target, base_path, output_filename,
|
def Write(self, qualified_target, relative_target, base_path, output_filename,
|
||||||
spec, configs, part_of_all):
|
spec, configs, part_of_all, write_alias_target, sdk_version):
|
||||||
"""The main entry point: writes a .mk file for a single target.
|
"""The main entry point: writes a .mk file for a single target.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
@ -144,6 +119,9 @@ class AndroidMkWriter(object):
|
||||||
output_filename: output .mk file name to write
|
output_filename: output .mk file name to write
|
||||||
spec, configs: gyp info
|
spec, configs: gyp info
|
||||||
part_of_all: flag indicating this target is part of 'all'
|
part_of_all: flag indicating this target is part of 'all'
|
||||||
|
write_alias_target: flag indicating whether to create short aliases for
|
||||||
|
this target
|
||||||
|
sdk_version: what to emit for LOCAL_SDK_VERSION in output
|
||||||
"""
|
"""
|
||||||
gyp.common.EnsureDirExists(output_filename)
|
gyp.common.EnsureDirExists(output_filename)
|
||||||
|
|
||||||
|
@ -183,14 +161,23 @@ class AndroidMkWriter(object):
|
||||||
if self.android_stem != self.android_module:
|
if self.android_stem != self.android_module:
|
||||||
self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
|
self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
|
||||||
self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
|
self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
|
||||||
self.WriteLn('LOCAL_MODULE_TAGS := optional')
|
|
||||||
if self.toolset == 'host':
|
if self.toolset == 'host':
|
||||||
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
|
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
|
||||||
|
self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)')
|
||||||
|
else:
|
||||||
|
self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
|
||||||
|
'$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
|
||||||
|
self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version)
|
||||||
|
|
||||||
# Grab output directories; needed for Actions and Rules.
|
# Grab output directories; needed for Actions and Rules.
|
||||||
self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir)')
|
if self.toolset == 'host':
|
||||||
|
self.WriteLn('gyp_intermediate_dir := '
|
||||||
|
'$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))')
|
||||||
|
else:
|
||||||
|
self.WriteLn('gyp_intermediate_dir := '
|
||||||
|
'$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
|
||||||
self.WriteLn('gyp_shared_intermediate_dir := '
|
self.WriteLn('gyp_shared_intermediate_dir := '
|
||||||
'$(call intermediates-dir-for,GYP,shared)')
|
'$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
|
||||||
self.WriteLn()
|
self.WriteLn()
|
||||||
|
|
||||||
# List files this target depends on so that actions/rules/copies/sources
|
# List files this target depends on so that actions/rules/copies/sources
|
||||||
|
@ -226,7 +213,8 @@ class AndroidMkWriter(object):
|
||||||
if spec.get('sources', []) or extra_sources:
|
if spec.get('sources', []) or extra_sources:
|
||||||
self.WriteSources(spec, configs, extra_sources)
|
self.WriteSources(spec, configs, extra_sources)
|
||||||
|
|
||||||
self.WriteTarget(spec, configs, deps, link_deps, part_of_all)
|
self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
|
||||||
|
write_alias_target)
|
||||||
|
|
||||||
# Update global list of target outputs, used in dependency tracking.
|
# Update global list of target outputs, used in dependency tracking.
|
||||||
target_outputs[qualified_target] = ('path', self.output_binary)
|
target_outputs[qualified_target] = ('path', self.output_binary)
|
||||||
|
@ -291,6 +279,7 @@ class AndroidMkWriter(object):
|
||||||
# writing duplicate dummy rules for those outputs.
|
# writing duplicate dummy rules for those outputs.
|
||||||
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
|
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
|
||||||
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
|
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
|
||||||
|
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
|
||||||
self.WriteLn('%s: gyp_intermediate_dir := '
|
self.WriteLn('%s: gyp_intermediate_dir := '
|
||||||
'$(abspath $(gyp_intermediate_dir))' % main_output)
|
'$(abspath $(gyp_intermediate_dir))' % main_output)
|
||||||
self.WriteLn('%s: gyp_shared_intermediate_dir := '
|
self.WriteLn('%s: gyp_shared_intermediate_dir := '
|
||||||
|
@ -305,12 +294,19 @@ class AndroidMkWriter(object):
|
||||||
self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
|
self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
|
||||||
% main_output)
|
% main_output)
|
||||||
|
|
||||||
|
# Don't allow spaces in input/output filenames, but make an exception for
|
||||||
|
# filenames which start with '$(' since it's okay for there to be spaces
|
||||||
|
# inside of make function/macro invocations.
|
||||||
for input in inputs:
|
for input in inputs:
|
||||||
assert ' ' not in input, (
|
if not input.startswith('$(') and ' ' in input:
|
||||||
"Spaces in action input filenames not supported (%s)" % input)
|
raise gyp.common.GypError(
|
||||||
|
'Action input filename "%s" in target %s contains a space' %
|
||||||
|
(input, self.target))
|
||||||
for output in outputs:
|
for output in outputs:
|
||||||
assert ' ' not in output, (
|
if not output.startswith('$(') and ' ' in output:
|
||||||
"Spaces in action output filenames not supported (%s)" % output)
|
raise gyp.common.GypError(
|
||||||
|
'Action output filename "%s" in target %s contains a space' %
|
||||||
|
(output, self.target))
|
||||||
|
|
||||||
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
|
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
|
||||||
(main_output, ' '.join(map(self.LocalPathify, inputs))))
|
(main_output, ' '.join(map(self.LocalPathify, inputs))))
|
||||||
|
@ -337,13 +333,10 @@ class AndroidMkWriter(object):
|
||||||
"""
|
"""
|
||||||
if len(rules) == 0:
|
if len(rules) == 0:
|
||||||
return
|
return
|
||||||
rule_trigger = '%s_rule_trigger' % self.android_module
|
|
||||||
|
|
||||||
did_write_rule = False
|
|
||||||
for rule in rules:
|
for rule in rules:
|
||||||
if len(rule.get('rule_sources', [])) == 0:
|
if len(rule.get('rule_sources', [])) == 0:
|
||||||
continue
|
continue
|
||||||
did_write_rule = True
|
|
||||||
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
|
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
|
||||||
rule['rule_name']))
|
rule['rule_name']))
|
||||||
self.WriteLn('\n### Generated for rule "%s":' % name)
|
self.WriteLn('\n### Generated for rule "%s":' % name)
|
||||||
|
@ -391,6 +384,7 @@ class AndroidMkWriter(object):
|
||||||
outputs = map(self.LocalPathify, outputs)
|
outputs = map(self.LocalPathify, outputs)
|
||||||
main_output = outputs[0]
|
main_output = outputs[0]
|
||||||
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
|
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
|
||||||
|
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
|
||||||
self.WriteLn('%s: gyp_intermediate_dir := '
|
self.WriteLn('%s: gyp_intermediate_dir := '
|
||||||
'$(abspath $(gyp_intermediate_dir))' % main_output)
|
'$(abspath $(gyp_intermediate_dir))' % main_output)
|
||||||
self.WriteLn('%s: gyp_shared_intermediate_dir := '
|
self.WriteLn('%s: gyp_shared_intermediate_dir := '
|
||||||
|
@ -412,13 +406,9 @@ class AndroidMkWriter(object):
|
||||||
# Make each output depend on the main output, with an empty command
|
# Make each output depend on the main output, with an empty command
|
||||||
# to force make to notice that the mtime has changed.
|
# to force make to notice that the mtime has changed.
|
||||||
self.WriteLn('%s: %s ;' % (output, main_output))
|
self.WriteLn('%s: %s ;' % (output, main_output))
|
||||||
self.WriteLn('.PHONY: %s' % (rule_trigger))
|
self.WriteLn()
|
||||||
self.WriteLn('%s: %s' % (rule_trigger, main_output))
|
|
||||||
self.WriteLn('')
|
self.WriteLn()
|
||||||
if did_write_rule:
|
|
||||||
extra_sources.append(rule_trigger) # Force all rules to run.
|
|
||||||
self.WriteLn('### Finished generating for all rules')
|
|
||||||
self.WriteLn('')
|
|
||||||
|
|
||||||
|
|
||||||
def WriteCopies(self, copies, extra_outputs):
|
def WriteCopies(self, copies, extra_outputs):
|
||||||
|
@ -501,6 +491,9 @@ class AndroidMkWriter(object):
|
||||||
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
|
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
|
||||||
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
|
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
|
||||||
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
|
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
|
||||||
|
# Android uses separate flags for assembly file invocations, but gyp expects
|
||||||
|
# the same CFLAGS to be applied:
|
||||||
|
self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
|
||||||
|
|
||||||
|
|
||||||
def WriteSources(self, spec, configs, extra_sources):
|
def WriteSources(self, spec, configs, extra_sources):
|
||||||
|
@ -609,16 +602,16 @@ class AndroidMkWriter(object):
|
||||||
prefix = ''
|
prefix = ''
|
||||||
|
|
||||||
if spec['toolset'] == 'host':
|
if spec['toolset'] == 'host':
|
||||||
suffix = '_host_gyp'
|
suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
|
||||||
else:
|
else:
|
||||||
suffix = '_gyp'
|
suffix = '_gyp'
|
||||||
|
|
||||||
if self.path:
|
if self.path:
|
||||||
name = '%s%s_%s%s' % (prefix, self.path, self.target, suffix)
|
middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
|
||||||
else:
|
else:
|
||||||
name = '%s%s%s' % (prefix, self.target, suffix)
|
middle = make.StringToMakefileVariable(self.target)
|
||||||
|
|
||||||
return make.StringToMakefileVariable(name)
|
return ''.join([prefix, middle, suffix])
|
||||||
|
|
||||||
|
|
||||||
def ComputeOutputParts(self, spec):
|
def ComputeOutputParts(self, spec):
|
||||||
|
@ -672,32 +665,31 @@ class AndroidMkWriter(object):
|
||||||
E.g., the loadable module 'foobar' in directory 'baz' will produce
|
E.g., the loadable module 'foobar' in directory 'baz' will produce
|
||||||
'$(obj)/baz/libfoobar.so'
|
'$(obj)/baz/libfoobar.so'
|
||||||
"""
|
"""
|
||||||
if self.type == 'executable' and self.toolset == 'host':
|
if self.type == 'executable':
|
||||||
# We install host executables into shared_intermediate_dir so they can be
|
# We install host executables into shared_intermediate_dir so they can be
|
||||||
# run by gyp rules that refer to PRODUCT_DIR.
|
# run by gyp rules that refer to PRODUCT_DIR.
|
||||||
path = '$(gyp_shared_intermediate_dir)'
|
path = '$(gyp_shared_intermediate_dir)'
|
||||||
elif self.type == 'shared_library':
|
elif self.type == 'shared_library':
|
||||||
if self.toolset == 'host':
|
if self.toolset == 'host':
|
||||||
path = '$(HOST_OUT_INTERMEDIATE_LIBRARIES)'
|
path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)'
|
||||||
else:
|
else:
|
||||||
path = '$(TARGET_OUT_INTERMEDIATE_LIBRARIES)'
|
path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
|
||||||
else:
|
else:
|
||||||
# Other targets just get built into their intermediate dir.
|
# Other targets just get built into their intermediate dir.
|
||||||
if self.toolset == 'host':
|
if self.toolset == 'host':
|
||||||
path = '$(call intermediates-dir-for,%s,%s,true)' % (self.android_class,
|
path = ('$(call intermediates-dir-for,%s,%s,true,,'
|
||||||
self.android_module)
|
'$(GYP_HOST_VAR_PREFIX))' % (self.android_class,
|
||||||
|
self.android_module))
|
||||||
else:
|
else:
|
||||||
path = '$(call intermediates-dir-for,%s,%s)' % (self.android_class,
|
path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
|
||||||
self.android_module)
|
% (self.android_class, self.android_module))
|
||||||
|
|
||||||
assert spec.get('product_dir') is None # TODO: not supported?
|
assert spec.get('product_dir') is None # TODO: not supported?
|
||||||
return os.path.join(path, self.ComputeOutputBasename(spec))
|
return os.path.join(path, self.ComputeOutputBasename(spec))
|
||||||
|
|
||||||
def NormalizeIncludePaths(self, include_paths):
|
def NormalizeIncludePaths(self, include_paths):
|
||||||
""" Normalize include_paths.
|
""" Normalize include_paths.
|
||||||
Convert absolute paths to relative to the Android top directory;
|
Convert absolute paths to relative to the Android top directory.
|
||||||
filter out include paths that are already brought in by the Android build
|
|
||||||
system.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
include_paths: A list of unprocessed include paths.
|
include_paths: A list of unprocessed include paths.
|
||||||
|
@ -708,10 +700,7 @@ class AndroidMkWriter(object):
|
||||||
for path in include_paths:
|
for path in include_paths:
|
||||||
if path[0] == '/':
|
if path[0] == '/':
|
||||||
path = gyp.common.RelativePath(path, self.android_top_dir)
|
path = gyp.common.RelativePath(path, self.android_top_dir)
|
||||||
|
normalized.append(path)
|
||||||
# Filter out the Android standard search path.
|
|
||||||
if path not in android_standard_include_paths:
|
|
||||||
normalized.append(path)
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
def ExtractIncludesFromCFlags(self, cflags):
|
def ExtractIncludesFromCFlags(self, cflags):
|
||||||
|
@ -732,16 +721,20 @@ class AndroidMkWriter(object):
|
||||||
|
|
||||||
return (clean_cflags, include_paths)
|
return (clean_cflags, include_paths)
|
||||||
|
|
||||||
def ComputeAndroidLibraryModuleNames(self, libraries):
|
def FilterLibraries(self, libraries):
|
||||||
"""Compute the Android module names from libraries, ie spec.get('libraries')
|
"""Filter the 'libraries' key to separate things that shouldn't be ldflags.
|
||||||
|
|
||||||
|
Library entries that look like filenames should be converted to android
|
||||||
|
module names instead of being passed to the linker as flags.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
libraries: the value of spec.get('libraries')
|
libraries: the value of spec.get('libraries')
|
||||||
Returns:
|
Returns:
|
||||||
A tuple (static_lib_modules, dynamic_lib_modules)
|
A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
|
||||||
"""
|
"""
|
||||||
static_lib_modules = []
|
static_lib_modules = []
|
||||||
dynamic_lib_modules = []
|
dynamic_lib_modules = []
|
||||||
|
ldflags = []
|
||||||
for libs in libraries:
|
for libs in libraries:
|
||||||
# Libs can have multiple words.
|
# Libs can have multiple words.
|
||||||
for lib in libs.split():
|
for lib in libs.split():
|
||||||
|
@ -758,13 +751,9 @@ class AndroidMkWriter(object):
|
||||||
if match:
|
if match:
|
||||||
dynamic_lib_modules.append(match.group(1))
|
dynamic_lib_modules.append(match.group(1))
|
||||||
continue
|
continue
|
||||||
# "-lstlport" -> libstlport
|
|
||||||
if lib.startswith('-l'):
|
if lib.startswith('-l'):
|
||||||
if lib.endswith('_static'):
|
ldflags.append(lib)
|
||||||
static_lib_modules.append('lib' + lib[2:])
|
return (static_lib_modules, dynamic_lib_modules, ldflags)
|
||||||
else:
|
|
||||||
dynamic_lib_modules.append('lib' + lib[2:])
|
|
||||||
return (static_lib_modules, dynamic_lib_modules)
|
|
||||||
|
|
||||||
|
|
||||||
def ComputeDeps(self, spec):
|
def ComputeDeps(self, spec):
|
||||||
|
@ -792,47 +781,74 @@ class AndroidMkWriter(object):
|
||||||
spec, configs: input from gyp.
|
spec, configs: input from gyp.
|
||||||
link_deps: link dependency list; see ComputeDeps()
|
link_deps: link dependency list; see ComputeDeps()
|
||||||
"""
|
"""
|
||||||
for configname, config in sorted(configs.iteritems()):
|
|
||||||
ldflags = list(config.get('ldflags', []))
|
|
||||||
self.WriteLn('')
|
|
||||||
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
|
|
||||||
self.WriteLn('\nLOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION))')
|
|
||||||
|
|
||||||
# Libraries (i.e. -lfoo)
|
# Libraries (i.e. -lfoo)
|
||||||
|
# These must be included even for static libraries as some of them provide
|
||||||
|
# implicit include paths through the build system.
|
||||||
libraries = gyp.common.uniquer(spec.get('libraries', []))
|
libraries = gyp.common.uniquer(spec.get('libraries', []))
|
||||||
static_libs, dynamic_libs = self.ComputeAndroidLibraryModuleNames(
|
static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
|
||||||
libraries)
|
|
||||||
|
|
||||||
# Link dependencies (i.e. libfoo.a, libfoo.so)
|
if self.type != 'static_library':
|
||||||
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
|
for configname, config in sorted(configs.iteritems()):
|
||||||
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
|
ldflags = list(config.get('ldflags', []))
|
||||||
self.WriteLn('')
|
self.WriteLn('')
|
||||||
self.WriteList(static_libs + static_link_deps,
|
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
|
||||||
'LOCAL_STATIC_LIBRARIES')
|
self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS')
|
||||||
self.WriteLn('# Enable grouping to fix circular references')
|
self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) '
|
||||||
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
|
'$(LOCAL_GYP_LIBS)')
|
||||||
self.WriteLn('')
|
|
||||||
self.WriteList(dynamic_libs + shared_link_deps,
|
# Link dependencies (i.e. other gyp targets this target depends on)
|
||||||
'LOCAL_SHARED_LIBRARIES')
|
# These need not be included for static libraries as within the gyp build
|
||||||
|
# we do not use the implicit include path mechanism.
|
||||||
|
if self.type != 'static_library':
|
||||||
|
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
|
||||||
|
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
|
||||||
|
else:
|
||||||
|
static_link_deps = []
|
||||||
|
shared_link_deps = []
|
||||||
|
|
||||||
|
# Only write the lists if they are non-empty.
|
||||||
|
if static_libs or static_link_deps:
|
||||||
|
self.WriteLn('')
|
||||||
|
self.WriteList(static_libs + static_link_deps,
|
||||||
|
'LOCAL_STATIC_LIBRARIES')
|
||||||
|
self.WriteLn('# Enable grouping to fix circular references')
|
||||||
|
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
|
||||||
|
if dynamic_libs or shared_link_deps:
|
||||||
|
self.WriteLn('')
|
||||||
|
self.WriteList(dynamic_libs + shared_link_deps,
|
||||||
|
'LOCAL_SHARED_LIBRARIES')
|
||||||
|
|
||||||
|
|
||||||
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all):
|
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
|
||||||
|
write_alias_target):
|
||||||
"""Write Makefile code to produce the final target of the gyp spec.
|
"""Write Makefile code to produce the final target of the gyp spec.
|
||||||
|
|
||||||
spec, configs: input from gyp.
|
spec, configs: input from gyp.
|
||||||
deps, link_deps: dependency lists; see ComputeDeps()
|
deps, link_deps: dependency lists; see ComputeDeps()
|
||||||
part_of_all: flag indicating this target is part of 'all'
|
part_of_all: flag indicating this target is part of 'all'
|
||||||
|
write_alias_target: flag indicating whether to create short aliases for this
|
||||||
|
target
|
||||||
"""
|
"""
|
||||||
self.WriteLn('### Rules for final target.')
|
self.WriteLn('### Rules for final target.')
|
||||||
|
|
||||||
if self.type != 'none':
|
if self.type != 'none':
|
||||||
self.WriteTargetFlags(spec, configs, link_deps)
|
self.WriteTargetFlags(spec, configs, link_deps)
|
||||||
|
|
||||||
|
settings = spec.get('aosp_build_settings', {})
|
||||||
|
if settings:
|
||||||
|
self.WriteLn('### Set directly by aosp_build_settings.')
|
||||||
|
for k, v in settings.iteritems():
|
||||||
|
if isinstance(v, list):
|
||||||
|
self.WriteList(v, k)
|
||||||
|
else:
|
||||||
|
self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v)))
|
||||||
|
self.WriteLn('')
|
||||||
|
|
||||||
# Add to the set of targets which represent the gyp 'all' target. We use the
|
# Add to the set of targets which represent the gyp 'all' target. We use the
|
||||||
# name 'gyp_all_modules' as the Android build system doesn't allow the use
|
# name 'gyp_all_modules' as the Android build system doesn't allow the use
|
||||||
# of the Make target 'all' and because 'all_modules' is the equivalent of
|
# of the Make target 'all' and because 'all_modules' is the equivalent of
|
||||||
# the Make target 'all' on Android.
|
# the Make target 'all' on Android.
|
||||||
if part_of_all:
|
if part_of_all and write_alias_target:
|
||||||
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
|
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
|
||||||
self.WriteLn('.PHONY: gyp_all_modules')
|
self.WriteLn('.PHONY: gyp_all_modules')
|
||||||
self.WriteLn('gyp_all_modules: %s' % self.android_module)
|
self.WriteLn('gyp_all_modules: %s' % self.android_module)
|
||||||
|
@ -841,7 +857,7 @@ class AndroidMkWriter(object):
|
||||||
# Add an alias from the gyp target name to the Android module name. This
|
# Add an alias from the gyp target name to the Android module name. This
|
||||||
# simplifies manual builds of the target, and is required by the test
|
# simplifies manual builds of the target, and is required by the test
|
||||||
# framework.
|
# framework.
|
||||||
if self.target != self.android_module:
|
if self.target != self.android_module and write_alias_target:
|
||||||
self.WriteLn('# Alias gyp target name.')
|
self.WriteLn('# Alias gyp target name.')
|
||||||
self.WriteLn('.PHONY: %s' % self.target)
|
self.WriteLn('.PHONY: %s' % self.target)
|
||||||
self.WriteLn('%s: %s' % (self.target, self.android_module))
|
self.WriteLn('%s: %s' % (self.target, self.android_module))
|
||||||
|
@ -859,17 +875,17 @@ class AndroidMkWriter(object):
|
||||||
self.WriteLn('LOCAL_PRELINK_MODULE := false')
|
self.WriteLn('LOCAL_PRELINK_MODULE := false')
|
||||||
self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
|
self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
|
||||||
elif self.type == 'executable':
|
elif self.type == 'executable':
|
||||||
if self.toolset == 'host':
|
# Executables are for build and test purposes only, so they're installed
|
||||||
self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
|
# to a directory that doesn't get included in the system image.
|
||||||
else:
|
self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
|
||||||
# Don't install target executables for now, as it results in them being
|
|
||||||
# included in ROM. This can be revisited if there's a reason to install
|
|
||||||
# them later.
|
|
||||||
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
|
|
||||||
self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
|
self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
|
||||||
else:
|
else:
|
||||||
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
|
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
|
||||||
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
|
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
|
||||||
|
if self.toolset == 'target':
|
||||||
|
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
|
||||||
|
else:
|
||||||
|
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)')
|
||||||
self.WriteLn()
|
self.WriteLn()
|
||||||
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
|
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
|
||||||
self.WriteLn()
|
self.WriteLn()
|
||||||
|
@ -877,6 +893,8 @@ class AndroidMkWriter(object):
|
||||||
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
|
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
|
||||||
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
|
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
|
||||||
self.WriteLn('\t$(hide) touch $@')
|
self.WriteLn('\t$(hide) touch $@')
|
||||||
|
self.WriteLn()
|
||||||
|
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
|
||||||
|
|
||||||
|
|
||||||
def WriteList(self, value_list, variable=None, prefix='',
|
def WriteList(self, value_list, variable=None, prefix='',
|
||||||
|
@ -926,7 +944,7 @@ class AndroidMkWriter(object):
|
||||||
'INPUT_ROOT': expansion,
|
'INPUT_ROOT': expansion,
|
||||||
'INPUT_DIRNAME': dirname,
|
'INPUT_DIRNAME': dirname,
|
||||||
}
|
}
|
||||||
return path
|
return os.path.normpath(path)
|
||||||
|
|
||||||
|
|
||||||
def PerformBuild(data, configurations, params):
|
def PerformBuild(data, configurations, params):
|
||||||
|
@ -946,6 +964,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
generator_flags = params.get('generator_flags', {})
|
generator_flags = params.get('generator_flags', {})
|
||||||
builddir_name = generator_flags.get('output_dir', 'out')
|
builddir_name = generator_flags.get('output_dir', 'out')
|
||||||
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
|
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
|
||||||
|
write_alias_targets = generator_flags.get('write_alias_targets', True)
|
||||||
|
sdk_version = generator_flags.get('aosp_sdk_version', 19)
|
||||||
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
|
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
|
||||||
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
|
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
|
||||||
|
|
||||||
|
@ -1031,8 +1051,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
spec = target_dicts[qualified_target]
|
spec = target_dicts[qualified_target]
|
||||||
configs = spec['configurations']
|
configs = spec['configurations']
|
||||||
|
|
||||||
part_of_all = (qualified_target in needed_targets and
|
part_of_all = qualified_target in needed_targets
|
||||||
not int(spec.get('suppress_wildcard', False)))
|
|
||||||
if limit_to_target_all and not part_of_all:
|
if limit_to_target_all and not part_of_all:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -1041,7 +1060,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
writer = AndroidMkWriter(android_top_dir)
|
writer = AndroidMkWriter(android_top_dir)
|
||||||
android_module = writer.Write(qualified_target, relative_target, base_path,
|
android_module = writer.Write(qualified_target, relative_target, base_path,
|
||||||
output_file, spec, configs,
|
output_file, spec, configs,
|
||||||
part_of_all=part_of_all)
|
part_of_all=part_of_all,
|
||||||
|
write_alias_target=write_alias_targets,
|
||||||
|
sdk_version=sdk_version)
|
||||||
if android_module in android_modules:
|
if android_module in android_modules:
|
||||||
print ('ERROR: Android module names must be unique. The following '
|
print ('ERROR: Android module names must be unique. The following '
|
||||||
'targets both generate Android module name %s.\n %s\n %s' %
|
'targets both generate Android module name %s.\n %s\n %s' %
|
||||||
|
@ -1057,6 +1078,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
include_list.add(mkfile_rel_path)
|
include_list.add(mkfile_rel_path)
|
||||||
|
|
||||||
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
|
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
|
||||||
|
root_makefile.write('GYP_VAR_PREFIX ?=\n')
|
||||||
|
root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n')
|
||||||
|
root_makefile.write('GYP_HOST_MULTILIB ?=\n')
|
||||||
|
|
||||||
# Write out the sorted list of includes.
|
# Write out the sorted list of includes.
|
||||||
root_makefile.write('\n')
|
root_makefile.write('\n')
|
||||||
|
@ -1064,6 +1088,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
|
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
|
||||||
root_makefile.write('\n')
|
root_makefile.write('\n')
|
||||||
|
|
||||||
root_makefile.write(SHARED_FOOTER)
|
if write_alias_targets:
|
||||||
|
root_makefile.write(ALL_MODULES_FOOTER)
|
||||||
|
|
||||||
root_makefile.close()
|
root_makefile.close()
|
||||||
|
|
|
@ -216,7 +216,7 @@ def WriteVariable(output, variable_name, prepend=None):
|
||||||
output.write('}')
|
output.write('}')
|
||||||
|
|
||||||
|
|
||||||
class CMakeTargetType:
|
class CMakeTargetType(object):
|
||||||
def __init__(self, command, modifier, property_modifier):
|
def __init__(self, command, modifier, property_modifier):
|
||||||
self.command = command
|
self.command = command
|
||||||
self.modifier = modifier
|
self.modifier = modifier
|
||||||
|
@ -464,7 +464,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
|
||||||
extra_deps.append(copy_name)
|
extra_deps.append(copy_name)
|
||||||
return
|
return
|
||||||
|
|
||||||
class Copy:
|
class Copy(object):
|
||||||
def __init__(self, ext, command):
|
def __init__(self, ext, command):
|
||||||
self.cmake_inputs = []
|
self.cmake_inputs = []
|
||||||
self.cmake_outputs = []
|
self.cmake_outputs = []
|
||||||
|
@ -743,7 +743,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||||
if target_output_directory is None:
|
if target_output_directory is None:
|
||||||
if target_type in ('executable', 'loadable_module'):
|
if target_type in ('executable', 'loadable_module'):
|
||||||
target_output_directory = generator_default_variables['PRODUCT_DIR']
|
target_output_directory = generator_default_variables['PRODUCT_DIR']
|
||||||
elif target_type in ('shared_library'):
|
elif target_type == 'shared_library':
|
||||||
target_output_directory = '${builddir}/lib.${TOOLSET}'
|
target_output_directory = '${builddir}/lib.${TOOLSET}'
|
||||||
elif spec.get('standalone_static_library', False):
|
elif spec.get('standalone_static_library', False):
|
||||||
target_output_directory = generator_default_variables['PRODUCT_DIR']
|
target_output_directory = generator_default_variables['PRODUCT_DIR']
|
||||||
|
|
|
@ -24,6 +24,7 @@ import gyp
|
||||||
import gyp.common
|
import gyp.common
|
||||||
import gyp.msvs_emulation
|
import gyp.msvs_emulation
|
||||||
import shlex
|
import shlex
|
||||||
|
import xml.etree.cElementTree as ET
|
||||||
|
|
||||||
generator_wants_static_library_dependencies_adjusted = False
|
generator_wants_static_library_dependencies_adjusted = False
|
||||||
|
|
||||||
|
@ -31,8 +32,8 @@ generator_default_variables = {
|
||||||
}
|
}
|
||||||
|
|
||||||
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
|
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||||
# Some gyp steps fail if these are empty(!).
|
# Some gyp steps fail if these are empty(!), so we convert them to variables
|
||||||
generator_default_variables[dirname] = 'dir'
|
generator_default_variables[dirname] = '$' + dirname
|
||||||
|
|
||||||
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||||
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||||
|
@ -77,7 +78,8 @@ def CalculateGeneratorInputInfo(params):
|
||||||
|
|
||||||
|
|
||||||
def GetAllIncludeDirectories(target_list, target_dicts,
|
def GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
shared_intermediate_dirs, config_name, params):
|
shared_intermediate_dirs, config_name, params,
|
||||||
|
compiler_path):
|
||||||
"""Calculate the set of include directories to be used.
|
"""Calculate the set of include directories to be used.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -88,6 +90,33 @@ def GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
gyp_includes_set = set()
|
gyp_includes_set = set()
|
||||||
compiler_includes_list = []
|
compiler_includes_list = []
|
||||||
|
|
||||||
|
# Find compiler's default include dirs.
|
||||||
|
if compiler_path:
|
||||||
|
command = shlex.split(compiler_path)
|
||||||
|
command.extend(['-E', '-xc++', '-v', '-'])
|
||||||
|
proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
output = proc.communicate()[1]
|
||||||
|
# Extract the list of include dirs from the output, which has this format:
|
||||||
|
# ...
|
||||||
|
# #include "..." search starts here:
|
||||||
|
# #include <...> search starts here:
|
||||||
|
# /usr/include/c++/4.6
|
||||||
|
# /usr/local/include
|
||||||
|
# End of search list.
|
||||||
|
# ...
|
||||||
|
in_include_list = False
|
||||||
|
for line in output.splitlines():
|
||||||
|
if line.startswith('#include'):
|
||||||
|
in_include_list = True
|
||||||
|
continue
|
||||||
|
if line.startswith('End of search list.'):
|
||||||
|
break
|
||||||
|
if in_include_list:
|
||||||
|
include_dir = line.strip()
|
||||||
|
if include_dir not in compiler_includes_list:
|
||||||
|
compiler_includes_list.append(include_dir)
|
||||||
|
|
||||||
flavor = gyp.common.GetFlavor(params)
|
flavor = gyp.common.GetFlavor(params)
|
||||||
if flavor == 'win':
|
if flavor == 'win':
|
||||||
generator_flags = params.get('generator_flags', {})
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
@ -106,11 +135,10 @@ def GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
else:
|
else:
|
||||||
cflags = config['cflags']
|
cflags = config['cflags']
|
||||||
for cflag in cflags:
|
for cflag in cflags:
|
||||||
include_dir = ''
|
|
||||||
if cflag.startswith('-I'):
|
if cflag.startswith('-I'):
|
||||||
include_dir = cflag[2:]
|
include_dir = cflag[2:]
|
||||||
if include_dir and not include_dir in compiler_includes_list:
|
if include_dir not in compiler_includes_list:
|
||||||
compiler_includes_list.append(include_dir)
|
compiler_includes_list.append(include_dir)
|
||||||
|
|
||||||
# Find standard gyp include dirs.
|
# Find standard gyp include dirs.
|
||||||
if config.has_key('include_dirs'):
|
if config.has_key('include_dirs'):
|
||||||
|
@ -125,9 +153,7 @@ def GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
include_dir = base_dir + '/' + include_dir
|
include_dir = base_dir + '/' + include_dir
|
||||||
include_dir = os.path.abspath(include_dir)
|
include_dir = os.path.abspath(include_dir)
|
||||||
|
|
||||||
if not include_dir in gyp_includes_set:
|
gyp_includes_set.add(include_dir)
|
||||||
gyp_includes_set.add(include_dir)
|
|
||||||
|
|
||||||
|
|
||||||
# Generate a list that has all the include dirs.
|
# Generate a list that has all the include dirs.
|
||||||
all_includes_list = list(gyp_includes_set)
|
all_includes_list = list(gyp_includes_set)
|
||||||
|
@ -140,7 +166,7 @@ def GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
return all_includes_list
|
return all_includes_list
|
||||||
|
|
||||||
|
|
||||||
def GetCompilerPath(target_list, target_dicts, data):
|
def GetCompilerPath(target_list, data, options):
|
||||||
"""Determine a command that can be used to invoke the compiler.
|
"""Determine a command that can be used to invoke the compiler.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -148,13 +174,12 @@ def GetCompilerPath(target_list, target_dicts, data):
|
||||||
the compiler from that. Otherwise, see if a compiler was specified via the
|
the compiler from that. Otherwise, see if a compiler was specified via the
|
||||||
CC_target environment variable.
|
CC_target environment variable.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# First, see if the compiler is configured in make's settings.
|
# First, see if the compiler is configured in make's settings.
|
||||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||||
make_global_settings_dict = data[build_file].get('make_global_settings', {})
|
make_global_settings_dict = data[build_file].get('make_global_settings', {})
|
||||||
for key, value in make_global_settings_dict:
|
for key, value in make_global_settings_dict:
|
||||||
if key in ['CC', 'CXX']:
|
if key in ['CC', 'CXX']:
|
||||||
return value
|
return os.path.join(options.toplevel_dir, value)
|
||||||
|
|
||||||
# Check to see if the compiler was specified as an environment variable.
|
# Check to see if the compiler was specified as an environment variable.
|
||||||
for key in ['CC_target', 'CC', 'CXX']:
|
for key in ['CC_target', 'CC', 'CXX']:
|
||||||
|
@ -165,7 +190,8 @@ def GetCompilerPath(target_list, target_dicts, data):
|
||||||
return 'gcc'
|
return 'gcc'
|
||||||
|
|
||||||
|
|
||||||
def GetAllDefines(target_list, target_dicts, data, config_name, params):
|
def GetAllDefines(target_list, target_dicts, data, config_name, params,
|
||||||
|
compiler_path):
|
||||||
"""Calculate the defines for a project.
|
"""Calculate the defines for a project.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -202,9 +228,8 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params):
|
||||||
# Get default compiler defines (if possible).
|
# Get default compiler defines (if possible).
|
||||||
if flavor == 'win':
|
if flavor == 'win':
|
||||||
return all_defines # Default defines already processed in the loop above.
|
return all_defines # Default defines already processed in the loop above.
|
||||||
cc_target = GetCompilerPath(target_list, target_dicts, data)
|
if compiler_path:
|
||||||
if cc_target:
|
command = shlex.split(compiler_path)
|
||||||
command = shlex.split(cc_target)
|
|
||||||
command.extend(['-E', '-dM', '-'])
|
command.extend(['-E', '-dM', '-'])
|
||||||
cpp_proc = subprocess.Popen(args=command, cwd='.',
|
cpp_proc = subprocess.Popen(args=command, cwd='.',
|
||||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||||
|
@ -270,31 +295,123 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
|
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
|
||||||
os.path.join(toplevel_build, 'gen')]
|
os.path.join(toplevel_build, 'gen')]
|
||||||
|
|
||||||
out_name = os.path.join(toplevel_build, 'eclipse-cdt-settings.xml')
|
GenerateCdtSettingsFile(target_list,
|
||||||
|
target_dicts,
|
||||||
|
data,
|
||||||
|
params,
|
||||||
|
config_name,
|
||||||
|
os.path.join(toplevel_build,
|
||||||
|
'eclipse-cdt-settings.xml'),
|
||||||
|
options,
|
||||||
|
shared_intermediate_dirs)
|
||||||
|
GenerateClasspathFile(target_list,
|
||||||
|
target_dicts,
|
||||||
|
options.toplevel_dir,
|
||||||
|
toplevel_build,
|
||||||
|
os.path.join(toplevel_build,
|
||||||
|
'eclipse-classpath.xml'))
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
|
||||||
|
config_name, out_name, options,
|
||||||
|
shared_intermediate_dirs):
|
||||||
gyp.common.EnsureDirExists(out_name)
|
gyp.common.EnsureDirExists(out_name)
|
||||||
out = open(out_name, 'w')
|
with open(out_name, 'w') as out:
|
||||||
|
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
out.write('<cdtprojectproperties>\n')
|
||||||
|
|
||||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
|
||||||
out.write('<cdtprojectproperties>\n')
|
'GNU C++', 'GNU C', 'Assembly']
|
||||||
|
compiler_path = GetCompilerPath(target_list, data, options)
|
||||||
|
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
|
shared_intermediate_dirs,
|
||||||
|
config_name, params, compiler_path)
|
||||||
|
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||||
|
defines = GetAllDefines(target_list, target_dicts, data, config_name,
|
||||||
|
params, compiler_path)
|
||||||
|
WriteMacros(out, eclipse_langs, defines)
|
||||||
|
|
||||||
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
|
out.write('</cdtprojectproperties>\n')
|
||||||
'GNU C++', 'GNU C', 'Assembly']
|
|
||||||
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
|
|
||||||
shared_intermediate_dirs, config_name,
|
|
||||||
params)
|
|
||||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
|
||||||
defines = GetAllDefines(target_list, target_dicts, data, config_name, params)
|
|
||||||
WriteMacros(out, eclipse_langs, defines)
|
|
||||||
|
|
||||||
out.write('</cdtprojectproperties>\n')
|
|
||||||
out.close()
|
def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
|
||||||
|
toplevel_build, out_name):
|
||||||
|
'''Generates a classpath file suitable for symbol navigation and code
|
||||||
|
completion of Java code (such as in Android projects) by finding all
|
||||||
|
.java and .jar files used as action inputs.'''
|
||||||
|
gyp.common.EnsureDirExists(out_name)
|
||||||
|
result = ET.Element('classpath')
|
||||||
|
|
||||||
|
def AddElements(kind, paths):
|
||||||
|
# First, we need to normalize the paths so they are all relative to the
|
||||||
|
# toplevel dir.
|
||||||
|
rel_paths = set()
|
||||||
|
for path in paths:
|
||||||
|
if os.path.isabs(path):
|
||||||
|
rel_paths.add(os.path.relpath(path, toplevel_dir))
|
||||||
|
else:
|
||||||
|
rel_paths.add(path)
|
||||||
|
|
||||||
|
for path in sorted(rel_paths):
|
||||||
|
entry_element = ET.SubElement(result, 'classpathentry')
|
||||||
|
entry_element.set('kind', kind)
|
||||||
|
entry_element.set('path', path)
|
||||||
|
|
||||||
|
AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
|
||||||
|
AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
|
||||||
|
# Include the standard JRE container and a dummy out folder
|
||||||
|
AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
|
||||||
|
# Include a dummy out folder so that Eclipse doesn't use the default /bin
|
||||||
|
# folder in the root of the project.
|
||||||
|
AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
|
||||||
|
|
||||||
|
ET.ElementTree(result).write(out_name)
|
||||||
|
|
||||||
|
|
||||||
|
def GetJavaJars(target_list, target_dicts, toplevel_dir):
|
||||||
|
'''Generates a sequence of all .jars used as inputs.'''
|
||||||
|
for target_name in target_list:
|
||||||
|
target = target_dicts[target_name]
|
||||||
|
for action in target.get('actions', []):
|
||||||
|
for input_ in action['inputs']:
|
||||||
|
if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
|
||||||
|
if os.path.isabs(input_):
|
||||||
|
yield input_
|
||||||
|
else:
|
||||||
|
yield os.path.join(os.path.dirname(target_name), input_)
|
||||||
|
|
||||||
|
|
||||||
|
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
|
||||||
|
'''Generates a sequence of all likely java package root directories.'''
|
||||||
|
for target_name in target_list:
|
||||||
|
target = target_dicts[target_name]
|
||||||
|
for action in target.get('actions', []):
|
||||||
|
for input_ in action['inputs']:
|
||||||
|
if (os.path.splitext(input_)[1] == '.java' and
|
||||||
|
not input_.startswith('$')):
|
||||||
|
dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
|
||||||
|
input_))
|
||||||
|
# If there is a parent 'src' or 'java' folder, navigate up to it -
|
||||||
|
# these are canonical package root names in Chromium. This will
|
||||||
|
# break if 'src' or 'java' exists in the package structure. This
|
||||||
|
# could be further improved by inspecting the java file for the
|
||||||
|
# package name if this proves to be too fragile in practice.
|
||||||
|
parent_search = dir_
|
||||||
|
while os.path.basename(parent_search) not in ['src', 'java']:
|
||||||
|
parent_search, _ = os.path.split(parent_search)
|
||||||
|
if not parent_search or parent_search == toplevel_dir:
|
||||||
|
# Didn't find a known root, just return the original path
|
||||||
|
yield dir_
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
yield parent_search
|
||||||
|
|
||||||
|
|
||||||
def GenerateOutput(target_list, target_dicts, data, params):
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
"""Generate an XML settings file that can be imported into a CDT project."""
|
"""Generate an XML settings file that can be imported into a CDT project."""
|
||||||
|
|
||||||
if params['options'].generator_output:
|
if params['options'].generator_output:
|
||||||
raise NotImplementedError, "--generator_output not implemented for eclipse"
|
raise NotImplementedError("--generator_output not implemented for eclipse")
|
||||||
|
|
||||||
user_config = params.get('generator_flags', {}).get('config', None)
|
user_config = params.get('generator_flags', {}).get('config', None)
|
||||||
if user_config:
|
if user_config:
|
||||||
|
|
|
@ -39,9 +39,11 @@ import pprint
|
||||||
|
|
||||||
# These variables should just be spit back out as variable references.
|
# These variables should just be spit back out as variable references.
|
||||||
_generator_identity_variables = [
|
_generator_identity_variables = [
|
||||||
|
'CONFIGURATION_NAME',
|
||||||
'EXECUTABLE_PREFIX',
|
'EXECUTABLE_PREFIX',
|
||||||
'EXECUTABLE_SUFFIX',
|
'EXECUTABLE_SUFFIX',
|
||||||
'INTERMEDIATE_DIR',
|
'INTERMEDIATE_DIR',
|
||||||
|
'LIB_DIR',
|
||||||
'PRODUCT_DIR',
|
'PRODUCT_DIR',
|
||||||
'RULE_INPUT_ROOT',
|
'RULE_INPUT_ROOT',
|
||||||
'RULE_INPUT_DIRNAME',
|
'RULE_INPUT_DIRNAME',
|
||||||
|
@ -49,6 +51,11 @@ _generator_identity_variables = [
|
||||||
'RULE_INPUT_NAME',
|
'RULE_INPUT_NAME',
|
||||||
'RULE_INPUT_PATH',
|
'RULE_INPUT_PATH',
|
||||||
'SHARED_INTERMEDIATE_DIR',
|
'SHARED_INTERMEDIATE_DIR',
|
||||||
|
'SHARED_LIB_DIR',
|
||||||
|
'SHARED_LIB_PREFIX',
|
||||||
|
'SHARED_LIB_SUFFIX',
|
||||||
|
'STATIC_LIB_PREFIX',
|
||||||
|
'STATIC_LIB_SUFFIX',
|
||||||
]
|
]
|
||||||
|
|
||||||
# gypd doesn't define a default value for OS like many other generator
|
# gypd doesn't define a default value for OS like many other generator
|
||||||
|
|
|
@ -29,6 +29,7 @@ import gyp
|
||||||
import gyp.common
|
import gyp.common
|
||||||
import gyp.xcode_emulation
|
import gyp.xcode_emulation
|
||||||
from gyp.common import GetEnvironFallback
|
from gyp.common import GetEnvironFallback
|
||||||
|
from gyp.common import GypError
|
||||||
|
|
||||||
generator_default_variables = {
|
generator_default_variables = {
|
||||||
'EXECUTABLE_PREFIX': '',
|
'EXECUTABLE_PREFIX': '',
|
||||||
|
@ -280,15 +281,7 @@ LDFLAGS.target ?= $(LDFLAGS)
|
||||||
AR.target ?= $(AR)
|
AR.target ?= $(AR)
|
||||||
|
|
||||||
# C++ apps need to be linked with g++.
|
# C++ apps need to be linked with g++.
|
||||||
#
|
LINK ?= $(CXX.target)
|
||||||
# Note: flock is used to seralize linking. Linking is a memory-intensive
|
|
||||||
# process so running parallel links can often lead to thrashing. To disable
|
|
||||||
# the serialization, override LINK via an envrionment variable as follows:
|
|
||||||
#
|
|
||||||
# export LINK=g++
|
|
||||||
#
|
|
||||||
# This will allow make to invoke N linker processes as specified in -jN.
|
|
||||||
LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target)
|
|
||||||
|
|
||||||
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
|
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
|
||||||
# to replicate this environment fallback in make as well.
|
# to replicate this environment fallback in make as well.
|
||||||
|
@ -372,7 +365,7 @@ cmd_touch = touch $@
|
||||||
|
|
||||||
quiet_cmd_copy = COPY $@
|
quiet_cmd_copy = COPY $@
|
||||||
# send stderr to /dev/null to ignore messages when linking directories.
|
# send stderr to /dev/null to ignore messages when linking directories.
|
||||||
cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
|
cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp -af "$<" "$@")
|
||||||
|
|
||||||
%(link_commands)s
|
%(link_commands)s
|
||||||
"""
|
"""
|
||||||
|
@ -631,6 +624,38 @@ def QuoteSpaces(s, quote=r'\ '):
|
||||||
return s.replace(' ', quote)
|
return s.replace(' ', quote)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
|
||||||
|
def _ValidateSourcesForOSX(spec, all_sources):
|
||||||
|
"""Makes sure if duplicate basenames are not specified in the source list.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
spec: The target dictionary containing the properties of the target.
|
||||||
|
"""
|
||||||
|
if spec.get('type', None) != 'static_library':
|
||||||
|
return
|
||||||
|
|
||||||
|
basenames = {}
|
||||||
|
for source in all_sources:
|
||||||
|
name, ext = os.path.splitext(source)
|
||||||
|
is_compiled_file = ext in [
|
||||||
|
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
|
||||||
|
if not is_compiled_file:
|
||||||
|
continue
|
||||||
|
basename = os.path.basename(name) # Don't include extension.
|
||||||
|
basenames.setdefault(basename, []).append(source)
|
||||||
|
|
||||||
|
error = ''
|
||||||
|
for basename, files in basenames.iteritems():
|
||||||
|
if len(files) > 1:
|
||||||
|
error += ' %s: %s\n' % (basename, ' '.join(files))
|
||||||
|
|
||||||
|
if error:
|
||||||
|
print('static library %s has several files with the same basename:\n' %
|
||||||
|
spec['target_name'] + error + 'libtool on OS X will generate' +
|
||||||
|
' warnings for them.')
|
||||||
|
raise GypError('Duplicate basenames in sources section, see list above')
|
||||||
|
|
||||||
|
|
||||||
# Map from qualified target to path to output.
|
# Map from qualified target to path to output.
|
||||||
target_outputs = {}
|
target_outputs = {}
|
||||||
# Map from qualified target to any linkable output. A subset
|
# Map from qualified target to any linkable output. A subset
|
||||||
|
@ -640,7 +665,7 @@ target_outputs = {}
|
||||||
target_link_deps = {}
|
target_link_deps = {}
|
||||||
|
|
||||||
|
|
||||||
class MakefileWriter:
|
class MakefileWriter(object):
|
||||||
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
|
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
|
||||||
|
|
||||||
Its only real entry point is Write(), and is mostly used for namespacing.
|
Its only real entry point is Write(), and is mostly used for namespacing.
|
||||||
|
@ -758,6 +783,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||||
# Sources.
|
# Sources.
|
||||||
all_sources = spec.get('sources', []) + extra_sources
|
all_sources = spec.get('sources', []) + extra_sources
|
||||||
if all_sources:
|
if all_sources:
|
||||||
|
if self.flavor == 'mac':
|
||||||
|
# libtool on OS X generates warnings for duplicate basenames in the same
|
||||||
|
# target.
|
||||||
|
_ValidateSourcesForOSX(spec, all_sources)
|
||||||
self.WriteSources(
|
self.WriteSources(
|
||||||
configs, deps, all_sources, extra_outputs,
|
configs, deps, all_sources, extra_outputs,
|
||||||
extra_link_deps, part_of_all,
|
extra_link_deps, part_of_all,
|
||||||
|
@ -1101,9 +1130,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||||
for output, res in gyp.xcode_emulation.GetMacBundleResources(
|
for output, res in gyp.xcode_emulation.GetMacBundleResources(
|
||||||
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
|
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
|
||||||
map(Sourceify, map(self.Absolutify, resources))):
|
map(Sourceify, map(self.Absolutify, resources))):
|
||||||
self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
|
_, ext = os.path.splitext(output)
|
||||||
part_of_all=True)
|
if ext != '.xcassets':
|
||||||
bundle_deps.append(output)
|
# Make does not supports '.xcassets' emulation.
|
||||||
|
self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
|
||||||
|
part_of_all=True)
|
||||||
|
bundle_deps.append(output)
|
||||||
|
|
||||||
|
|
||||||
def WriteMacInfoPlist(self, bundle_deps):
|
def WriteMacInfoPlist(self, bundle_deps):
|
||||||
|
@ -1888,13 +1920,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||||
"""Returns the location of the final output for an installable target."""
|
"""Returns the location of the final output for an installable target."""
|
||||||
# Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
|
# Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
|
||||||
# rely on this. Emulate this behavior for mac.
|
# rely on this. Emulate this behavior for mac.
|
||||||
|
if (self.type == 'shared_library' and
|
||||||
# XXX(TooTallNate): disabling this code since we don't want this behavior...
|
(self.flavor != 'mac' or self.toolset != 'target')):
|
||||||
#if (self.type == 'shared_library' and
|
# Install all shared libs into a common directory (per toolset) for
|
||||||
# (self.flavor != 'mac' or self.toolset != 'target')):
|
# convenient access with LD_LIBRARY_PATH.
|
||||||
# # Install all shared libs into a common directory (per toolset) for
|
return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
|
||||||
# # convenient access with LD_LIBRARY_PATH.
|
|
||||||
# return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
|
|
||||||
return '$(builddir)/' + self.alias
|
return '$(builddir)/' + self.alias
|
||||||
|
|
||||||
|
|
||||||
|
@ -2036,7 +2066,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||||
make_global_settings_array = data[build_file].get('make_global_settings', [])
|
make_global_settings_array = data[build_file].get('make_global_settings', [])
|
||||||
wrappers = {}
|
wrappers = {}
|
||||||
wrappers['LINK'] = '%s $(builddir)/linker.lock' % flock_command
|
|
||||||
for key, value in make_global_settings_array:
|
for key, value in make_global_settings_array:
|
||||||
if key.endswith('_wrapper'):
|
if key.endswith('_wrapper'):
|
||||||
wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
|
wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
# Use of this source code is governed by a BSD-style license that can be
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
# found in the LICENSE file.
|
# found in the LICENSE file.
|
||||||
|
|
||||||
import collections
|
|
||||||
import copy
|
import copy
|
||||||
import ntpath
|
import ntpath
|
||||||
import os
|
import os
|
||||||
|
@ -13,6 +12,7 @@ import sys
|
||||||
|
|
||||||
import gyp.common
|
import gyp.common
|
||||||
import gyp.easy_xml as easy_xml
|
import gyp.easy_xml as easy_xml
|
||||||
|
import gyp.generator.ninja as ninja_generator
|
||||||
import gyp.MSVSNew as MSVSNew
|
import gyp.MSVSNew as MSVSNew
|
||||||
import gyp.MSVSProject as MSVSProject
|
import gyp.MSVSProject as MSVSProject
|
||||||
import gyp.MSVSSettings as MSVSSettings
|
import gyp.MSVSSettings as MSVSSettings
|
||||||
|
@ -21,6 +21,7 @@ import gyp.MSVSUserFile as MSVSUserFile
|
||||||
import gyp.MSVSUtil as MSVSUtil
|
import gyp.MSVSUtil as MSVSUtil
|
||||||
import gyp.MSVSVersion as MSVSVersion
|
import gyp.MSVSVersion as MSVSVersion
|
||||||
from gyp.common import GypError
|
from gyp.common import GypError
|
||||||
|
from gyp.common import OrderedSet
|
||||||
|
|
||||||
# TODO: Remove once bots are on 2.7, http://crbug.com/241769
|
# TODO: Remove once bots are on 2.7, http://crbug.com/241769
|
||||||
def _import_OrderedDict():
|
def _import_OrderedDict():
|
||||||
|
@ -41,7 +42,7 @@ OrderedDict = _import_OrderedDict()
|
||||||
# if IncrediBuild is executed from inside Visual Studio. This regex
|
# if IncrediBuild is executed from inside Visual Studio. This regex
|
||||||
# validates that the string looks like a GUID with all uppercase hex
|
# validates that the string looks like a GUID with all uppercase hex
|
||||||
# letters.
|
# letters.
|
||||||
VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$')
|
VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
|
||||||
|
|
||||||
|
|
||||||
generator_default_variables = {
|
generator_default_variables = {
|
||||||
|
@ -81,6 +82,10 @@ generator_additional_non_configuration_keys = [
|
||||||
'msvs_external_builder_out_dir',
|
'msvs_external_builder_out_dir',
|
||||||
'msvs_external_builder_build_cmd',
|
'msvs_external_builder_build_cmd',
|
||||||
'msvs_external_builder_clean_cmd',
|
'msvs_external_builder_clean_cmd',
|
||||||
|
'msvs_external_builder_clcompile_cmd',
|
||||||
|
'msvs_enable_winrt',
|
||||||
|
'msvs_requires_importlibrary',
|
||||||
|
'msvs_enable_winphone',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -97,46 +102,6 @@ cached_username = None
|
||||||
cached_domain = None
|
cached_domain = None
|
||||||
|
|
||||||
|
|
||||||
# Based on http://code.activestate.com/recipes/576694/.
|
|
||||||
class OrderedSet(collections.MutableSet):
|
|
||||||
def __init__(self, iterable=None):
|
|
||||||
self.end = end = []
|
|
||||||
end += [None, end, end] # sentinel node for doubly linked list
|
|
||||||
self.map = {} # key --> [key, prev, next]
|
|
||||||
if iterable is not None:
|
|
||||||
self |= iterable
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self.map)
|
|
||||||
|
|
||||||
def discard(self, key):
|
|
||||||
if key in self.map:
|
|
||||||
key, prev, next = self.map.pop(key)
|
|
||||||
prev[2] = next
|
|
||||||
next[1] = prev
|
|
||||||
|
|
||||||
def __contains__(self, key):
|
|
||||||
return key in self.map
|
|
||||||
|
|
||||||
def add(self, key):
|
|
||||||
if key not in self.map:
|
|
||||||
end = self.end
|
|
||||||
curr = end[1]
|
|
||||||
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
|
||||||
|
|
||||||
def update(self, iterable):
|
|
||||||
for i in iterable:
|
|
||||||
if i not in self:
|
|
||||||
self.add(i)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
end = self.end
|
|
||||||
curr = end[2]
|
|
||||||
while curr is not end:
|
|
||||||
yield curr[0]
|
|
||||||
curr = curr[2]
|
|
||||||
|
|
||||||
|
|
||||||
# TODO(gspencer): Switch the os.environ calls to be
|
# TODO(gspencer): Switch the os.environ calls to be
|
||||||
# win32api.GetDomainName() and win32api.GetUserName() once the
|
# win32api.GetDomainName() and win32api.GetUserName() once the
|
||||||
# python version in depot_tools has been updated to work on Vista
|
# python version in depot_tools has been updated to work on Vista
|
||||||
|
@ -153,11 +118,11 @@ def _GetDomainAndUserName():
|
||||||
call = subprocess.Popen(['net', 'config', 'Workstation'],
|
call = subprocess.Popen(['net', 'config', 'Workstation'],
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE)
|
||||||
config = call.communicate()[0]
|
config = call.communicate()[0]
|
||||||
username_re = re.compile('^User name\s+(\S+)', re.MULTILINE)
|
username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
|
||||||
username_match = username_re.search(config)
|
username_match = username_re.search(config)
|
||||||
if username_match:
|
if username_match:
|
||||||
username = username_match.group(1)
|
username = username_match.group(1)
|
||||||
domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE)
|
domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
|
||||||
domain_match = domain_re.search(config)
|
domain_match = domain_re.search(config)
|
||||||
if domain_match:
|
if domain_match:
|
||||||
domain = domain_match.group(1)
|
domain = domain_match.group(1)
|
||||||
|
@ -266,7 +231,8 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
|
||||||
for f in folders:
|
for f in folders:
|
||||||
contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
|
contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
|
||||||
excluded=excluded,
|
excluded=excluded,
|
||||||
list_excluded=list_excluded)
|
list_excluded=list_excluded,
|
||||||
|
msvs_version=msvs_version)
|
||||||
contents = MSVSProject.Filter(f, contents=contents)
|
contents = MSVSProject.Filter(f, contents=contents)
|
||||||
result.append(contents)
|
result.append(contents)
|
||||||
return result
|
return result
|
||||||
|
@ -322,7 +288,7 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
|
||||||
if [x for x in cmd if '$(InputDir)' in x]:
|
if [x for x in cmd if '$(InputDir)' in x]:
|
||||||
input_dir_preamble = (
|
input_dir_preamble = (
|
||||||
'set INPUTDIR=$(InputDir)\n'
|
'set INPUTDIR=$(InputDir)\n'
|
||||||
'set INPUTDIR=%INPUTDIR:$(ProjectDir)=%\n'
|
'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
|
||||||
'set INPUTDIR=%INPUTDIR:~0,-1%\n'
|
'set INPUTDIR=%INPUTDIR:~0,-1%\n'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -851,23 +817,27 @@ def _GenerateRulesForMSVS(p, output_dir, options, spec,
|
||||||
if rules_external:
|
if rules_external:
|
||||||
_GenerateExternalRules(rules_external, output_dir, spec,
|
_GenerateExternalRules(rules_external, output_dir, spec,
|
||||||
sources, options, actions_to_add)
|
sources, options, actions_to_add)
|
||||||
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
|
_AdjustSourcesForRules(rules, sources, excluded_sources, False)
|
||||||
|
|
||||||
|
|
||||||
def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
|
def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
|
||||||
# Add outputs generated by each rule (if applicable).
|
# Add outputs generated by each rule (if applicable).
|
||||||
for rule in rules:
|
for rule in rules:
|
||||||
# Done if not processing outputs as sources.
|
# Add in the outputs from this rule.
|
||||||
if int(rule.get('process_outputs_as_sources', False)):
|
trigger_files = _FindRuleTriggerFiles(rule, sources)
|
||||||
# Add in the outputs from this rule.
|
for trigger_file in trigger_files:
|
||||||
trigger_files = _FindRuleTriggerFiles(rule, sources)
|
# Remove trigger_file from excluded_sources to let the rule be triggered
|
||||||
for trigger_file in trigger_files:
|
# (e.g. rule trigger ax_enums.idl is added to excluded_sources
|
||||||
|
# because it's also in an action's inputs in the same project)
|
||||||
|
excluded_sources.discard(_FixPath(trigger_file))
|
||||||
|
# Done if not processing outputs as sources.
|
||||||
|
if int(rule.get('process_outputs_as_sources', False)):
|
||||||
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
|
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
|
||||||
inputs = OrderedSet(_FixPaths(inputs))
|
inputs = OrderedSet(_FixPaths(inputs))
|
||||||
outputs = OrderedSet(_FixPaths(outputs))
|
outputs = OrderedSet(_FixPaths(outputs))
|
||||||
inputs.remove(_FixPath(trigger_file))
|
inputs.remove(_FixPath(trigger_file))
|
||||||
sources.update(inputs)
|
sources.update(inputs)
|
||||||
if not spec.get('msvs_external_builder'):
|
if not is_msbuild:
|
||||||
excluded_sources.update(inputs)
|
excluded_sources.update(inputs)
|
||||||
sources.update(outputs)
|
sources.update(outputs)
|
||||||
|
|
||||||
|
@ -954,6 +924,42 @@ def _GenerateProject(project, options, version, generator_flags):
|
||||||
return _GenerateMSVSProject(project, options, version, generator_flags)
|
return _GenerateMSVSProject(project, options, version, generator_flags)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
|
||||||
|
def _ValidateSourcesForMSVSProject(spec, version):
|
||||||
|
"""Makes sure if duplicate basenames are not specified in the source list.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
spec: The target dictionary containing the properties of the target.
|
||||||
|
version: The VisualStudioVersion object.
|
||||||
|
"""
|
||||||
|
# This validation should not be applied to MSVC2010 and later.
|
||||||
|
assert not version.UsesVcxproj()
|
||||||
|
|
||||||
|
# TODO: Check if MSVC allows this for loadable_module targets.
|
||||||
|
if spec.get('type', None) not in ('static_library', 'shared_library'):
|
||||||
|
return
|
||||||
|
sources = spec.get('sources', [])
|
||||||
|
basenames = {}
|
||||||
|
for source in sources:
|
||||||
|
name, ext = os.path.splitext(source)
|
||||||
|
is_compiled_file = ext in [
|
||||||
|
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
|
||||||
|
if not is_compiled_file:
|
||||||
|
continue
|
||||||
|
basename = os.path.basename(name) # Don't include extension.
|
||||||
|
basenames.setdefault(basename, []).append(source)
|
||||||
|
|
||||||
|
error = ''
|
||||||
|
for basename, files in basenames.iteritems():
|
||||||
|
if len(files) > 1:
|
||||||
|
error += ' %s: %s\n' % (basename, ' '.join(files))
|
||||||
|
|
||||||
|
if error:
|
||||||
|
print('static library %s has several files with the same basename:\n' %
|
||||||
|
spec['target_name'] + error + 'MSVC08 cannot handle that.')
|
||||||
|
raise GypError('Duplicate basenames in sources section, see list above')
|
||||||
|
|
||||||
|
|
||||||
def _GenerateMSVSProject(project, options, version, generator_flags):
|
def _GenerateMSVSProject(project, options, version, generator_flags):
|
||||||
"""Generates a .vcproj file. It may create .rules and .user files too.
|
"""Generates a .vcproj file. It may create .rules and .user files too.
|
||||||
|
|
||||||
|
@ -979,6 +985,11 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
|
||||||
for config_name, config in spec['configurations'].iteritems():
|
for config_name, config in spec['configurations'].iteritems():
|
||||||
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
|
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
|
||||||
|
|
||||||
|
# MSVC08 and prior version cannot handle duplicate basenames in the same
|
||||||
|
# target.
|
||||||
|
# TODO: Take excluded sources into consideration if possible.
|
||||||
|
_ValidateSourcesForMSVSProject(spec, version)
|
||||||
|
|
||||||
# Prepare list of sources and excluded sources.
|
# Prepare list of sources and excluded sources.
|
||||||
gyp_file = os.path.split(project.build_file)[1]
|
gyp_file = os.path.split(project.build_file)[1]
|
||||||
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
|
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
|
||||||
|
@ -1098,7 +1109,8 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
|
||||||
for this configuration.
|
for this configuration.
|
||||||
"""
|
"""
|
||||||
# Get the information for this configuration
|
# Get the information for this configuration
|
||||||
include_dirs, resource_include_dirs = _GetIncludeDirs(config)
|
include_dirs, midl_include_dirs, resource_include_dirs = \
|
||||||
|
_GetIncludeDirs(config)
|
||||||
libraries = _GetLibraries(spec)
|
libraries = _GetLibraries(spec)
|
||||||
library_dirs = _GetLibraryDirs(config)
|
library_dirs = _GetLibraryDirs(config)
|
||||||
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
|
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
|
||||||
|
@ -1126,6 +1138,8 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
|
||||||
# Add the information to the appropriate tool
|
# Add the information to the appropriate tool
|
||||||
_ToolAppend(tools, 'VCCLCompilerTool',
|
_ToolAppend(tools, 'VCCLCompilerTool',
|
||||||
'AdditionalIncludeDirectories', include_dirs)
|
'AdditionalIncludeDirectories', include_dirs)
|
||||||
|
_ToolAppend(tools, 'VCMIDLTool',
|
||||||
|
'AdditionalIncludeDirectories', midl_include_dirs)
|
||||||
_ToolAppend(tools, 'VCResourceCompilerTool',
|
_ToolAppend(tools, 'VCResourceCompilerTool',
|
||||||
'AdditionalIncludeDirectories', resource_include_dirs)
|
'AdditionalIncludeDirectories', resource_include_dirs)
|
||||||
# Add in libraries.
|
# Add in libraries.
|
||||||
|
@ -1181,10 +1195,14 @@ def _GetIncludeDirs(config):
|
||||||
include_dirs = (
|
include_dirs = (
|
||||||
config.get('include_dirs', []) +
|
config.get('include_dirs', []) +
|
||||||
config.get('msvs_system_include_dirs', []))
|
config.get('msvs_system_include_dirs', []))
|
||||||
|
midl_include_dirs = (
|
||||||
|
config.get('midl_include_dirs', []) +
|
||||||
|
config.get('msvs_system_include_dirs', []))
|
||||||
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
|
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
|
||||||
include_dirs = _FixPaths(include_dirs)
|
include_dirs = _FixPaths(include_dirs)
|
||||||
|
midl_include_dirs = _FixPaths(midl_include_dirs)
|
||||||
resource_include_dirs = _FixPaths(resource_include_dirs)
|
resource_include_dirs = _FixPaths(resource_include_dirs)
|
||||||
return include_dirs, resource_include_dirs
|
return include_dirs, midl_include_dirs, resource_include_dirs
|
||||||
|
|
||||||
|
|
||||||
def _GetLibraryDirs(config):
|
def _GetLibraryDirs(config):
|
||||||
|
@ -1218,7 +1236,7 @@ def _GetLibraries(spec):
|
||||||
found = OrderedSet()
|
found = OrderedSet()
|
||||||
unique_libraries_list = []
|
unique_libraries_list = []
|
||||||
for entry in reversed(libraries):
|
for entry in reversed(libraries):
|
||||||
library = re.sub('^\-l', '', entry)
|
library = re.sub(r'^\-l', '', entry)
|
||||||
if not os.path.splitext(library)[1]:
|
if not os.path.splitext(library)[1]:
|
||||||
library += '.lib'
|
library += '.lib'
|
||||||
if library not in found:
|
if library not in found:
|
||||||
|
@ -1478,8 +1496,14 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
|
||||||
|
|
||||||
# Prune filters with a single child to flatten ugly directory structures
|
# Prune filters with a single child to flatten ugly directory structures
|
||||||
# such as ../../src/modules/module1 etc.
|
# such as ../../src/modules/module1 etc.
|
||||||
while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
|
if version.UsesVcxproj():
|
||||||
sources = sources[0].contents
|
while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
|
||||||
|
and len(set([s.name for s in sources])) == 1:
|
||||||
|
assert all([len(s.contents) == 1 for s in sources])
|
||||||
|
sources = [s.contents[0] for s in sources]
|
||||||
|
else:
|
||||||
|
while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
|
||||||
|
sources = sources[0].contents
|
||||||
|
|
||||||
return sources, excluded_sources, excluded_idl
|
return sources, excluded_sources, excluded_idl
|
||||||
|
|
||||||
|
@ -1815,7 +1839,7 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
|
||||||
return projects
|
return projects
|
||||||
|
|
||||||
|
|
||||||
def _InitNinjaFlavor(options, target_list, target_dicts):
|
def _InitNinjaFlavor(params, target_list, target_dicts):
|
||||||
"""Initialize targets for the ninja flavor.
|
"""Initialize targets for the ninja flavor.
|
||||||
|
|
||||||
This sets up the necessary variables in the targets to generate msvs projects
|
This sets up the necessary variables in the targets to generate msvs projects
|
||||||
|
@ -1823,7 +1847,7 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
|
||||||
if they have not been set. This allows individual specs to override the
|
if they have not been set. This allows individual specs to override the
|
||||||
default values initialized here.
|
default values initialized here.
|
||||||
Arguments:
|
Arguments:
|
||||||
options: Options provided to the generator.
|
params: Params provided to the generator.
|
||||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||||
target_dicts: Dict of target properties keyed on target pair.
|
target_dicts: Dict of target properties keyed on target pair.
|
||||||
"""
|
"""
|
||||||
|
@ -1837,8 +1861,15 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
|
||||||
|
|
||||||
spec['msvs_external_builder'] = 'ninja'
|
spec['msvs_external_builder'] = 'ninja'
|
||||||
if not spec.get('msvs_external_builder_out_dir'):
|
if not spec.get('msvs_external_builder_out_dir'):
|
||||||
spec['msvs_external_builder_out_dir'] = \
|
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
|
||||||
options.depth + '/out/$(Configuration)'
|
gyp_dir = os.path.dirname(gyp_file)
|
||||||
|
configuration = '$(Configuration)'
|
||||||
|
if params.get('target_arch') == 'x64':
|
||||||
|
configuration += '_x64'
|
||||||
|
spec['msvs_external_builder_out_dir'] = os.path.join(
|
||||||
|
gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
|
||||||
|
ninja_generator.ComputeOutputDir(params),
|
||||||
|
configuration)
|
||||||
if not spec.get('msvs_external_builder_build_cmd'):
|
if not spec.get('msvs_external_builder_build_cmd'):
|
||||||
spec['msvs_external_builder_build_cmd'] = [
|
spec['msvs_external_builder_build_cmd'] = [
|
||||||
path_to_ninja,
|
path_to_ninja,
|
||||||
|
@ -1851,8 +1882,7 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
|
||||||
path_to_ninja,
|
path_to_ninja,
|
||||||
'-C',
|
'-C',
|
||||||
'$(OutDir)',
|
'$(OutDir)',
|
||||||
'-t',
|
'-tclean',
|
||||||
'clean',
|
|
||||||
'$(ProjectName)',
|
'$(ProjectName)',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1933,7 +1963,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
|
||||||
# Optionally configure each spec to use ninja as the external builder.
|
# Optionally configure each spec to use ninja as the external builder.
|
||||||
if params.get('flavor') == 'ninja':
|
if params.get('flavor') == 'ninja':
|
||||||
_InitNinjaFlavor(options, target_list, target_dicts)
|
_InitNinjaFlavor(params, target_list, target_dicts)
|
||||||
|
|
||||||
# Prepare the set of configurations.
|
# Prepare the set of configurations.
|
||||||
configs = set()
|
configs = set()
|
||||||
|
@ -1986,7 +2016,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
|
||||||
|
|
||||||
def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
||||||
extension_to_rule_name):
|
rule_dependencies, extension_to_rule_name):
|
||||||
"""Generate the filters file.
|
"""Generate the filters file.
|
||||||
|
|
||||||
This file is used by Visual Studio to organize the presentation of source
|
This file is used by Visual Studio to organize the presentation of source
|
||||||
|
@ -1999,8 +2029,8 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
||||||
"""
|
"""
|
||||||
filter_group = []
|
filter_group = []
|
||||||
source_group = []
|
source_group = []
|
||||||
_AppendFiltersForMSBuild('', source_files, extension_to_rule_name,
|
_AppendFiltersForMSBuild('', source_files, rule_dependencies,
|
||||||
filter_group, source_group)
|
extension_to_rule_name, filter_group, source_group)
|
||||||
if filter_group:
|
if filter_group:
|
||||||
content = ['Project',
|
content = ['Project',
|
||||||
{'ToolsVersion': '4.0',
|
{'ToolsVersion': '4.0',
|
||||||
|
@ -2015,7 +2045,7 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
||||||
os.unlink(filters_path)
|
os.unlink(filters_path)
|
||||||
|
|
||||||
|
|
||||||
def _AppendFiltersForMSBuild(parent_filter_name, sources,
|
def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
|
||||||
extension_to_rule_name,
|
extension_to_rule_name,
|
||||||
filter_group, source_group):
|
filter_group, source_group):
|
||||||
"""Creates the list of filters and sources to be added in the filter file.
|
"""Creates the list of filters and sources to be added in the filter file.
|
||||||
|
@ -2041,11 +2071,12 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources,
|
||||||
['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
|
['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
|
||||||
# Recurse and add its dependents.
|
# Recurse and add its dependents.
|
||||||
_AppendFiltersForMSBuild(filter_name, source.contents,
|
_AppendFiltersForMSBuild(filter_name, source.contents,
|
||||||
extension_to_rule_name,
|
rule_dependencies, extension_to_rule_name,
|
||||||
filter_group, source_group)
|
filter_group, source_group)
|
||||||
else:
|
else:
|
||||||
# It's a source. Create a source entry.
|
# It's a source. Create a source entry.
|
||||||
_, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name)
|
_, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
|
||||||
|
extension_to_rule_name)
|
||||||
source_entry = [element, {'Include': source}]
|
source_entry = [element, {'Include': source}]
|
||||||
# Specify the filter it is part of, if any.
|
# Specify the filter it is part of, if any.
|
||||||
if parent_filter_name:
|
if parent_filter_name:
|
||||||
|
@ -2053,7 +2084,8 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources,
|
||||||
source_group.append(source_entry)
|
source_group.append(source_entry)
|
||||||
|
|
||||||
|
|
||||||
def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
|
def _MapFileToMsBuildSourceType(source, rule_dependencies,
|
||||||
|
extension_to_rule_name):
|
||||||
"""Returns the group and element type of the source file.
|
"""Returns the group and element type of the source file.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
@ -2076,9 +2108,15 @@ def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
|
||||||
elif ext == '.rc':
|
elif ext == '.rc':
|
||||||
group = 'resource'
|
group = 'resource'
|
||||||
element = 'ResourceCompile'
|
element = 'ResourceCompile'
|
||||||
|
elif ext == '.asm':
|
||||||
|
group = 'masm'
|
||||||
|
element = 'MASM'
|
||||||
elif ext == '.idl':
|
elif ext == '.idl':
|
||||||
group = 'midl'
|
group = 'midl'
|
||||||
element = 'Midl'
|
element = 'Midl'
|
||||||
|
elif source in rule_dependencies:
|
||||||
|
group = 'rule_dependency'
|
||||||
|
element = 'CustomBuild'
|
||||||
else:
|
else:
|
||||||
group = 'none'
|
group = 'none'
|
||||||
element = 'None'
|
element = 'None'
|
||||||
|
@ -2088,7 +2126,8 @@ def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
|
||||||
def _GenerateRulesForMSBuild(output_dir, options, spec,
|
def _GenerateRulesForMSBuild(output_dir, options, spec,
|
||||||
sources, excluded_sources,
|
sources, excluded_sources,
|
||||||
props_files_of_rules, targets_files_of_rules,
|
props_files_of_rules, targets_files_of_rules,
|
||||||
actions_to_add, extension_to_rule_name):
|
actions_to_add, rule_dependencies,
|
||||||
|
extension_to_rule_name):
|
||||||
# MSBuild rules are implemented using three files: an XML file, a .targets
|
# MSBuild rules are implemented using three files: an XML file, a .targets
|
||||||
# file and a .props file.
|
# file and a .props file.
|
||||||
# See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
|
# See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
|
||||||
|
@ -2104,6 +2143,7 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
|
||||||
continue
|
continue
|
||||||
msbuild_rule = MSBuildRule(rule, spec)
|
msbuild_rule = MSBuildRule(rule, spec)
|
||||||
msbuild_rules.append(msbuild_rule)
|
msbuild_rules.append(msbuild_rule)
|
||||||
|
rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
|
||||||
extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
|
extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
|
||||||
if msbuild_rules:
|
if msbuild_rules:
|
||||||
base = spec['target_name'] + options.suffix
|
base = spec['target_name'] + options.suffix
|
||||||
|
@ -2125,7 +2165,7 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
|
||||||
if rules_external:
|
if rules_external:
|
||||||
_GenerateExternalRules(rules_external, output_dir, spec,
|
_GenerateExternalRules(rules_external, output_dir, spec,
|
||||||
sources, options, actions_to_add)
|
sources, options, actions_to_add)
|
||||||
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
|
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
|
||||||
|
|
||||||
|
|
||||||
class MSBuildRule(object):
|
class MSBuildRule(object):
|
||||||
|
@ -2578,14 +2618,30 @@ def _GetMSBuildProjectConfigurations(configurations):
|
||||||
|
|
||||||
def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
|
def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
|
||||||
namespace = os.path.splitext(gyp_file_name)[0]
|
namespace = os.path.splitext(gyp_file_name)[0]
|
||||||
return [
|
properties = [
|
||||||
['PropertyGroup', {'Label': 'Globals'},
|
['PropertyGroup', {'Label': 'Globals'},
|
||||||
['ProjectGuid', guid],
|
['ProjectGuid', guid],
|
||||||
['Keyword', 'Win32Proj'],
|
['Keyword', 'Win32Proj'],
|
||||||
['RootNamespace', namespace],
|
['RootNamespace', namespace],
|
||||||
|
['IgnoreWarnCompileDuplicatedFilename', 'true'],
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
|
||||||
|
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
|
||||||
|
properties[0].append(['PreferredToolArchitecture', 'x64'])
|
||||||
|
|
||||||
|
if spec.get('msvs_enable_winrt'):
|
||||||
|
properties[0].append(['DefaultLanguage', 'en-US'])
|
||||||
|
properties[0].append(['AppContainerApplication', 'true'])
|
||||||
|
properties[0].append(['ApplicationTypeRevision', '8.1'])
|
||||||
|
|
||||||
|
if spec.get('msvs_enable_winphone'):
|
||||||
|
properties[0].append(['ApplicationType', 'Windows Phone'])
|
||||||
|
else:
|
||||||
|
properties[0].append(['ApplicationType', 'Windows Store'])
|
||||||
|
|
||||||
|
return properties
|
||||||
|
|
||||||
def _GetMSBuildConfigurationDetails(spec, build_file):
|
def _GetMSBuildConfigurationDetails(spec, build_file):
|
||||||
properties = {}
|
properties = {}
|
||||||
|
@ -2596,8 +2652,9 @@ def _GetMSBuildConfigurationDetails(spec, build_file):
|
||||||
_AddConditionalProperty(properties, condition, 'ConfigurationType',
|
_AddConditionalProperty(properties, condition, 'ConfigurationType',
|
||||||
msbuild_attributes['ConfigurationType'])
|
msbuild_attributes['ConfigurationType'])
|
||||||
if character_set:
|
if character_set:
|
||||||
_AddConditionalProperty(properties, condition, 'CharacterSet',
|
if 'msvs_enable_winrt' not in spec :
|
||||||
character_set)
|
_AddConditionalProperty(properties, condition, 'CharacterSet',
|
||||||
|
character_set)
|
||||||
return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
|
return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
|
||||||
|
|
||||||
|
|
||||||
|
@ -2720,9 +2777,6 @@ def _GetMSBuildAttributes(spec, config, build_file):
|
||||||
product_name = spec.get('product_name', '$(ProjectName)')
|
product_name = spec.get('product_name', '$(ProjectName)')
|
||||||
target_name = prefix + product_name
|
target_name = prefix + product_name
|
||||||
msbuild_attributes['TargetName'] = target_name
|
msbuild_attributes['TargetName'] = target_name
|
||||||
if 'TargetExt' not in msbuild_attributes and 'product_extension' in spec:
|
|
||||||
ext = spec.get('product_extension')
|
|
||||||
msbuild_attributes['TargetExt'] = '.' + ext
|
|
||||||
|
|
||||||
if spec.get('msvs_external_builder'):
|
if spec.get('msvs_external_builder'):
|
||||||
external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
|
external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
|
||||||
|
@ -2776,9 +2830,6 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
|
||||||
attributes['OutputDirectory'])
|
attributes['OutputDirectory'])
|
||||||
_AddConditionalProperty(properties, condition, 'TargetName',
|
_AddConditionalProperty(properties, condition, 'TargetName',
|
||||||
attributes['TargetName'])
|
attributes['TargetName'])
|
||||||
if 'TargetExt' in attributes:
|
|
||||||
_AddConditionalProperty(properties, condition, 'TargetExt',
|
|
||||||
attributes['TargetExt'])
|
|
||||||
|
|
||||||
if attributes.get('TargetPath'):
|
if attributes.get('TargetPath'):
|
||||||
_AddConditionalProperty(properties, condition, 'TargetPath',
|
_AddConditionalProperty(properties, condition, 'TargetPath',
|
||||||
|
@ -2818,7 +2869,7 @@ def _AddConditionalProperty(properties, condition, name, value):
|
||||||
|
|
||||||
|
|
||||||
# Regex for msvs variable references ( i.e. $(FOO) ).
|
# Regex for msvs variable references ( i.e. $(FOO) ).
|
||||||
MSVS_VARIABLE_REFERENCE = re.compile('\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
|
MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
|
||||||
|
|
||||||
|
|
||||||
def _GetMSBuildPropertyGroup(spec, label, properties):
|
def _GetMSBuildPropertyGroup(spec, label, properties):
|
||||||
|
@ -2902,7 +2953,8 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||||
converted = True
|
converted = True
|
||||||
msvs_settings = configuration.get('msvs_settings', {})
|
msvs_settings = configuration.get('msvs_settings', {})
|
||||||
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
|
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
|
||||||
include_dirs, resource_include_dirs = _GetIncludeDirs(configuration)
|
include_dirs, midl_include_dirs, resource_include_dirs = \
|
||||||
|
_GetIncludeDirs(configuration)
|
||||||
libraries = _GetLibraries(spec)
|
libraries = _GetLibraries(spec)
|
||||||
library_dirs = _GetLibraryDirs(configuration)
|
library_dirs = _GetLibraryDirs(configuration)
|
||||||
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
|
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
|
||||||
|
@ -2912,7 +2964,7 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||||
# Visual Studio 2010 has TR1
|
# Visual Studio 2010 has TR1
|
||||||
defines = [d for d in defines if d != '_HAS_TR1=0']
|
defines = [d for d in defines if d != '_HAS_TR1=0']
|
||||||
# Warn of ignored settings
|
# Warn of ignored settings
|
||||||
ignored_settings = ['msvs_prebuild', 'msvs_postbuild', 'msvs_tool_files']
|
ignored_settings = ['msvs_tool_files']
|
||||||
for ignored_setting in ignored_settings:
|
for ignored_setting in ignored_settings:
|
||||||
value = configuration.get(ignored_setting)
|
value = configuration.get(ignored_setting)
|
||||||
if value:
|
if value:
|
||||||
|
@ -2921,9 +2973,8 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||||
|
|
||||||
defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
|
defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
|
||||||
disabled_warnings = _GetDisabledWarnings(configuration)
|
disabled_warnings = _GetDisabledWarnings(configuration)
|
||||||
# TODO(jeanluc) Validate & warn that we don't translate
|
prebuild = configuration.get('msvs_prebuild')
|
||||||
# prebuild = configuration.get('msvs_prebuild')
|
postbuild = configuration.get('msvs_postbuild')
|
||||||
# postbuild = configuration.get('msvs_postbuild')
|
|
||||||
def_file = _GetModuleDefinition(spec)
|
def_file = _GetModuleDefinition(spec)
|
||||||
precompiled_header = configuration.get('msvs_precompiled_header')
|
precompiled_header = configuration.get('msvs_precompiled_header')
|
||||||
|
|
||||||
|
@ -2933,6 +2984,8 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||||
# if you don't have any resources.
|
# if you don't have any resources.
|
||||||
_ToolAppend(msbuild_settings, 'ClCompile',
|
_ToolAppend(msbuild_settings, 'ClCompile',
|
||||||
'AdditionalIncludeDirectories', include_dirs)
|
'AdditionalIncludeDirectories', include_dirs)
|
||||||
|
_ToolAppend(msbuild_settings, 'Midl',
|
||||||
|
'AdditionalIncludeDirectories', midl_include_dirs)
|
||||||
_ToolAppend(msbuild_settings, 'ResourceCompile',
|
_ToolAppend(msbuild_settings, 'ResourceCompile',
|
||||||
'AdditionalIncludeDirectories', resource_include_dirs)
|
'AdditionalIncludeDirectories', resource_include_dirs)
|
||||||
# Add in libraries, note that even for empty libraries, we want this
|
# Add in libraries, note that even for empty libraries, we want this
|
||||||
|
@ -2963,6 +3016,13 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||||
'PrecompiledHeaderFile', precompiled_header)
|
'PrecompiledHeaderFile', precompiled_header)
|
||||||
_ToolAppend(msbuild_settings, 'ClCompile',
|
_ToolAppend(msbuild_settings, 'ClCompile',
|
||||||
'ForcedIncludeFiles', [precompiled_header])
|
'ForcedIncludeFiles', [precompiled_header])
|
||||||
|
else:
|
||||||
|
_ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
|
||||||
|
# Turn off WinRT compilation
|
||||||
|
_ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
|
||||||
|
# Turn on import libraries if appropriate
|
||||||
|
if spec.get('msvs_requires_importlibrary'):
|
||||||
|
_ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
|
||||||
# Loadable modules don't generate import libraries;
|
# Loadable modules don't generate import libraries;
|
||||||
# tell dependent projects to not expect one.
|
# tell dependent projects to not expect one.
|
||||||
if spec['type'] == 'loadable_module':
|
if spec['type'] == 'loadable_module':
|
||||||
|
@ -2971,6 +3031,10 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||||
if def_file:
|
if def_file:
|
||||||
_ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
|
_ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
|
||||||
configuration['finalized_msbuild_settings'] = msbuild_settings
|
configuration['finalized_msbuild_settings'] = msbuild_settings
|
||||||
|
if prebuild:
|
||||||
|
_ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild)
|
||||||
|
if postbuild:
|
||||||
|
_ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild)
|
||||||
|
|
||||||
|
|
||||||
def _GetValueFormattedForMSBuild(tool_name, name, value):
|
def _GetValueFormattedForMSBuild(tool_name, name, value):
|
||||||
|
@ -3026,15 +3090,18 @@ def _VerifySourcesExist(sources, root_dir):
|
||||||
return missing_sources
|
return missing_sources
|
||||||
|
|
||||||
|
|
||||||
def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
|
def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
|
||||||
actions_spec, sources_handled_by_action, list_excluded):
|
extension_to_rule_name, actions_spec,
|
||||||
groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule']
|
sources_handled_by_action, list_excluded):
|
||||||
|
groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
|
||||||
|
'rule_dependency']
|
||||||
grouped_sources = {}
|
grouped_sources = {}
|
||||||
for g in groups:
|
for g in groups:
|
||||||
grouped_sources[g] = []
|
grouped_sources[g] = []
|
||||||
|
|
||||||
_AddSources2(spec, sources, exclusions, grouped_sources,
|
_AddSources2(spec, sources, exclusions, grouped_sources,
|
||||||
extension_to_rule_name, sources_handled_by_action, list_excluded)
|
rule_dependencies, extension_to_rule_name,
|
||||||
|
sources_handled_by_action, list_excluded)
|
||||||
sources = []
|
sources = []
|
||||||
for g in groups:
|
for g in groups:
|
||||||
if grouped_sources[g]:
|
if grouped_sources[g]:
|
||||||
|
@ -3045,13 +3112,15 @@ def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
|
||||||
|
|
||||||
|
|
||||||
def _AddSources2(spec, sources, exclusions, grouped_sources,
|
def _AddSources2(spec, sources, exclusions, grouped_sources,
|
||||||
extension_to_rule_name, sources_handled_by_action,
|
rule_dependencies, extension_to_rule_name,
|
||||||
|
sources_handled_by_action,
|
||||||
list_excluded):
|
list_excluded):
|
||||||
extensions_excluded_from_precompile = []
|
extensions_excluded_from_precompile = []
|
||||||
for source in sources:
|
for source in sources:
|
||||||
if isinstance(source, MSVSProject.Filter):
|
if isinstance(source, MSVSProject.Filter):
|
||||||
_AddSources2(spec, source.contents, exclusions, grouped_sources,
|
_AddSources2(spec, source.contents, exclusions, grouped_sources,
|
||||||
extension_to_rule_name, sources_handled_by_action,
|
rule_dependencies, extension_to_rule_name,
|
||||||
|
sources_handled_by_action,
|
||||||
list_excluded)
|
list_excluded)
|
||||||
else:
|
else:
|
||||||
if not source in sources_handled_by_action:
|
if not source in sources_handled_by_action:
|
||||||
|
@ -3094,7 +3163,7 @@ def _AddSources2(spec, sources, exclusions, grouped_sources,
|
||||||
detail.append(['PrecompiledHeader', ''])
|
detail.append(['PrecompiledHeader', ''])
|
||||||
detail.append(['ForcedIncludeFiles', ''])
|
detail.append(['ForcedIncludeFiles', ''])
|
||||||
|
|
||||||
group, element = _MapFileToMsBuildSourceType(source,
|
group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
|
||||||
extension_to_rule_name)
|
extension_to_rule_name)
|
||||||
grouped_sources[group].append([element, {'Include': source}] + detail)
|
grouped_sources[group].append([element, {'Include': source}] + detail)
|
||||||
|
|
||||||
|
@ -3138,6 +3207,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||||
actions_to_add = {}
|
actions_to_add = {}
|
||||||
props_files_of_rules = set()
|
props_files_of_rules = set()
|
||||||
targets_files_of_rules = set()
|
targets_files_of_rules = set()
|
||||||
|
rule_dependencies = set()
|
||||||
extension_to_rule_name = {}
|
extension_to_rule_name = {}
|
||||||
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
|
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
|
||||||
|
|
||||||
|
@ -3146,10 +3216,11 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||||
_GenerateRulesForMSBuild(project_dir, options, spec,
|
_GenerateRulesForMSBuild(project_dir, options, spec,
|
||||||
sources, excluded_sources,
|
sources, excluded_sources,
|
||||||
props_files_of_rules, targets_files_of_rules,
|
props_files_of_rules, targets_files_of_rules,
|
||||||
actions_to_add, extension_to_rule_name)
|
actions_to_add, rule_dependencies,
|
||||||
|
extension_to_rule_name)
|
||||||
else:
|
else:
|
||||||
rules = spec.get('rules', [])
|
rules = spec.get('rules', [])
|
||||||
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
|
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
|
||||||
|
|
||||||
sources, excluded_sources, excluded_idl = (
|
sources, excluded_sources, excluded_idl = (
|
||||||
_AdjustSourcesAndConvertToFilterHierarchy(spec, options,
|
_AdjustSourcesAndConvertToFilterHierarchy(spec, options,
|
||||||
|
@ -3172,6 +3243,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||||
spec, actions_to_add)
|
spec, actions_to_add)
|
||||||
|
|
||||||
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
|
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
|
||||||
|
rule_dependencies,
|
||||||
extension_to_rule_name)
|
extension_to_rule_name)
|
||||||
missing_sources = _VerifySourcesExist(sources, project_dir)
|
missing_sources = _VerifySourcesExist(sources, project_dir)
|
||||||
|
|
||||||
|
@ -3186,6 +3258,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||||
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
|
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
|
||||||
import_cpp_targets_section = [
|
import_cpp_targets_section = [
|
||||||
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
|
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
|
||||||
|
import_masm_props_section = [
|
||||||
|
['Import',
|
||||||
|
{'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
|
||||||
|
import_masm_targets_section = [
|
||||||
|
['Import',
|
||||||
|
{'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
|
||||||
macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
|
macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
|
||||||
|
|
||||||
content = [
|
content = [
|
||||||
|
@ -3199,8 +3277,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||||
content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
|
content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
|
||||||
content += import_default_section
|
content += import_default_section
|
||||||
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
|
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
|
||||||
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
|
if spec.get('msvs_enable_winphone'):
|
||||||
|
content += _GetMSBuildLocalProperties('v120_wp81')
|
||||||
|
else:
|
||||||
|
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
|
||||||
content += import_cpp_props_section
|
content += import_cpp_props_section
|
||||||
|
content += import_masm_props_section
|
||||||
content += _GetMSBuildExtensions(props_files_of_rules)
|
content += _GetMSBuildExtensions(props_files_of_rules)
|
||||||
content += _GetMSBuildPropertySheets(configurations)
|
content += _GetMSBuildPropertySheets(configurations)
|
||||||
content += macro_section
|
content += macro_section
|
||||||
|
@ -3208,10 +3290,11 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||||
project.build_file)
|
project.build_file)
|
||||||
content += _GetMSBuildToolSettingsSections(spec, configurations)
|
content += _GetMSBuildToolSettingsSections(spec, configurations)
|
||||||
content += _GetMSBuildSources(
|
content += _GetMSBuildSources(
|
||||||
spec, sources, exclusions, extension_to_rule_name, actions_spec,
|
spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
|
||||||
sources_handled_by_action, list_excluded)
|
actions_spec, sources_handled_by_action, list_excluded)
|
||||||
content += _GetMSBuildProjectReferences(project)
|
content += _GetMSBuildProjectReferences(project)
|
||||||
content += import_cpp_targets_section
|
content += import_cpp_targets_section
|
||||||
|
content += import_masm_targets_section
|
||||||
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
|
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
|
||||||
|
|
||||||
if spec.get('msvs_external_builder'):
|
if spec.get('msvs_external_builder'):
|
||||||
|
@ -3228,7 +3311,9 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||||
def _GetMSBuildExternalBuilderTargets(spec):
|
def _GetMSBuildExternalBuilderTargets(spec):
|
||||||
"""Return a list of MSBuild targets for external builders.
|
"""Return a list of MSBuild targets for external builders.
|
||||||
|
|
||||||
Right now, only "Build" and "Clean" targets are generated.
|
The "Build" and "Clean" targets are always generated. If the spec contains
|
||||||
|
'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
|
||||||
|
be generated, to support building selected C/C++ files.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
spec: The gyp target spec.
|
spec: The gyp target spec.
|
||||||
|
@ -3247,7 +3332,17 @@ def _GetMSBuildExternalBuilderTargets(spec):
|
||||||
clean_target = ['Target', {'Name': 'Clean'}]
|
clean_target = ['Target', {'Name': 'Clean'}]
|
||||||
clean_target.append(['Exec', {'Command': clean_cmd}])
|
clean_target.append(['Exec', {'Command': clean_cmd}])
|
||||||
|
|
||||||
return [build_target, clean_target]
|
targets = [build_target, clean_target]
|
||||||
|
|
||||||
|
if spec.get('msvs_external_builder_clcompile_cmd'):
|
||||||
|
clcompile_cmd = _BuildCommandLineForRuleRaw(
|
||||||
|
spec, spec['msvs_external_builder_clcompile_cmd'],
|
||||||
|
False, False, False, False)
|
||||||
|
clcompile_target = ['Target', {'Name': 'ClCompile'}]
|
||||||
|
clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
|
||||||
|
targets.append(clcompile_target)
|
||||||
|
|
||||||
|
return targets
|
||||||
|
|
||||||
|
|
||||||
def _GetMSBuildExtensions(props_files_of_rules):
|
def _GetMSBuildExtensions(props_files_of_rules):
|
||||||
|
@ -3301,8 +3396,8 @@ def _GenerateActionsForMSBuild(spec, actions_to_add):
|
||||||
# get too long. See also _AddActions: cygwin's setup_env mustn't be called
|
# get too long. See also _AddActions: cygwin's setup_env mustn't be called
|
||||||
# for every invocation or the command that sets the PATH will grow too
|
# for every invocation or the command that sets the PATH will grow too
|
||||||
# long.
|
# long.
|
||||||
command = (
|
command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
|
||||||
'\r\nif %errorlevel% neq 0 exit /b %errorlevel%\r\n'.join(commands))
|
for c in commands])
|
||||||
_AddMSBuildAction(spec,
|
_AddMSBuildAction(spec,
|
||||||
primary_input,
|
primary_input,
|
||||||
inputs,
|
inputs,
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
# Use of this source code is governed by a BSD-style license that can be
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
# found in the LICENSE file.
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import collections
|
||||||
import copy
|
import copy
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
@ -13,6 +14,7 @@ import subprocess
|
||||||
import sys
|
import sys
|
||||||
import gyp
|
import gyp
|
||||||
import gyp.common
|
import gyp.common
|
||||||
|
from gyp.common import OrderedSet
|
||||||
import gyp.msvs_emulation
|
import gyp.msvs_emulation
|
||||||
import gyp.MSVSUtil as MSVSUtil
|
import gyp.MSVSUtil as MSVSUtil
|
||||||
import gyp.xcode_emulation
|
import gyp.xcode_emulation
|
||||||
|
@ -60,17 +62,7 @@ generator_additional_path_sections = []
|
||||||
generator_extra_sources_for_rules = []
|
generator_extra_sources_for_rules = []
|
||||||
generator_filelist_paths = None
|
generator_filelist_paths = None
|
||||||
|
|
||||||
# TODO: figure out how to not build extra host objects in the non-cross-compile
|
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
||||||
# case when this is enabled, and enable unconditionally.
|
|
||||||
generator_supports_multiple_toolsets = (
|
|
||||||
os.environ.get('GYP_CROSSCOMPILE') or
|
|
||||||
os.environ.get('AR_host') or
|
|
||||||
os.environ.get('CC_host') or
|
|
||||||
os.environ.get('CXX_host') or
|
|
||||||
os.environ.get('AR_target') or
|
|
||||||
os.environ.get('CC_target') or
|
|
||||||
os.environ.get('CXX_target'))
|
|
||||||
|
|
||||||
|
|
||||||
def StripPrefix(arg, prefix):
|
def StripPrefix(arg, prefix):
|
||||||
if arg.startswith(prefix):
|
if arg.startswith(prefix):
|
||||||
|
@ -106,7 +98,7 @@ def AddArch(output, arch):
|
||||||
return '%s.%s%s' % (output, arch, extension)
|
return '%s.%s%s' % (output, arch, extension)
|
||||||
|
|
||||||
|
|
||||||
class Target:
|
class Target(object):
|
||||||
"""Target represents the paths used within a single gyp target.
|
"""Target represents the paths used within a single gyp target.
|
||||||
|
|
||||||
Conceptually, building a single target A is a series of steps:
|
Conceptually, building a single target A is a series of steps:
|
||||||
|
@ -210,8 +202,8 @@ class Target:
|
||||||
# an output file; the result can be namespaced such that it is unique
|
# an output file; the result can be namespaced such that it is unique
|
||||||
# to the input file name as well as the output target name.
|
# to the input file name as well as the output target name.
|
||||||
|
|
||||||
class NinjaWriter:
|
class NinjaWriter(object):
|
||||||
def __init__(self, qualified_target, target_outputs, base_dir, build_dir,
|
def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
|
||||||
output_file, toplevel_build, output_file_name, flavor,
|
output_file, toplevel_build, output_file_name, flavor,
|
||||||
toplevel_dir=None):
|
toplevel_dir=None):
|
||||||
"""
|
"""
|
||||||
|
@ -221,7 +213,7 @@ class NinjaWriter:
|
||||||
toplevel_dir: path to the toplevel directory
|
toplevel_dir: path to the toplevel directory
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.qualified_target = qualified_target
|
self.hash_for_rules = hash_for_rules
|
||||||
self.target_outputs = target_outputs
|
self.target_outputs = target_outputs
|
||||||
self.base_dir = base_dir
|
self.base_dir = base_dir
|
||||||
self.build_dir = build_dir
|
self.build_dir = build_dir
|
||||||
|
@ -338,12 +330,15 @@ class NinjaWriter:
|
||||||
obj += '.' + self.toolset
|
obj += '.' + self.toolset
|
||||||
|
|
||||||
path_dir, path_basename = os.path.split(path)
|
path_dir, path_basename = os.path.split(path)
|
||||||
|
assert not os.path.isabs(path_dir), (
|
||||||
|
"'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
|
||||||
|
|
||||||
if qualified:
|
if qualified:
|
||||||
path_basename = self.name + '.' + path_basename
|
path_basename = self.name + '.' + path_basename
|
||||||
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
|
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
|
||||||
path_basename))
|
path_basename))
|
||||||
|
|
||||||
def WriteCollapsedDependencies(self, name, targets):
|
def WriteCollapsedDependencies(self, name, targets, order_only=None):
|
||||||
"""Given a list of targets, return a path for a single file
|
"""Given a list of targets, return a path for a single file
|
||||||
representing the result of building all the targets or None.
|
representing the result of building all the targets or None.
|
||||||
|
|
||||||
|
@ -351,10 +346,11 @@ class NinjaWriter:
|
||||||
|
|
||||||
assert targets == filter(None, targets), targets
|
assert targets == filter(None, targets), targets
|
||||||
if len(targets) == 0:
|
if len(targets) == 0:
|
||||||
|
assert not order_only
|
||||||
return None
|
return None
|
||||||
if len(targets) > 1:
|
if len(targets) > 1 or order_only:
|
||||||
stamp = self.GypPathToUniqueOutput(name + '.stamp')
|
stamp = self.GypPathToUniqueOutput(name + '.stamp')
|
||||||
targets = self.ninja.build(stamp, 'stamp', targets)
|
targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
|
||||||
self.ninja.newline()
|
self.ninja.newline()
|
||||||
return targets[0]
|
return targets[0]
|
||||||
|
|
||||||
|
@ -391,6 +387,9 @@ class NinjaWriter:
|
||||||
self.ninja.variable('arch', self.win_env[arch])
|
self.ninja.variable('arch', self.win_env[arch])
|
||||||
self.ninja.variable('cc', '$cl_' + arch)
|
self.ninja.variable('cc', '$cl_' + arch)
|
||||||
self.ninja.variable('cxx', '$cl_' + arch)
|
self.ninja.variable('cxx', '$cl_' + arch)
|
||||||
|
self.ninja.variable('cc_host', '$cl_' + arch)
|
||||||
|
self.ninja.variable('cxx_host', '$cl_' + arch)
|
||||||
|
self.ninja.variable('asm', '$ml_' + arch)
|
||||||
|
|
||||||
if self.flavor == 'mac':
|
if self.flavor == 'mac':
|
||||||
self.archs = self.xcode_settings.GetActiveArchs(config_name)
|
self.archs = self.xcode_settings.GetActiveArchs(config_name)
|
||||||
|
@ -472,6 +471,8 @@ class NinjaWriter:
|
||||||
else:
|
else:
|
||||||
print "Warning: Actions/rules writing object files don't work with " \
|
print "Warning: Actions/rules writing object files don't work with " \
|
||||||
"multiarch targets, dropping. (target %s)" % spec['target_name']
|
"multiarch targets, dropping. (target %s)" % spec['target_name']
|
||||||
|
elif self.flavor == 'mac' and len(self.archs) > 1:
|
||||||
|
link_deps = collections.defaultdict(list)
|
||||||
|
|
||||||
|
|
||||||
if self.flavor == 'win' and self.target.type == 'static_library':
|
if self.flavor == 'win' and self.target.type == 'static_library':
|
||||||
|
@ -523,7 +524,7 @@ class NinjaWriter:
|
||||||
def WriteWinIdlFiles(self, spec, prebuild):
|
def WriteWinIdlFiles(self, spec, prebuild):
|
||||||
"""Writes rules to match MSVS's implicit idl handling."""
|
"""Writes rules to match MSVS's implicit idl handling."""
|
||||||
assert self.flavor == 'win'
|
assert self.flavor == 'win'
|
||||||
if self.msvs_settings.HasExplicitIdlRules(spec):
|
if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
|
||||||
return []
|
return []
|
||||||
outputs = []
|
outputs = []
|
||||||
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
|
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
|
||||||
|
@ -557,9 +558,10 @@ class NinjaWriter:
|
||||||
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
|
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
|
||||||
|
|
||||||
if self.is_mac_bundle:
|
if self.is_mac_bundle:
|
||||||
self.WriteMacBundleResources(
|
xcassets = self.WriteMacBundleResources(
|
||||||
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
|
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
|
||||||
self.WriteMacInfoPlist(mac_bundle_depends)
|
partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
|
||||||
|
self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
|
||||||
|
|
||||||
return stamp
|
return stamp
|
||||||
|
|
||||||
|
@ -580,23 +582,24 @@ class NinjaWriter:
|
||||||
def WriteActions(self, actions, extra_sources, prebuild,
|
def WriteActions(self, actions, extra_sources, prebuild,
|
||||||
extra_mac_bundle_resources):
|
extra_mac_bundle_resources):
|
||||||
# Actions cd into the base directory.
|
# Actions cd into the base directory.
|
||||||
env = self.GetSortedXcodeEnv()
|
env = self.GetToolchainEnv()
|
||||||
if self.flavor == 'win':
|
|
||||||
env = self.msvs_settings.GetVSMacroEnv(
|
|
||||||
'$!PRODUCT_DIR', config=self.config_name)
|
|
||||||
all_outputs = []
|
all_outputs = []
|
||||||
for action in actions:
|
for action in actions:
|
||||||
# First write out a rule for the action.
|
# First write out a rule for the action.
|
||||||
name = '%s_%s' % (action['action_name'],
|
name = '%s_%s' % (action['action_name'], self.hash_for_rules)
|
||||||
hashlib.md5(self.qualified_target).hexdigest())
|
|
||||||
description = self.GenerateDescription('ACTION',
|
description = self.GenerateDescription('ACTION',
|
||||||
action.get('message', None),
|
action.get('message', None),
|
||||||
name)
|
name)
|
||||||
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
|
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
|
||||||
if self.flavor == 'win' else False)
|
if self.flavor == 'win' else False)
|
||||||
args = action['action']
|
args = action['action']
|
||||||
|
depfile = action.get('depfile', None)
|
||||||
|
if depfile:
|
||||||
|
depfile = self.ExpandSpecial(depfile, self.base_to_build)
|
||||||
|
pool = 'console' if int(action.get('ninja_use_console', 0)) else None
|
||||||
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
|
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
|
||||||
is_cygwin, env=env)
|
is_cygwin, env, pool,
|
||||||
|
depfile=depfile)
|
||||||
|
|
||||||
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
|
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
|
||||||
if int(action.get('process_outputs_as_sources', False)):
|
if int(action.get('process_outputs_as_sources', False)):
|
||||||
|
@ -616,15 +619,16 @@ class NinjaWriter:
|
||||||
|
|
||||||
def WriteRules(self, rules, extra_sources, prebuild,
|
def WriteRules(self, rules, extra_sources, prebuild,
|
||||||
mac_bundle_resources, extra_mac_bundle_resources):
|
mac_bundle_resources, extra_mac_bundle_resources):
|
||||||
env = self.GetSortedXcodeEnv()
|
env = self.GetToolchainEnv()
|
||||||
all_outputs = []
|
all_outputs = []
|
||||||
for rule in rules:
|
for rule in rules:
|
||||||
# First write out a rule for the rule action.
|
|
||||||
name = '%s_%s' % (rule['rule_name'],
|
|
||||||
hashlib.md5(self.qualified_target).hexdigest())
|
|
||||||
# Skip a rule with no action and no inputs.
|
# Skip a rule with no action and no inputs.
|
||||||
if 'action' not in rule and not rule.get('rule_sources', []):
|
if 'action' not in rule and not rule.get('rule_sources', []):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# First write out a rule for the rule action.
|
||||||
|
name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
|
||||||
|
|
||||||
args = rule['action']
|
args = rule['action']
|
||||||
description = self.GenerateDescription(
|
description = self.GenerateDescription(
|
||||||
'RULE',
|
'RULE',
|
||||||
|
@ -632,8 +636,9 @@ class NinjaWriter:
|
||||||
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
|
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
|
||||||
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
|
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
|
||||||
if self.flavor == 'win' else False)
|
if self.flavor == 'win' else False)
|
||||||
|
pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
|
||||||
rule_name, args = self.WriteNewNinjaRule(
|
rule_name, args = self.WriteNewNinjaRule(
|
||||||
name, args, description, is_cygwin, env=env)
|
name, args, description, is_cygwin, env, pool)
|
||||||
|
|
||||||
# TODO: if the command references the outputs directly, we should
|
# TODO: if the command references the outputs directly, we should
|
||||||
# simplify it to just use $out.
|
# simplify it to just use $out.
|
||||||
|
@ -645,16 +650,31 @@ class NinjaWriter:
|
||||||
needed_variables = set(['source'])
|
needed_variables = set(['source'])
|
||||||
for argument in args:
|
for argument in args:
|
||||||
for var in special_locals:
|
for var in special_locals:
|
||||||
if ('${%s}' % var) in argument:
|
if '${%s}' % var in argument:
|
||||||
needed_variables.add(var)
|
needed_variables.add(var)
|
||||||
|
|
||||||
def cygwin_munge(path):
|
def cygwin_munge(path):
|
||||||
|
# pylint: disable=cell-var-from-loop
|
||||||
if is_cygwin:
|
if is_cygwin:
|
||||||
return path.replace('\\', '/')
|
return path.replace('\\', '/')
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
|
||||||
|
|
||||||
|
# If there are n source files matching the rule, and m additional rule
|
||||||
|
# inputs, then adding 'inputs' to each build edge written below will
|
||||||
|
# write m * n inputs. Collapsing reduces this to m + n.
|
||||||
|
sources = rule.get('rule_sources', [])
|
||||||
|
num_inputs = len(inputs)
|
||||||
|
if prebuild:
|
||||||
|
num_inputs += 1
|
||||||
|
if num_inputs > 2 and len(sources) > 2:
|
||||||
|
inputs = [self.WriteCollapsedDependencies(
|
||||||
|
rule['rule_name'], inputs, order_only=prebuild)]
|
||||||
|
prebuild = []
|
||||||
|
|
||||||
# For each source file, write an edge that generates all the outputs.
|
# For each source file, write an edge that generates all the outputs.
|
||||||
for source in rule.get('rule_sources', []):
|
for source in sources:
|
||||||
source = os.path.normpath(source)
|
source = os.path.normpath(source)
|
||||||
dirname, basename = os.path.split(source)
|
dirname, basename = os.path.split(source)
|
||||||
root, ext = os.path.splitext(basename)
|
root, ext = os.path.splitext(basename)
|
||||||
|
@ -663,9 +683,6 @@ class NinjaWriter:
|
||||||
outputs = [self.ExpandRuleVariables(o, root, dirname,
|
outputs = [self.ExpandRuleVariables(o, root, dirname,
|
||||||
source, ext, basename)
|
source, ext, basename)
|
||||||
for o in rule['outputs']]
|
for o in rule['outputs']]
|
||||||
inputs = [self.ExpandRuleVariables(i, root, dirname,
|
|
||||||
source, ext, basename)
|
|
||||||
for i in rule.get('inputs', [])]
|
|
||||||
|
|
||||||
if int(rule.get('process_outputs_as_sources', False)):
|
if int(rule.get('process_outputs_as_sources', False)):
|
||||||
extra_sources += outputs
|
extra_sources += outputs
|
||||||
|
@ -703,10 +720,11 @@ class NinjaWriter:
|
||||||
else:
|
else:
|
||||||
assert var == None, repr(var)
|
assert var == None, repr(var)
|
||||||
|
|
||||||
inputs = [self.GypPathToNinja(i, env) for i in inputs]
|
|
||||||
outputs = [self.GypPathToNinja(o, env) for o in outputs]
|
outputs = [self.GypPathToNinja(o, env) for o in outputs]
|
||||||
extra_bindings.append(('unique_name',
|
if self.flavor == 'win':
|
||||||
hashlib.md5(outputs[0]).hexdigest()))
|
# WriteNewNinjaRule uses unique_name for creating an rsp file on win.
|
||||||
|
extra_bindings.append(('unique_name',
|
||||||
|
hashlib.md5(outputs[0]).hexdigest()))
|
||||||
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
|
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
|
||||||
implicit=inputs,
|
implicit=inputs,
|
||||||
order_only=prebuild,
|
order_only=prebuild,
|
||||||
|
@ -718,7 +736,7 @@ class NinjaWriter:
|
||||||
|
|
||||||
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
|
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
|
||||||
outputs = []
|
outputs = []
|
||||||
env = self.GetSortedXcodeEnv()
|
env = self.GetToolchainEnv()
|
||||||
for copy in copies:
|
for copy in copies:
|
||||||
for path in copy['files']:
|
for path in copy['files']:
|
||||||
# Normalize the path so trailing slashes don't confuse us.
|
# Normalize the path so trailing slashes don't confuse us.
|
||||||
|
@ -742,15 +760,68 @@ class NinjaWriter:
|
||||||
|
|
||||||
def WriteMacBundleResources(self, resources, bundle_depends):
|
def WriteMacBundleResources(self, resources, bundle_depends):
|
||||||
"""Writes ninja edges for 'mac_bundle_resources'."""
|
"""Writes ninja edges for 'mac_bundle_resources'."""
|
||||||
|
xcassets = []
|
||||||
for output, res in gyp.xcode_emulation.GetMacBundleResources(
|
for output, res in gyp.xcode_emulation.GetMacBundleResources(
|
||||||
generator_default_variables['PRODUCT_DIR'],
|
generator_default_variables['PRODUCT_DIR'],
|
||||||
self.xcode_settings, map(self.GypPathToNinja, resources)):
|
self.xcode_settings, map(self.GypPathToNinja, resources)):
|
||||||
output = self.ExpandSpecial(output)
|
output = self.ExpandSpecial(output)
|
||||||
self.ninja.build(output, 'mac_tool', res,
|
if os.path.splitext(output)[-1] != '.xcassets':
|
||||||
variables=[('mactool_cmd', 'copy-bundle-resource')])
|
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
|
||||||
bundle_depends.append(output)
|
self.ninja.build(output, 'mac_tool', res,
|
||||||
|
variables=[('mactool_cmd', 'copy-bundle-resource'), \
|
||||||
|
('binary', isBinary)])
|
||||||
|
bundle_depends.append(output)
|
||||||
|
else:
|
||||||
|
xcassets.append(res)
|
||||||
|
return xcassets
|
||||||
|
|
||||||
def WriteMacInfoPlist(self, bundle_depends):
|
def WriteMacXCassets(self, xcassets, bundle_depends):
|
||||||
|
"""Writes ninja edges for 'mac_bundle_resources' .xcassets files.
|
||||||
|
|
||||||
|
This add an invocation of 'actool' via the 'mac_tool.py' helper script.
|
||||||
|
It assumes that the assets catalogs define at least one imageset and
|
||||||
|
thus an Assets.car file will be generated in the application resources
|
||||||
|
directory. If this is not the case, then the build will probably be done
|
||||||
|
at each invocation of ninja."""
|
||||||
|
if not xcassets:
|
||||||
|
return
|
||||||
|
|
||||||
|
extra_arguments = {}
|
||||||
|
settings_to_arg = {
|
||||||
|
'XCASSETS_APP_ICON': 'app-icon',
|
||||||
|
'XCASSETS_LAUNCH_IMAGE': 'launch-image',
|
||||||
|
}
|
||||||
|
settings = self.xcode_settings.xcode_settings[self.config_name]
|
||||||
|
for settings_key, arg_name in settings_to_arg.iteritems():
|
||||||
|
value = settings.get(settings_key)
|
||||||
|
if value:
|
||||||
|
extra_arguments[arg_name] = value
|
||||||
|
|
||||||
|
partial_info_plist = None
|
||||||
|
if extra_arguments:
|
||||||
|
partial_info_plist = self.GypPathToUniqueOutput(
|
||||||
|
'assetcatalog_generated_info.plist')
|
||||||
|
extra_arguments['output-partial-info-plist'] = partial_info_plist
|
||||||
|
|
||||||
|
outputs = []
|
||||||
|
outputs.append(
|
||||||
|
os.path.join(
|
||||||
|
self.xcode_settings.GetBundleResourceFolder(),
|
||||||
|
'Assets.car'))
|
||||||
|
if partial_info_plist:
|
||||||
|
outputs.append(partial_info_plist)
|
||||||
|
|
||||||
|
keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
|
||||||
|
extra_env = self.xcode_settings.GetPerTargetSettings()
|
||||||
|
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
|
||||||
|
env = self.ComputeExportEnvString(env)
|
||||||
|
|
||||||
|
bundle_depends.extend(self.ninja.build(
|
||||||
|
outputs, 'compile_xcassets', xcassets,
|
||||||
|
variables=[('env', env), ('keys', keys)]))
|
||||||
|
return partial_info_plist
|
||||||
|
|
||||||
|
def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
|
||||||
"""Write build rules for bundle Info.plist files."""
|
"""Write build rules for bundle Info.plist files."""
|
||||||
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
|
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
|
||||||
generator_default_variables['PRODUCT_DIR'],
|
generator_default_variables['PRODUCT_DIR'],
|
||||||
|
@ -770,10 +841,18 @@ class NinjaWriter:
|
||||||
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
|
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
|
||||||
env = self.ComputeExportEnvString(env)
|
env = self.ComputeExportEnvString(env)
|
||||||
|
|
||||||
|
if partial_info_plist:
|
||||||
|
intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
|
||||||
|
info_plist = self.ninja.build(
|
||||||
|
intermediate_plist, 'merge_infoplist',
|
||||||
|
[partial_info_plist, info_plist])
|
||||||
|
|
||||||
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
|
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
|
||||||
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
|
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
|
||||||
|
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
|
||||||
self.ninja.build(out, 'copy_infoplist', info_plist,
|
self.ninja.build(out, 'copy_infoplist', info_plist,
|
||||||
variables=[('env', env), ('keys', keys)])
|
variables=[('env', env), ('keys', keys),
|
||||||
|
('binary', isBinary)])
|
||||||
bundle_depends.append(out)
|
bundle_depends.append(out)
|
||||||
|
|
||||||
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
|
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
|
||||||
|
@ -785,6 +864,8 @@ class NinjaWriter:
|
||||||
self.ninja.variable('cxx', '$cxx_host')
|
self.ninja.variable('cxx', '$cxx_host')
|
||||||
self.ninja.variable('ld', '$ld_host')
|
self.ninja.variable('ld', '$ld_host')
|
||||||
self.ninja.variable('ldxx', '$ldxx_host')
|
self.ninja.variable('ldxx', '$ldxx_host')
|
||||||
|
self.ninja.variable('nm', '$nm_host')
|
||||||
|
self.ninja.variable('readelf', '$readelf_host')
|
||||||
|
|
||||||
if self.flavor != 'mac' or len(self.archs) == 1:
|
if self.flavor != 'mac' or len(self.archs) == 1:
|
||||||
return self.WriteSourcesForArch(
|
return self.WriteSourcesForArch(
|
||||||
|
@ -810,6 +891,7 @@ class NinjaWriter:
|
||||||
cflags_objcc = ['$cflags_cc'] + \
|
cflags_objcc = ['$cflags_cc'] + \
|
||||||
self.xcode_settings.GetCflagsObjCC(config_name)
|
self.xcode_settings.GetCflagsObjCC(config_name)
|
||||||
elif self.flavor == 'win':
|
elif self.flavor == 'win':
|
||||||
|
asmflags = self.msvs_settings.GetAsmflags(config_name)
|
||||||
cflags = self.msvs_settings.GetCflags(config_name)
|
cflags = self.msvs_settings.GetCflags(config_name)
|
||||||
cflags_c = self.msvs_settings.GetCflagsC(config_name)
|
cflags_c = self.msvs_settings.GetCflagsC(config_name)
|
||||||
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
|
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
|
||||||
|
@ -844,22 +926,31 @@ class NinjaWriter:
|
||||||
self.WriteVariableList(ninja_file, 'defines',
|
self.WriteVariableList(ninja_file, 'defines',
|
||||||
[Define(d, self.flavor) for d in defines])
|
[Define(d, self.flavor) for d in defines])
|
||||||
if self.flavor == 'win':
|
if self.flavor == 'win':
|
||||||
|
self.WriteVariableList(ninja_file, 'asmflags',
|
||||||
|
map(self.ExpandSpecial, asmflags))
|
||||||
self.WriteVariableList(ninja_file, 'rcflags',
|
self.WriteVariableList(ninja_file, 'rcflags',
|
||||||
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
|
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
|
||||||
for f in self.msvs_settings.GetRcflags(config_name,
|
for f in self.msvs_settings.GetRcflags(config_name,
|
||||||
self.GypPathToNinja)])
|
self.GypPathToNinja)])
|
||||||
|
|
||||||
include_dirs = config.get('include_dirs', [])
|
include_dirs = config.get('include_dirs', [])
|
||||||
env = self.GetSortedXcodeEnv()
|
|
||||||
|
env = self.GetToolchainEnv()
|
||||||
if self.flavor == 'win':
|
if self.flavor == 'win':
|
||||||
env = self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
|
|
||||||
config=config_name)
|
|
||||||
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
|
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
|
||||||
config_name)
|
config_name)
|
||||||
self.WriteVariableList(ninja_file, 'includes',
|
self.WriteVariableList(ninja_file, 'includes',
|
||||||
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
|
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
|
||||||
for i in include_dirs])
|
for i in include_dirs])
|
||||||
|
|
||||||
|
if self.flavor == 'win':
|
||||||
|
midl_include_dirs = config.get('midl_include_dirs', [])
|
||||||
|
midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
|
||||||
|
midl_include_dirs, config_name)
|
||||||
|
self.WriteVariableList(ninja_file, 'midl_includes',
|
||||||
|
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
|
||||||
|
for i in midl_include_dirs])
|
||||||
|
|
||||||
pch_commands = precompiled_header.GetPchBuildCommands(arch)
|
pch_commands = precompiled_header.GetPchBuildCommands(arch)
|
||||||
if self.flavor == 'mac':
|
if self.flavor == 'mac':
|
||||||
# Most targets use no precompiled headers, so only write these if needed.
|
# Most targets use no precompiled headers, so only write these if needed.
|
||||||
|
@ -868,6 +959,8 @@ class NinjaWriter:
|
||||||
include = precompiled_header.GetInclude(ext, arch)
|
include = precompiled_header.GetInclude(ext, arch)
|
||||||
if include: ninja_file.variable(var, include)
|
if include: ninja_file.variable(var, include)
|
||||||
|
|
||||||
|
arflags = config.get('arflags', [])
|
||||||
|
|
||||||
self.WriteVariableList(ninja_file, 'cflags',
|
self.WriteVariableList(ninja_file, 'cflags',
|
||||||
map(self.ExpandSpecial, cflags))
|
map(self.ExpandSpecial, cflags))
|
||||||
self.WriteVariableList(ninja_file, 'cflags_c',
|
self.WriteVariableList(ninja_file, 'cflags_c',
|
||||||
|
@ -879,6 +972,8 @@ class NinjaWriter:
|
||||||
map(self.ExpandSpecial, cflags_objc))
|
map(self.ExpandSpecial, cflags_objc))
|
||||||
self.WriteVariableList(ninja_file, 'cflags_objcc',
|
self.WriteVariableList(ninja_file, 'cflags_objcc',
|
||||||
map(self.ExpandSpecial, cflags_objcc))
|
map(self.ExpandSpecial, cflags_objcc))
|
||||||
|
self.WriteVariableList(ninja_file, 'arflags',
|
||||||
|
map(self.ExpandSpecial, arflags))
|
||||||
ninja_file.newline()
|
ninja_file.newline()
|
||||||
outputs = []
|
outputs = []
|
||||||
has_rc_source = False
|
has_rc_source = False
|
||||||
|
@ -894,9 +989,7 @@ class NinjaWriter:
|
||||||
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
|
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
|
||||||
command = 'cc_s'
|
command = 'cc_s'
|
||||||
elif (self.flavor == 'win' and ext == 'asm' and
|
elif (self.flavor == 'win' and ext == 'asm' and
|
||||||
self.msvs_settings.GetArch(config_name) == 'x86' and
|
|
||||||
not self.msvs_settings.HasExplicitAsmRules(spec)):
|
not self.msvs_settings.HasExplicitAsmRules(spec)):
|
||||||
# Asm files only get auto assembled for x86 (not x64).
|
|
||||||
command = 'asm'
|
command = 'asm'
|
||||||
# Add the _asm suffix as msvs is capable of handling .cc and
|
# Add the _asm suffix as msvs is capable of handling .cc and
|
||||||
# .asm files of the same name without collision.
|
# .asm files of the same name without collision.
|
||||||
|
@ -968,9 +1061,19 @@ class NinjaWriter:
|
||||||
arch=arch)
|
arch=arch)
|
||||||
for arch in self.archs]
|
for arch in self.archs]
|
||||||
extra_bindings = []
|
extra_bindings = []
|
||||||
|
build_output = output
|
||||||
if not self.is_mac_bundle:
|
if not self.is_mac_bundle:
|
||||||
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
|
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
|
||||||
self.ninja.build(output, 'lipo', inputs, variables=extra_bindings)
|
|
||||||
|
# TODO(yyanagisawa): more work needed to fix:
|
||||||
|
# https://code.google.com/p/gyp/issues/detail?id=411
|
||||||
|
if (spec['type'] in ('shared_library', 'loadable_module') and
|
||||||
|
not self.is_mac_bundle):
|
||||||
|
extra_bindings.append(('lib', output))
|
||||||
|
self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
|
||||||
|
variables=extra_bindings)
|
||||||
|
else:
|
||||||
|
self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
|
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
|
||||||
|
@ -1063,7 +1166,7 @@ class NinjaWriter:
|
||||||
rpath = 'lib/'
|
rpath = 'lib/'
|
||||||
if self.toolset != 'target':
|
if self.toolset != 'target':
|
||||||
rpath += self.toolset
|
rpath += self.toolset
|
||||||
ldflags.append('-Wl,-rpath=\$$ORIGIN/%s' % rpath)
|
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
|
||||||
ldflags.append('-Wl,-rpath-link=%s' % rpath)
|
ldflags.append('-Wl,-rpath-link=%s' % rpath)
|
||||||
self.WriteVariableList(ninja_file, 'ldflags',
|
self.WriteVariableList(ninja_file, 'ldflags',
|
||||||
gyp.common.uniquer(map(self.ExpandSpecial, ldflags)))
|
gyp.common.uniquer(map(self.ExpandSpecial, ldflags)))
|
||||||
|
@ -1095,9 +1198,27 @@ class NinjaWriter:
|
||||||
extra_bindings.append(('soname', os.path.split(output)[1]))
|
extra_bindings.append(('soname', os.path.split(output)[1]))
|
||||||
extra_bindings.append(('lib',
|
extra_bindings.append(('lib',
|
||||||
gyp.common.EncodePOSIXShellArgument(output)))
|
gyp.common.EncodePOSIXShellArgument(output)))
|
||||||
|
if self.flavor != 'win':
|
||||||
|
link_file_list = output
|
||||||
|
if self.is_mac_bundle:
|
||||||
|
# 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
|
||||||
|
# 'Dependency Framework.framework.rsp'
|
||||||
|
link_file_list = self.xcode_settings.GetWrapperName()
|
||||||
|
if arch:
|
||||||
|
link_file_list += '.' + arch
|
||||||
|
link_file_list += '.rsp'
|
||||||
|
# If an rspfile contains spaces, ninja surrounds the filename with
|
||||||
|
# quotes around it and then passes it to open(), creating a file with
|
||||||
|
# quotes in its name (and when looking for the rsp file, the name
|
||||||
|
# makes it through bash which strips the quotes) :-/
|
||||||
|
link_file_list = link_file_list.replace(' ', '_')
|
||||||
|
extra_bindings.append(
|
||||||
|
('link_file_list',
|
||||||
|
gyp.common.EncodePOSIXShellArgument(link_file_list)))
|
||||||
if self.flavor == 'win':
|
if self.flavor == 'win':
|
||||||
extra_bindings.append(('binary', output))
|
extra_bindings.append(('binary', output))
|
||||||
if '/NOENTRY' not in ldflags:
|
if ('/NOENTRY' not in ldflags and
|
||||||
|
not self.msvs_settings.GetNoImportLibrary(config_name)):
|
||||||
self.target.import_lib = output + '.lib'
|
self.target.import_lib = output + '.lib'
|
||||||
extra_bindings.append(('implibflag',
|
extra_bindings.append(('implibflag',
|
||||||
'/IMPLIB:%s' % self.target.import_lib))
|
'/IMPLIB:%s' % self.target.import_lib))
|
||||||
|
@ -1196,6 +1317,19 @@ class NinjaWriter:
|
||||||
self.target.bundle = output
|
self.target.bundle = output
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
def GetToolchainEnv(self, additional_settings=None):
|
||||||
|
"""Returns the variables toolchain would set for build steps."""
|
||||||
|
env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
|
||||||
|
if self.flavor == 'win':
|
||||||
|
env = self.GetMsvsToolchainEnv(
|
||||||
|
additional_settings=additional_settings)
|
||||||
|
return env
|
||||||
|
|
||||||
|
def GetMsvsToolchainEnv(self, additional_settings=None):
|
||||||
|
"""Returns the variables Visual Studio would set for build steps."""
|
||||||
|
return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
|
||||||
|
config=self.config_name)
|
||||||
|
|
||||||
def GetSortedXcodeEnv(self, additional_settings=None):
|
def GetSortedXcodeEnv(self, additional_settings=None):
|
||||||
"""Returns the variables Xcode would set for build steps."""
|
"""Returns the variables Xcode would set for build steps."""
|
||||||
assert self.abs_build_dir
|
assert self.abs_build_dir
|
||||||
|
@ -1377,7 +1511,8 @@ class NinjaWriter:
|
||||||
values = []
|
values = []
|
||||||
ninja_file.variable(var, ' '.join(values))
|
ninja_file.variable(var, ' '.join(values))
|
||||||
|
|
||||||
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env):
|
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
|
||||||
|
depfile=None):
|
||||||
"""Write out a new ninja "rule" statement for a given command.
|
"""Write out a new ninja "rule" statement for a given command.
|
||||||
|
|
||||||
Returns the name of the new rule, and a copy of |args| with variables
|
Returns the name of the new rule, and a copy of |args| with variables
|
||||||
|
@ -1435,7 +1570,8 @@ class NinjaWriter:
|
||||||
# GYP rules/actions express being no-ops by not touching their outputs.
|
# GYP rules/actions express being no-ops by not touching their outputs.
|
||||||
# Avoid executing downstream dependencies in this case by specifying
|
# Avoid executing downstream dependencies in this case by specifying
|
||||||
# restat=1 to ninja.
|
# restat=1 to ninja.
|
||||||
self.ninja.rule(rule_name, command, description, restat=True,
|
self.ninja.rule(rule_name, command, description, depfile=depfile,
|
||||||
|
restat=True, pool=pool,
|
||||||
rspfile=rspfile, rspfile_content=rspfile_content)
|
rspfile=rspfile, rspfile_content=rspfile_content)
|
||||||
self.ninja.newline()
|
self.ninja.newline()
|
||||||
|
|
||||||
|
@ -1466,12 +1602,13 @@ def CalculateVariables(default_variables, params):
|
||||||
generator_extra_sources_for_rules = getattr(xcode_generator,
|
generator_extra_sources_for_rules = getattr(xcode_generator,
|
||||||
'generator_extra_sources_for_rules', [])
|
'generator_extra_sources_for_rules', [])
|
||||||
elif flavor == 'win':
|
elif flavor == 'win':
|
||||||
|
exts = gyp.MSVSUtil.TARGET_TYPE_EXT
|
||||||
default_variables.setdefault('OS', 'win')
|
default_variables.setdefault('OS', 'win')
|
||||||
default_variables['EXECUTABLE_SUFFIX'] = '.exe'
|
default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
|
||||||
default_variables['STATIC_LIB_PREFIX'] = ''
|
default_variables['STATIC_LIB_PREFIX'] = ''
|
||||||
default_variables['STATIC_LIB_SUFFIX'] = '.lib'
|
default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
|
||||||
default_variables['SHARED_LIB_PREFIX'] = ''
|
default_variables['SHARED_LIB_PREFIX'] = ''
|
||||||
default_variables['SHARED_LIB_SUFFIX'] = '.dll'
|
default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
|
||||||
|
|
||||||
# Copy additional generator configuration data from VS, which is shared
|
# Copy additional generator configuration data from VS, which is shared
|
||||||
# by the Windows Ninja generator.
|
# by the Windows Ninja generator.
|
||||||
|
@ -1535,6 +1672,10 @@ def CommandWithWrapper(cmd, wrappers, prog):
|
||||||
|
|
||||||
def GetDefaultConcurrentLinks():
|
def GetDefaultConcurrentLinks():
|
||||||
"""Returns a best-guess for a number of concurrent links."""
|
"""Returns a best-guess for a number of concurrent links."""
|
||||||
|
pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
|
||||||
|
if pool_size:
|
||||||
|
return pool_size
|
||||||
|
|
||||||
if sys.platform in ('win32', 'cygwin'):
|
if sys.platform in ('win32', 'cygwin'):
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
|
@ -1557,19 +1698,17 @@ def GetDefaultConcurrentLinks():
|
||||||
|
|
||||||
mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB
|
mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB
|
||||||
hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
|
hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
|
||||||
# return min(mem_limit, hard_cap)
|
return min(mem_limit, hard_cap)
|
||||||
# TODO(scottmg): Temporary speculative fix for OOM on builders
|
|
||||||
# See http://crbug.com/333000.
|
|
||||||
return 2
|
|
||||||
elif sys.platform.startswith('linux'):
|
elif sys.platform.startswith('linux'):
|
||||||
with open("/proc/meminfo") as meminfo:
|
if os.path.exists("/proc/meminfo"):
|
||||||
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
|
with open("/proc/meminfo") as meminfo:
|
||||||
for line in meminfo:
|
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
|
||||||
match = memtotal_re.match(line)
|
for line in meminfo:
|
||||||
if not match:
|
match = memtotal_re.match(line)
|
||||||
continue
|
if not match:
|
||||||
# Allow 8Gb per link on Linux because Gold is quite memory hungry
|
continue
|
||||||
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
|
# Allow 8Gb per link on Linux because Gold is quite memory hungry
|
||||||
|
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
|
||||||
return 1
|
return 1
|
||||||
elif sys.platform == 'darwin':
|
elif sys.platform == 'darwin':
|
||||||
try:
|
try:
|
||||||
|
@ -1666,14 +1805,15 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
|
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
|
||||||
# to cc/cxx.
|
# to cc/cxx.
|
||||||
if flavor == 'win':
|
if flavor == 'win':
|
||||||
# Overridden by local arch choice in the use_deps case.
|
ar = 'lib.exe'
|
||||||
# Chromium's ffmpeg c99conv.py currently looks for a 'cc =' line in
|
# cc and cxx must be set to the correct architecture by overriding with one
|
||||||
# build.ninja so needs something valid here. http://crbug.com/233985
|
# of cl_x86 or cl_x64 below.
|
||||||
cc = 'cl.exe'
|
cc = 'UNSET'
|
||||||
cxx = 'cl.exe'
|
cxx = 'UNSET'
|
||||||
ld = 'link.exe'
|
ld = 'link.exe'
|
||||||
ld_host = '$ld'
|
ld_host = '$ld'
|
||||||
else:
|
else:
|
||||||
|
ar = 'ar'
|
||||||
cc = 'cc'
|
cc = 'cc'
|
||||||
cxx = 'c++'
|
cxx = 'c++'
|
||||||
ld = '$cc'
|
ld = '$cc'
|
||||||
|
@ -1681,10 +1821,16 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
ld_host = '$cc_host'
|
ld_host = '$cc_host'
|
||||||
ldxx_host = '$cxx_host'
|
ldxx_host = '$cxx_host'
|
||||||
|
|
||||||
|
ar_host = 'ar'
|
||||||
cc_host = None
|
cc_host = None
|
||||||
cxx_host = None
|
cxx_host = None
|
||||||
cc_host_global_setting = None
|
cc_host_global_setting = None
|
||||||
cxx_host_global_setting = None
|
cxx_host_global_setting = None
|
||||||
|
clang_cl = None
|
||||||
|
nm = 'nm'
|
||||||
|
nm_host = 'nm'
|
||||||
|
readelf = 'readelf'
|
||||||
|
readelf_host = 'readelf'
|
||||||
|
|
||||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||||
make_global_settings = data[build_file].get('make_global_settings', [])
|
make_global_settings = data[build_file].get('make_global_settings', [])
|
||||||
|
@ -1692,8 +1838,14 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
options.toplevel_dir)
|
options.toplevel_dir)
|
||||||
wrappers = {}
|
wrappers = {}
|
||||||
for key, value in make_global_settings:
|
for key, value in make_global_settings:
|
||||||
|
if key == 'AR':
|
||||||
|
ar = os.path.join(build_to_root, value)
|
||||||
|
if key == 'AR.host':
|
||||||
|
ar_host = os.path.join(build_to_root, value)
|
||||||
if key == 'CC':
|
if key == 'CC':
|
||||||
cc = os.path.join(build_to_root, value)
|
cc = os.path.join(build_to_root, value)
|
||||||
|
if cc.endswith('clang-cl'):
|
||||||
|
clang_cl = cc
|
||||||
if key == 'CXX':
|
if key == 'CXX':
|
||||||
cxx = os.path.join(build_to_root, value)
|
cxx = os.path.join(build_to_root, value)
|
||||||
if key == 'CC.host':
|
if key == 'CC.host':
|
||||||
|
@ -1702,6 +1854,18 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
if key == 'CXX.host':
|
if key == 'CXX.host':
|
||||||
cxx_host = os.path.join(build_to_root, value)
|
cxx_host = os.path.join(build_to_root, value)
|
||||||
cxx_host_global_setting = value
|
cxx_host_global_setting = value
|
||||||
|
if key == 'LD':
|
||||||
|
ld = os.path.join(build_to_root, value)
|
||||||
|
if key == 'LD.host':
|
||||||
|
ld_host = os.path.join(build_to_root, value)
|
||||||
|
if key == 'NM':
|
||||||
|
nm = os.path.join(build_to_root, value)
|
||||||
|
if key == 'NM.host':
|
||||||
|
nm_host = os.path.join(build_to_root, value)
|
||||||
|
if key == 'READELF':
|
||||||
|
readelf = os.path.join(build_to_root, value)
|
||||||
|
if key == 'READELF.host':
|
||||||
|
readelf_host = os.path.join(build_to_root, value)
|
||||||
if key.endswith('_wrapper'):
|
if key.endswith('_wrapper'):
|
||||||
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
|
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
|
||||||
|
|
||||||
|
@ -1713,12 +1877,25 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
wrappers[key_prefix] = os.path.join(build_to_root, value)
|
wrappers[key_prefix] = os.path.join(build_to_root, value)
|
||||||
|
|
||||||
if flavor == 'win':
|
if flavor == 'win':
|
||||||
|
configs = [target_dicts[qualified_target]['configurations'][config_name]
|
||||||
|
for qualified_target in target_list]
|
||||||
|
shared_system_includes = None
|
||||||
|
if not generator_flags.get('ninja_use_custom_environment_files', 0):
|
||||||
|
shared_system_includes = \
|
||||||
|
gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
|
||||||
|
configs, generator_flags)
|
||||||
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
|
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
|
||||||
toplevel_build, generator_flags, OpenOutput)
|
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
|
||||||
for arch, path in cl_paths.iteritems():
|
for arch, path in cl_paths.iteritems():
|
||||||
master_ninja.variable(
|
if clang_cl:
|
||||||
'cl_' + arch, CommandWithWrapper('CC', wrappers,
|
# If we have selected clang-cl, use that instead.
|
||||||
QuoteShellArgument(path, flavor)))
|
path = clang_cl
|
||||||
|
command = CommandWithWrapper('CC', wrappers,
|
||||||
|
QuoteShellArgument(path, 'win'))
|
||||||
|
if clang_cl:
|
||||||
|
# Use clang-cl to cross-compile for x86 or x86_64.
|
||||||
|
command += (' -m32' if arch == 'x86' else ' -m64')
|
||||||
|
master_ninja.variable('cl_' + arch, command)
|
||||||
|
|
||||||
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
|
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
|
||||||
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
|
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
|
||||||
|
@ -1728,14 +1905,22 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
if flavor == 'win':
|
if flavor == 'win':
|
||||||
master_ninja.variable('ld', ld)
|
master_ninja.variable('ld', ld)
|
||||||
master_ninja.variable('idl', 'midl.exe')
|
master_ninja.variable('idl', 'midl.exe')
|
||||||
master_ninja.variable('ar', 'lib.exe')
|
master_ninja.variable('ar', ar)
|
||||||
master_ninja.variable('rc', 'rc.exe')
|
master_ninja.variable('rc', 'rc.exe')
|
||||||
master_ninja.variable('asm', 'ml.exe')
|
master_ninja.variable('ml_x86', 'ml.exe')
|
||||||
|
master_ninja.variable('ml_x64', 'ml64.exe')
|
||||||
master_ninja.variable('mt', 'mt.exe')
|
master_ninja.variable('mt', 'mt.exe')
|
||||||
else:
|
else:
|
||||||
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
|
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
|
||||||
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
|
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
|
||||||
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
|
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
|
||||||
|
if flavor != 'mac':
|
||||||
|
# Mac does not use readelf/nm for .TOC generation, so avoiding polluting
|
||||||
|
# the master ninja with extra unused variables.
|
||||||
|
master_ninja.variable(
|
||||||
|
'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
|
||||||
|
master_ninja.variable(
|
||||||
|
'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
|
||||||
|
|
||||||
if generator_supports_multiple_toolsets:
|
if generator_supports_multiple_toolsets:
|
||||||
if not cc_host:
|
if not cc_host:
|
||||||
|
@ -1743,7 +1928,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
if not cxx_host:
|
if not cxx_host:
|
||||||
cxx_host = cxx
|
cxx_host = cxx
|
||||||
|
|
||||||
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
|
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
|
||||||
|
master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
|
||||||
|
master_ninja.variable('readelf_host',
|
||||||
|
GetEnvironFallback(['READELF_host'], readelf_host))
|
||||||
cc_host = GetEnvironFallback(['CC_host'], cc_host)
|
cc_host = GetEnvironFallback(['CC_host'], cc_host)
|
||||||
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
|
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
|
||||||
|
|
||||||
|
@ -1826,7 +2014,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
description='IDL $in',
|
description='IDL $in',
|
||||||
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
|
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
|
||||||
'$tlb $h $dlldata $iid $proxy $in '
|
'$tlb $h $dlldata $iid $proxy $in '
|
||||||
'$idlflags' % sys.executable))
|
'$midl_includes $idlflags' % sys.executable))
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'rc',
|
'rc',
|
||||||
description='RC $in',
|
description='RC $in',
|
||||||
|
@ -1836,20 +2024,20 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
sys.executable))
|
sys.executable))
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'asm',
|
'asm',
|
||||||
description='ASM $in',
|
description='ASM $out',
|
||||||
command=('%s gyp-win-tool asm-wrapper '
|
command=('%s gyp-win-tool asm-wrapper '
|
||||||
'$arch $asm $defines $includes /c /Fo $out $in' %
|
'$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
|
||||||
sys.executable))
|
sys.executable))
|
||||||
|
|
||||||
if flavor != 'mac' and flavor != 'win':
|
if flavor != 'mac' and flavor != 'win':
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'alink',
|
'alink',
|
||||||
description='AR $out',
|
description='AR $out',
|
||||||
command='rm -f $out && $ar rcs $out $in')
|
command='rm -f $out && $ar rcs $arflags $out $in')
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'alink_thin',
|
'alink_thin',
|
||||||
description='AR $out',
|
description='AR $out',
|
||||||
command='rm -f $out && $ar rcsT $out $in')
|
command='rm -f $out && $ar rcsT $arflags $out $in')
|
||||||
|
|
||||||
# This allows targets that only need to depend on $lib's API to declare an
|
# This allows targets that only need to depend on $lib's API to declare an
|
||||||
# order-only dependency on $lib.TOC and avoid relinking such downstream
|
# order-only dependency on $lib.TOC and avoid relinking such downstream
|
||||||
|
@ -1857,38 +2045,39 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
# The resulting string leaves an uninterpolated %{suffix} which
|
# The resulting string leaves an uninterpolated %{suffix} which
|
||||||
# is used in the final substitution below.
|
# is used in the final substitution below.
|
||||||
mtime_preserving_solink_base = (
|
mtime_preserving_solink_base = (
|
||||||
'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then '
|
'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
|
||||||
'%(solink)s && %(extract_toc)s > ${lib}.TOC; else '
|
'%(solink)s && %(extract_toc)s > $lib.TOC; else '
|
||||||
'%(solink)s && %(extract_toc)s > ${lib}.tmp && '
|
'%(solink)s && %(extract_toc)s > $lib.tmp && '
|
||||||
'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; '
|
'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
|
||||||
'fi; fi'
|
'fi; fi'
|
||||||
% { 'solink':
|
% { 'solink':
|
||||||
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
|
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
|
||||||
'extract_toc':
|
'extract_toc':
|
||||||
('{ readelf -d ${lib} | grep SONAME ; '
|
('{ $readelf -d $lib | grep SONAME ; '
|
||||||
'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')})
|
'$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
|
||||||
|
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'solink',
|
'solink',
|
||||||
description='SOLINK $lib',
|
description='SOLINK $lib',
|
||||||
restat=True,
|
restat=True,
|
||||||
command=(mtime_preserving_solink_base % {
|
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
|
||||||
'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
|
rspfile='$link_file_list',
|
||||||
'$libs'}),
|
rspfile_content=
|
||||||
|
'-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
|
||||||
pool='link_pool')
|
pool='link_pool')
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'solink_module',
|
'solink_module',
|
||||||
description='SOLINK(module) $lib',
|
description='SOLINK(module) $lib',
|
||||||
restat=True,
|
restat=True,
|
||||||
command=(mtime_preserving_solink_base % {
|
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
|
||||||
'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group '
|
rspfile='$link_file_list',
|
||||||
'$libs'}),
|
rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
|
||||||
pool='link_pool')
|
pool='link_pool')
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'link',
|
'link',
|
||||||
description='LINK $out',
|
description='LINK $out',
|
||||||
command=('$ld $ldflags -o $out '
|
command=('$ld $ldflags -o $out '
|
||||||
'-Wl,--start-group $in $solibs -Wl,--end-group $libs'),
|
'-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
|
||||||
pool='link_pool')
|
pool='link_pool')
|
||||||
elif flavor == 'win':
|
elif flavor == 'win':
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
|
@ -1927,21 +2116,31 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
'lipo',
|
'lipo',
|
||||||
description='LIPO $out, POSTBUILDS',
|
description='LIPO $out, POSTBUILDS',
|
||||||
command='rm -f $out && lipo -create $in -output $out$postbuilds')
|
command='rm -f $out && lipo -create $in -output $out$postbuilds')
|
||||||
|
master_ninja.rule(
|
||||||
|
'solipo',
|
||||||
|
description='SOLIPO $out, POSTBUILDS',
|
||||||
|
command=(
|
||||||
|
'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
|
||||||
|
'%(extract_toc)s > $lib.TOC'
|
||||||
|
% { 'extract_toc':
|
||||||
|
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
|
||||||
|
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
|
||||||
|
|
||||||
|
|
||||||
# Record the public interface of $lib in $lib.TOC. See the corresponding
|
# Record the public interface of $lib in $lib.TOC. See the corresponding
|
||||||
# comment in the posix section above for details.
|
# comment in the posix section above for details.
|
||||||
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
|
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
|
||||||
mtime_preserving_solink_base = (
|
mtime_preserving_solink_base = (
|
||||||
'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
|
'if [ ! -e $lib -o ! -e $lib.TOC ] || '
|
||||||
# Always force dependent targets to relink if this library
|
# Always force dependent targets to relink if this library
|
||||||
# reexports something. Handling this correctly would require
|
# reexports something. Handling this correctly would require
|
||||||
# recursive TOC dumping but this is rare in practice, so punt.
|
# recursive TOC dumping but this is rare in practice, so punt.
|
||||||
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
|
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
|
||||||
'%(solink)s && %(extract_toc)s > ${lib}.TOC; '
|
'%(solink)s && %(extract_toc)s > $lib.TOC; '
|
||||||
'else '
|
'else '
|
||||||
'%(solink)s && %(extract_toc)s > ${lib}.tmp && '
|
'%(solink)s && %(extract_toc)s > $lib.tmp && '
|
||||||
'if ! cmp -s ${lib}.tmp ${lib}.TOC; then '
|
'if ! cmp -s $lib.tmp $lib.TOC; then '
|
||||||
'mv ${lib}.tmp ${lib}.TOC ; '
|
'mv $lib.tmp $lib.TOC ; '
|
||||||
'fi; '
|
'fi; '
|
||||||
'fi'
|
'fi'
|
||||||
% { 'solink': solink_base,
|
% { 'solink': solink_base,
|
||||||
|
@ -1949,34 +2148,42 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
|
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
|
||||||
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
|
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
|
||||||
|
|
||||||
solink_suffix = '$in $solibs $libs$postbuilds'
|
|
||||||
|
solink_suffix = '@$link_file_list$postbuilds'
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'solink',
|
'solink',
|
||||||
description='SOLINK $lib, POSTBUILDS',
|
description='SOLINK $lib, POSTBUILDS',
|
||||||
restat=True,
|
restat=True,
|
||||||
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
|
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
|
||||||
'type': '-shared'},
|
'type': '-shared'},
|
||||||
|
rspfile='$link_file_list',
|
||||||
|
rspfile_content='$in $solibs $libs',
|
||||||
pool='link_pool')
|
pool='link_pool')
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'solink_notoc',
|
'solink_notoc',
|
||||||
description='SOLINK $lib, POSTBUILDS',
|
description='SOLINK $lib, POSTBUILDS',
|
||||||
restat=True,
|
restat=True,
|
||||||
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
|
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
|
||||||
|
rspfile='$link_file_list',
|
||||||
|
rspfile_content='$in $solibs $libs',
|
||||||
pool='link_pool')
|
pool='link_pool')
|
||||||
|
|
||||||
solink_module_suffix = '$in $solibs $libs$postbuilds'
|
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'solink_module',
|
'solink_module',
|
||||||
description='SOLINK(module) $lib, POSTBUILDS',
|
description='SOLINK(module) $lib, POSTBUILDS',
|
||||||
restat=True,
|
restat=True,
|
||||||
command=mtime_preserving_solink_base % {'suffix': solink_module_suffix,
|
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
|
||||||
'type': '-bundle'},
|
'type': '-bundle'},
|
||||||
|
rspfile='$link_file_list',
|
||||||
|
rspfile_content='$in $solibs $libs',
|
||||||
pool='link_pool')
|
pool='link_pool')
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'solink_module_notoc',
|
'solink_module_notoc',
|
||||||
description='SOLINK(module) $lib, POSTBUILDS',
|
description='SOLINK(module) $lib, POSTBUILDS',
|
||||||
restat=True,
|
restat=True,
|
||||||
command=solink_base % {'suffix': solink_module_suffix, 'type': '-bundle'},
|
command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
|
||||||
|
rspfile='$link_file_list',
|
||||||
|
rspfile_content='$in $solibs $libs',
|
||||||
pool='link_pool')
|
pool='link_pool')
|
||||||
|
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
|
@ -1993,11 +2200,19 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'copy_infoplist',
|
'copy_infoplist',
|
||||||
description='COPY INFOPLIST $in',
|
description='COPY INFOPLIST $in',
|
||||||
command='$env ./gyp-mac-tool copy-info-plist $in $out $keys')
|
command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
|
||||||
|
master_ninja.rule(
|
||||||
|
'merge_infoplist',
|
||||||
|
description='MERGE INFOPLISTS $in',
|
||||||
|
command='$env ./gyp-mac-tool merge-info-plist $out $in')
|
||||||
|
master_ninja.rule(
|
||||||
|
'compile_xcassets',
|
||||||
|
description='COMPILE XCASSETS $in',
|
||||||
|
command='$env ./gyp-mac-tool compile-xcassets $keys $in')
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'mac_tool',
|
'mac_tool',
|
||||||
description='MACTOOL $mactool_cmd $in',
|
description='MACTOOL $mactool_cmd $in',
|
||||||
command='$env ./gyp-mac-tool $mactool_cmd $in $out')
|
command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'package_framework',
|
'package_framework',
|
||||||
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
|
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
|
||||||
|
@ -2020,7 +2235,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'copy',
|
'copy',
|
||||||
description='COPY $in $out',
|
description='COPY $in $out',
|
||||||
command='rm -rf $out && cp -af $in $out')
|
command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)')
|
||||||
master_ninja.newline()
|
master_ninja.newline()
|
||||||
|
|
||||||
all_targets = set()
|
all_targets = set()
|
||||||
|
@ -2037,6 +2252,15 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
# objects.
|
# objects.
|
||||||
target_short_names = {}
|
target_short_names = {}
|
||||||
|
|
||||||
|
# short name of targets that were skipped because they didn't contain anything
|
||||||
|
# interesting.
|
||||||
|
# NOTE: there may be overlap between this an non_empty_target_names.
|
||||||
|
empty_target_names = set()
|
||||||
|
|
||||||
|
# Set of non-empty short target names.
|
||||||
|
# NOTE: there may be overlap between this an empty_target_names.
|
||||||
|
non_empty_target_names = set()
|
||||||
|
|
||||||
for qualified_target in target_list:
|
for qualified_target in target_list:
|
||||||
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
|
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
|
||||||
build_file, name, toolset = \
|
build_file, name, toolset = \
|
||||||
|
@ -2053,6 +2277,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
|
|
||||||
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
|
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
|
||||||
|
|
||||||
|
qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
|
||||||
|
toolset)
|
||||||
|
hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
|
||||||
|
|
||||||
base_path = os.path.dirname(build_file)
|
base_path = os.path.dirname(build_file)
|
||||||
obj = 'obj'
|
obj = 'obj'
|
||||||
if toolset != 'target':
|
if toolset != 'target':
|
||||||
|
@ -2060,7 +2288,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
output_file = os.path.join(obj, base_path, name + '.ninja')
|
output_file = os.path.join(obj, base_path, name + '.ninja')
|
||||||
|
|
||||||
ninja_output = StringIO()
|
ninja_output = StringIO()
|
||||||
writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
|
writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
|
||||||
ninja_output,
|
ninja_output,
|
||||||
toplevel_build, output_file,
|
toplevel_build, output_file,
|
||||||
flavor, toplevel_dir=options.toplevel_dir)
|
flavor, toplevel_dir=options.toplevel_dir)
|
||||||
|
@ -2080,6 +2308,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
target_outputs[qualified_target] = target
|
target_outputs[qualified_target] = target
|
||||||
if qualified_target in all_targets:
|
if qualified_target in all_targets:
|
||||||
all_outputs.add(target.FinalOutput())
|
all_outputs.add(target.FinalOutput())
|
||||||
|
non_empty_target_names.add(name)
|
||||||
|
else:
|
||||||
|
empty_target_names.add(name)
|
||||||
|
|
||||||
if target_short_names:
|
if target_short_names:
|
||||||
# Write a short name to build this target. This benefits both the
|
# Write a short name to build this target. This benefits both the
|
||||||
|
@ -2091,6 +2322,16 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
|
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
|
||||||
target_short_names[short_name]])
|
target_short_names[short_name]])
|
||||||
|
|
||||||
|
# Write phony targets for any empty targets that weren't written yet. As
|
||||||
|
# short names are not necessarily unique only do this for short names that
|
||||||
|
# haven't already been output for another target.
|
||||||
|
empty_target_names = empty_target_names - non_empty_target_names
|
||||||
|
if empty_target_names:
|
||||||
|
master_ninja.newline()
|
||||||
|
master_ninja.comment('Empty targets (output for completeness).')
|
||||||
|
for name in sorted(empty_target_names):
|
||||||
|
master_ninja.build(name, 'phony')
|
||||||
|
|
||||||
if all_outputs:
|
if all_outputs:
|
||||||
master_ninja.newline()
|
master_ninja.newline()
|
||||||
master_ninja.build('all', 'phony', list(all_outputs))
|
master_ninja.build('all', 'phony', list(all_outputs))
|
||||||
|
|
|
@ -15,15 +15,18 @@ import TestCommon
|
||||||
|
|
||||||
class TestPrefixesAndSuffixes(unittest.TestCase):
|
class TestPrefixesAndSuffixes(unittest.TestCase):
|
||||||
def test_BinaryNamesWindows(self):
|
def test_BinaryNamesWindows(self):
|
||||||
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
# These cannot run on non-Windows as they require a VS installation to
|
||||||
'build.ninja', 'win')
|
# correctly handle variable expansion.
|
||||||
spec = { 'target_name': 'wee' }
|
if sys.platform.startswith('win'):
|
||||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
|
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||||
endswith('.exe'))
|
'build.ninja', 'win')
|
||||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
spec = { 'target_name': 'wee' }
|
||||||
endswith('.dll'))
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
|
||||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
endswith('.exe'))
|
||||||
endswith('.lib'))
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||||
|
endswith('.dll'))
|
||||||
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||||
|
endswith('.lib'))
|
||||||
|
|
||||||
def test_BinaryNamesLinux(self):
|
def test_BinaryNamesLinux(self):
|
||||||
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
import filecmp
|
import filecmp
|
||||||
import gyp.common
|
import gyp.common
|
||||||
import gyp.xcodeproj_file
|
import gyp.xcodeproj_file
|
||||||
|
import gyp.xcode_ninja
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
@ -68,6 +69,9 @@ generator_additional_path_sections = [
|
||||||
# The Xcode-specific keys that exist on targets and aren't moved down to
|
# The Xcode-specific keys that exist on targets and aren't moved down to
|
||||||
# configurations.
|
# configurations.
|
||||||
generator_additional_non_configuration_keys = [
|
generator_additional_non_configuration_keys = [
|
||||||
|
'ios_app_extension',
|
||||||
|
'ios_watch_app',
|
||||||
|
'ios_watchkit_extension',
|
||||||
'mac_bundle',
|
'mac_bundle',
|
||||||
'mac_bundle_resources',
|
'mac_bundle_resources',
|
||||||
'mac_framework_headers',
|
'mac_framework_headers',
|
||||||
|
@ -484,7 +488,7 @@ sys.exit(subprocess.call(sys.argv[1:]))" """
|
||||||
def AddSourceToTarget(source, type, pbxp, xct):
|
def AddSourceToTarget(source, type, pbxp, xct):
|
||||||
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
|
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
|
||||||
# little bit fancier.
|
# little bit fancier.
|
||||||
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
|
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
|
||||||
|
|
||||||
# .o is conceptually more of a "source" than a "library," but Xcode thinks
|
# .o is conceptually more of a "source" than a "library," but Xcode thinks
|
||||||
# of "sources" as things to compile and "libraries" (or "frameworks") as
|
# of "sources" as things to compile and "libraries" (or "frameworks") as
|
||||||
|
@ -520,7 +524,7 @@ def AddHeaderToTarget(header, pbxp, xct, is_public):
|
||||||
xct.HeadersPhase().AddFile(header, settings)
|
xct.HeadersPhase().AddFile(header, settings)
|
||||||
|
|
||||||
|
|
||||||
_xcode_variable_re = re.compile('(\$\((.*?)\))')
|
_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
|
||||||
def ExpandXcodeVariables(string, expansions):
|
def ExpandXcodeVariables(string, expansions):
|
||||||
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
|
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
|
||||||
|
|
||||||
|
@ -575,12 +579,17 @@ def PerformBuild(data, configurations, params):
|
||||||
|
|
||||||
|
|
||||||
def GenerateOutput(target_list, target_dicts, data, params):
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
# Optionally configure each spec to use ninja as the external builder.
|
||||||
|
ninja_wrapper = params.get('flavor') == 'ninja'
|
||||||
|
if ninja_wrapper:
|
||||||
|
(target_list, target_dicts, data) = \
|
||||||
|
gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
|
||||||
|
|
||||||
options = params['options']
|
options = params['options']
|
||||||
generator_flags = params.get('generator_flags', {})
|
generator_flags = params.get('generator_flags', {})
|
||||||
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
|
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
|
||||||
serialize_all_tests = \
|
serialize_all_tests = \
|
||||||
generator_flags.get('xcode_serialize_all_test_runs', True)
|
generator_flags.get('xcode_serialize_all_test_runs', True)
|
||||||
project_version = generator_flags.get('xcode_project_version', None)
|
|
||||||
skip_excluded_files = \
|
skip_excluded_files = \
|
||||||
not generator_flags.get('xcode_list_excluded_files', True)
|
not generator_flags.get('xcode_list_excluded_files', True)
|
||||||
xcode_projects = {}
|
xcode_projects = {}
|
||||||
|
@ -598,8 +607,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
if parallel_builds:
|
if parallel_builds:
|
||||||
pbxp.SetProperty('attributes',
|
pbxp.SetProperty('attributes',
|
||||||
{'BuildIndependentTargetsInParallel': 'YES'})
|
{'BuildIndependentTargetsInParallel': 'YES'})
|
||||||
if project_version:
|
|
||||||
xcp.project_file.SetXcodeVersion(project_version)
|
|
||||||
|
|
||||||
# Add gyp/gypi files to project
|
# Add gyp/gypi files to project
|
||||||
if not generator_flags.get('standalone'):
|
if not generator_flags.get('standalone'):
|
||||||
|
@ -637,14 +644,18 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
|
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
|
||||||
# to create a single-file mh_bundle.
|
# to create a single-file mh_bundle.
|
||||||
_types = {
|
_types = {
|
||||||
'executable': 'com.apple.product-type.tool',
|
'executable': 'com.apple.product-type.tool',
|
||||||
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
|
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
|
||||||
'shared_library': 'com.apple.product-type.library.dynamic',
|
'shared_library': 'com.apple.product-type.library.dynamic',
|
||||||
'static_library': 'com.apple.product-type.library.static',
|
'static_library': 'com.apple.product-type.library.static',
|
||||||
'executable+bundle': 'com.apple.product-type.application',
|
'executable+bundle': 'com.apple.product-type.application',
|
||||||
'loadable_module+bundle': 'com.apple.product-type.bundle',
|
'loadable_module+bundle': 'com.apple.product-type.bundle',
|
||||||
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
|
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
|
||||||
'shared_library+bundle': 'com.apple.product-type.framework',
|
'shared_library+bundle': 'com.apple.product-type.framework',
|
||||||
|
'executable+extension+bundle': 'com.apple.product-type.app-extension',
|
||||||
|
'executable+watch+extension+bundle':
|
||||||
|
'com.apple.product-type.watchkit-extension',
|
||||||
|
'executable+watch+bundle': 'com.apple.product-type.application.watchapp',
|
||||||
}
|
}
|
||||||
|
|
||||||
target_properties = {
|
target_properties = {
|
||||||
|
@ -655,6 +666,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
type = spec['type']
|
type = spec['type']
|
||||||
is_xctest = int(spec.get('mac_xctest_bundle', 0))
|
is_xctest = int(spec.get('mac_xctest_bundle', 0))
|
||||||
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
|
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
|
||||||
|
is_app_extension = int(spec.get('ios_app_extension', 0))
|
||||||
|
is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
|
||||||
|
is_watch_app = int(spec.get('ios_watch_app', 0))
|
||||||
if type != 'none':
|
if type != 'none':
|
||||||
type_bundle_key = type
|
type_bundle_key = type
|
||||||
if is_xctest:
|
if is_xctest:
|
||||||
|
@ -662,6 +676,18 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
assert type == 'loadable_module', (
|
assert type == 'loadable_module', (
|
||||||
'mac_xctest_bundle targets must have type loadable_module '
|
'mac_xctest_bundle targets must have type loadable_module '
|
||||||
'(target %s)' % target_name)
|
'(target %s)' % target_name)
|
||||||
|
elif is_app_extension:
|
||||||
|
assert is_bundle, ('ios_app_extension flag requires mac_bundle '
|
||||||
|
'(target %s)' % target_name)
|
||||||
|
type_bundle_key += '+extension+bundle'
|
||||||
|
elif is_watchkit_extension:
|
||||||
|
assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
|
||||||
|
'(target %s)' % target_name)
|
||||||
|
type_bundle_key += '+watch+extension+bundle'
|
||||||
|
elif is_watch_app:
|
||||||
|
assert is_bundle, ('ios_watch_app flag requires mac_bundle '
|
||||||
|
'(target %s)' % target_name)
|
||||||
|
type_bundle_key += '+watch+bundle'
|
||||||
elif is_bundle:
|
elif is_bundle:
|
||||||
type_bundle_key += '+bundle'
|
type_bundle_key += '+bundle'
|
||||||
|
|
||||||
|
@ -703,11 +729,16 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
# and is made a dependency of this target. This way the work is done
|
# and is made a dependency of this target. This way the work is done
|
||||||
# before the dependency checks for what should be recompiled.
|
# before the dependency checks for what should be recompiled.
|
||||||
support_xct = None
|
support_xct = None
|
||||||
if type != 'none' and (spec_actions or spec_rules):
|
# The Xcode "issues" don't affect xcode-ninja builds, since the dependency
|
||||||
|
# logic all happens in ninja. Don't bother creating the extra targets in
|
||||||
|
# that case.
|
||||||
|
if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
|
||||||
support_xccl = CreateXCConfigurationList(configuration_names);
|
support_xccl = CreateXCConfigurationList(configuration_names);
|
||||||
|
support_target_suffix = generator_flags.get(
|
||||||
|
'support_target_suffix', ' Support')
|
||||||
support_target_properties = {
|
support_target_properties = {
|
||||||
'buildConfigurationList': support_xccl,
|
'buildConfigurationList': support_xccl,
|
||||||
'name': target_name + ' Support',
|
'name': target_name + support_target_suffix,
|
||||||
}
|
}
|
||||||
if target_product_name:
|
if target_product_name:
|
||||||
support_target_properties['productName'] = \
|
support_target_properties['productName'] = \
|
||||||
|
@ -1096,6 +1127,9 @@ exit 1
|
||||||
# Relative paths are relative to $(SRCROOT).
|
# Relative paths are relative to $(SRCROOT).
|
||||||
dest = '$(SRCROOT)/' + dest
|
dest = '$(SRCROOT)/' + dest
|
||||||
|
|
||||||
|
code_sign = int(copy_group.get('xcode_code_sign', 0))
|
||||||
|
settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
|
||||||
|
|
||||||
# Coalesce multiple "copies" sections in the same target with the same
|
# Coalesce multiple "copies" sections in the same target with the same
|
||||||
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
|
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
|
||||||
# they'll wind up with ID collisions.
|
# they'll wind up with ID collisions.
|
||||||
|
@ -1114,7 +1148,7 @@ exit 1
|
||||||
pbxcp_dict[dest] = pbxcp
|
pbxcp_dict[dest] = pbxcp
|
||||||
|
|
||||||
for file in copy_group['files']:
|
for file in copy_group['files']:
|
||||||
pbxcp.AddFile(file)
|
pbxcp.AddFile(file, settings)
|
||||||
|
|
||||||
# Excluded files can also go into the project file.
|
# Excluded files can also go into the project file.
|
||||||
if not skip_excluded_files:
|
if not skip_excluded_files:
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -44,16 +44,16 @@ class TestFindCycles(unittest.TestCase):
|
||||||
def test_cycle_self_reference(self):
|
def test_cycle_self_reference(self):
|
||||||
self._create_dependency(self.nodes['a'], self.nodes['a'])
|
self._create_dependency(self.nodes['a'], self.nodes['a'])
|
||||||
|
|
||||||
self.assertEquals([(self.nodes['a'], self.nodes['a'])],
|
self.assertEquals([[self.nodes['a'], self.nodes['a']]],
|
||||||
self.nodes['a'].FindCycles())
|
self.nodes['a'].FindCycles())
|
||||||
|
|
||||||
def test_cycle_two_nodes(self):
|
def test_cycle_two_nodes(self):
|
||||||
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||||
self._create_dependency(self.nodes['b'], self.nodes['a'])
|
self._create_dependency(self.nodes['b'], self.nodes['a'])
|
||||||
|
|
||||||
self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
|
self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
|
||||||
self.nodes['a'].FindCycles())
|
self.nodes['a'].FindCycles())
|
||||||
self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
|
self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
|
||||||
self.nodes['b'].FindCycles())
|
self.nodes['b'].FindCycles())
|
||||||
|
|
||||||
def test_two_cycles(self):
|
def test_two_cycles(self):
|
||||||
|
@ -65,9 +65,9 @@ class TestFindCycles(unittest.TestCase):
|
||||||
|
|
||||||
cycles = self.nodes['a'].FindCycles()
|
cycles = self.nodes['a'].FindCycles()
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
(self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
|
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
(self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
|
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
|
||||||
self.assertEquals(2, len(cycles))
|
self.assertEquals(2, len(cycles))
|
||||||
|
|
||||||
def test_big_cycle(self):
|
def test_big_cycle(self):
|
||||||
|
@ -77,12 +77,12 @@ class TestFindCycles(unittest.TestCase):
|
||||||
self._create_dependency(self.nodes['d'], self.nodes['e'])
|
self._create_dependency(self.nodes['d'], self.nodes['e'])
|
||||||
self._create_dependency(self.nodes['e'], self.nodes['a'])
|
self._create_dependency(self.nodes['e'], self.nodes['a'])
|
||||||
|
|
||||||
self.assertEquals([(self.nodes['a'],
|
self.assertEquals([[self.nodes['a'],
|
||||||
self.nodes['b'],
|
self.nodes['b'],
|
||||||
self.nodes['c'],
|
self.nodes['c'],
|
||||||
self.nodes['d'],
|
self.nodes['d'],
|
||||||
self.nodes['e'],
|
self.nodes['e'],
|
||||||
self.nodes['a'])],
|
self.nodes['a']]],
|
||||||
self.nodes['a'].FindCycles())
|
self.nodes['a'].FindCycles())
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ class MacTool(object):
|
||||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||||
return name_string.title().replace('-', '')
|
return name_string.title().replace('-', '')
|
||||||
|
|
||||||
def ExecCopyBundleResource(self, source, dest):
|
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
|
||||||
"""Copies a resource file to the bundle/Resources directory, performing any
|
"""Copies a resource file to the bundle/Resources directory, performing any
|
||||||
necessary compilation on each resource."""
|
necessary compilation on each resource."""
|
||||||
extension = os.path.splitext(source)[1].lower()
|
extension = os.path.splitext(source)[1].lower()
|
||||||
|
@ -62,7 +62,7 @@ class MacTool(object):
|
||||||
elif extension == '.storyboard':
|
elif extension == '.storyboard':
|
||||||
return self._CopyXIBFile(source, dest)
|
return self._CopyXIBFile(source, dest)
|
||||||
elif extension == '.strings':
|
elif extension == '.strings':
|
||||||
self._CopyStringsFile(source, dest)
|
self._CopyStringsFile(source, dest, convert_to_binary)
|
||||||
else:
|
else:
|
||||||
shutil.copy(source, dest)
|
shutil.copy(source, dest)
|
||||||
|
|
||||||
|
@ -92,7 +92,11 @@ class MacTool(object):
|
||||||
sys.stdout.write(line)
|
sys.stdout.write(line)
|
||||||
return ibtoolout.returncode
|
return ibtoolout.returncode
|
||||||
|
|
||||||
def _CopyStringsFile(self, source, dest):
|
def _ConvertToBinary(self, dest):
|
||||||
|
subprocess.check_call([
|
||||||
|
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
|
||||||
|
|
||||||
|
def _CopyStringsFile(self, source, dest, convert_to_binary):
|
||||||
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
||||||
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
||||||
|
|
||||||
|
@ -112,6 +116,9 @@ class MacTool(object):
|
||||||
fp.write(s.decode(input_code).encode('UTF-16'))
|
fp.write(s.decode(input_code).encode('UTF-16'))
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
|
if convert_to_binary == 'True':
|
||||||
|
self._ConvertToBinary(dest)
|
||||||
|
|
||||||
def _DetectInputEncoding(self, file_name):
|
def _DetectInputEncoding(self, file_name):
|
||||||
"""Reads the first few bytes from file_name and tries to guess the text
|
"""Reads the first few bytes from file_name and tries to guess the text
|
||||||
encoding. Returns None as a guess if it can't detect it."""
|
encoding. Returns None as a guess if it can't detect it."""
|
||||||
|
@ -131,7 +138,7 @@ class MacTool(object):
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def ExecCopyInfoPlist(self, source, dest, *keys):
|
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
|
||||||
"""Copies the |source| Info.plist to the destination directory |dest|."""
|
"""Copies the |source| Info.plist to the destination directory |dest|."""
|
||||||
# Read the source Info.plist into memory.
|
# Read the source Info.plist into memory.
|
||||||
fd = open(source, 'r')
|
fd = open(source, 'r')
|
||||||
|
@ -146,7 +153,7 @@ class MacTool(object):
|
||||||
|
|
||||||
# Go through all the environment variables and replace them as variables in
|
# Go through all the environment variables and replace them as variables in
|
||||||
# the file.
|
# the file.
|
||||||
IDENT_RE = re.compile('[/\s]')
|
IDENT_RE = re.compile(r'[/\s]')
|
||||||
for key in os.environ:
|
for key in os.environ:
|
||||||
if key.startswith('_'):
|
if key.startswith('_'):
|
||||||
continue
|
continue
|
||||||
|
@ -185,6 +192,9 @@ class MacTool(object):
|
||||||
# "compiled".
|
# "compiled".
|
||||||
self._WritePkgInfo(dest)
|
self._WritePkgInfo(dest)
|
||||||
|
|
||||||
|
if convert_to_binary == 'True':
|
||||||
|
self._ConvertToBinary(dest)
|
||||||
|
|
||||||
def _WritePkgInfo(self, info_plist):
|
def _WritePkgInfo(self, info_plist):
|
||||||
"""This writes the PkgInfo file from the data stored in Info.plist."""
|
"""This writes the PkgInfo file from the data stored in Info.plist."""
|
||||||
plist = plistlib.readPlist(info_plist)
|
plist = plistlib.readPlist(info_plist)
|
||||||
|
@ -219,11 +229,28 @@ class MacTool(object):
|
||||||
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
|
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
|
||||||
symbols'."""
|
symbols'."""
|
||||||
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
|
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
|
||||||
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
|
libtool_re5 = re.compile(
|
||||||
|
r'^.*libtool: warning for library: ' +
|
||||||
|
r'.* the table of contents is empty ' +
|
||||||
|
r'\(no object file members in the library define global symbols\)$')
|
||||||
|
env = os.environ.copy()
|
||||||
|
# Ref:
|
||||||
|
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
|
||||||
|
# The problem with this flag is that it resets the file mtime on the file to
|
||||||
|
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
|
||||||
|
env['ZERO_AR_DATE'] = '1'
|
||||||
|
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
|
||||||
_, err = libtoolout.communicate()
|
_, err = libtoolout.communicate()
|
||||||
for line in err.splitlines():
|
for line in err.splitlines():
|
||||||
if not libtool_re.match(line):
|
if not libtool_re.match(line) and not libtool_re5.match(line):
|
||||||
print >>sys.stderr, line
|
print >>sys.stderr, line
|
||||||
|
# Unconditionally touch the output .a file on the command line if present
|
||||||
|
# and the command succeeded. A bit hacky.
|
||||||
|
if not libtoolout.returncode:
|
||||||
|
for i in range(len(cmd_list) - 1):
|
||||||
|
if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
|
||||||
|
os.utime(cmd_list[i+1], None)
|
||||||
|
break
|
||||||
return libtoolout.returncode
|
return libtoolout.returncode
|
||||||
|
|
||||||
def ExecPackageFramework(self, framework, version):
|
def ExecPackageFramework(self, framework, version):
|
||||||
|
@ -262,6 +289,66 @@ class MacTool(object):
|
||||||
os.remove(link)
|
os.remove(link)
|
||||||
os.symlink(dest, link)
|
os.symlink(dest, link)
|
||||||
|
|
||||||
|
def ExecCompileXcassets(self, keys, *inputs):
|
||||||
|
"""Compiles multiple .xcassets files into a single .car file.
|
||||||
|
|
||||||
|
This invokes 'actool' to compile all the inputs .xcassets files. The
|
||||||
|
|keys| arguments is a json-encoded dictionary of extra arguments to
|
||||||
|
pass to 'actool' when the asset catalogs contains an application icon
|
||||||
|
or a launch image.
|
||||||
|
|
||||||
|
Note that 'actool' does not create the Assets.car file if the asset
|
||||||
|
catalogs does not contains imageset.
|
||||||
|
"""
|
||||||
|
command_line = [
|
||||||
|
'xcrun', 'actool', '--output-format', 'human-readable-text',
|
||||||
|
'--compress-pngs', '--notices', '--warnings', '--errors',
|
||||||
|
]
|
||||||
|
is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
|
||||||
|
if is_iphone_target:
|
||||||
|
platform = os.environ['CONFIGURATION'].split('-')[-1]
|
||||||
|
if platform not in ('iphoneos', 'iphonesimulator'):
|
||||||
|
platform = 'iphonesimulator'
|
||||||
|
command_line.extend([
|
||||||
|
'--platform', platform, '--target-device', 'iphone',
|
||||||
|
'--target-device', 'ipad', '--minimum-deployment-target',
|
||||||
|
os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
|
||||||
|
os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
command_line.extend([
|
||||||
|
'--platform', 'macosx', '--target-device', 'mac',
|
||||||
|
'--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
|
||||||
|
'--compile',
|
||||||
|
os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
|
||||||
|
])
|
||||||
|
if keys:
|
||||||
|
keys = json.loads(keys)
|
||||||
|
for key, value in keys.iteritems():
|
||||||
|
arg_name = '--' + key
|
||||||
|
if isinstance(value, bool):
|
||||||
|
if value:
|
||||||
|
command_line.append(arg_name)
|
||||||
|
elif isinstance(value, list):
|
||||||
|
for v in value:
|
||||||
|
command_line.append(arg_name)
|
||||||
|
command_line.append(str(v))
|
||||||
|
else:
|
||||||
|
command_line.append(arg_name)
|
||||||
|
command_line.append(str(value))
|
||||||
|
# Note: actool crashes if inputs path are relative, so use os.path.abspath
|
||||||
|
# to get absolute path name for inputs.
|
||||||
|
command_line.extend(map(os.path.abspath, inputs))
|
||||||
|
subprocess.check_call(command_line)
|
||||||
|
|
||||||
|
def ExecMergeInfoPlist(self, output, *inputs):
|
||||||
|
"""Merge multiple .plist files into a single .plist file."""
|
||||||
|
merged_plist = {}
|
||||||
|
for path in inputs:
|
||||||
|
plist = self._LoadPlistMaybeBinary(path)
|
||||||
|
self._MergePlist(merged_plist, plist)
|
||||||
|
plistlib.writePlist(merged_plist, output)
|
||||||
|
|
||||||
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
|
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
|
||||||
"""Code sign a bundle.
|
"""Code sign a bundle.
|
||||||
|
|
||||||
|
@ -398,6 +485,19 @@ class MacTool(object):
|
||||||
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
|
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
|
||||||
return self._LoadPlistMaybeBinary(temp.name)
|
return self._LoadPlistMaybeBinary(temp.name)
|
||||||
|
|
||||||
|
def _MergePlist(self, merged_plist, plist):
|
||||||
|
"""Merge |plist| into |merged_plist|."""
|
||||||
|
for key, value in plist.iteritems():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
merged_value = merged_plist.get(key, {})
|
||||||
|
if isinstance(merged_value, dict):
|
||||||
|
self._MergePlist(merged_value, value)
|
||||||
|
merged_plist[key] = merged_value
|
||||||
|
else:
|
||||||
|
merged_plist[key] = value
|
||||||
|
else:
|
||||||
|
merged_plist[key] = value
|
||||||
|
|
||||||
def _LoadPlistMaybeBinary(self, plist_path):
|
def _LoadPlistMaybeBinary(self, plist_path):
|
||||||
"""Loads into a memory a plist possibly encoded in binary format.
|
"""Loads into a memory a plist possibly encoded in binary format.
|
||||||
|
|
||||||
|
|
|
@ -12,10 +12,14 @@ import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from gyp.common import OrderedSet
|
||||||
|
import gyp.MSVSUtil
|
||||||
import gyp.MSVSVersion
|
import gyp.MSVSVersion
|
||||||
|
|
||||||
|
|
||||||
windows_quoter_regex = re.compile(r'(\\*)"')
|
windows_quoter_regex = re.compile(r'(\\*)"')
|
||||||
|
|
||||||
|
|
||||||
def QuoteForRspFile(arg):
|
def QuoteForRspFile(arg):
|
||||||
"""Quote a command line argument so that it appears as one argument when
|
"""Quote a command line argument so that it appears as one argument when
|
||||||
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
|
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
|
||||||
|
@ -131,6 +135,54 @@ def _FindDirectXInstallation():
|
||||||
return dxsdk_dir
|
return dxsdk_dir
|
||||||
|
|
||||||
|
|
||||||
|
def GetGlobalVSMacroEnv(vs_version):
|
||||||
|
"""Get a dict of variables mapping internal VS macro names to their gyp
|
||||||
|
equivalents. Returns all variables that are independent of the target."""
|
||||||
|
env = {}
|
||||||
|
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
|
||||||
|
# Visual Studio is actually installed.
|
||||||
|
if vs_version.Path():
|
||||||
|
env['$(VSInstallDir)'] = vs_version.Path()
|
||||||
|
env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
|
||||||
|
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
|
||||||
|
# set. This happens when the SDK is sync'd via src-internal, rather than
|
||||||
|
# by typical end-user installation of the SDK. If it's not set, we don't
|
||||||
|
# want to leave the unexpanded variable in the path, so simply strip it.
|
||||||
|
dxsdk_dir = _FindDirectXInstallation()
|
||||||
|
env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
|
||||||
|
# Try to find an installation location for the Windows DDK by checking
|
||||||
|
# the WDK_DIR environment variable, may be None.
|
||||||
|
env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
|
||||||
|
return env
|
||||||
|
|
||||||
|
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
|
||||||
|
"""Finds msvs_system_include_dirs that are common to all targets, removes
|
||||||
|
them from all targets, and returns an OrderedSet containing them."""
|
||||||
|
all_system_includes = OrderedSet(
|
||||||
|
configs[0].get('msvs_system_include_dirs', []))
|
||||||
|
for config in configs[1:]:
|
||||||
|
system_includes = config.get('msvs_system_include_dirs', [])
|
||||||
|
all_system_includes = all_system_includes & OrderedSet(system_includes)
|
||||||
|
if not all_system_includes:
|
||||||
|
return None
|
||||||
|
# Expand macros in all_system_includes.
|
||||||
|
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
|
||||||
|
expanded_system_includes = OrderedSet([ExpandMacros(include, env)
|
||||||
|
for include in all_system_includes])
|
||||||
|
if any(['$' in include for include in expanded_system_includes]):
|
||||||
|
# Some path relies on target-specific variables, bail.
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Remove system includes shared by all targets from the targets.
|
||||||
|
for config in configs:
|
||||||
|
includes = config.get('msvs_system_include_dirs', [])
|
||||||
|
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
|
||||||
|
# This must check the unexpanded includes list:
|
||||||
|
new_includes = [i for i in includes if i not in all_system_includes]
|
||||||
|
config['msvs_system_include_dirs'] = new_includes
|
||||||
|
return expanded_system_includes
|
||||||
|
|
||||||
|
|
||||||
class MsvsSettings(object):
|
class MsvsSettings(object):
|
||||||
"""A class that understands the gyp 'msvs_...' values (especially the
|
"""A class that understands the gyp 'msvs_...' values (especially the
|
||||||
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
|
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
|
||||||
|
@ -139,11 +191,6 @@ class MsvsSettings(object):
|
||||||
def __init__(self, spec, generator_flags):
|
def __init__(self, spec, generator_flags):
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
self.vs_version = GetVSVersion(generator_flags)
|
self.vs_version = GetVSVersion(generator_flags)
|
||||||
self.dxsdk_dir = _FindDirectXInstallation()
|
|
||||||
|
|
||||||
# Try to find an installation location for the Windows DDK by checking
|
|
||||||
# the WDK_DIR environment variable, may be None.
|
|
||||||
self.wdk_dir = os.environ.get('WDK_DIR')
|
|
||||||
|
|
||||||
supported_fields = [
|
supported_fields = [
|
||||||
('msvs_configuration_attributes', dict),
|
('msvs_configuration_attributes', dict),
|
||||||
|
@ -163,6 +210,30 @@ class MsvsSettings(object):
|
||||||
|
|
||||||
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
|
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
|
||||||
|
|
||||||
|
unsupported_fields = [
|
||||||
|
'msvs_prebuild',
|
||||||
|
'msvs_postbuild',
|
||||||
|
]
|
||||||
|
unsupported = []
|
||||||
|
for field in unsupported_fields:
|
||||||
|
for config in configs.values():
|
||||||
|
if field in config:
|
||||||
|
unsupported += ["%s not supported (target %s)." %
|
||||||
|
(field, spec['target_name'])]
|
||||||
|
if unsupported:
|
||||||
|
raise Exception('\n'.join(unsupported))
|
||||||
|
|
||||||
|
def GetExtension(self):
|
||||||
|
"""Returns the extension for the target, with no leading dot.
|
||||||
|
|
||||||
|
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
|
||||||
|
the target type.
|
||||||
|
"""
|
||||||
|
ext = self.spec.get('product_extension', None)
|
||||||
|
if ext:
|
||||||
|
return ext
|
||||||
|
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
|
||||||
|
|
||||||
def GetVSMacroEnv(self, base_to_build=None, config=None):
|
def GetVSMacroEnv(self, base_to_build=None, config=None):
|
||||||
"""Get a dict of variables mapping internal VS macro names to their gyp
|
"""Get a dict of variables mapping internal VS macro names to their gyp
|
||||||
equivalents."""
|
equivalents."""
|
||||||
|
@ -170,29 +241,24 @@ class MsvsSettings(object):
|
||||||
target_name = self.spec.get('product_prefix', '') + \
|
target_name = self.spec.get('product_prefix', '') + \
|
||||||
self.spec.get('product_name', self.spec['target_name'])
|
self.spec.get('product_name', self.spec['target_name'])
|
||||||
target_dir = base_to_build + '\\' if base_to_build else ''
|
target_dir = base_to_build + '\\' if base_to_build else ''
|
||||||
|
target_ext = '.' + self.GetExtension()
|
||||||
|
target_file_name = target_name + target_ext
|
||||||
|
|
||||||
replacements = {
|
replacements = {
|
||||||
'$(OutDir)\\': target_dir,
|
|
||||||
'$(TargetDir)\\': target_dir,
|
|
||||||
'$(IntDir)': '$!INTERMEDIATE_DIR',
|
|
||||||
'$(InputPath)': '${source}',
|
|
||||||
'$(InputName)': '${root}',
|
'$(InputName)': '${root}',
|
||||||
'$(ProjectName)': self.spec['target_name'],
|
'$(InputPath)': '${source}',
|
||||||
'$(TargetName)': target_name,
|
'$(IntDir)': '$!INTERMEDIATE_DIR',
|
||||||
|
'$(OutDir)\\': target_dir,
|
||||||
'$(PlatformName)': target_platform,
|
'$(PlatformName)': target_platform,
|
||||||
'$(ProjectDir)\\': '',
|
'$(ProjectDir)\\': '',
|
||||||
|
'$(ProjectName)': self.spec['target_name'],
|
||||||
|
'$(TargetDir)\\': target_dir,
|
||||||
|
'$(TargetExt)': target_ext,
|
||||||
|
'$(TargetFileName)': target_file_name,
|
||||||
|
'$(TargetName)': target_name,
|
||||||
|
'$(TargetPath)': os.path.join(target_dir, target_file_name),
|
||||||
}
|
}
|
||||||
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
|
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
|
||||||
# Visual Studio is actually installed.
|
|
||||||
if self.vs_version.Path():
|
|
||||||
replacements['$(VSInstallDir)'] = self.vs_version.Path()
|
|
||||||
replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(),
|
|
||||||
'VC') + '\\'
|
|
||||||
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
|
|
||||||
# set. This happens when the SDK is sync'd via src-internal, rather than
|
|
||||||
# by typical end-user installation of the SDK. If it's not set, we don't
|
|
||||||
# want to leave the unexpanded variable in the path, so simply strip it.
|
|
||||||
replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else ''
|
|
||||||
replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else ''
|
|
||||||
return replacements
|
return replacements
|
||||||
|
|
||||||
def ConvertVSMacros(self, s, base_to_build=None, config=None):
|
def ConvertVSMacros(self, s, base_to_build=None, config=None):
|
||||||
|
@ -272,6 +338,15 @@ class MsvsSettings(object):
|
||||||
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
|
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
|
||||||
return [self.ConvertVSMacros(p, config=config) for p in includes]
|
return [self.ConvertVSMacros(p, config=config) for p in includes]
|
||||||
|
|
||||||
|
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
|
||||||
|
"""Updates midl_include_dirs to expand VS specific paths, and adds the
|
||||||
|
system include dirs used for platform SDK and similar."""
|
||||||
|
config = self._TargetConfig(config)
|
||||||
|
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
|
||||||
|
includes.extend(self._Setting(
|
||||||
|
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
|
||||||
|
return [self.ConvertVSMacros(p, config=config) for p in includes]
|
||||||
|
|
||||||
def GetComputedDefines(self, config):
|
def GetComputedDefines(self, config):
|
||||||
"""Returns the set of defines that are injected to the defines list based
|
"""Returns the set of defines that are injected to the defines list based
|
||||||
on other VS settings."""
|
on other VS settings."""
|
||||||
|
@ -324,7 +399,7 @@ class MsvsSettings(object):
|
||||||
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
|
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
|
||||||
generate_debug_info = self._Setting(
|
generate_debug_info = self._Setting(
|
||||||
('VCLinkerTool', 'GenerateDebugInformation'), config)
|
('VCLinkerTool', 'GenerateDebugInformation'), config)
|
||||||
if generate_debug_info:
|
if generate_debug_info == 'true':
|
||||||
if output_file:
|
if output_file:
|
||||||
return expand_special(self.ConvertVSMacros(output_file, config=config))
|
return expand_special(self.ConvertVSMacros(output_file, config=config))
|
||||||
else:
|
else:
|
||||||
|
@ -332,6 +407,22 @@ class MsvsSettings(object):
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def GetNoImportLibrary(self, config):
|
||||||
|
"""If NoImportLibrary: true, ninja will not expect the output to include
|
||||||
|
an import library."""
|
||||||
|
config = self._TargetConfig(config)
|
||||||
|
noimplib = self._Setting(('NoImportLibrary',), config)
|
||||||
|
return noimplib == 'true'
|
||||||
|
|
||||||
|
def GetAsmflags(self, config):
|
||||||
|
"""Returns the flags that need to be added to ml invocations."""
|
||||||
|
config = self._TargetConfig(config)
|
||||||
|
asmflags = []
|
||||||
|
safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
|
||||||
|
if safeseh == 'true':
|
||||||
|
asmflags.append('/safeseh')
|
||||||
|
return asmflags
|
||||||
|
|
||||||
def GetCflags(self, config):
|
def GetCflags(self, config):
|
||||||
"""Returns the flags that need to be added to .c and .cc compilations."""
|
"""Returns the flags that need to be added to .c and .cc compilations."""
|
||||||
config = self._TargetConfig(config)
|
config = self._TargetConfig(config)
|
||||||
|
@ -348,9 +439,14 @@ class MsvsSettings(object):
|
||||||
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
|
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
|
||||||
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
|
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
|
||||||
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
|
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
|
||||||
|
cl('FloatingPointModel',
|
||||||
|
map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
|
||||||
|
default='0')
|
||||||
cl('WholeProgramOptimization', map={'true': '/GL'})
|
cl('WholeProgramOptimization', map={'true': '/GL'})
|
||||||
cl('WarningLevel', prefix='/W')
|
cl('WarningLevel', prefix='/W')
|
||||||
cl('WarnAsError', map={'true': '/WX'})
|
cl('WarnAsError', map={'true': '/WX'})
|
||||||
|
cl('CallingConvention',
|
||||||
|
map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
|
||||||
cl('DebugInformationFormat',
|
cl('DebugInformationFormat',
|
||||||
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
|
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
|
||||||
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
|
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
|
||||||
|
@ -366,21 +462,18 @@ class MsvsSettings(object):
|
||||||
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
|
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
|
||||||
cl('EnablePREfast', map={'true': '/analyze'})
|
cl('EnablePREfast', map={'true': '/analyze'})
|
||||||
cl('AdditionalOptions', prefix='')
|
cl('AdditionalOptions', prefix='')
|
||||||
|
cl('EnableEnhancedInstructionSet',
|
||||||
|
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
|
||||||
|
prefix='/arch:')
|
||||||
cflags.extend(['/FI' + f for f in self._Setting(
|
cflags.extend(['/FI' + f for f in self._Setting(
|
||||||
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
|
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
|
||||||
if self.vs_version.short_name in ('2013', '2013e'):
|
if self.vs_version.short_name in ('2013', '2013e', '2015'):
|
||||||
# New flag required in 2013 to maintain previous PDB behavior.
|
# New flag required in 2013 to maintain previous PDB behavior.
|
||||||
cflags.append('/FS')
|
cflags.append('/FS')
|
||||||
# ninja handles parallelism by itself, don't have the compiler do it too.
|
# ninja handles parallelism by itself, don't have the compiler do it too.
|
||||||
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
|
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
|
||||||
return cflags
|
return cflags
|
||||||
|
|
||||||
def GetPrecompiledHeader(self, config, gyp_to_build_path):
|
|
||||||
"""Returns an object that handles the generation of precompiled header
|
|
||||||
build steps."""
|
|
||||||
config = self._TargetConfig(config)
|
|
||||||
return _PchHelper(self, config, gyp_to_build_path)
|
|
||||||
|
|
||||||
def _GetPchFlags(self, config, extension):
|
def _GetPchFlags(self, config, extension):
|
||||||
"""Get the flags to be added to the cflags for precompiled header support.
|
"""Get the flags to be added to the cflags for precompiled header support.
|
||||||
"""
|
"""
|
||||||
|
@ -425,7 +518,8 @@ class MsvsSettings(object):
|
||||||
libflags.extend(self._GetAdditionalLibraryDirectories(
|
libflags.extend(self._GetAdditionalLibraryDirectories(
|
||||||
'VCLibrarianTool', config, gyp_to_build_path))
|
'VCLibrarianTool', config, gyp_to_build_path))
|
||||||
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
|
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
|
||||||
lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
|
lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
|
||||||
|
prefix='/MACHINE:')
|
||||||
lib('AdditionalOptions')
|
lib('AdditionalOptions')
|
||||||
return libflags
|
return libflags
|
||||||
|
|
||||||
|
@ -468,7 +562,8 @@ class MsvsSettings(object):
|
||||||
'VCLinkerTool', append=ldflags)
|
'VCLinkerTool', append=ldflags)
|
||||||
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
|
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
|
||||||
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
|
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
|
||||||
ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
|
ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
|
||||||
|
prefix='/MACHINE:')
|
||||||
ldflags.extend(self._GetAdditionalLibraryDirectories(
|
ldflags.extend(self._GetAdditionalLibraryDirectories(
|
||||||
'VCLinkerTool', config, gyp_to_build_path))
|
'VCLinkerTool', config, gyp_to_build_path))
|
||||||
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
|
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
|
||||||
|
@ -522,6 +617,14 @@ class MsvsSettings(object):
|
||||||
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
|
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
|
||||||
ld('AdditionalDependencies', prefix='')
|
ld('AdditionalDependencies', prefix='')
|
||||||
|
|
||||||
|
if self.GetArch(config) == 'x86':
|
||||||
|
safeseh_default = 'true'
|
||||||
|
else:
|
||||||
|
safeseh_default = None
|
||||||
|
ld('ImageHasSafeExceptionHandlers',
|
||||||
|
map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
|
||||||
|
default=safeseh_default)
|
||||||
|
|
||||||
# If the base address is not specifically controlled, DYNAMICBASE should
|
# If the base address is not specifically controlled, DYNAMICBASE should
|
||||||
# be on by default.
|
# be on by default.
|
||||||
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
|
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
|
||||||
|
@ -708,10 +811,16 @@ class MsvsSettings(object):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def HasExplicitIdlRules(self, spec):
|
def _HasExplicitIdlActions(self, spec):
|
||||||
"""Determine if there's an explicit rule for idl files. When there isn't we
|
"""Determine if an action should not run midl for .idl files."""
|
||||||
need to generate implicit rules to build MIDL .idl files."""
|
return any([action.get('explicit_idl_action', 0)
|
||||||
return self._HasExplicitRuleForExtension(spec, 'idl')
|
for action in spec.get('actions', [])])
|
||||||
|
|
||||||
|
def HasExplicitIdlRulesOrActions(self, spec):
|
||||||
|
"""Determine if there's an explicit rule or action for idl files. When
|
||||||
|
there isn't we need to generate implicit rules to build MIDL .idl files."""
|
||||||
|
return (self._HasExplicitRuleForExtension(spec, 'idl') or
|
||||||
|
self._HasExplicitIdlActions(spec))
|
||||||
|
|
||||||
def HasExplicitAsmRules(self, spec):
|
def HasExplicitAsmRules(self, spec):
|
||||||
"""Determine if there's an explicit rule for asm files. When there isn't we
|
"""Determine if there's an explicit rule for asm files. When there isn't we
|
||||||
|
@ -774,7 +883,7 @@ class PrecompiledHeader(object):
|
||||||
def GetObjDependencies(self, sources, objs, arch):
|
def GetObjDependencies(self, sources, objs, arch):
|
||||||
"""Given a list of sources files and the corresponding object files,
|
"""Given a list of sources files and the corresponding object files,
|
||||||
returns a list of the pch files that should be depended upon. The
|
returns a list of the pch files that should be depended upon. The
|
||||||
additional wrapping in the return value is for interface compatability
|
additional wrapping in the return value is for interface compatibility
|
||||||
with make.py on Mac, and xcode_emulation.py."""
|
with make.py on Mac, and xcode_emulation.py."""
|
||||||
assert arch is None
|
assert arch is None
|
||||||
if not self._PchHeader():
|
if not self._PchHeader():
|
||||||
|
@ -810,7 +919,8 @@ def GetVSVersion(generator_flags):
|
||||||
global vs_version
|
global vs_version
|
||||||
if not vs_version:
|
if not vs_version:
|
||||||
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
|
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
|
||||||
generator_flags.get('msvs_version', 'auto'))
|
generator_flags.get('msvs_version', 'auto'),
|
||||||
|
allow_fallback=False)
|
||||||
return vs_version
|
return vs_version
|
||||||
|
|
||||||
def _GetVsvarsSetupArgs(generator_flags, arch):
|
def _GetVsvarsSetupArgs(generator_flags, arch):
|
||||||
|
@ -878,7 +988,8 @@ def _ExtractCLPath(output_of_where):
|
||||||
if line.startswith('LOC:'):
|
if line.startswith('LOC:'):
|
||||||
return line[len('LOC:'):].strip()
|
return line[len('LOC:'):].strip()
|
||||||
|
|
||||||
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
|
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
|
||||||
|
system_includes, open_out):
|
||||||
"""It's not sufficient to have the absolute path to the compiler, linker,
|
"""It's not sufficient to have the absolute path to the compiler, linker,
|
||||||
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
|
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
|
||||||
need to support both x86 and x64 compilers within the same build (to support
|
need to support both x86 and x64 compilers within the same build (to support
|
||||||
|
@ -909,6 +1020,13 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
|
||||||
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
variables, _ = popen.communicate()
|
variables, _ = popen.communicate()
|
||||||
env = _ExtractImportantEnvironment(variables)
|
env = _ExtractImportantEnvironment(variables)
|
||||||
|
|
||||||
|
# Inject system includes from gyp files into INCLUDE.
|
||||||
|
if system_includes:
|
||||||
|
system_includes = system_includes | OrderedSet(
|
||||||
|
env.get('INCLUDE', '').split(';'))
|
||||||
|
env['INCLUDE'] = ';'.join(system_includes)
|
||||||
|
|
||||||
env_block = _FormatAsEnvironmentBlock(env)
|
env_block = _FormatAsEnvironmentBlock(env)
|
||||||
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
|
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
|
||||||
f.write(env_block)
|
f.write(env_block)
|
||||||
|
|
46
gyp/pylib/gyp/simple_copy.py
Normal file
46
gyp/pylib/gyp/simple_copy.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
# Copyright 2014 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""A clone of the default copy.deepcopy that doesn't handle cyclic
|
||||||
|
structures or complex types except for dicts and lists. This is
|
||||||
|
because gyp copies so large structure that small copy overhead ends up
|
||||||
|
taking seconds in a project the size of Chromium."""
|
||||||
|
|
||||||
|
class Error(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
__all__ = ["Error", "deepcopy"]
|
||||||
|
|
||||||
|
def deepcopy(x):
|
||||||
|
"""Deep copy operation on gyp objects such as strings, ints, dicts
|
||||||
|
and lists. More than twice as fast as copy.deepcopy but much less
|
||||||
|
generic."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return _deepcopy_dispatch[type(x)](x)
|
||||||
|
except KeyError:
|
||||||
|
raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
|
||||||
|
'or expand simple_copy support.' % type(x))
|
||||||
|
|
||||||
|
_deepcopy_dispatch = d = {}
|
||||||
|
|
||||||
|
def _deepcopy_atomic(x):
|
||||||
|
return x
|
||||||
|
|
||||||
|
for x in (type(None), int, long, float,
|
||||||
|
bool, str, unicode, type):
|
||||||
|
d[x] = _deepcopy_atomic
|
||||||
|
|
||||||
|
def _deepcopy_list(x):
|
||||||
|
return [deepcopy(a) for a in x]
|
||||||
|
d[list] = _deepcopy_list
|
||||||
|
|
||||||
|
def _deepcopy_dict(x):
|
||||||
|
y = {}
|
||||||
|
for key, value in x.iteritems():
|
||||||
|
y[deepcopy(key)] = deepcopy(value)
|
||||||
|
return y
|
||||||
|
d[dict] = _deepcopy_dict
|
||||||
|
|
||||||
|
del d
|
|
@ -13,6 +13,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import stat
|
||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -48,7 +49,8 @@ class WinTool(object):
|
||||||
for arg in args:
|
for arg in args:
|
||||||
m = _LINK_EXE_OUT_ARG.match(arg)
|
m = _LINK_EXE_OUT_ARG.match(arg)
|
||||||
if m:
|
if m:
|
||||||
endpoint_name = '%s_%d' % (m.group('out'), os.getpid())
|
endpoint_name = re.sub(r'\W+', '',
|
||||||
|
'%s_%d' % (m.group('out'), os.getpid()))
|
||||||
break
|
break
|
||||||
|
|
||||||
if endpoint_name is None:
|
if endpoint_name is None:
|
||||||
|
@ -88,9 +90,19 @@ class WinTool(object):
|
||||||
"""Emulation of rm -rf out && cp -af in out."""
|
"""Emulation of rm -rf out && cp -af in out."""
|
||||||
if os.path.exists(dest):
|
if os.path.exists(dest):
|
||||||
if os.path.isdir(dest):
|
if os.path.isdir(dest):
|
||||||
shutil.rmtree(dest)
|
def _on_error(fn, path, excinfo):
|
||||||
|
# The operation failed, possibly because the file is set to
|
||||||
|
# read-only. If that's why, make it writable and try the op again.
|
||||||
|
if not os.access(path, os.W_OK):
|
||||||
|
os.chmod(path, stat.S_IWRITE)
|
||||||
|
fn(path)
|
||||||
|
shutil.rmtree(dest, onerror=_on_error)
|
||||||
else:
|
else:
|
||||||
|
if not os.access(dest, os.W_OK):
|
||||||
|
# Attempt to make the file writable before deleting it.
|
||||||
|
os.chmod(dest, stat.S_IWRITE)
|
||||||
os.unlink(dest)
|
os.unlink(dest)
|
||||||
|
|
||||||
if os.path.isdir(source):
|
if os.path.isdir(source):
|
||||||
shutil.copytree(source, dest)
|
shutil.copytree(source, dest)
|
||||||
else:
|
else:
|
||||||
|
@ -104,7 +116,7 @@ class WinTool(object):
|
||||||
env = self._GetEnv(arch)
|
env = self._GetEnv(arch)
|
||||||
if use_separate_mspdbsrv == 'True':
|
if use_separate_mspdbsrv == 'True':
|
||||||
self._UseSeparateMspdbsrv(env, args)
|
self._UseSeparateMspdbsrv(env, args)
|
||||||
link = subprocess.Popen(args,
|
link = subprocess.Popen([args[0].replace('/', '\\')] + list(args[1:]),
|
||||||
shell=True,
|
shell=True,
|
||||||
env=env,
|
env=env,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
|
@ -236,19 +248,17 @@ class WinTool(object):
|
||||||
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
|
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
|
||||||
# objidl.idl
|
# objidl.idl
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
prefix = 'Processing '
|
prefixes = ('Processing ', '64 bit Processing ')
|
||||||
processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
|
processing = set(os.path.basename(x)
|
||||||
|
for x in lines if x.startswith(prefixes))
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if not line.startswith(prefix) and line not in processing:
|
if not line.startswith(prefixes) and line not in processing:
|
||||||
print line
|
print line
|
||||||
return popen.returncode
|
return popen.returncode
|
||||||
|
|
||||||
def ExecAsmWrapper(self, arch, *args):
|
def ExecAsmWrapper(self, arch, *args):
|
||||||
"""Filter logo banner from invocations of asm.exe."""
|
"""Filter logo banner from invocations of asm.exe."""
|
||||||
env = self._GetEnv(arch)
|
env = self._GetEnv(arch)
|
||||||
# MSVS doesn't assemble x64 asm files.
|
|
||||||
if arch == 'environment.x64':
|
|
||||||
return 0
|
|
||||||
popen = subprocess.Popen(args, shell=True, env=env,
|
popen = subprocess.Popen(args, shell=True, env=env,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
out, _ = popen.communicate()
|
out, _ = popen.communicate()
|
||||||
|
@ -287,5 +297,16 @@ class WinTool(object):
|
||||||
dir = dir[0] if dir else None
|
dir = dir[0] if dir else None
|
||||||
return subprocess.call(args, shell=True, env=env, cwd=dir)
|
return subprocess.call(args, shell=True, env=env, cwd=dir)
|
||||||
|
|
||||||
|
def ExecClCompile(self, project_dir, selected_files):
|
||||||
|
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
|
||||||
|
build selected C/C++ files."""
|
||||||
|
project_dir = os.path.relpath(project_dir, BASE_DIR)
|
||||||
|
selected_files = selected_files.split(';')
|
||||||
|
ninja_targets = [os.path.join(project_dir, filename) + '^^'
|
||||||
|
for filename in selected_files]
|
||||||
|
cmd = ['ninja.exe']
|
||||||
|
cmd.extend(ninja_targets)
|
||||||
|
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main(sys.argv[1:]))
|
sys.exit(main(sys.argv[1:]))
|
||||||
|
|
|
@ -18,6 +18,129 @@ import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from gyp.common import GypError
|
from gyp.common import GypError
|
||||||
|
|
||||||
|
# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
|
||||||
|
# "xcodebuild" is called too quickly (it has been found to return incorrect
|
||||||
|
# version number).
|
||||||
|
XCODE_VERSION_CACHE = None
|
||||||
|
|
||||||
|
# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
|
||||||
|
# corresponding to the installed version of Xcode.
|
||||||
|
XCODE_ARCHS_DEFAULT_CACHE = None
|
||||||
|
|
||||||
|
|
||||||
|
def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
|
||||||
|
"""Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
|
||||||
|
and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
|
||||||
|
mapping = {'$(ARCHS_STANDARD)': archs}
|
||||||
|
if archs_including_64_bit:
|
||||||
|
mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
class XcodeArchsDefault(object):
|
||||||
|
"""A class to resolve ARCHS variable from xcode_settings, resolving Xcode
|
||||||
|
macros and implementing filtering by VALID_ARCHS. The expansion of macros
|
||||||
|
depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
|
||||||
|
on the version of Xcode.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Match variable like $(ARCHS_STANDARD).
|
||||||
|
variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
|
||||||
|
|
||||||
|
def __init__(self, default, mac, iphonesimulator, iphoneos):
|
||||||
|
self._default = (default,)
|
||||||
|
self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
|
||||||
|
|
||||||
|
def _VariableMapping(self, sdkroot):
|
||||||
|
"""Returns the dictionary of variable mapping depending on the SDKROOT."""
|
||||||
|
sdkroot = sdkroot.lower()
|
||||||
|
if 'iphoneos' in sdkroot:
|
||||||
|
return self._archs['ios']
|
||||||
|
elif 'iphonesimulator' in sdkroot:
|
||||||
|
return self._archs['iossim']
|
||||||
|
else:
|
||||||
|
return self._archs['mac']
|
||||||
|
|
||||||
|
def _ExpandArchs(self, archs, sdkroot):
|
||||||
|
"""Expands variables references in ARCHS, and remove duplicates."""
|
||||||
|
variable_mapping = self._VariableMapping(sdkroot)
|
||||||
|
expanded_archs = []
|
||||||
|
for arch in archs:
|
||||||
|
if self.variable_pattern.match(arch):
|
||||||
|
variable = arch
|
||||||
|
try:
|
||||||
|
variable_expansion = variable_mapping[variable]
|
||||||
|
for arch in variable_expansion:
|
||||||
|
if arch not in expanded_archs:
|
||||||
|
expanded_archs.append(arch)
|
||||||
|
except KeyError as e:
|
||||||
|
print 'Warning: Ignoring unsupported variable "%s".' % variable
|
||||||
|
elif arch not in expanded_archs:
|
||||||
|
expanded_archs.append(arch)
|
||||||
|
return expanded_archs
|
||||||
|
|
||||||
|
def ActiveArchs(self, archs, valid_archs, sdkroot):
|
||||||
|
"""Expands variables references in ARCHS, and filter by VALID_ARCHS if it
|
||||||
|
is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
|
||||||
|
values present in VALID_ARCHS are kept)."""
|
||||||
|
expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
|
||||||
|
if valid_archs:
|
||||||
|
filtered_archs = []
|
||||||
|
for arch in expanded_archs:
|
||||||
|
if arch in valid_archs:
|
||||||
|
filtered_archs.append(arch)
|
||||||
|
expanded_archs = filtered_archs
|
||||||
|
return expanded_archs
|
||||||
|
|
||||||
|
|
||||||
|
def GetXcodeArchsDefault():
|
||||||
|
"""Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
|
||||||
|
installed version of Xcode. The default values used by Xcode for ARCHS
|
||||||
|
and the expansion of the variables depends on the version of Xcode used.
|
||||||
|
|
||||||
|
For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
|
||||||
|
uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
|
||||||
|
$(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
|
||||||
|
and deprecated with Xcode 5.1.
|
||||||
|
|
||||||
|
For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
|
||||||
|
architecture as part of $(ARCHS_STANDARD) and default to only building it.
|
||||||
|
|
||||||
|
For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
|
||||||
|
of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
|
||||||
|
are also part of $(ARCHS_STANDARD).
|
||||||
|
|
||||||
|
All thoses rules are coded in the construction of the |XcodeArchsDefault|
|
||||||
|
object to use depending on the version of Xcode detected. The object is
|
||||||
|
for performance reason."""
|
||||||
|
global XCODE_ARCHS_DEFAULT_CACHE
|
||||||
|
if XCODE_ARCHS_DEFAULT_CACHE:
|
||||||
|
return XCODE_ARCHS_DEFAULT_CACHE
|
||||||
|
xcode_version, _ = XcodeVersion()
|
||||||
|
if xcode_version < '0500':
|
||||||
|
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
|
||||||
|
'$(ARCHS_STANDARD)',
|
||||||
|
XcodeArchsVariableMapping(['i386']),
|
||||||
|
XcodeArchsVariableMapping(['i386']),
|
||||||
|
XcodeArchsVariableMapping(['armv7']))
|
||||||
|
elif xcode_version < '0510':
|
||||||
|
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
|
||||||
|
'$(ARCHS_STANDARD_INCLUDING_64_BIT)',
|
||||||
|
XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
|
||||||
|
XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
|
||||||
|
XcodeArchsVariableMapping(
|
||||||
|
['armv7', 'armv7s'],
|
||||||
|
['armv7', 'armv7s', 'arm64']))
|
||||||
|
else:
|
||||||
|
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
|
||||||
|
'$(ARCHS_STANDARD)',
|
||||||
|
XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
|
||||||
|
XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
|
||||||
|
XcodeArchsVariableMapping(
|
||||||
|
['armv7', 'armv7s', 'arm64'],
|
||||||
|
['armv7', 'armv7s', 'arm64']))
|
||||||
|
return XCODE_ARCHS_DEFAULT_CACHE
|
||||||
|
|
||||||
|
|
||||||
class XcodeSettings(object):
|
class XcodeSettings(object):
|
||||||
"""A class that understands the gyp 'xcode_settings' object."""
|
"""A class that understands the gyp 'xcode_settings' object."""
|
||||||
|
|
||||||
|
@ -34,10 +157,6 @@ class XcodeSettings(object):
|
||||||
# cached at class-level for efficiency.
|
# cached at class-level for efficiency.
|
||||||
_codesigning_key_cache = {}
|
_codesigning_key_cache = {}
|
||||||
|
|
||||||
# Populated lazily by _XcodeVersion. Shared by all XcodeSettings, so cached
|
|
||||||
# at class-level for efficiency.
|
|
||||||
_xcode_version_cache = ()
|
|
||||||
|
|
||||||
def __init__(self, spec):
|
def __init__(self, spec):
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
|
|
||||||
|
@ -96,9 +215,24 @@ class XcodeSettings(object):
|
||||||
if test_key in self._Settings():
|
if test_key in self._Settings():
|
||||||
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
|
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
|
||||||
|
|
||||||
|
def IsBinaryOutputFormat(self, configname):
|
||||||
|
default = "binary" if self.isIOS else "xml"
|
||||||
|
format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
|
||||||
|
default)
|
||||||
|
return format == "binary"
|
||||||
|
|
||||||
def _IsBundle(self):
|
def _IsBundle(self):
|
||||||
return int(self.spec.get('mac_bundle', 0)) != 0
|
return int(self.spec.get('mac_bundle', 0)) != 0
|
||||||
|
|
||||||
|
def _IsIosAppExtension(self):
|
||||||
|
return int(self.spec.get('ios_app_extension', 0)) != 0
|
||||||
|
|
||||||
|
def _IsIosWatchKitExtension(self):
|
||||||
|
return int(self.spec.get('ios_watchkit_extension', 0)) != 0
|
||||||
|
|
||||||
|
def _IsIosWatchApp(self):
|
||||||
|
return int(self.spec.get('ios_watch_app', 0)) != 0
|
||||||
|
|
||||||
def GetFrameworkVersion(self):
|
def GetFrameworkVersion(self):
|
||||||
"""Returns the framework version of the current target. Only valid for
|
"""Returns the framework version of the current target. Only valid for
|
||||||
bundles."""
|
bundles."""
|
||||||
|
@ -118,7 +252,10 @@ class XcodeSettings(object):
|
||||||
'WRAPPER_EXTENSION', default=default_wrapper_extension)
|
'WRAPPER_EXTENSION', default=default_wrapper_extension)
|
||||||
return '.' + self.spec.get('product_extension', wrapper_extension)
|
return '.' + self.spec.get('product_extension', wrapper_extension)
|
||||||
elif self.spec['type'] == 'executable':
|
elif self.spec['type'] == 'executable':
|
||||||
return '.' + self.spec.get('product_extension', 'app')
|
if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
|
||||||
|
return '.' + self.spec.get('product_extension', 'appex')
|
||||||
|
else:
|
||||||
|
return '.' + self.spec.get('product_extension', 'app')
|
||||||
else:
|
else:
|
||||||
assert False, "Don't know extension for '%s', target '%s'" % (
|
assert False, "Don't know extension for '%s', target '%s'" % (
|
||||||
self.spec['type'], self.spec['target_name'])
|
self.spec['type'], self.spec['target_name'])
|
||||||
|
@ -173,6 +310,18 @@ class XcodeSettings(object):
|
||||||
|
|
||||||
def GetProductType(self):
|
def GetProductType(self):
|
||||||
"""Returns the PRODUCT_TYPE of this target."""
|
"""Returns the PRODUCT_TYPE of this target."""
|
||||||
|
if self._IsIosAppExtension():
|
||||||
|
assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
|
||||||
|
'(target %s)' % self.spec['target_name'])
|
||||||
|
return 'com.apple.product-type.app-extension'
|
||||||
|
if self._IsIosWatchKitExtension():
|
||||||
|
assert self._IsBundle(), ('ios_watchkit_extension flag requires '
|
||||||
|
'mac_bundle (target %s)' % self.spec['target_name'])
|
||||||
|
return 'com.apple.product-type.watchkit-extension'
|
||||||
|
if self._IsIosWatchApp():
|
||||||
|
assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
|
||||||
|
'(target %s)' % self.spec['target_name'])
|
||||||
|
return 'com.apple.product-type.application.watchapp'
|
||||||
if self._IsBundle():
|
if self._IsBundle():
|
||||||
return {
|
return {
|
||||||
'executable': 'com.apple.product-type.application',
|
'executable': 'com.apple.product-type.application',
|
||||||
|
@ -267,17 +416,12 @@ class XcodeSettings(object):
|
||||||
|
|
||||||
def GetActiveArchs(self, configname):
|
def GetActiveArchs(self, configname):
|
||||||
"""Returns the architectures this target should be built for."""
|
"""Returns the architectures this target should be built for."""
|
||||||
# TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
|
config_settings = self.xcode_settings[configname]
|
||||||
# CURRENT_ARCH / NATIVE_ARCH env vars?
|
xcode_archs_default = GetXcodeArchsDefault()
|
||||||
return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
|
return xcode_archs_default.ActiveArchs(
|
||||||
|
config_settings.get('ARCHS'),
|
||||||
def _GetStdout(self, cmdlist):
|
config_settings.get('VALID_ARCHS'),
|
||||||
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
|
config_settings.get('SDKROOT'))
|
||||||
out = job.communicate()[0]
|
|
||||||
if job.returncode != 0:
|
|
||||||
sys.stderr.write(out + '\n')
|
|
||||||
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
|
|
||||||
return out.rstrip('\n')
|
|
||||||
|
|
||||||
def _GetSdkVersionInfoItem(self, sdk, infoitem):
|
def _GetSdkVersionInfoItem(self, sdk, infoitem):
|
||||||
# xcodebuild requires Xcode and can't run on Command Line Tools-only
|
# xcodebuild requires Xcode and can't run on Command Line Tools-only
|
||||||
|
@ -285,7 +429,7 @@ class XcodeSettings(object):
|
||||||
# Since the CLT has no SDK paths anyway, returning None is the
|
# Since the CLT has no SDK paths anyway, returning None is the
|
||||||
# most sensible route and should still do the right thing.
|
# most sensible route and should still do the right thing.
|
||||||
try:
|
try:
|
||||||
return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
|
return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -396,7 +540,8 @@ class XcodeSettings(object):
|
||||||
if arch is not None:
|
if arch is not None:
|
||||||
archs = [arch]
|
archs = [arch]
|
||||||
else:
|
else:
|
||||||
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
|
assert self.configname
|
||||||
|
archs = self.GetActiveArchs(self.configname)
|
||||||
if len(archs) != 1:
|
if len(archs) != 1:
|
||||||
# TODO: Supporting fat binaries will be annoying.
|
# TODO: Supporting fat binaries will be annoying.
|
||||||
self._WarnUnimplemented('ARCHS')
|
self._WarnUnimplemented('ARCHS')
|
||||||
|
@ -588,8 +733,8 @@ class XcodeSettings(object):
|
||||||
# -exported_symbols_list file
|
# -exported_symbols_list file
|
||||||
# -Wl,exported_symbols_list file
|
# -Wl,exported_symbols_list file
|
||||||
# -Wl,exported_symbols_list,file
|
# -Wl,exported_symbols_list,file
|
||||||
LINKER_FILE = '(\S+)'
|
LINKER_FILE = r'(\S+)'
|
||||||
WORD = '\S+'
|
WORD = r'\S+'
|
||||||
linker_flags = [
|
linker_flags = [
|
||||||
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
|
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
|
||||||
['-unexported_symbols_list', LINKER_FILE],
|
['-unexported_symbols_list', LINKER_FILE],
|
||||||
|
@ -653,7 +798,8 @@ class XcodeSettings(object):
|
||||||
if arch is not None:
|
if arch is not None:
|
||||||
archs = [arch]
|
archs = [arch]
|
||||||
else:
|
else:
|
||||||
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
|
assert self.configname
|
||||||
|
archs = self.GetActiveArchs(self.configname)
|
||||||
if len(archs) != 1:
|
if len(archs) != 1:
|
||||||
# TODO: Supporting fat binaries will be annoying.
|
# TODO: Supporting fat binaries will be annoying.
|
||||||
self._WarnUnimplemented('ARCHS')
|
self._WarnUnimplemented('ARCHS')
|
||||||
|
@ -678,6 +824,21 @@ class XcodeSettings(object):
|
||||||
for directory in framework_dirs:
|
for directory in framework_dirs:
|
||||||
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
|
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
|
||||||
|
|
||||||
|
is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
|
||||||
|
if sdk_root and is_extension:
|
||||||
|
# Adds the link flags for extensions. These flags are common for all
|
||||||
|
# extensions and provide loader and main function.
|
||||||
|
# These flags reflect the compilation options used by xcode to compile
|
||||||
|
# extensions.
|
||||||
|
ldflags.append('-lpkstart')
|
||||||
|
ldflags.append(sdk_root +
|
||||||
|
'/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
|
||||||
|
ldflags.append('-fapplication-extension')
|
||||||
|
ldflags.append('-Xlinker -rpath '
|
||||||
|
'-Xlinker @executable_path/../../Frameworks')
|
||||||
|
|
||||||
|
self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
|
||||||
|
|
||||||
self.configname = None
|
self.configname = None
|
||||||
return ldflags
|
return ldflags
|
||||||
|
|
||||||
|
@ -803,7 +964,7 @@ class XcodeSettings(object):
|
||||||
"""Return a shell command to codesign the iOS output binary so it can
|
"""Return a shell command to codesign the iOS output binary so it can
|
||||||
be deployed to a device. This should be run as the very last step of the
|
be deployed to a device. This should be run as the very last step of the
|
||||||
build."""
|
build."""
|
||||||
if not (self.isIOS and self.spec['type'] == "executable"):
|
if not (self.isIOS and self.spec['type'] == 'executable'):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
settings = self.xcode_settings[configname]
|
settings = self.xcode_settings[configname]
|
||||||
|
@ -874,65 +1035,7 @@ class XcodeSettings(object):
|
||||||
return libraries
|
return libraries
|
||||||
|
|
||||||
def _BuildMachineOSBuild(self):
|
def _BuildMachineOSBuild(self):
|
||||||
return self._GetStdout(['sw_vers', '-buildVersion'])
|
return GetStdout(['sw_vers', '-buildVersion'])
|
||||||
|
|
||||||
# This method ported from the logic in Homebrew's CLT version check
|
|
||||||
def _CLTVersion(self):
|
|
||||||
# pkgutil output looks like
|
|
||||||
# package-id: com.apple.pkg.CLTools_Executables
|
|
||||||
# version: 5.0.1.0.1.1382131676
|
|
||||||
# volume: /
|
|
||||||
# location: /
|
|
||||||
# install-time: 1382544035
|
|
||||||
# groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
|
|
||||||
STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
|
|
||||||
FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
|
|
||||||
MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
|
|
||||||
|
|
||||||
regex = re.compile('version: (?P<version>.+)')
|
|
||||||
for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
|
|
||||||
try:
|
|
||||||
output = self._GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
|
|
||||||
return re.search(regex, output).groupdict()['version']
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
def _XcodeVersion(self):
|
|
||||||
# `xcodebuild -version` output looks like
|
|
||||||
# Xcode 4.6.3
|
|
||||||
# Build version 4H1503
|
|
||||||
# or like
|
|
||||||
# Xcode 3.2.6
|
|
||||||
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
|
|
||||||
# BuildVersion: 10M2518
|
|
||||||
# Convert that to '0463', '4H1503'.
|
|
||||||
if len(XcodeSettings._xcode_version_cache) == 0:
|
|
||||||
try:
|
|
||||||
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
|
|
||||||
# In some circumstances xcodebuild exits 0 but doesn't return
|
|
||||||
# the right results; for example, a user on 10.7 or 10.8 with
|
|
||||||
# a bogus path set via xcode-select
|
|
||||||
# In that case this may be a CLT-only install so fall back to
|
|
||||||
# checking that version.
|
|
||||||
if len(version_list) < 2:
|
|
||||||
raise GypError, "xcodebuild returned unexpected results"
|
|
||||||
except:
|
|
||||||
version = self._CLTVersion()
|
|
||||||
if version:
|
|
||||||
version = re.match('(\d\.\d\.?\d*)', version).groups()[0]
|
|
||||||
else:
|
|
||||||
raise GypError, "No Xcode or CLT version detected!"
|
|
||||||
# The CLT has no build information, so we return an empty string.
|
|
||||||
version_list = [version, '']
|
|
||||||
version = version_list[0]
|
|
||||||
build = version_list[-1]
|
|
||||||
# Be careful to convert "4.2" to "0420":
|
|
||||||
version = version.split()[-1].replace('.', '')
|
|
||||||
version = (version + '0' * (3 - len(version))).zfill(4)
|
|
||||||
if build:
|
|
||||||
build = build.split()[-1]
|
|
||||||
XcodeSettings._xcode_version_cache = (version, build)
|
|
||||||
return XcodeSettings._xcode_version_cache
|
|
||||||
|
|
||||||
def _XcodeIOSDeviceFamily(self, configname):
|
def _XcodeIOSDeviceFamily(self, configname):
|
||||||
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
|
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
|
||||||
|
@ -944,7 +1047,7 @@ class XcodeSettings(object):
|
||||||
cache = {}
|
cache = {}
|
||||||
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
|
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
|
||||||
|
|
||||||
xcode, xcode_build = self._XcodeVersion()
|
xcode, xcode_build = XcodeVersion()
|
||||||
cache['DTXcode'] = xcode
|
cache['DTXcode'] = xcode
|
||||||
cache['DTXcodeBuild'] = xcode_build
|
cache['DTXcodeBuild'] = xcode_build
|
||||||
|
|
||||||
|
@ -982,14 +1085,15 @@ class XcodeSettings(object):
|
||||||
project, then the environment variable was empty. Starting with this
|
project, then the environment variable was empty. Starting with this
|
||||||
version, Xcode uses the name of the newest SDK installed.
|
version, Xcode uses the name of the newest SDK installed.
|
||||||
"""
|
"""
|
||||||
if self._XcodeVersion() < '0500':
|
xcode_version, xcode_build = XcodeVersion()
|
||||||
|
if xcode_version < '0500':
|
||||||
return ''
|
return ''
|
||||||
default_sdk_path = self._XcodeSdkPath('')
|
default_sdk_path = self._XcodeSdkPath('')
|
||||||
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
|
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
|
||||||
if default_sdk_root:
|
if default_sdk_root:
|
||||||
return default_sdk_root
|
return default_sdk_root
|
||||||
try:
|
try:
|
||||||
all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
|
all_sdks = GetStdout(['xcodebuild', '-showsdks'])
|
||||||
except:
|
except:
|
||||||
# If xcodebuild fails, there will be no valid SDKs
|
# If xcodebuild fails, there will be no valid SDKs
|
||||||
return ''
|
return ''
|
||||||
|
@ -1002,28 +1106,6 @@ class XcodeSettings(object):
|
||||||
return sdk_root
|
return sdk_root
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def _DefaultArch(self):
|
|
||||||
# For Mac projects, Xcode changed the default value used when ARCHS is not
|
|
||||||
# set from "i386" to "x86_64".
|
|
||||||
#
|
|
||||||
# For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
|
|
||||||
# building for a device, and the simulator binaries are always build for
|
|
||||||
# "i386".
|
|
||||||
#
|
|
||||||
# For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
|
|
||||||
# which correspond to "armv7 armv7s arm64", and when building the simulator
|
|
||||||
# the architecture is either "i386" or "x86_64" depending on the simulated
|
|
||||||
# device (respectively 32-bit or 64-bit device).
|
|
||||||
#
|
|
||||||
# Since the value returned by this function is only used when ARCHS is not
|
|
||||||
# set, then on iOS we return "i386", as the default xcode project generator
|
|
||||||
# does not set ARCHS if it is not set in the .gyp file.
|
|
||||||
if self.isIOS:
|
|
||||||
return 'i386'
|
|
||||||
version, build = self._XcodeVersion()
|
|
||||||
if version >= '0500':
|
|
||||||
return 'x86_64'
|
|
||||||
return 'i386'
|
|
||||||
|
|
||||||
class MacPrefixHeader(object):
|
class MacPrefixHeader(object):
|
||||||
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
|
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
|
||||||
|
@ -1131,6 +1213,81 @@ class MacPrefixHeader(object):
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def XcodeVersion():
|
||||||
|
"""Returns a tuple of version and build version of installed Xcode."""
|
||||||
|
# `xcodebuild -version` output looks like
|
||||||
|
# Xcode 4.6.3
|
||||||
|
# Build version 4H1503
|
||||||
|
# or like
|
||||||
|
# Xcode 3.2.6
|
||||||
|
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
|
||||||
|
# BuildVersion: 10M2518
|
||||||
|
# Convert that to '0463', '4H1503'.
|
||||||
|
global XCODE_VERSION_CACHE
|
||||||
|
if XCODE_VERSION_CACHE:
|
||||||
|
return XCODE_VERSION_CACHE
|
||||||
|
try:
|
||||||
|
version_list = GetStdout(['xcodebuild', '-version']).splitlines()
|
||||||
|
# In some circumstances xcodebuild exits 0 but doesn't return
|
||||||
|
# the right results; for example, a user on 10.7 or 10.8 with
|
||||||
|
# a bogus path set via xcode-select
|
||||||
|
# In that case this may be a CLT-only install so fall back to
|
||||||
|
# checking that version.
|
||||||
|
if len(version_list) < 2:
|
||||||
|
raise GypError("xcodebuild returned unexpected results")
|
||||||
|
except:
|
||||||
|
version = CLTVersion()
|
||||||
|
if version:
|
||||||
|
version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0]
|
||||||
|
else:
|
||||||
|
raise GypError("No Xcode or CLT version detected!")
|
||||||
|
# The CLT has no build information, so we return an empty string.
|
||||||
|
version_list = [version, '']
|
||||||
|
version = version_list[0]
|
||||||
|
build = version_list[-1]
|
||||||
|
# Be careful to convert "4.2" to "0420":
|
||||||
|
version = version.split()[-1].replace('.', '')
|
||||||
|
version = (version + '0' * (3 - len(version))).zfill(4)
|
||||||
|
if build:
|
||||||
|
build = build.split()[-1]
|
||||||
|
XCODE_VERSION_CACHE = (version, build)
|
||||||
|
return XCODE_VERSION_CACHE
|
||||||
|
|
||||||
|
|
||||||
|
# This function ported from the logic in Homebrew's CLT version check
|
||||||
|
def CLTVersion():
|
||||||
|
"""Returns the version of command-line tools from pkgutil."""
|
||||||
|
# pkgutil output looks like
|
||||||
|
# package-id: com.apple.pkg.CLTools_Executables
|
||||||
|
# version: 5.0.1.0.1.1382131676
|
||||||
|
# volume: /
|
||||||
|
# location: /
|
||||||
|
# install-time: 1382544035
|
||||||
|
# groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
|
||||||
|
STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
|
||||||
|
FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
|
||||||
|
MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
|
||||||
|
|
||||||
|
regex = re.compile('version: (?P<version>.+)')
|
||||||
|
for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
|
||||||
|
try:
|
||||||
|
output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
|
||||||
|
return re.search(regex, output).groupdict()['version']
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
def GetStdout(cmdlist):
|
||||||
|
"""Returns the content of standard output returned by invoking |cmdlist|.
|
||||||
|
Raises |GypError| if the command return with a non-zero return code."""
|
||||||
|
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
|
||||||
|
out = job.communicate()[0]
|
||||||
|
if job.returncode != 0:
|
||||||
|
sys.stderr.write(out + '\n')
|
||||||
|
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
|
||||||
|
return out.rstrip('\n')
|
||||||
|
|
||||||
|
|
||||||
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
|
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
|
||||||
"""Merges the global xcode_settings dictionary into each configuration of the
|
"""Merges the global xcode_settings dictionary into each configuration of the
|
||||||
target represented by spec. For keys that are both in the global and the local
|
target represented by spec. For keys that are both in the global and the local
|
||||||
|
@ -1310,6 +1467,13 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
|
||||||
install_name_base = xcode_settings.GetInstallNameBase()
|
install_name_base = xcode_settings.GetInstallNameBase()
|
||||||
if install_name_base:
|
if install_name_base:
|
||||||
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
|
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
|
||||||
|
if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
|
||||||
|
sdk_root = xcode_settings._SdkRoot(configuration)
|
||||||
|
if not sdk_root:
|
||||||
|
sdk_root = xcode_settings._XcodeSdkPath('')
|
||||||
|
if sdk_root is None:
|
||||||
|
sdk_root = ''
|
||||||
|
env['SDKROOT'] = sdk_root
|
||||||
|
|
||||||
if not additional_settings:
|
if not additional_settings:
|
||||||
additional_settings = {}
|
additional_settings = {}
|
||||||
|
@ -1420,16 +1584,16 @@ def _HasIOSTarget(targets):
|
||||||
|
|
||||||
def _AddIOSDeviceConfigurations(targets):
|
def _AddIOSDeviceConfigurations(targets):
|
||||||
"""Clone all targets and append -iphoneos to the name. Configure these targets
|
"""Clone all targets and append -iphoneos to the name. Configure these targets
|
||||||
to build for iOS devices."""
|
to build for iOS devices and use correct architectures for those builds."""
|
||||||
for target_dict in targets.values():
|
for target_dict in targets.itervalues():
|
||||||
for config_name in target_dict['configurations'].keys():
|
toolset = target_dict['toolset']
|
||||||
config = target_dict['configurations'][config_name]
|
configs = target_dict['configurations']
|
||||||
new_config_name = config_name + '-iphoneos'
|
for config_name, config_dict in dict(configs).iteritems():
|
||||||
new_config_dict = copy.deepcopy(config)
|
iphoneos_config_dict = copy.deepcopy(config_dict)
|
||||||
if target_dict['toolset'] == 'target':
|
configs[config_name + '-iphoneos'] = iphoneos_config_dict
|
||||||
new_config_dict['xcode_settings']['ARCHS'] = ['armv7']
|
configs[config_name + '-iphonesimulator'] = config_dict
|
||||||
new_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
|
if toolset == 'target':
|
||||||
target_dict['configurations'][new_config_name] = new_config_dict
|
iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
|
||||||
return targets
|
return targets
|
||||||
|
|
||||||
def CloneConfigurationForDeviceAndEmulator(target_dicts):
|
def CloneConfigurationForDeviceAndEmulator(target_dicts):
|
||||||
|
|
270
gyp/pylib/gyp/xcode_ninja.py
Normal file
270
gyp/pylib/gyp/xcode_ninja.py
Normal file
|
@ -0,0 +1,270 @@
|
||||||
|
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Xcode-ninja wrapper project file generator.
|
||||||
|
|
||||||
|
This updates the data structures passed to the Xcode gyp generator to build
|
||||||
|
with ninja instead. The Xcode project itself is transformed into a list of
|
||||||
|
executable targets, each with a build step to build with ninja, and a target
|
||||||
|
with every source and resource file. This appears to sidestep some of the
|
||||||
|
major performance headaches experienced using complex projects and large number
|
||||||
|
of targets within Xcode.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import gyp.generator.ninja
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import xml.sax.saxutils
|
||||||
|
|
||||||
|
|
||||||
|
def _WriteWorkspace(main_gyp, sources_gyp, params):
|
||||||
|
""" Create a workspace to wrap main and sources gyp paths. """
|
||||||
|
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
|
||||||
|
workspace_path = build_file_root + '.xcworkspace'
|
||||||
|
options = params['options']
|
||||||
|
if options.generator_output:
|
||||||
|
workspace_path = os.path.join(options.generator_output, workspace_path)
|
||||||
|
try:
|
||||||
|
os.makedirs(workspace_path)
|
||||||
|
except OSError, e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
|
||||||
|
'<Workspace version = "1.0">\n'
|
||||||
|
for gyp_name in [main_gyp, sources_gyp]:
|
||||||
|
name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
|
||||||
|
name = xml.sax.saxutils.quoteattr("group:" + name)
|
||||||
|
output_string += ' <FileRef location = %s></FileRef>\n' % name
|
||||||
|
output_string += '</Workspace>\n'
|
||||||
|
|
||||||
|
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(workspace_file, 'r') as input_file:
|
||||||
|
input_string = input_file.read()
|
||||||
|
if input_string == output_string:
|
||||||
|
return
|
||||||
|
except IOError:
|
||||||
|
# Ignore errors if the file doesn't exist.
|
||||||
|
pass
|
||||||
|
|
||||||
|
with open(workspace_file, 'w') as output_file:
|
||||||
|
output_file.write(output_string)
|
||||||
|
|
||||||
|
def _TargetFromSpec(old_spec, params):
|
||||||
|
""" Create fake target for xcode-ninja wrapper. """
|
||||||
|
# Determine ninja top level build dir (e.g. /path/to/out).
|
||||||
|
ninja_toplevel = None
|
||||||
|
jobs = 0
|
||||||
|
if params:
|
||||||
|
options = params['options']
|
||||||
|
ninja_toplevel = \
|
||||||
|
os.path.join(options.toplevel_dir,
|
||||||
|
gyp.generator.ninja.ComputeOutputDir(params))
|
||||||
|
jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
|
||||||
|
|
||||||
|
target_name = old_spec.get('target_name')
|
||||||
|
product_name = old_spec.get('product_name', target_name)
|
||||||
|
product_extension = old_spec.get('product_extension')
|
||||||
|
|
||||||
|
ninja_target = {}
|
||||||
|
ninja_target['target_name'] = target_name
|
||||||
|
ninja_target['product_name'] = product_name
|
||||||
|
if product_extension:
|
||||||
|
ninja_target['product_extension'] = product_extension
|
||||||
|
ninja_target['toolset'] = old_spec.get('toolset')
|
||||||
|
ninja_target['default_configuration'] = old_spec.get('default_configuration')
|
||||||
|
ninja_target['configurations'] = {}
|
||||||
|
|
||||||
|
# Tell Xcode to look in |ninja_toplevel| for build products.
|
||||||
|
new_xcode_settings = {}
|
||||||
|
if ninja_toplevel:
|
||||||
|
new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
|
||||||
|
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
|
||||||
|
|
||||||
|
if 'configurations' in old_spec:
|
||||||
|
for config in old_spec['configurations'].iterkeys():
|
||||||
|
old_xcode_settings = \
|
||||||
|
old_spec['configurations'][config].get('xcode_settings', {})
|
||||||
|
if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
|
||||||
|
new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
|
||||||
|
new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
|
||||||
|
old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
|
||||||
|
ninja_target['configurations'][config] = {}
|
||||||
|
ninja_target['configurations'][config]['xcode_settings'] = \
|
||||||
|
new_xcode_settings
|
||||||
|
|
||||||
|
ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
|
||||||
|
ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
|
||||||
|
ninja_target['ios_watchkit_extension'] = \
|
||||||
|
old_spec.get('ios_watchkit_extension', 0)
|
||||||
|
ninja_target['ios_watchkit_app'] = old_spec.get('ios_watchkit_app', 0)
|
||||||
|
ninja_target['type'] = old_spec['type']
|
||||||
|
if ninja_toplevel:
|
||||||
|
ninja_target['actions'] = [
|
||||||
|
{
|
||||||
|
'action_name': 'Compile and copy %s via ninja' % target_name,
|
||||||
|
'inputs': [],
|
||||||
|
'outputs': [],
|
||||||
|
'action': [
|
||||||
|
'env',
|
||||||
|
'PATH=%s' % os.environ['PATH'],
|
||||||
|
'ninja',
|
||||||
|
'-C',
|
||||||
|
new_xcode_settings['CONFIGURATION_BUILD_DIR'],
|
||||||
|
target_name,
|
||||||
|
],
|
||||||
|
'message': 'Compile and copy %s via ninja' % target_name,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
if jobs > 0:
|
||||||
|
ninja_target['actions'][0]['action'].extend(('-j', jobs))
|
||||||
|
return ninja_target
|
||||||
|
|
||||||
|
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||||
|
"""Limit targets for Xcode wrapper.
|
||||||
|
|
||||||
|
Xcode sometimes performs poorly with too many targets, so only include
|
||||||
|
proper executable targets, with filters to customize.
|
||||||
|
Arguments:
|
||||||
|
target_extras: Regular expression to always add, matching any target.
|
||||||
|
executable_target_pattern: Regular expression limiting executable targets.
|
||||||
|
spec: Specifications for target.
|
||||||
|
"""
|
||||||
|
target_name = spec.get('target_name')
|
||||||
|
# Always include targets matching target_extras.
|
||||||
|
if target_extras is not None and re.search(target_extras, target_name):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Otherwise just show executable targets.
|
||||||
|
if spec.get('type', '') == 'executable' and \
|
||||||
|
spec.get('product_extension', '') != 'bundle':
|
||||||
|
|
||||||
|
# If there is a filter and the target does not match, exclude the target.
|
||||||
|
if executable_target_pattern is not None:
|
||||||
|
if not re.search(executable_target_pattern, target_name):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def CreateWrapper(target_list, target_dicts, data, params):
|
||||||
|
"""Initialize targets for the ninja wrapper.
|
||||||
|
|
||||||
|
This sets up the necessary variables in the targets to generate Xcode projects
|
||||||
|
that use ninja as an external builder.
|
||||||
|
Arguments:
|
||||||
|
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||||
|
target_dicts: Dict of target properties keyed on target pair.
|
||||||
|
data: Dict of flattened build files keyed on gyp path.
|
||||||
|
params: Dict of global options for gyp.
|
||||||
|
"""
|
||||||
|
orig_gyp = params['build_files'][0]
|
||||||
|
for gyp_name, gyp_dict in data.iteritems():
|
||||||
|
if gyp_name == orig_gyp:
|
||||||
|
depth = gyp_dict['_DEPTH']
|
||||||
|
|
||||||
|
# Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
|
||||||
|
# and prepend .ninja before the .gyp extension.
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
|
||||||
|
if main_gyp is None:
|
||||||
|
(build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
|
||||||
|
main_gyp = build_file_root + ".ninja" + build_file_ext
|
||||||
|
|
||||||
|
# Create new |target_list|, |target_dicts| and |data| data structures.
|
||||||
|
new_target_list = []
|
||||||
|
new_target_dicts = {}
|
||||||
|
new_data = {}
|
||||||
|
|
||||||
|
# Set base keys needed for |data|.
|
||||||
|
new_data[main_gyp] = {}
|
||||||
|
new_data[main_gyp]['included_files'] = []
|
||||||
|
new_data[main_gyp]['targets'] = []
|
||||||
|
new_data[main_gyp]['xcode_settings'] = \
|
||||||
|
data[orig_gyp].get('xcode_settings', {})
|
||||||
|
|
||||||
|
# Normally the xcode-ninja generator includes only valid executable targets.
|
||||||
|
# If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
|
||||||
|
# executable targets that match the pattern. (Default all)
|
||||||
|
executable_target_pattern = \
|
||||||
|
generator_flags.get('xcode_ninja_executable_target_pattern', None)
|
||||||
|
|
||||||
|
# For including other non-executable targets, add the matching target name
|
||||||
|
# to the |xcode_ninja_target_pattern| regular expression. (Default none)
|
||||||
|
target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
|
||||||
|
|
||||||
|
for old_qualified_target in target_list:
|
||||||
|
spec = target_dicts[old_qualified_target]
|
||||||
|
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||||
|
# Add to new_target_list.
|
||||||
|
target_name = spec.get('target_name')
|
||||||
|
new_target_name = '%s:%s#target' % (main_gyp, target_name)
|
||||||
|
new_target_list.append(new_target_name)
|
||||||
|
|
||||||
|
# Add to new_target_dicts.
|
||||||
|
new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
|
||||||
|
|
||||||
|
# Add to new_data.
|
||||||
|
for old_target in data[old_qualified_target.split(':')[0]]['targets']:
|
||||||
|
if old_target['target_name'] == target_name:
|
||||||
|
new_data_target = {}
|
||||||
|
new_data_target['target_name'] = old_target['target_name']
|
||||||
|
new_data_target['toolset'] = old_target['toolset']
|
||||||
|
new_data[main_gyp]['targets'].append(new_data_target)
|
||||||
|
|
||||||
|
# Create sources target.
|
||||||
|
sources_target_name = 'sources_for_indexing'
|
||||||
|
sources_target = _TargetFromSpec(
|
||||||
|
{ 'target_name' : sources_target_name,
|
||||||
|
'toolset': 'target',
|
||||||
|
'default_configuration': 'Default',
|
||||||
|
'mac_bundle': '0',
|
||||||
|
'type': 'executable'
|
||||||
|
}, None)
|
||||||
|
|
||||||
|
# Tell Xcode to look everywhere for headers.
|
||||||
|
sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
|
||||||
|
|
||||||
|
sources = []
|
||||||
|
for target, target_dict in target_dicts.iteritems():
|
||||||
|
base = os.path.dirname(target)
|
||||||
|
files = target_dict.get('sources', []) + \
|
||||||
|
target_dict.get('mac_bundle_resources', [])
|
||||||
|
for action in target_dict.get('actions', []):
|
||||||
|
files.extend(action.get('inputs', []))
|
||||||
|
# Remove files starting with $. These are mostly intermediate files for the
|
||||||
|
# build system.
|
||||||
|
files = [ file for file in files if not file.startswith('$')]
|
||||||
|
|
||||||
|
# Make sources relative to root build file.
|
||||||
|
relative_path = os.path.dirname(main_gyp)
|
||||||
|
sources += [ os.path.relpath(os.path.join(base, file), relative_path)
|
||||||
|
for file in files ]
|
||||||
|
|
||||||
|
sources_target['sources'] = sorted(set(sources))
|
||||||
|
|
||||||
|
# Put sources_to_index in it's own gyp.
|
||||||
|
sources_gyp = \
|
||||||
|
os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
|
||||||
|
fully_qualified_target_name = \
|
||||||
|
'%s:%s#target' % (sources_gyp, sources_target_name)
|
||||||
|
|
||||||
|
# Add to new_target_list, new_target_dicts and new_data.
|
||||||
|
new_target_list.append(fully_qualified_target_name)
|
||||||
|
new_target_dicts[fully_qualified_target_name] = sources_target
|
||||||
|
new_data_target = {}
|
||||||
|
new_data_target['target_name'] = sources_target['target_name']
|
||||||
|
new_data_target['_DEPTH'] = depth
|
||||||
|
new_data_target['toolset'] = "target"
|
||||||
|
new_data[sources_gyp] = {}
|
||||||
|
new_data[sources_gyp]['targets'] = []
|
||||||
|
new_data[sources_gyp]['included_files'] = []
|
||||||
|
new_data[sources_gyp]['xcode_settings'] = \
|
||||||
|
data[orig_gyp].get('xcode_settings', {})
|
||||||
|
new_data[sources_gyp]['targets'].append(new_data_target)
|
||||||
|
|
||||||
|
# Write workspace to file.
|
||||||
|
_WriteWorkspace(main_gyp, sources_gyp, params)
|
||||||
|
return (new_target_list, new_target_dicts, new_data)
|
|
@ -173,7 +173,7 @@ _escaped = re.compile('[\\\\"]|[\x00-\x1f]')
|
||||||
|
|
||||||
|
|
||||||
# Used by SourceTreeAndPathFromPath
|
# Used by SourceTreeAndPathFromPath
|
||||||
_path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$')
|
_path_leading_variable = re.compile(r'^\$\((.*?)\)(/(.*))?$')
|
||||||
|
|
||||||
def SourceTreeAndPathFromPath(input_path):
|
def SourceTreeAndPathFromPath(input_path):
|
||||||
"""Given input_path, returns a tuple with sourceTree and path values.
|
"""Given input_path, returns a tuple with sourceTree and path values.
|
||||||
|
@ -196,7 +196,7 @@ def SourceTreeAndPathFromPath(input_path):
|
||||||
return (source_tree, output_path)
|
return (source_tree, output_path)
|
||||||
|
|
||||||
def ConvertVariablesToShellSyntax(input_string):
|
def ConvertVariablesToShellSyntax(input_string):
|
||||||
return re.sub('\$\((.*?)\)', '${\\1}', input_string)
|
return re.sub(r'\$\((.*?)\)', '${\\1}', input_string)
|
||||||
|
|
||||||
class XCObject(object):
|
class XCObject(object):
|
||||||
"""The abstract base of all class types used in Xcode project files.
|
"""The abstract base of all class types used in Xcode project files.
|
||||||
|
@ -341,13 +341,13 @@ class XCObject(object):
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
# dicts are never strong.
|
# dicts are never strong.
|
||||||
if is_strong:
|
if is_strong:
|
||||||
raise TypeError, 'Strong dict for key ' + key + ' in ' + \
|
raise TypeError('Strong dict for key ' + key + ' in ' + \
|
||||||
self.__class__.__name__
|
self.__class__.__name__)
|
||||||
else:
|
else:
|
||||||
that._properties[key] = value.copy()
|
that._properties[key] = value.copy()
|
||||||
else:
|
else:
|
||||||
raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \
|
raise TypeError('Unexpected type ' + value.__class__.__name__ + \
|
||||||
' for key ' + key + ' in ' + self.__class__.__name__
|
' for key ' + key + ' in ' + self.__class__.__name__)
|
||||||
|
|
||||||
return that
|
return that
|
||||||
|
|
||||||
|
@ -366,8 +366,7 @@ class XCObject(object):
|
||||||
('name' in self._schema and self._schema['name'][3]):
|
('name' in self._schema and self._schema['name'][3]):
|
||||||
return self._properties['name']
|
return self._properties['name']
|
||||||
|
|
||||||
raise NotImplementedError, \
|
raise NotImplementedError(self.__class__.__name__ + ' must implement Name')
|
||||||
self.__class__.__name__ + ' must implement Name'
|
|
||||||
|
|
||||||
def Comment(self):
|
def Comment(self):
|
||||||
"""Return a comment string for the object.
|
"""Return a comment string for the object.
|
||||||
|
@ -466,10 +465,10 @@ class XCObject(object):
|
||||||
for descendant in descendants:
|
for descendant in descendants:
|
||||||
if descendant.id in ids:
|
if descendant.id in ids:
|
||||||
other = ids[descendant.id]
|
other = ids[descendant.id]
|
||||||
raise KeyError, \
|
raise KeyError(
|
||||||
'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
|
'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
|
||||||
(descendant.id, str(descendant._properties),
|
(descendant.id, str(descendant._properties),
|
||||||
str(other._properties), self._properties['rootObject'].Name())
|
str(other._properties), self._properties['rootObject'].Name()))
|
||||||
ids[descendant.id] = descendant
|
ids[descendant.id] = descendant
|
||||||
|
|
||||||
def Children(self):
|
def Children(self):
|
||||||
|
@ -630,7 +629,7 @@ class XCObject(object):
|
||||||
sep
|
sep
|
||||||
printable += end_tabs + '}'
|
printable += end_tabs + '}'
|
||||||
else:
|
else:
|
||||||
raise TypeError, "Can't make " + value.__class__.__name__ + ' printable'
|
raise TypeError("Can't make " + value.__class__.__name__ + ' printable')
|
||||||
|
|
||||||
if comment != None:
|
if comment != None:
|
||||||
printable += ' ' + self._EncodeComment(comment)
|
printable += ' ' + self._EncodeComment(comment)
|
||||||
|
@ -756,31 +755,31 @@ class XCObject(object):
|
||||||
for property, value in properties.iteritems():
|
for property, value in properties.iteritems():
|
||||||
# Make sure the property is in the schema.
|
# Make sure the property is in the schema.
|
||||||
if not property in self._schema:
|
if not property in self._schema:
|
||||||
raise KeyError, property + ' not in ' + self.__class__.__name__
|
raise KeyError(property + ' not in ' + self.__class__.__name__)
|
||||||
|
|
||||||
# Make sure the property conforms to the schema.
|
# Make sure the property conforms to the schema.
|
||||||
(is_list, property_type, is_strong) = self._schema[property][0:3]
|
(is_list, property_type, is_strong) = self._schema[property][0:3]
|
||||||
if is_list:
|
if is_list:
|
||||||
if value.__class__ != list:
|
if value.__class__ != list:
|
||||||
raise TypeError, \
|
raise TypeError(
|
||||||
property + ' of ' + self.__class__.__name__ + \
|
property + ' of ' + self.__class__.__name__ + \
|
||||||
' must be list, not ' + value.__class__.__name__
|
' must be list, not ' + value.__class__.__name__)
|
||||||
for item in value:
|
for item in value:
|
||||||
if not isinstance(item, property_type) and \
|
if not isinstance(item, property_type) and \
|
||||||
not (item.__class__ == unicode and property_type == str):
|
not (item.__class__ == unicode and property_type == str):
|
||||||
# Accept unicode where str is specified. str is treated as
|
# Accept unicode where str is specified. str is treated as
|
||||||
# UTF-8-encoded.
|
# UTF-8-encoded.
|
||||||
raise TypeError, \
|
raise TypeError(
|
||||||
'item of ' + property + ' of ' + self.__class__.__name__ + \
|
'item of ' + property + ' of ' + self.__class__.__name__ + \
|
||||||
' must be ' + property_type.__name__ + ', not ' + \
|
' must be ' + property_type.__name__ + ', not ' + \
|
||||||
item.__class__.__name__
|
item.__class__.__name__)
|
||||||
elif not isinstance(value, property_type) and \
|
elif not isinstance(value, property_type) and \
|
||||||
not (value.__class__ == unicode and property_type == str):
|
not (value.__class__ == unicode and property_type == str):
|
||||||
# Accept unicode where str is specified. str is treated as
|
# Accept unicode where str is specified. str is treated as
|
||||||
# UTF-8-encoded.
|
# UTF-8-encoded.
|
||||||
raise TypeError, \
|
raise TypeError(
|
||||||
property + ' of ' + self.__class__.__name__ + ' must be ' + \
|
property + ' of ' + self.__class__.__name__ + ' must be ' + \
|
||||||
property_type.__name__ + ', not ' + value.__class__.__name__
|
property_type.__name__ + ', not ' + value.__class__.__name__)
|
||||||
|
|
||||||
# Checks passed, perform the assignment.
|
# Checks passed, perform the assignment.
|
||||||
if do_copy:
|
if do_copy:
|
||||||
|
@ -804,9 +803,9 @@ class XCObject(object):
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
self._properties[property] = value.copy()
|
self._properties[property] = value.copy()
|
||||||
else:
|
else:
|
||||||
raise TypeError, "Don't know how to copy a " + \
|
raise TypeError("Don't know how to copy a " + \
|
||||||
value.__class__.__name__ + ' object for ' + \
|
value.__class__.__name__ + ' object for ' + \
|
||||||
property + ' in ' + self.__class__.__name__
|
property + ' in ' + self.__class__.__name__)
|
||||||
else:
|
else:
|
||||||
self._properties[property] = value
|
self._properties[property] = value
|
||||||
|
|
||||||
|
@ -837,15 +836,15 @@ class XCObject(object):
|
||||||
|
|
||||||
# Schema validation.
|
# Schema validation.
|
||||||
if not key in self._schema:
|
if not key in self._schema:
|
||||||
raise KeyError, key + ' not in ' + self.__class__.__name__
|
raise KeyError(key + ' not in ' + self.__class__.__name__)
|
||||||
|
|
||||||
(is_list, property_type, is_strong) = self._schema[key][0:3]
|
(is_list, property_type, is_strong) = self._schema[key][0:3]
|
||||||
if not is_list:
|
if not is_list:
|
||||||
raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list'
|
raise TypeError(key + ' of ' + self.__class__.__name__ + ' must be list')
|
||||||
if not isinstance(value, property_type):
|
if not isinstance(value, property_type):
|
||||||
raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \
|
raise TypeError('item of ' + key + ' of ' + self.__class__.__name__ + \
|
||||||
' must be ' + property_type.__name__ + ', not ' + \
|
' must be ' + property_type.__name__ + ', not ' + \
|
||||||
value.__class__.__name__
|
value.__class__.__name__)
|
||||||
|
|
||||||
# If the property doesn't exist yet, create a new empty list to receive the
|
# If the property doesn't exist yet, create a new empty list to receive the
|
||||||
# item.
|
# item.
|
||||||
|
@ -869,7 +868,7 @@ class XCObject(object):
|
||||||
for property, attributes in self._schema.iteritems():
|
for property, attributes in self._schema.iteritems():
|
||||||
(is_list, property_type, is_strong, is_required) = attributes[0:4]
|
(is_list, property_type, is_strong, is_required) = attributes[0:4]
|
||||||
if is_required and not property in self._properties:
|
if is_required and not property in self._properties:
|
||||||
raise KeyError, self.__class__.__name__ + ' requires ' + property
|
raise KeyError(self.__class__.__name__ + ' requires ' + property)
|
||||||
|
|
||||||
def _SetDefaultsFromSchema(self):
|
def _SetDefaultsFromSchema(self):
|
||||||
"""Assign object default values according to the schema. This will not
|
"""Assign object default values according to the schema. This will not
|
||||||
|
@ -1143,16 +1142,16 @@ class PBXGroup(XCHierarchicalElement):
|
||||||
child_path = child.PathFromSourceTreeAndPath()
|
child_path = child.PathFromSourceTreeAndPath()
|
||||||
if child_path:
|
if child_path:
|
||||||
if child_path in self._children_by_path:
|
if child_path in self._children_by_path:
|
||||||
raise ValueError, 'Found multiple children with path ' + child_path
|
raise ValueError('Found multiple children with path ' + child_path)
|
||||||
self._children_by_path[child_path] = child
|
self._children_by_path[child_path] = child
|
||||||
|
|
||||||
if isinstance(child, PBXVariantGroup):
|
if isinstance(child, PBXVariantGroup):
|
||||||
child_name = child._properties.get('name', None)
|
child_name = child._properties.get('name', None)
|
||||||
key = (child_name, child_path)
|
key = (child_name, child_path)
|
||||||
if key in self._variant_children_by_name_and_path:
|
if key in self._variant_children_by_name_and_path:
|
||||||
raise ValueError, 'Found multiple PBXVariantGroup children with ' + \
|
raise ValueError('Found multiple PBXVariantGroup children with ' + \
|
||||||
'name ' + str(child_name) + ' and path ' + \
|
'name ' + str(child_name) + ' and path ' + \
|
||||||
str(child_path)
|
str(child_path))
|
||||||
self._variant_children_by_name_and_path[key] = child
|
self._variant_children_by_name_and_path[key] = child
|
||||||
|
|
||||||
def AppendChild(self, child):
|
def AppendChild(self, child):
|
||||||
|
@ -1508,9 +1507,12 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
|
||||||
's': 'sourcecode.asm',
|
's': 'sourcecode.asm',
|
||||||
'storyboard': 'file.storyboard',
|
'storyboard': 'file.storyboard',
|
||||||
'strings': 'text.plist.strings',
|
'strings': 'text.plist.strings',
|
||||||
|
'swift': 'sourcecode.swift',
|
||||||
'ttf': 'file',
|
'ttf': 'file',
|
||||||
|
'xcassets': 'folder.assetcatalog',
|
||||||
'xcconfig': 'text.xcconfig',
|
'xcconfig': 'text.xcconfig',
|
||||||
'xcdatamodel': 'wrapper.xcdatamodel',
|
'xcdatamodel': 'wrapper.xcdatamodel',
|
||||||
|
'xcdatamodeld':'wrapper.xcdatamodeld',
|
||||||
'xib': 'file.xib',
|
'xib': 'file.xib',
|
||||||
'y': 'sourcecode.yacc',
|
'y': 'sourcecode.yacc',
|
||||||
}
|
}
|
||||||
|
@ -1605,7 +1607,7 @@ class XCConfigurationList(XCObject):
|
||||||
if configuration._properties['name'] == name:
|
if configuration._properties['name'] == name:
|
||||||
return configuration
|
return configuration
|
||||||
|
|
||||||
raise KeyError, name
|
raise KeyError(name)
|
||||||
|
|
||||||
def DefaultConfiguration(self):
|
def DefaultConfiguration(self):
|
||||||
"""Convenience accessor to obtain the default XCBuildConfiguration."""
|
"""Convenience accessor to obtain the default XCBuildConfiguration."""
|
||||||
|
@ -1662,7 +1664,7 @@ class XCConfigurationList(XCObject):
|
||||||
value = configuration_value
|
value = configuration_value
|
||||||
else:
|
else:
|
||||||
if value != configuration_value:
|
if value != configuration_value:
|
||||||
raise ValueError, 'Variant values for ' + key
|
raise ValueError('Variant values for ' + key)
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
@ -1769,8 +1771,8 @@ class XCBuildPhase(XCObject):
|
||||||
# added, either as a child or deeper descendant. The second item should
|
# added, either as a child or deeper descendant. The second item should
|
||||||
# be a boolean indicating whether files should be added into hierarchical
|
# be a boolean indicating whether files should be added into hierarchical
|
||||||
# groups or one single flat group.
|
# groups or one single flat group.
|
||||||
raise NotImplementedError, \
|
raise NotImplementedError(
|
||||||
self.__class__.__name__ + ' must implement FileGroup'
|
self.__class__.__name__ + ' must implement FileGroup')
|
||||||
|
|
||||||
def _AddPathToDict(self, pbxbuildfile, path):
|
def _AddPathToDict(self, pbxbuildfile, path):
|
||||||
"""Adds path to the dict tracking paths belonging to this build phase.
|
"""Adds path to the dict tracking paths belonging to this build phase.
|
||||||
|
@ -1779,7 +1781,7 @@ class XCBuildPhase(XCObject):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if path in self._files_by_path:
|
if path in self._files_by_path:
|
||||||
raise ValueError, 'Found multiple build files with path ' + path
|
raise ValueError('Found multiple build files with path ' + path)
|
||||||
self._files_by_path[path] = pbxbuildfile
|
self._files_by_path[path] = pbxbuildfile
|
||||||
|
|
||||||
def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
|
def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
|
||||||
|
@ -1834,8 +1836,8 @@ class XCBuildPhase(XCObject):
|
||||||
# problem.
|
# problem.
|
||||||
if xcfilelikeelement in self._files_by_xcfilelikeelement and \
|
if xcfilelikeelement in self._files_by_xcfilelikeelement and \
|
||||||
self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
|
self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
|
||||||
raise ValueError, 'Found multiple build files for ' + \
|
raise ValueError('Found multiple build files for ' + \
|
||||||
xcfilelikeelement.Name()
|
xcfilelikeelement.Name())
|
||||||
self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
|
self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
|
||||||
|
|
||||||
def AppendBuildFile(self, pbxbuildfile, path=None):
|
def AppendBuildFile(self, pbxbuildfile, path=None):
|
||||||
|
@ -1999,8 +2001,8 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
|
||||||
subfolder = 0
|
subfolder = 0
|
||||||
relative_path = path[1:]
|
relative_path = path[1:]
|
||||||
else:
|
else:
|
||||||
raise ValueError, 'Can\'t use path %s in a %s' % \
|
raise ValueError('Can\'t use path %s in a %s' % \
|
||||||
(path, self.__class__.__name__)
|
(path, self.__class__.__name__))
|
||||||
|
|
||||||
self._properties['dstPath'] = relative_path
|
self._properties['dstPath'] = relative_path
|
||||||
self._properties['dstSubfolderSpec'] = subfolder
|
self._properties['dstSubfolderSpec'] = subfolder
|
||||||
|
@ -2236,10 +2238,16 @@ class PBXNativeTarget(XCTarget):
|
||||||
# Mapping from Xcode product-types to settings. The settings are:
|
# Mapping from Xcode product-types to settings. The settings are:
|
||||||
# filetype : used for explicitFileType in the project file
|
# filetype : used for explicitFileType in the project file
|
||||||
# prefix : the prefix for the file name
|
# prefix : the prefix for the file name
|
||||||
# suffix : the suffix for the filen ame
|
# suffix : the suffix for the file name
|
||||||
_product_filetypes = {
|
_product_filetypes = {
|
||||||
'com.apple.product-type.application': ['wrapper.application',
|
'com.apple.product-type.application': ['wrapper.application',
|
||||||
'', '.app'],
|
'', '.app'],
|
||||||
|
'com.apple.product-type.application.watchapp': ['wrapper.application',
|
||||||
|
'', '.app'],
|
||||||
|
'com.apple.product-type.watchkit-extension': ['wrapper.app-extension',
|
||||||
|
'', '.appex'],
|
||||||
|
'com.apple.product-type.app-extension': ['wrapper.app-extension',
|
||||||
|
'', '.appex'],
|
||||||
'com.apple.product-type.bundle': ['wrapper.cfbundle',
|
'com.apple.product-type.bundle': ['wrapper.cfbundle',
|
||||||
'', '.bundle'],
|
'', '.bundle'],
|
||||||
'com.apple.product-type.framework': ['wrapper.framework',
|
'com.apple.product-type.framework': ['wrapper.framework',
|
||||||
|
@ -2312,11 +2320,11 @@ class PBXNativeTarget(XCTarget):
|
||||||
|
|
||||||
if force_extension is not None:
|
if force_extension is not None:
|
||||||
# If it's a wrapper (bundle), set WRAPPER_EXTENSION.
|
# If it's a wrapper (bundle), set WRAPPER_EXTENSION.
|
||||||
|
# Extension override.
|
||||||
|
suffix = '.' + force_extension
|
||||||
if filetype.startswith('wrapper.'):
|
if filetype.startswith('wrapper.'):
|
||||||
self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
|
self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
|
||||||
else:
|
else:
|
||||||
# Extension override.
|
|
||||||
suffix = '.' + force_extension
|
|
||||||
self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
|
self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
|
||||||
|
|
||||||
if filetype.startswith('compiled.mach-o.executable'):
|
if filetype.startswith('compiled.mach-o.executable'):
|
||||||
|
@ -2732,8 +2740,53 @@ class PBXProject(XCContainerPortal):
|
||||||
|
|
||||||
self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
|
self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
|
||||||
|
|
||||||
|
inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False)
|
||||||
|
targets = other_pbxproject.GetProperty('targets')
|
||||||
|
if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets):
|
||||||
|
dir_path = project_ref._properties['path']
|
||||||
|
product_group._hashables.extend(dir_path)
|
||||||
|
|
||||||
return [product_group, project_ref]
|
return [product_group, project_ref]
|
||||||
|
|
||||||
|
def _AllSymrootsUnique(self, target, inherit_unique_symroot):
|
||||||
|
# Returns True if all configurations have a unique 'SYMROOT' attribute.
|
||||||
|
# The value of inherit_unique_symroot decides, if a configuration is assumed
|
||||||
|
# to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't
|
||||||
|
# define an explicit value for 'SYMROOT'.
|
||||||
|
symroots = self._DefinedSymroots(target)
|
||||||
|
for s in self._DefinedSymroots(target):
|
||||||
|
if (s is not None and not self._IsUniqueSymrootForTarget(s) or
|
||||||
|
s is None and not inherit_unique_symroot):
|
||||||
|
return False
|
||||||
|
return True if symroots else inherit_unique_symroot
|
||||||
|
|
||||||
|
def _DefinedSymroots(self, target):
|
||||||
|
# Returns all values for the 'SYMROOT' attribute defined in all
|
||||||
|
# configurations for this target. If any configuration doesn't define the
|
||||||
|
# 'SYMROOT' attribute, None is added to the returned set. If all
|
||||||
|
# configurations don't define the 'SYMROOT' attribute, an empty set is
|
||||||
|
# returned.
|
||||||
|
config_list = target.GetProperty('buildConfigurationList')
|
||||||
|
symroots = set()
|
||||||
|
for config in config_list.GetProperty('buildConfigurations'):
|
||||||
|
setting = config.GetProperty('buildSettings')
|
||||||
|
if 'SYMROOT' in setting:
|
||||||
|
symroots.add(setting['SYMROOT'])
|
||||||
|
else:
|
||||||
|
symroots.add(None)
|
||||||
|
if len(symroots) == 1 and None in symroots:
|
||||||
|
return set()
|
||||||
|
return symroots
|
||||||
|
|
||||||
|
def _IsUniqueSymrootForTarget(self, symroot):
|
||||||
|
# This method returns True if all configurations in target contain a
|
||||||
|
# 'SYMROOT' attribute that is unique for the given target. A value is
|
||||||
|
# unique, if the Xcode macro '$SRCROOT' appears in it in any form.
|
||||||
|
uniquifier = ['$SRCROOT', '$(SRCROOT)']
|
||||||
|
if any(x in symroot for x in uniquifier):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def _SetUpProductReferences(self, other_pbxproject, product_group,
|
def _SetUpProductReferences(self, other_pbxproject, product_group,
|
||||||
project_ref):
|
project_ref):
|
||||||
# TODO(mark): This only adds references to products in other_pbxproject
|
# TODO(mark): This only adds references to products in other_pbxproject
|
||||||
|
@ -2802,7 +2855,7 @@ class PBXProject(XCContainerPortal):
|
||||||
product_group = ref_dict['ProductGroup']
|
product_group = ref_dict['ProductGroup']
|
||||||
product_group._properties['children'] = sorted(
|
product_group._properties['children'] = sorted(
|
||||||
product_group._properties['children'],
|
product_group._properties['children'],
|
||||||
cmp=lambda x, y: CompareProducts(x, y, remote_products))
|
cmp=lambda x, y, rp=remote_products: CompareProducts(x, y, rp))
|
||||||
|
|
||||||
|
|
||||||
class XCProjectFile(XCObject):
|
class XCProjectFile(XCObject):
|
||||||
|
@ -2810,27 +2863,10 @@ class XCProjectFile(XCObject):
|
||||||
_schema.update({
|
_schema.update({
|
||||||
'archiveVersion': [0, int, 0, 1, 1],
|
'archiveVersion': [0, int, 0, 1, 1],
|
||||||
'classes': [0, dict, 0, 1, {}],
|
'classes': [0, dict, 0, 1, {}],
|
||||||
'objectVersion': [0, int, 0, 1, 45],
|
'objectVersion': [0, int, 0, 1, 46],
|
||||||
'rootObject': [0, PBXProject, 1, 1],
|
'rootObject': [0, PBXProject, 1, 1],
|
||||||
})
|
})
|
||||||
|
|
||||||
def SetXcodeVersion(self, version):
|
|
||||||
version_to_object_version = {
|
|
||||||
'2.4': 45,
|
|
||||||
'3.0': 45,
|
|
||||||
'3.1': 45,
|
|
||||||
'3.2': 46,
|
|
||||||
}
|
|
||||||
if not version in version_to_object_version:
|
|
||||||
supported_str = ', '.join(sorted(version_to_object_version.keys()))
|
|
||||||
raise Exception(
|
|
||||||
'Unsupported Xcode version %s (supported: %s)' %
|
|
||||||
( version, supported_str ) )
|
|
||||||
compatibility_version = 'Xcode %s' % version
|
|
||||||
self._properties['rootObject'].SetProperty('compatibilityVersion',
|
|
||||||
compatibility_version)
|
|
||||||
self.SetProperty('objectVersion', version_to_object_version[version]);
|
|
||||||
|
|
||||||
def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
|
def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
|
||||||
# Although XCProjectFile is implemented here as an XCObject, it's not a
|
# Although XCProjectFile is implemented here as an XCObject, it's not a
|
||||||
# proper object in the Xcode sense, and it certainly doesn't have its own
|
# proper object in the Xcode sense, and it certainly doesn't have its own
|
||||||
|
|
307
gyp/pylintrc
307
gyp/pylintrc
|
@ -1,307 +0,0 @@
|
||||||
[MASTER]
|
|
||||||
|
|
||||||
# Specify a configuration file.
|
|
||||||
#rcfile=
|
|
||||||
|
|
||||||
# Python code to execute, usually for sys.path manipulation such as
|
|
||||||
# pygtk.require().
|
|
||||||
#init-hook=
|
|
||||||
|
|
||||||
# Profiled execution.
|
|
||||||
profile=no
|
|
||||||
|
|
||||||
# Add files or directories to the blacklist. They should be base names, not
|
|
||||||
# paths.
|
|
||||||
ignore=CVS
|
|
||||||
|
|
||||||
# Pickle collected data for later comparisons.
|
|
||||||
persistent=yes
|
|
||||||
|
|
||||||
# List of plugins (as comma separated values of python modules names) to load,
|
|
||||||
# usually to register additional checkers.
|
|
||||||
load-plugins=
|
|
||||||
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
|
||||||
|
|
||||||
# Enable the message, report, category or checker with the given id(s). You can
|
|
||||||
# either give multiple identifier separated by comma (,) or put this option
|
|
||||||
# multiple time.
|
|
||||||
#enable=
|
|
||||||
|
|
||||||
# Disable the message, report, category or checker with the given id(s). You
|
|
||||||
# can either give multiple identifier separated by comma (,) or put this option
|
|
||||||
# multiple time (only on the command line, not in the configuration file where
|
|
||||||
# it should appear only once).
|
|
||||||
# C0103: Invalid name "NN" (should match [a-z_][a-z0-9_]{2,30}$)
|
|
||||||
# C0111: Missing docstring
|
|
||||||
# C0302: Too many lines in module (NN)
|
|
||||||
# R0902: Too many instance attributes (N/7)
|
|
||||||
# R0903: Too few public methods (N/2)
|
|
||||||
# R0904: Too many public methods (NN/20)
|
|
||||||
# R0912: Too many branches (NN/12)
|
|
||||||
# R0913: Too many arguments (N/5)
|
|
||||||
# R0914: Too many local variables (NN/15)
|
|
||||||
# R0915: Too many statements (NN/50)
|
|
||||||
# W0141: Used builtin function 'map'
|
|
||||||
# W0142: Used * or ** magic
|
|
||||||
# W0232: Class has no __init__ method
|
|
||||||
# W0511: TODO
|
|
||||||
# W0603: Using the global statement
|
|
||||||
#
|
|
||||||
# These should be enabled eventually:
|
|
||||||
# C0112: Empty docstring
|
|
||||||
# C0301: Line too long (NN/80)
|
|
||||||
# C0321: More than one statement on single line
|
|
||||||
# C0322: Operator not preceded by a space
|
|
||||||
# C0323: Operator not followed by a space
|
|
||||||
# C0324: Comma not followed by a space
|
|
||||||
# E0101: Explicit return in __init__
|
|
||||||
# E0102: function already defined line NN
|
|
||||||
# E1002: Use of super on an old style class
|
|
||||||
# E1101: Instance of 'XX' has no 'YY' member
|
|
||||||
# E1103: Instance of 'XX' has no 'XX' member (but some types could not be inferred)
|
|
||||||
# E0602: Undefined variable 'XX'
|
|
||||||
# F0401: Unable to import 'XX'
|
|
||||||
# R0201: Method could be a function
|
|
||||||
# R0801: Similar lines in N files
|
|
||||||
# W0102: Dangerous default value {} as argument
|
|
||||||
# W0104: Statement seems to have no effect
|
|
||||||
# W0105: String statement has no effect
|
|
||||||
# W0108: Lambda may not be necessary
|
|
||||||
# W0201: Attribute 'XX' defined outside __init__
|
|
||||||
# W0212: Access to a protected member XX of a client class
|
|
||||||
# W0221: Arguments number differs from overridden method
|
|
||||||
# W0223: Method 'XX' is abstract in class 'YY' but is not overridden
|
|
||||||
# W0231: __init__ method from base class 'XX' is not called
|
|
||||||
# W0301: Unnecessary semicolon
|
|
||||||
# W0311: Bad indentation. Found NN spaces, expected NN
|
|
||||||
# W0401: Wildcard import XX
|
|
||||||
# W0402: Uses of a deprecated module 'string'
|
|
||||||
# W0403: Relative import 'XX', should be 'YY.XX'
|
|
||||||
# W0404: Reimport 'XX' (imported line NN)
|
|
||||||
# W0601: Global variable 'XX' undefined at the module level
|
|
||||||
# W0602: Using global for 'XX' but no assignment is done
|
|
||||||
# W0611: Unused import pprint
|
|
||||||
# W0612: Unused variable 'XX'
|
|
||||||
# W0613: Unused argument 'XX'
|
|
||||||
# W0614: Unused import XX from wildcard import
|
|
||||||
# W0621: Redefining name 'XX' from outer scope (line NN)
|
|
||||||
# W0622: Redefining built-in 'NN'
|
|
||||||
# W0631: Using possibly undefined loop variable 'XX'
|
|
||||||
# W0701: Raising a string exception
|
|
||||||
# W0702: No exception type(s) specified
|
|
||||||
disable=C0103,C0111,C0302,R0902,R0903,R0904,R0912,R0913,R0914,R0915,W0141,W0142,W0232,W0511,W0603,C0112,C0301,C0321,C0322,C0323,C0324,E0101,E0102,E1002,E1101,E1103,E0602,F0401,R0201,R0801,W0102,W0104,W0105,W0108,W0201,W0212,W0221,W0223,W0231,W0301,W0311,W0401,W0402,W0403,W0404,W0601,W0602,W0611,W0612,W0613,W0614,W0621,W0622,W0631,W0701,W0702
|
|
||||||
|
|
||||||
|
|
||||||
[REPORTS]
|
|
||||||
|
|
||||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
|
||||||
# (visual studio) and html
|
|
||||||
output-format=text
|
|
||||||
|
|
||||||
# Include message's id in output
|
|
||||||
include-ids=yes
|
|
||||||
|
|
||||||
# Put messages in a separate file for each module / package specified on the
|
|
||||||
# command line instead of printing them on stdout. Reports (if any) will be
|
|
||||||
# written in a file name "pylint_global.[txt|html]".
|
|
||||||
files-output=no
|
|
||||||
|
|
||||||
# Tells whether to display a full report or only the messages
|
|
||||||
reports=no
|
|
||||||
|
|
||||||
# Python expression which should return a note less than 10 (10 is the highest
|
|
||||||
# note). You have access to the variables errors warning, statement which
|
|
||||||
# respectively contain the number of errors / warnings messages and the total
|
|
||||||
# number of statements analyzed. This is used by the global evaluation report
|
|
||||||
# (RP0004).
|
|
||||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
|
||||||
|
|
||||||
# Add a comment according to your evaluation note. This is used by the global
|
|
||||||
# evaluation report (RP0004).
|
|
||||||
comment=no
|
|
||||||
|
|
||||||
|
|
||||||
[VARIABLES]
|
|
||||||
|
|
||||||
# Tells whether we should check for unused import in __init__ files.
|
|
||||||
init-import=no
|
|
||||||
|
|
||||||
# A regular expression matching the beginning of the name of dummy variables
|
|
||||||
# (i.e. not used).
|
|
||||||
dummy-variables-rgx=_|dummy
|
|
||||||
|
|
||||||
# List of additional names supposed to be defined in builtins. Remember that
|
|
||||||
# you should avoid to define new builtins when possible.
|
|
||||||
additional-builtins=
|
|
||||||
|
|
||||||
|
|
||||||
[TYPECHECK]
|
|
||||||
|
|
||||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
|
||||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
|
||||||
ignore-mixin-members=yes
|
|
||||||
|
|
||||||
# List of classes names for which member attributes should not be checked
|
|
||||||
# (useful for classes with attributes dynamically set).
|
|
||||||
ignored-classes=SQLObject
|
|
||||||
|
|
||||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
|
||||||
# to generated-members.
|
|
||||||
zope=no
|
|
||||||
|
|
||||||
# List of members which are set dynamically and missed by pylint inference
|
|
||||||
# system, and so shouldn't trigger E0201 when accessed. Python regular
|
|
||||||
# expressions are accepted.
|
|
||||||
generated-members=REQUEST,acl_users,aq_parent
|
|
||||||
|
|
||||||
|
|
||||||
[MISCELLANEOUS]
|
|
||||||
|
|
||||||
# List of note tags to take in consideration, separated by a comma.
|
|
||||||
notes=FIXME,XXX,TODO
|
|
||||||
|
|
||||||
|
|
||||||
[SIMILARITIES]
|
|
||||||
|
|
||||||
# Minimum lines number of a similarity.
|
|
||||||
min-similarity-lines=4
|
|
||||||
|
|
||||||
# Ignore comments when computing similarities.
|
|
||||||
ignore-comments=yes
|
|
||||||
|
|
||||||
# Ignore docstrings when computing similarities.
|
|
||||||
ignore-docstrings=yes
|
|
||||||
|
|
||||||
|
|
||||||
[FORMAT]
|
|
||||||
|
|
||||||
# Maximum number of characters on a single line.
|
|
||||||
max-line-length=80
|
|
||||||
|
|
||||||
# Maximum number of lines in a module
|
|
||||||
max-module-lines=1000
|
|
||||||
|
|
||||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
|
||||||
# tab).
|
|
||||||
indent-string=' '
|
|
||||||
|
|
||||||
|
|
||||||
[BASIC]
|
|
||||||
|
|
||||||
# Required attributes for module, separated by a comma
|
|
||||||
required-attributes=
|
|
||||||
|
|
||||||
# List of builtins function names that should not be used, separated by a comma
|
|
||||||
bad-functions=map,filter,apply,input
|
|
||||||
|
|
||||||
# Regular expression which should only match correct module names
|
|
||||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct module level names
|
|
||||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct class names
|
|
||||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct function names
|
|
||||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct method names
|
|
||||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct instance attribute names
|
|
||||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct argument names
|
|
||||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct variable names
|
|
||||||
variable-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match correct list comprehension /
|
|
||||||
# generator expression variable names
|
|
||||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
|
||||||
|
|
||||||
# Good variable names which should always be accepted, separated by a comma
|
|
||||||
good-names=i,j,k,ex,Run,_
|
|
||||||
|
|
||||||
# Bad variable names which should always be refused, separated by a comma
|
|
||||||
bad-names=foo,bar,baz,toto,tutu,tata
|
|
||||||
|
|
||||||
# Regular expression which should only match functions or classes name which do
|
|
||||||
# not require a docstring
|
|
||||||
no-docstring-rgx=__.*__
|
|
||||||
|
|
||||||
|
|
||||||
[DESIGN]
|
|
||||||
|
|
||||||
# Maximum number of arguments for function / method
|
|
||||||
max-args=5
|
|
||||||
|
|
||||||
# Argument names that match this expression will be ignored. Default to name
|
|
||||||
# with leading underscore
|
|
||||||
ignored-argument-names=_.*
|
|
||||||
|
|
||||||
# Maximum number of locals for function / method body
|
|
||||||
max-locals=15
|
|
||||||
|
|
||||||
# Maximum number of return / yield for function / method body
|
|
||||||
max-returns=6
|
|
||||||
|
|
||||||
# Maximum number of branch for function / method body
|
|
||||||
max-branchs=12
|
|
||||||
|
|
||||||
# Maximum number of statements in function / method body
|
|
||||||
max-statements=50
|
|
||||||
|
|
||||||
# Maximum number of parents for a class (see R0901).
|
|
||||||
max-parents=7
|
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902).
|
|
||||||
max-attributes=7
|
|
||||||
|
|
||||||
# Minimum number of public methods for a class (see R0903).
|
|
||||||
min-public-methods=2
|
|
||||||
|
|
||||||
# Maximum number of public methods for a class (see R0904).
|
|
||||||
max-public-methods=20
|
|
||||||
|
|
||||||
|
|
||||||
[CLASSES]
|
|
||||||
|
|
||||||
# List of interface methods to ignore, separated by a comma. This is used for
|
|
||||||
# instance to not check methods defines in Zope's Interface base class.
|
|
||||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
|
||||||
|
|
||||||
# List of method names used to declare (i.e. assign) instance attributes.
|
|
||||||
defining-attr-methods=__init__,__new__,setUp
|
|
||||||
|
|
||||||
# List of valid names for the first argument in a class method.
|
|
||||||
valid-classmethod-first-arg=cls
|
|
||||||
|
|
||||||
|
|
||||||
[IMPORTS]
|
|
||||||
|
|
||||||
# Deprecated modules which should not be used, separated by a comma
|
|
||||||
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
|
|
||||||
|
|
||||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
|
||||||
# given file (report RP0402 must not be disabled)
|
|
||||||
import-graph=
|
|
||||||
|
|
||||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
|
||||||
# not be disabled)
|
|
||||||
ext-import-graph=
|
|
||||||
|
|
||||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
|
||||||
# not be disabled)
|
|
||||||
int-import-graph=
|
|
||||||
|
|
||||||
|
|
||||||
[EXCEPTIONS]
|
|
||||||
|
|
||||||
# Exceptions that will emit a warning when being caught. Defaults to
|
|
||||||
# "Exception"
|
|
||||||
overgeneral-exceptions=Exception
|
|
|
@ -15,14 +15,36 @@
|
||||||
"recent emacsen), not from the older and less maintained "
|
"recent emacsen), not from the older and less maintained "
|
||||||
"python-mode.el")))
|
"python-mode.el")))
|
||||||
|
|
||||||
(defadvice python-calculate-indentation (after ami-outdent-closing-parens
|
(defadvice python-indent-calculate-levels (after gyp-outdent-closing-parens
|
||||||
activate)
|
activate)
|
||||||
"De-indent closing parens, braces, and brackets in gyp-mode."
|
"De-indent closing parens, braces, and brackets in gyp-mode."
|
||||||
(if (and (eq major-mode 'gyp-mode)
|
(when (and (eq major-mode 'gyp-mode)
|
||||||
(string-match "^ *[])}][],)}]* *$"
|
(string-match "^ *[])}][],)}]* *$"
|
||||||
(buffer-substring-no-properties
|
(buffer-substring-no-properties
|
||||||
(line-beginning-position) (line-end-position))))
|
(line-beginning-position) (line-end-position))))
|
||||||
(setq ad-return-value (- ad-return-value 2))))
|
(setf (first python-indent-levels)
|
||||||
|
(- (first python-indent-levels) python-continuation-offset))))
|
||||||
|
|
||||||
|
(defadvice python-indent-guess-indent-offset (around
|
||||||
|
gyp-indent-guess-indent-offset
|
||||||
|
activate)
|
||||||
|
"Guess correct indent offset in gyp-mode."
|
||||||
|
(or (and (not (eq major-mode 'gyp-mode))
|
||||||
|
ad-do-it)
|
||||||
|
(save-excursion
|
||||||
|
(save-restriction
|
||||||
|
(widen)
|
||||||
|
(goto-char (point-min))
|
||||||
|
;; Find first line ending with an opening brace that is not a comment.
|
||||||
|
(or (and (re-search-forward "\\(^[[{]$\\|^.*[^#].*[[{]$\\)")
|
||||||
|
(forward-line)
|
||||||
|
(/= (current-indentation) 0)
|
||||||
|
(set (make-local-variable 'python-indent-offset)
|
||||||
|
(current-indentation))
|
||||||
|
(set (make-local-variable 'python-continuation-offset)
|
||||||
|
(current-indentation)))
|
||||||
|
(message "Can't guess gyp indent offset, using default: %s"
|
||||||
|
python-continuation-offset))))))
|
||||||
|
|
||||||
(define-derived-mode gyp-mode python-mode "Gyp"
|
(define-derived-mode gyp-mode python-mode "Gyp"
|
||||||
"Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
|
"Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
|
||||||
|
@ -35,9 +57,10 @@
|
||||||
|
|
||||||
(defun gyp-set-indentation ()
|
(defun gyp-set-indentation ()
|
||||||
"Hook function to configure python indentation to suit gyp mode."
|
"Hook function to configure python indentation to suit gyp mode."
|
||||||
(setq python-continuation-offset 2
|
(set (make-local-variable 'python-indent-offset) 2)
|
||||||
python-indent 2
|
(set (make-local-variable 'python-continuation-offset) 2)
|
||||||
python-guess-indent nil))
|
(set (make-local-variable 'python-indent-guess-indent-offset) t)
|
||||||
|
(python-indent-guess-indent-offset))
|
||||||
|
|
||||||
(add-hook 'gyp-mode-hook 'gyp-set-indentation)
|
(add-hook 'gyp-mode-hook 'gyp-set-indentation)
|
||||||
|
|
||||||
|
@ -218,11 +241,11 @@
|
||||||
;; Top-level keywords
|
;; Top-level keywords
|
||||||
(list (concat "['\"]\\("
|
(list (concat "['\"]\\("
|
||||||
(regexp-opt (list "action" "action_name" "actions" "cflags"
|
(regexp-opt (list "action" "action_name" "actions" "cflags"
|
||||||
"conditions" "configurations" "copies" "defines"
|
"cflags_cc" "conditions" "configurations"
|
||||||
"dependencies" "destination"
|
"copies" "defines" "dependencies" "destination"
|
||||||
"direct_dependent_settings"
|
"direct_dependent_settings"
|
||||||
"export_dependent_settings" "extension" "files"
|
"export_dependent_settings" "extension" "files"
|
||||||
"include_dirs" "includes" "inputs" "libraries"
|
"include_dirs" "includes" "inputs" "ldflags" "libraries"
|
||||||
"link_settings" "mac_bundle" "message"
|
"link_settings" "mac_bundle" "message"
|
||||||
"msvs_external_rule" "outputs" "product_name"
|
"msvs_external_rule" "outputs" "product_name"
|
||||||
"process_outputs_as_sources" "rules" "rule_name"
|
"process_outputs_as_sources" "rules" "rule_name"
|
||||||
|
|
|
@ -38,12 +38,13 @@ def ParseSolution(solution_file):
|
||||||
|
|
||||||
# Regular expressions that matches the SLN format.
|
# Regular expressions that matches the SLN format.
|
||||||
# The first line of a project definition.
|
# The first line of a project definition.
|
||||||
begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
|
begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
|
||||||
'}"\) = "(.*)", "(.*)", "(.*)"$'))
|
r'}"\) = "(.*)", "(.*)", "(.*)"$')
|
||||||
# The last line of a project definition.
|
# The last line of a project definition.
|
||||||
end_project = re.compile('^EndProject$')
|
end_project = re.compile('^EndProject$')
|
||||||
# The first line of a dependency list.
|
# The first line of a dependency list.
|
||||||
begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
|
begin_dep = re.compile(
|
||||||
|
r'ProjectSection\(ProjectDependencies\) = postProject$')
|
||||||
# The last line of a dependency list.
|
# The last line of a dependency list.
|
||||||
end_dep = re.compile('EndProjectSection$')
|
end_dep = re.compile('EndProjectSection$')
|
||||||
# A line describing a dependency.
|
# A line describing a dependency.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue