summaryrefslogtreecommitdiff
path: root/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp
diff options
context:
space:
mode:
Diffstat (limited to 'server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp')
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.flake84
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml36
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml45
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml32
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/release-please.yml16
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/AUTHORS16
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CHANGELOG.md233
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md4
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CONTRIBUTING.md32
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/LICENSE28
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/README.md7
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc12
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp8
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp.bat5
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp_main.py45
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py367
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py206
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py1270
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py1547
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py59
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py153
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py271
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py574
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/__init__.py690
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/common.py661
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/common_test.py78
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py165
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py109
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py55
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py0
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py808
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py1173
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py1321
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py120
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py103
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py464
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py89
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py58
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py2717
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py3981
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py44
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py2936
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py55
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py1394
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py25
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/input.py3130
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/input_test.py98
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py771
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py1271
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py174
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py61
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py374
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py1939
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py302
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py3197
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py65
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pyproject.toml41
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/test_gyp.py261
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/README15
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/README5
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec27
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec226
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/README12
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el63
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/gyp.el275
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh7
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp1105
-rw-r--r--server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified1107
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/graphviz.py102
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_gyp.py156
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_sln.py181
-rwxr-xr-xserver/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_vcproj.py339
72 files changed, 37320 insertions, 0 deletions
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.flake8 b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.flake8
new file mode 100644
index 0000000..ea0c768
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.flake8
@@ -0,0 +1,4 @@
+[flake8]
+max-complexity = 101
+max-line-length = 88
+extend-ignore = E203 # whitespace before ':' to agree with psf/black
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml
new file mode 100644
index 0000000..aad1350
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml
@@ -0,0 +1,36 @@
+# TODO: Enable os: windows-latest
+# TODO: Enable pytest --doctest-modules
+
+name: Python_tests
+on:
+ push:
+ branches: [ main ]
+ pull_request:
+ branches: [ main ]
+ workflow_dispatch:
+jobs:
+ Python_tests:
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ max-parallel: 8
+ matrix:
+ os: [macos-latest, ubuntu-latest] # , windows-latest]
+ python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"]
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip setuptools
+ pip install --editable ".[dev]"
+ - run: ./gyp -V && ./gyp --version && gyp -V && gyp --version
+ - name: Lint with flake8
+ run: flake8 . --ignore=E203,W503 --max-complexity=101 --max-line-length=88 --show-source --statistics
+ - name: Test with pytest
+ run: pytest
+ # - name: Run doctests with pytest
+ # run: pytest --doctest-modules
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml
new file mode 100644
index 0000000..7cc1f9e
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml
@@ -0,0 +1,45 @@
+name: node-gyp integration
+on:
+ push:
+ branches: [ main ]
+ pull_request:
+ branches: [ main ]
+ workflow_dispatch:
+jobs:
+ integration:
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [macos-latest, ubuntu-latest, windows-latest]
+ python: ["3.7", "3.10"]
+
+ runs-on: ${{ matrix.os }}
+ steps:
+ - name: Clone gyp-next
+ uses: actions/checkout@v3
+ with:
+ path: gyp-next
+ - name: Clone nodejs/node-gyp
+ uses: actions/checkout@v3
+ with:
+ repository: nodejs/node-gyp
+ path: node-gyp
+ - uses: actions/setup-node@v3
+ with:
+ node-version: 14.x
+ - uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python }}
+ - name: Install dependencies
+ run: |
+ cd node-gyp
+ npm install --no-progress
+ - name: Replace gyp in node-gyp
+ shell: bash
+ run: |
+ rm -rf node-gyp/gyp
+ cp -r gyp-next node-gyp/gyp
+ - name: Run tests
+ run: |
+ cd node-gyp
+ npm test
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml
new file mode 100644
index 0000000..4e6c954
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml
@@ -0,0 +1,32 @@
+name: Node.js Windows integration
+
+on:
+ push:
+ branches: [ main ]
+ pull_request:
+ branches: [ main ]
+ workflow_dispatch:
+
+jobs:
+ build-windows:
+ runs-on: windows-2019
+ steps:
+ - name: Clone gyp-next
+ uses: actions/checkout@v3
+ with:
+ path: gyp-next
+ - name: Clone nodejs/node
+ uses: actions/checkout@v3
+ with:
+ repository: nodejs/node
+ path: node
+ - name: Install deps
+ run: choco install nasm
+ - name: Replace gyp in Node.js
+ run: |
+ rm -Recurse node/tools/gyp
+ cp -Recurse gyp-next node/tools/gyp
+ - name: Build Node.js
+ run: |
+ cd node
+ ./vcbuild.bat
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/release-please.yml b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/release-please.yml
new file mode 100644
index 0000000..665c4c4
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/.github/workflows/release-please.yml
@@ -0,0 +1,16 @@
+on:
+ push:
+ branches:
+ - main
+
+name: release-please
+jobs:
+ release-please:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: google-github-actions/release-please-action@v3
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ release-type: python
+ package-name: gyp-next
+ bump-minor-pre-major: true
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/AUTHORS b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/AUTHORS
new file mode 100644
index 0000000..f49a357
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/AUTHORS
@@ -0,0 +1,16 @@
+# Names should be added to this file like so:
+# Name or Organization <email address>
+
+Google Inc. <*@google.com>
+Bloomberg Finance L.P. <*@bloomberg.net>
+IBM Inc. <*@*.ibm.com>
+Yandex LLC <*@yandex-team.ru>
+
+Steven Knight <knight@baldmt.com>
+Ryan Norton <rnorton10@gmail.com>
+David J. Sankel <david@sankelsoftware.com>
+Eric N. Vander Weele <ericvw@gmail.com>
+Tom Freudenberg <th.freudenberg@gmail.com>
+Julien Brianceau <jbriance@cisco.com>
+Refael Ackermann <refack@gmail.com>
+Ujjwal Sharma <ryzokuken@disroot.org>
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CHANGELOG.md b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CHANGELOG.md
new file mode 100644
index 0000000..4b4968f
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CHANGELOG.md
@@ -0,0 +1,233 @@
+# Changelog
+
+## [0.14.0](https://github.com/nodejs/gyp-next/compare/v0.13.0...v0.14.0) (2022-10-08)
+
+
+### Features
+
+* Add command line argument for `gyp --version` ([#164](https://github.com/nodejs/gyp-next/issues/164)) ([5c9f4d0](https://github.com/nodejs/gyp-next/commit/5c9f4d05678dd855e18ed2327219e5d18e5374db))
+* ninja build for iOS ([#174](https://github.com/nodejs/gyp-next/issues/174)) ([b6f2714](https://github.com/nodejs/gyp-next/commit/b6f271424e0033d7ed54d437706695af2ba7a1bf))
+* **zos:** support IBM Open XL C/C++ & PL/I compilers on z/OS ([#178](https://github.com/nodejs/gyp-next/issues/178)) ([43a7211](https://github.com/nodejs/gyp-next/commit/43a72110ae3fafb13c9625cc7a969624b27cda47))
+
+
+### Bug Fixes
+
+* lock windows env ([#163](https://github.com/nodejs/gyp-next/issues/163)) ([44bd0dd](https://github.com/nodejs/gyp-next/commit/44bd0ddc93ea0b5770a44dd326a2e4ae62c21442))
+* move configuration information into pyproject.toml ([#176](https://github.com/nodejs/gyp-next/issues/176)) ([d69d8ec](https://github.com/nodejs/gyp-next/commit/d69d8ece6dbff7af4f2ea073c9fd170baf8cb7f7))
+* node.js debugger adds stderr (but exit code is 0) -> shouldn't throw ([#179](https://github.com/nodejs/gyp-next/issues/179)) ([1a457d9](https://github.com/nodejs/gyp-next/commit/1a457d9ed08cfd30c9fa551bc5cf0d90fb583787))
+
+## [0.13.0](https://www.github.com/nodejs/gyp-next/compare/v0.12.1...v0.13.0) (2022-05-11)
+
+
+### Features
+
+* add PRODUCT_DIR_ABS variable ([#151](https://www.github.com/nodejs/gyp-next/issues/151)) ([80d2626](https://www.github.com/nodejs/gyp-next/commit/80d26263581db829b61b312a7bdb5cc791df7824))
+
+
+### Bug Fixes
+
+* execvp: printf: Argument list too long ([#147](https://www.github.com/nodejs/gyp-next/issues/147)) ([c4e14f3](https://www.github.com/nodejs/gyp-next/commit/c4e14f301673fadbac3ab7882d0b5f4d02530cb9))
+
+### [0.12.1](https://www.github.com/nodejs/gyp-next/compare/v0.12.0...v0.12.1) (2022-04-06)
+
+
+### Bug Fixes
+
+* **msvs:** avoid fixing path for arguments with "=" ([#143](https://www.github.com/nodejs/gyp-next/issues/143)) ([7e8f16e](https://www.github.com/nodejs/gyp-next/commit/7e8f16eb165e042e64bec98fa6c2a0232a42c26b))
+
+## [0.12.0](https://www.github.com/nodejs/gyp-next/compare/v0.11.0...v0.12.0) (2022-04-04)
+
+
+### Features
+
+* support building shared libraries on z/OS ([#137](https://www.github.com/nodejs/gyp-next/issues/137)) ([293bcfa](https://www.github.com/nodejs/gyp-next/commit/293bcfa4c25c6adb743377adafc45a80fee492c6))
+
+## [0.11.0](https://www.github.com/nodejs/gyp-next/compare/v0.10.1...v0.11.0) (2022-03-04)
+
+
+### Features
+
+* Add proper support for IBM i ([#140](https://www.github.com/nodejs/gyp-next/issues/140)) ([fdda4a3](https://www.github.com/nodejs/gyp-next/commit/fdda4a3038b8a7042ad960ce7a223687c24a21b1))
+
+### [0.10.1](https://www.github.com/nodejs/gyp-next/compare/v0.10.0...v0.10.1) (2021-11-24)
+
+
+### Bug Fixes
+
+* **make:** only generate makefile for multiple toolsets if requested ([#133](https://www.github.com/nodejs/gyp-next/issues/133)) ([f463a77](https://www.github.com/nodejs/gyp-next/commit/f463a77705973289ea38fec1b244c922ac438e26))
+
+## [0.10.0](https://www.github.com/nodejs/gyp-next/compare/v0.9.6...v0.10.0) (2021-08-26)
+
+
+### Features
+
+* **msvs:** add support for Visual Studio 2022 ([#124](https://www.github.com/nodejs/gyp-next/issues/124)) ([4bd9215](https://www.github.com/nodejs/gyp-next/commit/4bd9215c44d300f06e916aec1d6327c22b78272d))
+
+### [0.9.6](https://www.github.com/nodejs/gyp-next/compare/v0.9.5...v0.9.6) (2021-08-23)
+
+
+### Bug Fixes
+
+* align flake8 test ([#122](https://www.github.com/nodejs/gyp-next/issues/122)) ([f1faa8d](https://www.github.com/nodejs/gyp-next/commit/f1faa8d3081e1a47e917ff910892f00dff16cf8a))
+* **msvs:** fix paths again in action command arguments ([#121](https://www.github.com/nodejs/gyp-next/issues/121)) ([7159dfb](https://www.github.com/nodejs/gyp-next/commit/7159dfbc5758c9ec717e215f2c36daf482c846a1))
+
+### [0.9.5](https://www.github.com/nodejs/gyp-next/compare/v0.9.4...v0.9.5) (2021-08-18)
+
+
+### Bug Fixes
+
+* add python 3.6 to node-gyp integration test ([3462d4c](https://www.github.com/nodejs/gyp-next/commit/3462d4ce3c31cce747513dc7ca9760c81d57c60e))
+* revert for windows compatibility ([d078e7d](https://www.github.com/nodejs/gyp-next/commit/d078e7d7ae080ddae243188f6415f940376a7368))
+* support msvs_quote_cmd in ninja generator ([#117](https://www.github.com/nodejs/gyp-next/issues/117)) ([46486ac](https://www.github.com/nodejs/gyp-next/commit/46486ac6e9329529d51061e006a5b39631e46729))
+
+### [0.9.4](https://www.github.com/nodejs/gyp-next/compare/v0.9.3...v0.9.4) (2021-08-09)
+
+
+### Bug Fixes
+
+* .S is an extension for asm file on Windows ([#115](https://www.github.com/nodejs/gyp-next/issues/115)) ([d2fad44](https://www.github.com/nodejs/gyp-next/commit/d2fad44ef3a79ca8900f1307060153ded57053fc))
+
+### [0.9.3](https://www.github.com/nodejs/gyp-next/compare/v0.9.2...v0.9.3) (2021-07-07)
+
+
+### Bug Fixes
+
+* build failure with ninja and Python 3 on Windows ([#113](https://www.github.com/nodejs/gyp-next/issues/113)) ([c172d10](https://www.github.com/nodejs/gyp-next/commit/c172d105deff5db4244e583942215918fa80dd3c))
+
+### [0.9.2](https://www.github.com/nodejs/gyp-next/compare/v0.9.1...v0.9.2) (2021-05-21)
+
+
+### Bug Fixes
+
+* add support of utf8 encoding ([#105](https://www.github.com/nodejs/gyp-next/issues/105)) ([4d0f93c](https://www.github.com/nodejs/gyp-next/commit/4d0f93c249286d1f0c0f665f5fe7346119f98cf1))
+
+### [0.9.1](https://www.github.com/nodejs/gyp-next/compare/v0.9.0...v0.9.1) (2021-05-14)
+
+
+### Bug Fixes
+
+* py lint ([3b6a8ee](https://www.github.com/nodejs/gyp-next/commit/3b6a8ee7a66193a8a6867eba9e1d2b70bdf04402))
+
+## [0.9.0](https://www.github.com/nodejs/gyp-next/compare/v0.8.1...v0.9.0) (2021-05-13)
+
+
+### Features
+
+* use LDFLAGS_host for host toolset ([#98](https://www.github.com/nodejs/gyp-next/issues/98)) ([bea5c7b](https://www.github.com/nodejs/gyp-next/commit/bea5c7bd67d6ad32acbdce79767a5481c70675a2))
+
+
+### Bug Fixes
+
+* msvs.py: remove overindentation ([#102](https://www.github.com/nodejs/gyp-next/issues/102)) ([3f83e99](https://www.github.com/nodejs/gyp-next/commit/3f83e99056d004d9579ceb786e06b624ddc36529))
+* update gyp.el to change case to cl-case ([#93](https://www.github.com/nodejs/gyp-next/issues/93)) ([13d5b66](https://www.github.com/nodejs/gyp-next/commit/13d5b66aab35985af9c2fb1174fdc6e1c1407ecc))
+
+### [0.8.1](https://www.github.com/nodejs/gyp-next/compare/v0.8.0...v0.8.1) (2021-02-18)
+
+
+### Bug Fixes
+
+* update shebang lines from python to python3 ([#94](https://www.github.com/nodejs/gyp-next/issues/94)) ([a1b0d41](https://www.github.com/nodejs/gyp-next/commit/a1b0d4171a8049a4ab7a614202063dec332f2df4))
+
+## [0.8.0](https://www.github.com/nodejs/gyp-next/compare/v0.7.0...v0.8.0) (2021-01-15)
+
+
+### âš  BREAKING CHANGES
+
+* remove support for Python 2
+
+### Bug Fixes
+
+* revert posix build job ([#86](https://www.github.com/nodejs/gyp-next/issues/86)) ([39dc34f](https://www.github.com/nodejs/gyp-next/commit/39dc34f0799c074624005fb9bbccf6e028607f9d))
+
+
+### gyp
+
+* Remove support for Python 2 ([#88](https://www.github.com/nodejs/gyp-next/issues/88)) ([22e4654](https://www.github.com/nodejs/gyp-next/commit/22e465426fd892403c95534229af819a99c3f8dc))
+
+## [0.7.0](https://www.github.com/nodejs/gyp-next/compare/v0.6.2...v0.7.0) (2020-12-17)
+
+
+### âš  BREAKING CHANGES
+
+* **msvs:** On Windows, arguments passed to the "action" commands are no longer transformed to replace slashes with backslashes.
+
+### Features
+
+* **xcode:** --cross-compiling overrides arch-specific settings ([973bae0](https://www.github.com/nodejs/gyp-next/commit/973bae0b7b08be7b680ecae9565fbd04b3e0787d))
+
+
+### Bug Fixes
+
+* **msvs:** do not fix paths in action command arguments ([fc22f83](https://www.github.com/nodejs/gyp-next/commit/fc22f8335e2016da4aae4f4233074bd651d2faea))
+* cmake on python 3 ([fd61f5f](https://www.github.com/nodejs/gyp-next/commit/fd61f5faa5275ec8fc98e3c7868c0dd46f109540))
+* ValueError: invalid mode: 'rU' while trying to load binding.gyp ([d0504e6](https://www.github.com/nodejs/gyp-next/commit/d0504e6700ce48f44957a4d5891b142a60be946f))
+* xcode cmake parsing ([eefe8d1](https://www.github.com/nodejs/gyp-next/commit/eefe8d10e99863bc4ac7e2ed32facd608d400d4b))
+
+### [0.6.2](https://www.github.com/nodejs/gyp-next/compare/v0.6.1...v0.6.2) (2020-10-16)
+
+
+### Bug Fixes
+
+* do not rewrite absolute paths to avoid long paths ([#74](https://www.github.com/nodejs/gyp-next/issues/74)) ([c2ccc1a](https://www.github.com/nodejs/gyp-next/commit/c2ccc1a81f7f94433a94f4d01a2e820db4c4331a))
+* only include MARMASM when toolset is target ([5a2794a](https://www.github.com/nodejs/gyp-next/commit/5a2794aefb58f0c00404ff042b61740bc8b8d5cd))
+
+### [0.6.1](https://github.com/nodejs/gyp-next/compare/v0.6.0...v0.6.1) (2020-10-14)
+
+
+### Bug Fixes
+
+* Correctly rename object files for absolute paths in MSVS generator.
+
+## [0.6.0](https://github.com/nodejs/gyp-next/compare/v0.5.0...v0.6.0) (2020-10-13)
+
+
+### Features
+
+* The Makefile generator will now output shared libraries directly to the product directory on all platforms (previously only macOS).
+
+## [0.5.0](https://github.com/nodejs/gyp-next/compare/v0.4.0...v0.5.0) (2020-09-30)
+
+
+### Features
+
+* Extended compile_commands_json generator to consider more file extensions than just `c` and `cc`. `cpp` and `cxx` are now supported.
+* Source files with duplicate basenames are now supported.
+
+### Removed
+
+* The `--no-duplicate-basename-check` option was removed.
+* The `msvs_enable_marmasm` configuration option was removed in favor of auto-inclusion of the "marmasm" sections for Windows on ARM.
+
+## [0.4.0](https://github.com/nodejs/gyp-next/compare/v0.3.0...v0.4.0) (2020-07-14)
+
+
+### Features
+
+* Added support for passing arbitrary architectures to Xcode builds, enables `arm64` builds.
+
+### Bug Fixes
+
+* Fixed a bug on Solaris where copying archives failed.
+
+## [0.3.0](https://github.com/nodejs/gyp-next/compare/v0.2.1...v0.3.0) (2020-06-06)
+
+
+### Features
+
+* Added support for MSVC cross-compilation. This allows compilation on x64 for a Windows ARM target.
+
+### Bug Fixes
+
+* Fixed XCode CLT version detection on macOS Catalina.
+
+### [0.2.1](https://github.com/nodejs/gyp-next/compare/v0.2.0...v0.2.1) (2020-05-05)
+
+
+### Bug Fixes
+
+* Relicensed to Node.js contributors.
+* Fixed Windows bug introduced in v0.2.0.
+
+## [0.2.0](https://github.com/nodejs/gyp-next/releases/tag/v0.2.0) (2020-04-06)
+
+This is the first release of this project, based on https://chromium.googlesource.com/external/gyp with changes made over the years in Node.js and node-gyp.
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..d724027
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
@@ -0,0 +1,4 @@
+# Code of Conduct
+
+* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md)
+* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/HEAD/Moderation-Policy.md)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CONTRIBUTING.md b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CONTRIBUTING.md
new file mode 100644
index 0000000..1a0bcde
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/CONTRIBUTING.md
@@ -0,0 +1,32 @@
+# Contributing to gyp-next
+
+## Code of Conduct
+
+This project is bound to the [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md).
+
+<a id="developers-certificate-of-origin"></a>
+## Developer's Certificate of Origin 1.1
+
+By making a contribution to this project, I certify that:
+
+* (a) The contribution was created in whole or in part by me and I
+ have the right to submit it under the open source license
+ indicated in the file; or
+
+* (b) The contribution is based upon previous work that, to the best
+ of my knowledge, is covered under an appropriate open source
+ license and I have the right under that license to submit that
+ work with modifications, whether created in whole or in part
+ by me, under the same open source license (unless I am
+ permitted to submit under a different license), as indicated
+ in the file; or
+
+* (c) The contribution was provided directly to me by some other
+ person who certified (a), (b) or (c) and I have not modified
+ it.
+
+* (d) I understand and agree that this project and the contribution
+ are public and that a record of the contribution (including all
+ personal information I submit with it, including my sign-off) is
+ maintained indefinitely and may be redistributed consistent with
+ this project or the open source license(s) involved.
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/LICENSE b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/LICENSE
new file mode 100644
index 0000000..c6944c5
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/LICENSE
@@ -0,0 +1,28 @@
+Copyright (c) 2020 Node.js contributors. All rights reserved.
+Copyright (c) 2009 Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/README.md b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/README.md
new file mode 100644
index 0000000..9ffc2b2
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/README.md
@@ -0,0 +1,7 @@
+GYP can Generate Your Projects.
+===================================
+
+Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
+
+__gyp-next__ is [released](https://github.com/nodejs/gyp-next/releases) to the [__Python Packaging Index__](https://pypi.org/project/gyp-next) (PyPI) and can be installed with the command:
+* `python3 -m pip install gyp-next`
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
new file mode 100644
index 0000000..8bca510
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
@@ -0,0 +1,12 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
+// then used during the final link for modules that have large PDBs. Otherwise,
+// the linker will generate a pdb with a page size of 1KB, which imposes a limit
+// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
+// (rather than the linker), this limit is avoided. With this in place PDBs may
+// grow to 2GB.
+//
+// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp
new file mode 100755
index 0000000..1b8b9bd
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp
@@ -0,0 +1,8 @@
+#!/bin/sh
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+base=$(dirname "$0")
+exec python "${base}/gyp_main.py" "$@"
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp.bat b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp.bat
new file mode 100755
index 0000000..c0b4ca2
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp.bat
@@ -0,0 +1,5 @@
+@rem Copyright (c) 2009 Google Inc. All rights reserved.
+@rem Use of this source code is governed by a BSD-style license that can be
+@rem found in the LICENSE file.
+
+@python "%~dp0gyp_main.py" %*
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp_main.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp_main.py
new file mode 100755
index 0000000..f23dcdf
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/gyp_main.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+import subprocess
+
+
+def IsCygwin():
+ # Function copied from pylib/gyp/common.py
+ try:
+ out = subprocess.Popen(
+ "uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ stdout, _ = out.communicate()
+ return "CYGWIN" in stdout.decode("utf-8")
+ except Exception:
+ return False
+
+
+def UnixifyPath(path):
+ try:
+ if not IsCygwin():
+ return path
+ out = subprocess.Popen(
+ ["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ stdout, _ = out.communicate()
+ return stdout.decode("utf-8")
+ except Exception:
+ return path
+
+
+# Make sure we're using the version of pylib in this repo, not one installed
+# elsewhere on the system. Also convert to Unix style path on Cygwin systems,
+# else the 'gyp' library will not be found
+path = UnixifyPath(sys.argv[0])
+sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib"))
+import gyp # noqa: E402
+
+if __name__ == "__main__":
+ sys.exit(gyp.script_main())
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
new file mode 100644
index 0000000..d6b1897
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
@@ -0,0 +1,367 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""New implementation of Visual Studio project generation."""
+
+import hashlib
+import os
+import random
+from operator import attrgetter
+
+import gyp.common
+
+
+def cmp(x, y):
+ return (x > y) - (x < y)
+
+
+# Initialize random number generator
+random.seed()
+
+# GUIDs for project types
+ENTRY_TYPE_GUIDS = {
+ "project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
+ "folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}",
+}
+
+# ------------------------------------------------------------------------------
+# Helper functions
+
+
+def MakeGuid(name, seed="msvs_new"):
+ """Returns a GUID for the specified target name.
+
+ Args:
+ name: Target name.
+ seed: Seed for MD5 hash.
+ Returns:
+ A GUID-line string calculated from the name and seed.
+
+ This generates something which looks like a GUID, but depends only on the
+ name and seed. This means the same name/seed will always generate the same
+ GUID, so that projects and solutions which refer to each other can explicitly
+ determine the GUID to refer to explicitly. It also means that the GUID will
+ not change when the project for a target is rebuilt.
+ """
+ # Calculate a MD5 signature for the seed and name.
+ d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper()
+ # Convert most of the signature to GUID form (discard the rest)
+ guid = (
+ "{"
+ + d[:8]
+ + "-"
+ + d[8:12]
+ + "-"
+ + d[12:16]
+ + "-"
+ + d[16:20]
+ + "-"
+ + d[20:32]
+ + "}"
+ )
+ return guid
+
+
+# ------------------------------------------------------------------------------
+
+
+class MSVSSolutionEntry:
+ def __cmp__(self, other):
+ # Sort by name then guid (so things are in order on vs2008).
+ return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
+
+
+class MSVSFolder(MSVSSolutionEntry):
+ """Folder in a Visual Studio project or solution."""
+
+ def __init__(self, path, name=None, entries=None, guid=None, items=None):
+ """Initializes the folder.
+
+ Args:
+ path: Full path to the folder.
+ name: Name of the folder.
+ entries: List of folder entries to nest inside this folder. May contain
+ Folder or Project objects. May be None, if the folder is empty.
+ guid: GUID to use for folder, if not None.
+ items: List of solution items to include in the folder project. May be
+ None, if the folder does not directly contain items.
+ """
+ if name:
+ self.name = name
+ else:
+ # Use last layer.
+ self.name = os.path.basename(path)
+
+ self.path = path
+ self.guid = guid
+
+ # Copy passed lists (or set to empty lists)
+ self.entries = sorted(entries or [], key=attrgetter("path"))
+ self.items = list(items or [])
+
+ self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"]
+
+ def get_guid(self):
+ if self.guid is None:
+ # Use consistent guids for folders (so things don't regenerate).
+ self.guid = MakeGuid(self.path, seed="msvs_folder")
+ return self.guid
+
+
+# ------------------------------------------------------------------------------
+
+
+class MSVSProject(MSVSSolutionEntry):
+ """Visual Studio project."""
+
+ def __init__(
+ self,
+ path,
+ name=None,
+ dependencies=None,
+ guid=None,
+ spec=None,
+ build_file=None,
+ config_platform_overrides=None,
+ fixpath_prefix=None,
+ ):
+ """Initializes the project.
+
+ Args:
+ path: Absolute path to the project file.
+ name: Name of project. If None, the name will be the same as the base
+ name of the project file.
+ dependencies: List of other Project objects this project is dependent
+ upon, if not None.
+ guid: GUID to use for project, if not None.
+ spec: Dictionary specifying how to build this project.
+ build_file: Filename of the .gyp file that the vcproj file comes from.
+ config_platform_overrides: optional dict of configuration platforms to
+ used in place of the default for this target.
+ fixpath_prefix: the path used to adjust the behavior of _fixpath
+ """
+ self.path = path
+ self.guid = guid
+ self.spec = spec
+ self.build_file = build_file
+ # Use project filename if name not specified
+ self.name = name or os.path.splitext(os.path.basename(path))[0]
+
+ # Copy passed lists (or set to empty lists)
+ self.dependencies = list(dependencies or [])
+
+ self.entry_type_guid = ENTRY_TYPE_GUIDS["project"]
+
+ if config_platform_overrides:
+ self.config_platform_overrides = config_platform_overrides
+ else:
+ self.config_platform_overrides = {}
+ self.fixpath_prefix = fixpath_prefix
+ self.msbuild_toolset = None
+
+ def set_dependencies(self, dependencies):
+ self.dependencies = list(dependencies or [])
+
+ def get_guid(self):
+ if self.guid is None:
+ # Set GUID from path
+ # TODO(rspangler): This is fragile.
+ # 1. We can't just use the project filename sans path, since there could
+ # be multiple projects with the same base name (for example,
+ # foo/unittest.vcproj and bar/unittest.vcproj).
+ # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
+ # GUID is the same whether it's included from base/base.sln or
+ # foo/bar/baz/baz.sln.
+ # 3. The GUID needs to be the same each time this builder is invoked, so
+ # that we don't need to rebuild the solution when the project changes.
+ # 4. We should be able to handle pre-built project files by reading the
+ # GUID from the files.
+ self.guid = MakeGuid(self.name)
+ return self.guid
+
+ def set_msbuild_toolset(self, msbuild_toolset):
+ self.msbuild_toolset = msbuild_toolset
+
+
+# ------------------------------------------------------------------------------
+
+
+class MSVSSolution:
+ """Visual Studio solution."""
+
+ def __init__(
+ self, path, version, entries=None, variants=None, websiteProperties=True
+ ):
+ """Initializes the solution.
+
+ Args:
+ path: Path to solution file.
+ version: Format version to emit.
+ entries: List of entries in solution. May contain Folder or Project
+ objects. May be None, if the folder is empty.
+ variants: List of build variant strings. If none, a default list will
+ be used.
+ websiteProperties: Flag to decide if the website properties section
+ is generated.
+ """
+ self.path = path
+ self.websiteProperties = websiteProperties
+ self.version = version
+
+ # Copy passed lists (or set to empty lists)
+ self.entries = list(entries or [])
+
+ if variants:
+ # Copy passed list
+ self.variants = variants[:]
+ else:
+ # Use default
+ self.variants = ["Debug|Win32", "Release|Win32"]
+ # TODO(rspangler): Need to be able to handle a mapping of solution config
+ # to project config. Should we be able to handle variants being a dict,
+ # or add a separate variant_map variable? If it's a dict, we can't
+ # guarantee the order of variants since dict keys aren't ordered.
+
+ # TODO(rspangler): Automatically write to disk for now; should delay until
+ # node-evaluation time.
+ self.Write()
+
+ def Write(self, writer=gyp.common.WriteOnDiff):
+ """Writes the solution file to disk.
+
+ Raises:
+ IndexError: An entry appears multiple times.
+ """
+ # Walk the entry tree and collect all the folders and projects.
+ all_entries = set()
+ entries_to_check = self.entries[:]
+ while entries_to_check:
+ e = entries_to_check.pop(0)
+
+ # If this entry has been visited, nothing to do.
+ if e in all_entries:
+ continue
+
+ all_entries.add(e)
+
+ # If this is a folder, check its entries too.
+ if isinstance(e, MSVSFolder):
+ entries_to_check += e.entries
+
+ all_entries = sorted(all_entries, key=attrgetter("path"))
+
+ # Open file and print header
+ f = writer(self.path)
+ f.write(
+ "Microsoft Visual Studio Solution File, "
+ "Format Version %s\r\n" % self.version.SolutionVersion()
+ )
+ f.write("# %s\r\n" % self.version.Description())
+
+ # Project entries
+ sln_root = os.path.split(self.path)[0]
+ for e in all_entries:
+ relative_path = gyp.common.RelativePath(e.path, sln_root)
+ # msbuild does not accept an empty folder_name.
+ # use '.' in case relative_path is empty.
+ folder_name = relative_path.replace("/", "\\") or "."
+ f.write(
+ 'Project("%s") = "%s", "%s", "%s"\r\n'
+ % (
+ e.entry_type_guid, # Entry type GUID
+ e.name, # Folder name
+ folder_name, # Folder name (again)
+ e.get_guid(), # Entry GUID
+ )
+ )
+
+ # TODO(rspangler): Need a way to configure this stuff
+ if self.websiteProperties:
+ f.write(
+ "\tProjectSection(WebsiteProperties) = preProject\r\n"
+ '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
+ '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
+ "\tEndProjectSection\r\n"
+ )
+
+ if isinstance(e, MSVSFolder):
+ if e.items:
+ f.write("\tProjectSection(SolutionItems) = preProject\r\n")
+ for i in e.items:
+ f.write(f"\t\t{i} = {i}\r\n")
+ f.write("\tEndProjectSection\r\n")
+
+ if isinstance(e, MSVSProject):
+ if e.dependencies:
+ f.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
+ for d in e.dependencies:
+ f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n")
+ f.write("\tEndProjectSection\r\n")
+
+ f.write("EndProject\r\n")
+
+ # Global section
+ f.write("Global\r\n")
+
+ # Configurations (variants)
+ f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
+ for v in self.variants:
+ f.write(f"\t\t{v} = {v}\r\n")
+ f.write("\tEndGlobalSection\r\n")
+
+ # Sort config guids for easier diffing of solution changes.
+ config_guids = []
+ config_guids_overrides = {}
+ for e in all_entries:
+ if isinstance(e, MSVSProject):
+ config_guids.append(e.get_guid())
+ config_guids_overrides[e.get_guid()] = e.config_platform_overrides
+ config_guids.sort()
+
+ f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
+ for g in config_guids:
+ for v in self.variants:
+ nv = config_guids_overrides[g].get(v, v)
+ # Pick which project configuration to build for this solution
+ # configuration.
+ f.write(
+ "\t\t%s.%s.ActiveCfg = %s\r\n"
+ % (
+ g, # Project GUID
+ v, # Solution build configuration
+ nv, # Project build config for that solution config
+ )
+ )
+
+ # Enable project in this solution configuration.
+ f.write(
+ "\t\t%s.%s.Build.0 = %s\r\n"
+ % (
+ g, # Project GUID
+ v, # Solution build configuration
+ nv, # Project build config for that solution config
+ )
+ )
+ f.write("\tEndGlobalSection\r\n")
+
+ # TODO(rspangler): Should be able to configure this stuff too (though I've
+ # never seen this be any different)
+ f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
+ f.write("\t\tHideSolutionNode = FALSE\r\n")
+ f.write("\tEndGlobalSection\r\n")
+
+ # Folder mappings
+ # Omit this section if there are no folders
+ if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
+ f.write("\tGlobalSection(NestedProjects) = preSolution\r\n")
+ for e in all_entries:
+ if not isinstance(e, MSVSFolder):
+ continue # Does not apply to projects, only folders
+ for subentry in e.entries:
+ f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n")
+ f.write("\tEndGlobalSection\r\n")
+
+ f.write("EndGlobal\r\n")
+
+ f.close()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
new file mode 100644
index 0000000..f0cfabe
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
@@ -0,0 +1,206 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio project reader/writer."""
+
+import gyp.easy_xml as easy_xml
+
+# ------------------------------------------------------------------------------
+
+
+class Tool:
+ """Visual Studio tool."""
+
+ def __init__(self, name, attrs=None):
+ """Initializes the tool.
+
+ Args:
+ name: Tool name.
+ attrs: Dict of tool attributes; may be None.
+ """
+ self._attrs = attrs or {}
+ self._attrs["Name"] = name
+
+ def _GetSpecification(self):
+ """Creates an element for the tool.
+
+ Returns:
+ A new xml.dom.Element for the tool.
+ """
+ return ["Tool", self._attrs]
+
+
+class Filter:
+ """Visual Studio filter - that is, a virtual folder."""
+
+ def __init__(self, name, contents=None):
+ """Initializes the folder.
+
+ Args:
+ name: Filter (folder) name.
+ contents: List of filenames and/or Filter objects contained.
+ """
+ self.name = name
+ self.contents = list(contents or [])
+
+
+# ------------------------------------------------------------------------------
+
+
+class Writer:
+ """Visual Studio XML project writer."""
+
+ def __init__(self, project_path, version, name, guid=None, platforms=None):
+ """Initializes the project.
+
+ Args:
+ project_path: Path to the project file.
+ version: Format version to emit.
+ name: Name of the project.
+ guid: GUID to use for project, if not None.
+ platforms: Array of string, the supported platforms. If null, ['Win32']
+ """
+ self.project_path = project_path
+ self.version = version
+ self.name = name
+ self.guid = guid
+
+ # Default to Win32 for platforms.
+ if not platforms:
+ platforms = ["Win32"]
+
+ # Initialize the specifications of the various sections.
+ self.platform_section = ["Platforms"]
+ for platform in platforms:
+ self.platform_section.append(["Platform", {"Name": platform}])
+ self.tool_files_section = ["ToolFiles"]
+ self.configurations_section = ["Configurations"]
+ self.files_section = ["Files"]
+
+ # Keep a dict keyed on filename to speed up access.
+ self.files_dict = dict()
+
+ def AddToolFile(self, path):
+ """Adds a tool file to the project.
+
+ Args:
+ path: Relative path from project to tool file.
+ """
+ self.tool_files_section.append(["ToolFile", {"RelativePath": path}])
+
+ def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
+ """Returns the specification for a configuration.
+
+ Args:
+ config_type: Type of configuration node.
+ config_name: Configuration name.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+ Returns:
+ """
+ # Handle defaults
+ if not attrs:
+ attrs = {}
+ if not tools:
+ tools = []
+
+ # Add configuration node and its attributes
+ node_attrs = attrs.copy()
+ node_attrs["Name"] = config_name
+ specification = [config_type, node_attrs]
+
+ # Add tool nodes and their attributes
+ if tools:
+ for t in tools:
+ if isinstance(t, Tool):
+ specification.append(t._GetSpecification())
+ else:
+ specification.append(Tool(t)._GetSpecification())
+ return specification
+
+ def AddConfig(self, name, attrs=None, tools=None):
+ """Adds a configuration to the project.
+
+ Args:
+ name: Configuration name.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+ """
+ spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools)
+ self.configurations_section.append(spec)
+
+ def _AddFilesToNode(self, parent, files):
+ """Adds files and/or filters to the parent node.
+
+ Args:
+ parent: Destination node
+ files: A list of Filter objects and/or relative paths to files.
+
+ Will call itself recursively, if the files list contains Filter objects.
+ """
+ for f in files:
+ if isinstance(f, Filter):
+ node = ["Filter", {"Name": f.name}]
+ self._AddFilesToNode(node, f.contents)
+ else:
+ node = ["File", {"RelativePath": f}]
+ self.files_dict[f] = node
+ parent.append(node)
+
+ def AddFiles(self, files):
+ """Adds files to the project.
+
+ Args:
+ files: A list of Filter objects and/or relative paths to files.
+
+ This makes a copy of the file/filter tree at the time of this call. If you
+ later add files to a Filter object which was passed into a previous call
+ to AddFiles(), it will not be reflected in this project.
+ """
+ self._AddFilesToNode(self.files_section, files)
+ # TODO(rspangler) This also doesn't handle adding files to an existing
+ # filter. That is, it doesn't merge the trees.
+
+ def AddFileConfig(self, path, config, attrs=None, tools=None):
+ """Adds a configuration to a file.
+
+ Args:
+ path: Relative path to the file.
+ config: Name of configuration to add.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+
+ Raises:
+ ValueError: Relative path does not match any file added via AddFiles().
+ """
+ # Find the file node with the right relative path
+ parent = self.files_dict.get(path)
+ if not parent:
+ raise ValueError('AddFileConfig: file "%s" not in project.' % path)
+
+ # Add the config to the file node
+ spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools)
+ parent.append(spec)
+
+ def WriteIfChanged(self):
+ """Writes the project file."""
+ # First create XML content definition
+ content = [
+ "VisualStudioProject",
+ {
+ "ProjectType": "Visual C++",
+ "Version": self.version.ProjectVersion(),
+ "Name": self.name,
+ "ProjectGUID": self.guid,
+ "RootNamespace": self.name,
+ "Keyword": "Win32Proj",
+ },
+ self.platform_section,
+ self.tool_files_section,
+ self.configurations_section,
+ ["References"], # empty section
+ self.files_section,
+ ["Globals"], # empty section
+ ]
+ easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252")
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
new file mode 100644
index 0000000..e89a971
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
@@ -0,0 +1,1270 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+r"""Code to validate and convert settings of the Microsoft build tools.
+
+This file contains code to validate and convert settings of the Microsoft
+build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
+and ValidateMSBuildSettings() are the entry points.
+
+This file was created by comparing the projects created by Visual Studio 2008
+and Visual Studio 2010 for all available settings through the user interface.
+The MSBuild schemas were also considered. They are typically found in the
+MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
+"""
+
+import re
+import sys
+
+# Dictionaries of settings validators. The key is the tool name, the value is
+# a dictionary mapping setting names to validation functions.
+_msvs_validators = {}
+_msbuild_validators = {}
+
+
+# A dictionary of settings converters. The key is the tool name, the value is
+# a dictionary mapping setting names to conversion functions.
+_msvs_to_msbuild_converters = {}
+
+
+# Tool name mapping from MSVS to MSBuild.
+_msbuild_name_of_tool = {}
+
+
+class _Tool:
+ """Represents a tool used by MSVS or MSBuild.
+
+ Attributes:
+ msvs_name: The name of the tool in MSVS.
+ msbuild_name: The name of the tool in MSBuild.
+ """
+
+ def __init__(self, msvs_name, msbuild_name):
+ self.msvs_name = msvs_name
+ self.msbuild_name = msbuild_name
+
+
+def _AddTool(tool):
+ """Adds a tool to the four dictionaries used to process settings.
+
+ This only defines the tool. Each setting also needs to be added.
+
+ Args:
+ tool: The _Tool object to be added.
+ """
+ _msvs_validators[tool.msvs_name] = {}
+ _msbuild_validators[tool.msbuild_name] = {}
+ _msvs_to_msbuild_converters[tool.msvs_name] = {}
+ _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name
+
+
+def _GetMSBuildToolSettings(msbuild_settings, tool):
+ """Returns an MSBuild tool dictionary. Creates it if needed."""
+ return msbuild_settings.setdefault(tool.msbuild_name, {})
+
+
+class _Type:
+ """Type of settings (Base class)."""
+
+ def ValidateMSVS(self, value):
+ """Verifies that the value is legal for MSVS.
+
+ Args:
+ value: the value to check for this type.
+
+ Raises:
+ ValueError if value is not valid for MSVS.
+ """
+
+ def ValidateMSBuild(self, value):
+ """Verifies that the value is legal for MSBuild.
+
+ Args:
+ value: the value to check for this type.
+
+ Raises:
+ ValueError if value is not valid for MSBuild.
+ """
+
+ def ConvertToMSBuild(self, value):
+ """Returns the MSBuild equivalent of the MSVS value given.
+
+ Args:
+ value: the MSVS value to convert.
+
+ Returns:
+ the MSBuild equivalent.
+
+ Raises:
+ ValueError if value is not valid.
+ """
+ return value
+
+
+class _String(_Type):
+ """A setting that's just a string."""
+
+ def ValidateMSVS(self, value):
+ if not isinstance(value, str):
+ raise ValueError("expected string; got %r" % value)
+
+ def ValidateMSBuild(self, value):
+ if not isinstance(value, str):
+ raise ValueError("expected string; got %r" % value)
+
+ def ConvertToMSBuild(self, value):
+ # Convert the macros
+ return ConvertVCMacrosToMSBuild(value)
+
+
+class _StringList(_Type):
+ """A settings that's a list of strings."""
+
+ def ValidateMSVS(self, value):
+ if not isinstance(value, (list, str)):
+ raise ValueError("expected string list; got %r" % value)
+
+ def ValidateMSBuild(self, value):
+ if not isinstance(value, (list, str)):
+ raise ValueError("expected string list; got %r" % value)
+
+ def ConvertToMSBuild(self, value):
+ # Convert the macros
+ if isinstance(value, list):
+ return [ConvertVCMacrosToMSBuild(i) for i in value]
+ else:
+ return ConvertVCMacrosToMSBuild(value)
+
+
+class _Boolean(_Type):
+ """Boolean settings, can have the values 'false' or 'true'."""
+
+ def _Validate(self, value):
+ if value != "true" and value != "false":
+ raise ValueError("expected bool; got %r" % value)
+
+ def ValidateMSVS(self, value):
+ self._Validate(value)
+
+ def ValidateMSBuild(self, value):
+ self._Validate(value)
+
+ def ConvertToMSBuild(self, value):
+ self._Validate(value)
+ return value
+
+
+class _Integer(_Type):
+ """Integer settings."""
+
+ def __init__(self, msbuild_base=10):
+ _Type.__init__(self)
+ self._msbuild_base = msbuild_base
+
+ def ValidateMSVS(self, value):
+ # Try to convert, this will raise ValueError if invalid.
+ self.ConvertToMSBuild(value)
+
+ def ValidateMSBuild(self, value):
+ # Try to convert, this will raise ValueError if invalid.
+ int(value, self._msbuild_base)
+
+ def ConvertToMSBuild(self, value):
+ msbuild_format = (self._msbuild_base == 10) and "%d" or "0x%04x"
+ return msbuild_format % int(value)
+
+
+class _Enumeration(_Type):
+ """Type of settings that is an enumeration.
+
+ In MSVS, the values are indexes like '0', '1', and '2'.
+ MSBuild uses text labels that are more representative, like 'Win32'.
+
+ Constructor args:
+ label_list: an array of MSBuild labels that correspond to the MSVS index.
+ In the rare cases where MSVS has skipped an index value, None is
+ used in the array to indicate the unused spot.
+ new: an array of labels that are new to MSBuild.
+ """
+
+ def __init__(self, label_list, new=None):
+ _Type.__init__(self)
+ self._label_list = label_list
+ self._msbuild_values = {value for value in label_list if value is not None}
+ if new is not None:
+ self._msbuild_values.update(new)
+
+ def ValidateMSVS(self, value):
+ # Try to convert. It will raise an exception if not valid.
+ self.ConvertToMSBuild(value)
+
+ def ValidateMSBuild(self, value):
+ if value not in self._msbuild_values:
+ raise ValueError("unrecognized enumerated value %s" % value)
+
+ def ConvertToMSBuild(self, value):
+ index = int(value)
+ if index < 0 or index >= len(self._label_list):
+ raise ValueError(
+ "index value (%d) not in expected range [0, %d)"
+ % (index, len(self._label_list))
+ )
+ label = self._label_list[index]
+ if label is None:
+ raise ValueError("converted value for %s not specified." % value)
+ return label
+
+
+# Instantiate the various generic types.
+_boolean = _Boolean()
+_integer = _Integer()
+# For now, we don't do any special validation on these types:
+_string = _String()
+_file_name = _String()
+_folder_name = _String()
+_file_list = _StringList()
+_folder_list = _StringList()
+_string_list = _StringList()
+# Some boolean settings went from numerical values to boolean. The
+# mapping is 0: default, 1: false, 2: true.
+_newly_boolean = _Enumeration(["", "false", "true"])
+
+
+def _Same(tool, name, setting_type):
+ """Defines a setting that has the same name in MSVS and MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
+ _Renamed(tool, name, name, setting_type)
+
+
+def _Renamed(tool, msvs_name, msbuild_name, setting_type):
+ """Defines a setting for which the name has changed.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_name: the name of the MSVS setting.
+ msbuild_name: the name of the MSBuild setting.
+ setting_type: the type of this setting.
+ """
+
+ def _Translate(value, msbuild_settings):
+ msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
+ msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value)
+
+ _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS
+ _msbuild_validators[tool.msbuild_name][msbuild_name] = setting_type.ValidateMSBuild
+ _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
+
+
+def _Moved(tool, settings_name, msbuild_tool_name, setting_type):
+ _MovedAndRenamed(
+ tool, settings_name, msbuild_tool_name, settings_name, setting_type
+ )
+
+
+def _MovedAndRenamed(
+ tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type
+):
+ """Defines a setting that may have moved to a new section.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_settings_name: the MSVS name of the setting.
+ msbuild_tool_name: the name of the MSBuild tool to place the setting under.
+ msbuild_settings_name: the MSBuild name of the setting.
+ setting_type: the type of this setting.
+ """
+
+ def _Translate(value, msbuild_settings):
+ tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
+ tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
+
+ _msvs_validators[tool.msvs_name][msvs_settings_name] = setting_type.ValidateMSVS
+ validator = setting_type.ValidateMSBuild
+ _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
+ _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
+
+
+def _MSVSOnly(tool, name, setting_type):
+ """Defines a setting that is only found in MSVS.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
+
+ def _Translate(unused_value, unused_msbuild_settings):
+ # Since this is for MSVS only settings, no translation will happen.
+ pass
+
+ _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS
+ _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
+
+
+def _MSBuildOnly(tool, name, setting_type):
+ """Defines a setting that is only found in MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
+
+ def _Translate(value, msbuild_settings):
+ # Let msbuild-only properties get translated as-is from msvs_settings.
+ tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
+ tool_settings[name] = value
+
+ _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
+ _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
+
+
+def _ConvertedToAdditionalOption(tool, msvs_name, flag):
+ """Defines a setting that's handled via a command line option in MSBuild.
+
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_name: the name of the MSVS setting that if 'true' becomes a flag
+ flag: the flag to insert at the end of the AdditionalOptions
+ """
+
+ def _Translate(value, msbuild_settings):
+ if value == "true":
+ tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
+ if "AdditionalOptions" in tool_settings:
+ new_flags = "{} {}".format(tool_settings["AdditionalOptions"], flag)
+ else:
+ new_flags = flag
+ tool_settings["AdditionalOptions"] = new_flags
+
+ _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS
+ _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
+
+
+def _CustomGeneratePreprocessedFile(tool, msvs_name):
+ def _Translate(value, msbuild_settings):
+ tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
+ if value == "0":
+ tool_settings["PreprocessToFile"] = "false"
+ tool_settings["PreprocessSuppressLineNumbers"] = "false"
+ elif value == "1": # /P
+ tool_settings["PreprocessToFile"] = "true"
+ tool_settings["PreprocessSuppressLineNumbers"] = "false"
+ elif value == "2": # /EP /P
+ tool_settings["PreprocessToFile"] = "true"
+ tool_settings["PreprocessSuppressLineNumbers"] = "true"
+ else:
+ raise ValueError("value must be one of [0, 1, 2]; got %s" % value)
+
+ # Create a bogus validator that looks for '0', '1', or '2'
+ msvs_validator = _Enumeration(["a", "b", "c"]).ValidateMSVS
+ _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator
+ msbuild_validator = _boolean.ValidateMSBuild
+ msbuild_tool_validators = _msbuild_validators[tool.msbuild_name]
+ msbuild_tool_validators["PreprocessToFile"] = msbuild_validator
+ msbuild_tool_validators["PreprocessSuppressLineNumbers"] = msbuild_validator
+ _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
+
+
+fix_vc_macro_slashes_regex_list = ("IntDir", "OutDir")
+fix_vc_macro_slashes_regex = re.compile(
+ r"(\$\((?:%s)\))(?:[\\/]+)" % "|".join(fix_vc_macro_slashes_regex_list)
+)
+
+# Regular expression to detect keys that were generated by exclusion lists
+_EXCLUDED_SUFFIX_RE = re.compile("^(.*)_excluded$")
+
+
+def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
+ """Verify that 'setting' is valid if it is generated from an exclusion list.
+
+ If the setting appears to be generated from an exclusion list, the root name
+ is checked.
+
+ Args:
+ setting: A string that is the setting name to validate
+ settings: A dictionary where the keys are valid settings
+ error_msg: The message to emit in the event of error
+ stderr: The stream receiving the error messages.
+ """
+ # This may be unrecognized because it's an exclusion list. If the
+ # setting name has the _excluded suffix, then check the root name.
+ unrecognized = True
+ m = re.match(_EXCLUDED_SUFFIX_RE, setting)
+ if m:
+ root_setting = m.group(1)
+ unrecognized = root_setting not in settings
+
+ if unrecognized:
+ # We don't know this setting. Give a warning.
+ print(error_msg, file=stderr)
+
+
+def FixVCMacroSlashes(s):
+ """Replace macros which have excessive following slashes.
+
+ These macros are known to have a built-in trailing slash. Furthermore, many
+ scripts hiccup on processing paths with extra slashes in the middle.
+
+ This list is probably not exhaustive. Add as needed.
+ """
+ if "$" in s:
+ s = fix_vc_macro_slashes_regex.sub(r"\1", s)
+ return s
+
+
+def ConvertVCMacrosToMSBuild(s):
+ """Convert the MSVS macros found in the string to the MSBuild equivalent.
+
+ This list is probably not exhaustive. Add as needed.
+ """
+ if "$" in s:
+ replace_map = {
+ "$(ConfigurationName)": "$(Configuration)",
+ "$(InputDir)": "%(RelativeDir)",
+ "$(InputExt)": "%(Extension)",
+ "$(InputFileName)": "%(Filename)%(Extension)",
+ "$(InputName)": "%(Filename)",
+ "$(InputPath)": "%(Identity)",
+ "$(ParentName)": "$(ProjectFileName)",
+ "$(PlatformName)": "$(Platform)",
+ "$(SafeInputName)": "%(Filename)",
+ }
+ for old, new in replace_map.items():
+ s = s.replace(old, new)
+ s = FixVCMacroSlashes(s)
+ return s
+
+
+def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
+ """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
+
+ Args:
+ msvs_settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+
+ Returns:
+ A dictionary of MSBuild settings. The key is either the MSBuild tool name
+ or the empty string (for the global settings). The values are themselves
+ dictionaries of settings and their values.
+ """
+ msbuild_settings = {}
+ for msvs_tool_name, msvs_tool_settings in msvs_settings.items():
+ if msvs_tool_name in _msvs_to_msbuild_converters:
+ msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
+ for msvs_setting, msvs_value in msvs_tool_settings.items():
+ if msvs_setting in msvs_tool:
+ # Invoke the translation function.
+ try:
+ msvs_tool[msvs_setting](msvs_value, msbuild_settings)
+ except ValueError as e:
+ print(
+ "Warning: while converting %s/%s to MSBuild, "
+ "%s" % (msvs_tool_name, msvs_setting, e),
+ file=stderr,
+ )
+ else:
+ _ValidateExclusionSetting(
+ msvs_setting,
+ msvs_tool,
+ (
+ "Warning: unrecognized setting %s/%s "
+ "while converting to MSBuild."
+ % (msvs_tool_name, msvs_setting)
+ ),
+ stderr,
+ )
+ else:
+ print(
+ "Warning: unrecognized tool %s while converting to "
+ "MSBuild." % msvs_tool_name,
+ file=stderr,
+ )
+ return msbuild_settings
+
+
+def ValidateMSVSSettings(settings, stderr=sys.stderr):
+ """Validates that the names of the settings are valid for MSVS.
+
+ Args:
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ _ValidateSettings(_msvs_validators, settings, stderr)
+
+
+def ValidateMSBuildSettings(settings, stderr=sys.stderr):
+ """Validates that the names of the settings are valid for MSBuild.
+
+ Args:
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ _ValidateSettings(_msbuild_validators, settings, stderr)
+
+
+def _ValidateSettings(validators, settings, stderr):
+ """Validates that the settings are valid for MSBuild or MSVS.
+
+ We currently only validate the names of the settings, not their values.
+
+ Args:
+ validators: A dictionary of tools and their validators.
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
+ for tool_name in settings:
+ if tool_name in validators:
+ tool_validators = validators[tool_name]
+ for setting, value in settings[tool_name].items():
+ if setting in tool_validators:
+ try:
+ tool_validators[setting](value)
+ except ValueError as e:
+ print(
+ f"Warning: for {tool_name}/{setting}, {e}",
+ file=stderr,
+ )
+ else:
+ _ValidateExclusionSetting(
+ setting,
+ tool_validators,
+ (f"Warning: unrecognized setting {tool_name}/{setting}"),
+ stderr,
+ )
+
+ else:
+ print("Warning: unrecognized tool %s" % (tool_name), file=stderr)
+
+
+# MSVS and MBuild names of the tools.
+_compile = _Tool("VCCLCompilerTool", "ClCompile")
+_link = _Tool("VCLinkerTool", "Link")
+_midl = _Tool("VCMIDLTool", "Midl")
+_rc = _Tool("VCResourceCompilerTool", "ResourceCompile")
+_lib = _Tool("VCLibrarianTool", "Lib")
+_manifest = _Tool("VCManifestTool", "Manifest")
+_masm = _Tool("MASM", "MASM")
+_armasm = _Tool("ARMASM", "ARMASM")
+
+
+_AddTool(_compile)
+_AddTool(_link)
+_AddTool(_midl)
+_AddTool(_rc)
+_AddTool(_lib)
+_AddTool(_manifest)
+_AddTool(_masm)
+_AddTool(_armasm)
+# Add sections only found in the MSBuild settings.
+_msbuild_validators[""] = {}
+_msbuild_validators["ProjectReference"] = {}
+_msbuild_validators["ManifestResourceCompile"] = {}
+
+# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
+# ClCompile in MSBuild.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
+# the schema of the MSBuild ClCompile settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_compile, "AdditionalIncludeDirectories", _folder_list) # /I
+_Same(_compile, "AdditionalOptions", _string_list)
+_Same(_compile, "AdditionalUsingDirectories", _folder_list) # /AI
+_Same(_compile, "AssemblerListingLocation", _file_name) # /Fa
+_Same(_compile, "BrowseInformationFile", _file_name)
+_Same(_compile, "BufferSecurityCheck", _boolean) # /GS
+_Same(_compile, "DisableLanguageExtensions", _boolean) # /Za
+_Same(_compile, "DisableSpecificWarnings", _string_list) # /wd
+_Same(_compile, "EnableFiberSafeOptimizations", _boolean) # /GT
+_Same(_compile, "EnablePREfast", _boolean) # /analyze Visible='false'
+_Same(_compile, "ExpandAttributedSource", _boolean) # /Fx
+_Same(_compile, "FloatingPointExceptions", _boolean) # /fp:except
+_Same(_compile, "ForceConformanceInForLoopScope", _boolean) # /Zc:forScope
+_Same(_compile, "ForcedIncludeFiles", _file_list) # /FI
+_Same(_compile, "ForcedUsingFiles", _file_list) # /FU
+_Same(_compile, "GenerateXMLDocumentationFiles", _boolean) # /doc
+_Same(_compile, "IgnoreStandardIncludePath", _boolean) # /X
+_Same(_compile, "MinimalRebuild", _boolean) # /Gm
+_Same(_compile, "OmitDefaultLibName", _boolean) # /Zl
+_Same(_compile, "OmitFramePointers", _boolean) # /Oy
+_Same(_compile, "PreprocessorDefinitions", _string_list) # /D
+_Same(_compile, "ProgramDataBaseFileName", _file_name) # /Fd
+_Same(_compile, "RuntimeTypeInfo", _boolean) # /GR
+_Same(_compile, "ShowIncludes", _boolean) # /showIncludes
+_Same(_compile, "SmallerTypeCheck", _boolean) # /RTCc
+_Same(_compile, "StringPooling", _boolean) # /GF
+_Same(_compile, "SuppressStartupBanner", _boolean) # /nologo
+_Same(_compile, "TreatWChar_tAsBuiltInType", _boolean) # /Zc:wchar_t
+_Same(_compile, "UndefineAllPreprocessorDefinitions", _boolean) # /u
+_Same(_compile, "UndefinePreprocessorDefinitions", _string_list) # /U
+_Same(_compile, "UseFullPaths", _boolean) # /FC
+_Same(_compile, "WholeProgramOptimization", _boolean) # /GL
+_Same(_compile, "XMLDocumentationFileName", _file_name)
+_Same(_compile, "CompileAsWinRT", _boolean) # /ZW
+
+_Same(
+ _compile,
+ "AssemblerOutput",
+ _Enumeration(
+ [
+ "NoListing",
+ "AssemblyCode", # /FA
+ "All", # /FAcs
+ "AssemblyAndMachineCode", # /FAc
+ "AssemblyAndSourceCode",
+ ]
+ ),
+) # /FAs
+_Same(
+ _compile,
+ "BasicRuntimeChecks",
+ _Enumeration(
+ [
+ "Default",
+ "StackFrameRuntimeCheck", # /RTCs
+ "UninitializedLocalUsageCheck", # /RTCu
+ "EnableFastChecks",
+ ]
+ ),
+) # /RTC1
+_Same(
+ _compile, "BrowseInformation", _Enumeration(["false", "true", "true"]) # /FR
+) # /Fr
+_Same(
+ _compile,
+ "CallingConvention",
+ _Enumeration(["Cdecl", "FastCall", "StdCall", "VectorCall"]), # /Gd # /Gr # /Gz
+) # /Gv
+_Same(
+ _compile,
+ "CompileAs",
+ _Enumeration(["Default", "CompileAsC", "CompileAsCpp"]), # /TC
+) # /TP
+_Same(
+ _compile,
+ "DebugInformationFormat",
+ _Enumeration(
+ [
+ "", # Disabled
+ "OldStyle", # /Z7
+ None,
+ "ProgramDatabase", # /Zi
+ "EditAndContinue",
+ ]
+ ),
+) # /ZI
+_Same(
+ _compile,
+ "EnableEnhancedInstructionSet",
+ _Enumeration(
+ [
+ "NotSet",
+ "StreamingSIMDExtensions", # /arch:SSE
+ "StreamingSIMDExtensions2", # /arch:SSE2
+ "AdvancedVectorExtensions", # /arch:AVX (vs2012+)
+ "NoExtensions", # /arch:IA32 (vs2012+)
+ # This one only exists in the new msbuild format.
+ "AdvancedVectorExtensions2", # /arch:AVX2 (vs2013r2+)
+ ]
+ ),
+)
+_Same(
+ _compile,
+ "ErrorReporting",
+ _Enumeration(
+ [
+ "None", # /errorReport:none
+ "Prompt", # /errorReport:prompt
+ "Queue",
+ ], # /errorReport:queue
+ new=["Send"],
+ ),
+) # /errorReport:send"
+_Same(
+ _compile,
+ "ExceptionHandling",
+ _Enumeration(["false", "Sync", "Async"], new=["SyncCThrow"]), # /EHsc # /EHa
+) # /EHs
+_Same(
+ _compile, "FavorSizeOrSpeed", _Enumeration(["Neither", "Speed", "Size"]) # /Ot
+) # /Os
+_Same(
+ _compile,
+ "FloatingPointModel",
+ _Enumeration(["Precise", "Strict", "Fast"]), # /fp:precise # /fp:strict
+) # /fp:fast
+_Same(
+ _compile,
+ "InlineFunctionExpansion",
+ _Enumeration(
+ ["Default", "OnlyExplicitInline", "AnySuitable"], # /Ob1 # /Ob2
+ new=["Disabled"],
+ ),
+) # /Ob0
+_Same(
+ _compile,
+ "Optimization",
+ _Enumeration(["Disabled", "MinSpace", "MaxSpeed", "Full"]), # /Od # /O1 # /O2
+) # /Ox
+_Same(
+ _compile,
+ "RuntimeLibrary",
+ _Enumeration(
+ [
+ "MultiThreaded", # /MT
+ "MultiThreadedDebug", # /MTd
+ "MultiThreadedDLL", # /MD
+ "MultiThreadedDebugDLL",
+ ]
+ ),
+) # /MDd
+_Same(
+ _compile,
+ "StructMemberAlignment",
+ _Enumeration(
+ [
+ "Default",
+ "1Byte", # /Zp1
+ "2Bytes", # /Zp2
+ "4Bytes", # /Zp4
+ "8Bytes", # /Zp8
+ "16Bytes",
+ ]
+ ),
+) # /Zp16
+_Same(
+ _compile,
+ "WarningLevel",
+ _Enumeration(
+ [
+ "TurnOffAllWarnings", # /W0
+ "Level1", # /W1
+ "Level2", # /W2
+ "Level3", # /W3
+ "Level4",
+ ], # /W4
+ new=["EnableAllWarnings"],
+ ),
+) # /Wall
+
+# Options found in MSVS that have been renamed in MSBuild.
+_Renamed(
+ _compile, "EnableFunctionLevelLinking", "FunctionLevelLinking", _boolean
+) # /Gy
+_Renamed(_compile, "EnableIntrinsicFunctions", "IntrinsicFunctions", _boolean) # /Oi
+_Renamed(_compile, "KeepComments", "PreprocessKeepComments", _boolean) # /C
+_Renamed(_compile, "ObjectFile", "ObjectFileName", _file_name) # /Fo
+_Renamed(_compile, "OpenMP", "OpenMPSupport", _boolean) # /openmp
+_Renamed(
+ _compile, "PrecompiledHeaderThrough", "PrecompiledHeaderFile", _file_name
+) # Used with /Yc and /Yu
+_Renamed(
+ _compile, "PrecompiledHeaderFile", "PrecompiledHeaderOutputFile", _file_name
+) # /Fp
+_Renamed(
+ _compile,
+ "UsePrecompiledHeader",
+ "PrecompiledHeader",
+ _Enumeration(
+ ["NotUsing", "Create", "Use"] # VS recognized '' for this value too. # /Yc
+ ),
+) # /Yu
+_Renamed(_compile, "WarnAsError", "TreatWarningAsError", _boolean) # /WX
+
+_ConvertedToAdditionalOption(_compile, "DefaultCharIsUnsigned", "/J")
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_compile, "Detect64BitPortabilityProblems", _boolean)
+_MSVSOnly(_compile, "UseUnicodeResponseFiles", _boolean)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_compile, "BuildingInIDE", _boolean)
+_MSBuildOnly(
+ _compile, "CompileAsManaged", _Enumeration([], new=["false", "true"])
+) # /clr
+_MSBuildOnly(_compile, "CreateHotpatchableImage", _boolean) # /hotpatch
+_MSBuildOnly(_compile, "MultiProcessorCompilation", _boolean) # /MP
+_MSBuildOnly(_compile, "PreprocessOutputPath", _string) # /Fi
+_MSBuildOnly(_compile, "ProcessorNumber", _integer) # the number of processors
+_MSBuildOnly(_compile, "TrackerLogDirectory", _folder_name)
+_MSBuildOnly(_compile, "TreatSpecificWarningsAsErrors", _string_list) # /we
+_MSBuildOnly(_compile, "UseUnicodeForAssemblerListing", _boolean) # /FAu
+
+# Defines a setting that needs very customized processing
+_CustomGeneratePreprocessedFile(_compile, "GeneratePreprocessedFile")
+
+
+# Directives for converting MSVS VCLinkerTool to MSBuild Link.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
+# the schema of the MSBuild Link settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_link, "AdditionalDependencies", _file_list)
+_Same(_link, "AdditionalLibraryDirectories", _folder_list) # /LIBPATH
+# /MANIFESTDEPENDENCY:
+_Same(_link, "AdditionalManifestDependencies", _file_list)
+_Same(_link, "AdditionalOptions", _string_list)
+_Same(_link, "AddModuleNamesToAssembly", _file_list) # /ASSEMBLYMODULE
+_Same(_link, "AllowIsolation", _boolean) # /ALLOWISOLATION
+_Same(_link, "AssemblyLinkResource", _file_list) # /ASSEMBLYLINKRESOURCE
+_Same(_link, "BaseAddress", _string) # /BASE
+_Same(_link, "CLRUnmanagedCodeCheck", _boolean) # /CLRUNMANAGEDCODECHECK
+_Same(_link, "DelayLoadDLLs", _file_list) # /DELAYLOAD
+_Same(_link, "DelaySign", _boolean) # /DELAYSIGN
+_Same(_link, "EmbedManagedResourceFile", _file_list) # /ASSEMBLYRESOURCE
+_Same(_link, "EnableUAC", _boolean) # /MANIFESTUAC
+_Same(_link, "EntryPointSymbol", _string) # /ENTRY
+_Same(_link, "ForceSymbolReferences", _file_list) # /INCLUDE
+_Same(_link, "FunctionOrder", _file_name) # /ORDER
+_Same(_link, "GenerateDebugInformation", _boolean) # /DEBUG
+_Same(_link, "GenerateMapFile", _boolean) # /MAP
+_Same(_link, "HeapCommitSize", _string)
+_Same(_link, "HeapReserveSize", _string) # /HEAP
+_Same(_link, "IgnoreAllDefaultLibraries", _boolean) # /NODEFAULTLIB
+_Same(_link, "IgnoreEmbeddedIDL", _boolean) # /IGNOREIDL
+_Same(_link, "ImportLibrary", _file_name) # /IMPLIB
+_Same(_link, "KeyContainer", _file_name) # /KEYCONTAINER
+_Same(_link, "KeyFile", _file_name) # /KEYFILE
+_Same(_link, "ManifestFile", _file_name) # /ManifestFile
+_Same(_link, "MapExports", _boolean) # /MAPINFO:EXPORTS
+_Same(_link, "MapFileName", _file_name)
+_Same(_link, "MergedIDLBaseFileName", _file_name) # /IDLOUT
+_Same(_link, "MergeSections", _string) # /MERGE
+_Same(_link, "MidlCommandFile", _file_name) # /MIDL
+_Same(_link, "ModuleDefinitionFile", _file_name) # /DEF
+_Same(_link, "OutputFile", _file_name) # /OUT
+_Same(_link, "PerUserRedirection", _boolean)
+_Same(_link, "Profile", _boolean) # /PROFILE
+_Same(_link, "ProfileGuidedDatabase", _file_name) # /PGD
+_Same(_link, "ProgramDatabaseFile", _file_name) # /PDB
+_Same(_link, "RegisterOutput", _boolean)
+_Same(_link, "SetChecksum", _boolean) # /RELEASE
+_Same(_link, "StackCommitSize", _string)
+_Same(_link, "StackReserveSize", _string) # /STACK
+_Same(_link, "StripPrivateSymbols", _file_name) # /PDBSTRIPPED
+_Same(_link, "SupportUnloadOfDelayLoadedDLL", _boolean) # /DELAY:UNLOAD
+_Same(_link, "SuppressStartupBanner", _boolean) # /NOLOGO
+_Same(_link, "SwapRunFromCD", _boolean) # /SWAPRUN:CD
+_Same(_link, "TurnOffAssemblyGeneration", _boolean) # /NOASSEMBLY
+_Same(_link, "TypeLibraryFile", _file_name) # /TLBOUT
+_Same(_link, "TypeLibraryResourceID", _integer) # /TLBID
+_Same(_link, "UACUIAccess", _boolean) # /uiAccess='true'
+_Same(_link, "Version", _string) # /VERSION
+
+_Same(_link, "EnableCOMDATFolding", _newly_boolean) # /OPT:ICF
+_Same(_link, "FixedBaseAddress", _newly_boolean) # /FIXED
+_Same(_link, "LargeAddressAware", _newly_boolean) # /LARGEADDRESSAWARE
+_Same(_link, "OptimizeReferences", _newly_boolean) # /OPT:REF
+_Same(_link, "RandomizedBaseAddress", _newly_boolean) # /DYNAMICBASE
+_Same(_link, "TerminalServerAware", _newly_boolean) # /TSAWARE
+
+_subsystem_enumeration = _Enumeration(
+ [
+ "NotSet",
+ "Console", # /SUBSYSTEM:CONSOLE
+ "Windows", # /SUBSYSTEM:WINDOWS
+ "Native", # /SUBSYSTEM:NATIVE
+ "EFI Application", # /SUBSYSTEM:EFI_APPLICATION
+ "EFI Boot Service Driver", # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
+ "EFI ROM", # /SUBSYSTEM:EFI_ROM
+ "EFI Runtime", # /SUBSYSTEM:EFI_RUNTIME_DRIVER
+ "WindowsCE",
+ ], # /SUBSYSTEM:WINDOWSCE
+ new=["POSIX"],
+) # /SUBSYSTEM:POSIX
+
+_target_machine_enumeration = _Enumeration(
+ [
+ "NotSet",
+ "MachineX86", # /MACHINE:X86
+ None,
+ "MachineARM", # /MACHINE:ARM
+ "MachineEBC", # /MACHINE:EBC
+ "MachineIA64", # /MACHINE:IA64
+ None,
+ "MachineMIPS", # /MACHINE:MIPS
+ "MachineMIPS16", # /MACHINE:MIPS16
+ "MachineMIPSFPU", # /MACHINE:MIPSFPU
+ "MachineMIPSFPU16", # /MACHINE:MIPSFPU16
+ None,
+ None,
+ None,
+ "MachineSH4", # /MACHINE:SH4
+ None,
+ "MachineTHUMB", # /MACHINE:THUMB
+ "MachineX64",
+ ]
+) # /MACHINE:X64
+
+_Same(
+ _link, "AssemblyDebug", _Enumeration(["", "true", "false"]) # /ASSEMBLYDEBUG
+) # /ASSEMBLYDEBUG:DISABLE
+_Same(
+ _link,
+ "CLRImageType",
+ _Enumeration(
+ [
+ "Default",
+ "ForceIJWImage", # /CLRIMAGETYPE:IJW
+ "ForcePureILImage", # /Switch="CLRIMAGETYPE:PURE
+ "ForceSafeILImage",
+ ]
+ ),
+) # /Switch="CLRIMAGETYPE:SAFE
+_Same(
+ _link,
+ "CLRThreadAttribute",
+ _Enumeration(
+ [
+ "DefaultThreadingAttribute", # /CLRTHREADATTRIBUTE:NONE
+ "MTAThreadingAttribute", # /CLRTHREADATTRIBUTE:MTA
+ "STAThreadingAttribute",
+ ]
+ ),
+) # /CLRTHREADATTRIBUTE:STA
+_Same(
+ _link,
+ "DataExecutionPrevention",
+ _Enumeration(["", "false", "true"]), # /NXCOMPAT:NO
+) # /NXCOMPAT
+_Same(
+ _link,
+ "Driver",
+ _Enumeration(["NotSet", "Driver", "UpOnly", "WDM"]), # /Driver # /DRIVER:UPONLY
+) # /DRIVER:WDM
+_Same(
+ _link,
+ "LinkTimeCodeGeneration",
+ _Enumeration(
+ [
+ "Default",
+ "UseLinkTimeCodeGeneration", # /LTCG
+ "PGInstrument", # /LTCG:PGInstrument
+ "PGOptimization", # /LTCG:PGOptimize
+ "PGUpdate",
+ ]
+ ),
+) # /LTCG:PGUpdate
+_Same(
+ _link,
+ "ShowProgress",
+ _Enumeration(
+ ["NotSet", "LinkVerbose", "LinkVerboseLib"], # /VERBOSE # /VERBOSE:Lib
+ new=[
+ "LinkVerboseICF", # /VERBOSE:ICF
+ "LinkVerboseREF", # /VERBOSE:REF
+ "LinkVerboseSAFESEH", # /VERBOSE:SAFESEH
+ "LinkVerboseCLR",
+ ],
+ ),
+) # /VERBOSE:CLR
+_Same(_link, "SubSystem", _subsystem_enumeration)
+_Same(_link, "TargetMachine", _target_machine_enumeration)
+_Same(
+ _link,
+ "UACExecutionLevel",
+ _Enumeration(
+ [
+ "AsInvoker", # /level='asInvoker'
+ "HighestAvailable", # /level='highestAvailable'
+ "RequireAdministrator",
+ ]
+ ),
+) # /level='requireAdministrator'
+_Same(_link, "MinimumRequiredVersion", _string)
+_Same(_link, "TreatLinkerWarningAsErrors", _boolean) # /WX
+
+
+# Options found in MSVS that have been renamed in MSBuild.
+_Renamed(
+ _link,
+ "ErrorReporting",
+ "LinkErrorReporting",
+ _Enumeration(
+ [
+ "NoErrorReport", # /ERRORREPORT:NONE
+ "PromptImmediately", # /ERRORREPORT:PROMPT
+ "QueueForNextLogin",
+ ], # /ERRORREPORT:QUEUE
+ new=["SendErrorReport"],
+ ),
+) # /ERRORREPORT:SEND
+_Renamed(
+ _link, "IgnoreDefaultLibraryNames", "IgnoreSpecificDefaultLibraries", _file_list
+) # /NODEFAULTLIB
+_Renamed(_link, "ResourceOnlyDLL", "NoEntryPoint", _boolean) # /NOENTRY
+_Renamed(_link, "SwapRunFromNet", "SwapRunFromNET", _boolean) # /SWAPRUN:NET
+
+_Moved(_link, "GenerateManifest", "", _boolean)
+_Moved(_link, "IgnoreImportLibrary", "", _boolean)
+_Moved(_link, "LinkIncremental", "", _newly_boolean)
+_Moved(_link, "LinkLibraryDependencies", "ProjectReference", _boolean)
+_Moved(_link, "UseLibraryDependencyInputs", "ProjectReference", _boolean)
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_link, "OptimizeForWindows98", _newly_boolean)
+_MSVSOnly(_link, "UseUnicodeResponseFiles", _boolean)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_link, "BuildingInIDE", _boolean)
+_MSBuildOnly(_link, "ImageHasSafeExceptionHandlers", _boolean) # /SAFESEH
+_MSBuildOnly(_link, "LinkDLL", _boolean) # /DLL Visible='false'
+_MSBuildOnly(_link, "LinkStatus", _boolean) # /LTCG:STATUS
+_MSBuildOnly(_link, "PreventDllBinding", _boolean) # /ALLOWBIND
+_MSBuildOnly(_link, "SupportNobindOfDelayLoadedDLL", _boolean) # /DELAY:NOBIND
+_MSBuildOnly(_link, "TrackerLogDirectory", _folder_name)
+_MSBuildOnly(_link, "MSDOSStubFileName", _file_name) # /STUB Visible='false'
+_MSBuildOnly(_link, "SectionAlignment", _integer) # /ALIGN
+_MSBuildOnly(_link, "SpecifySectionAttributes", _string) # /SECTION
+_MSBuildOnly(
+ _link,
+ "ForceFileOutput",
+ _Enumeration(
+ [],
+ new=[
+ "Enabled", # /FORCE
+ # /FORCE:MULTIPLE
+ "MultiplyDefinedSymbolOnly",
+ "UndefinedSymbolOnly",
+ ],
+ ),
+) # /FORCE:UNRESOLVED
+_MSBuildOnly(
+ _link,
+ "CreateHotPatchableImage",
+ _Enumeration(
+ [],
+ new=[
+ "Enabled", # /FUNCTIONPADMIN
+ "X86Image", # /FUNCTIONPADMIN:5
+ "X64Image", # /FUNCTIONPADMIN:6
+ "ItaniumImage",
+ ],
+ ),
+) # /FUNCTIONPADMIN:16
+_MSBuildOnly(
+ _link,
+ "CLRSupportLastError",
+ _Enumeration(
+ [],
+ new=[
+ "Enabled", # /CLRSupportLastError
+ "Disabled", # /CLRSupportLastError:NO
+ # /CLRSupportLastError:SYSTEMDLL
+ "SystemDlls",
+ ],
+ ),
+)
+
+
+# Directives for converting VCResourceCompilerTool to ResourceCompile.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
+# the schema of the MSBuild ResourceCompile settings.
+
+_Same(_rc, "AdditionalOptions", _string_list)
+_Same(_rc, "AdditionalIncludeDirectories", _folder_list) # /I
+_Same(_rc, "Culture", _Integer(msbuild_base=16))
+_Same(_rc, "IgnoreStandardIncludePath", _boolean) # /X
+_Same(_rc, "PreprocessorDefinitions", _string_list) # /D
+_Same(_rc, "ResourceOutputFileName", _string) # /fo
+_Same(_rc, "ShowProgress", _boolean) # /v
+# There is no UI in VisualStudio 2008 to set the following properties.
+# However they are found in CL and other tools. Include them here for
+# completeness, as they are very likely to have the same usage pattern.
+_Same(_rc, "SuppressStartupBanner", _boolean) # /nologo
+_Same(_rc, "UndefinePreprocessorDefinitions", _string_list) # /u
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_rc, "NullTerminateStrings", _boolean) # /n
+_MSBuildOnly(_rc, "TrackerLogDirectory", _folder_name)
+
+
+# Directives for converting VCMIDLTool to Midl.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
+# the schema of the MSBuild Midl settings.
+
+_Same(_midl, "AdditionalIncludeDirectories", _folder_list) # /I
+_Same(_midl, "AdditionalOptions", _string_list)
+_Same(_midl, "CPreprocessOptions", _string) # /cpp_opt
+_Same(_midl, "ErrorCheckAllocations", _boolean) # /error allocation
+_Same(_midl, "ErrorCheckBounds", _boolean) # /error bounds_check
+_Same(_midl, "ErrorCheckEnumRange", _boolean) # /error enum
+_Same(_midl, "ErrorCheckRefPointers", _boolean) # /error ref
+_Same(_midl, "ErrorCheckStubData", _boolean) # /error stub_data
+_Same(_midl, "GenerateStublessProxies", _boolean) # /Oicf
+_Same(_midl, "GenerateTypeLibrary", _boolean)
+_Same(_midl, "HeaderFileName", _file_name) # /h
+_Same(_midl, "IgnoreStandardIncludePath", _boolean) # /no_def_idir
+_Same(_midl, "InterfaceIdentifierFileName", _file_name) # /iid
+_Same(_midl, "MkTypLibCompatible", _boolean) # /mktyplib203
+_Same(_midl, "OutputDirectory", _string) # /out
+_Same(_midl, "PreprocessorDefinitions", _string_list) # /D
+_Same(_midl, "ProxyFileName", _file_name) # /proxy
+_Same(_midl, "RedirectOutputAndErrors", _file_name) # /o
+_Same(_midl, "SuppressStartupBanner", _boolean) # /nologo
+_Same(_midl, "TypeLibraryName", _file_name) # /tlb
+_Same(_midl, "UndefinePreprocessorDefinitions", _string_list) # /U
+_Same(_midl, "WarnAsError", _boolean) # /WX
+
+_Same(
+ _midl,
+ "DefaultCharType",
+ _Enumeration(["Unsigned", "Signed", "Ascii"]), # /char unsigned # /char signed
+) # /char ascii7
+_Same(
+ _midl,
+ "TargetEnvironment",
+ _Enumeration(
+ [
+ "NotSet",
+ "Win32", # /env win32
+ "Itanium", # /env ia64
+ "X64", # /env x64
+ "ARM64", # /env arm64
+ ]
+ ),
+)
+_Same(
+ _midl,
+ "EnableErrorChecks",
+ _Enumeration(["EnableCustom", "None", "All"]), # /error none
+) # /error all
+_Same(
+ _midl,
+ "StructMemberAlignment",
+ _Enumeration(["NotSet", "1", "2", "4", "8"]), # Zp1 # Zp2 # Zp4
+) # Zp8
+_Same(
+ _midl,
+ "WarningLevel",
+ _Enumeration(["0", "1", "2", "3", "4"]), # /W0 # /W1 # /W2 # /W3
+) # /W4
+
+_Renamed(_midl, "DLLDataFileName", "DllDataFileName", _file_name) # /dlldata
+_Renamed(_midl, "ValidateParameters", "ValidateAllParameters", _boolean) # /robust
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_midl, "ApplicationConfigurationMode", _boolean) # /app_config
+_MSBuildOnly(_midl, "ClientStubFile", _file_name) # /cstub
+_MSBuildOnly(
+ _midl, "GenerateClientFiles", _Enumeration([], new=["Stub", "None"]) # /client stub
+) # /client none
+_MSBuildOnly(
+ _midl, "GenerateServerFiles", _Enumeration([], new=["Stub", "None"]) # /client stub
+) # /client none
+_MSBuildOnly(_midl, "LocaleID", _integer) # /lcid DECIMAL
+_MSBuildOnly(_midl, "ServerStubFile", _file_name) # /sstub
+_MSBuildOnly(_midl, "SuppressCompilerWarnings", _boolean) # /no_warn
+_MSBuildOnly(_midl, "TrackerLogDirectory", _folder_name)
+_MSBuildOnly(
+ _midl, "TypeLibFormat", _Enumeration([], new=["NewFormat", "OldFormat"]) # /newtlb
+) # /oldtlb
+
+
+# Directives for converting VCLibrarianTool to Lib.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
+# the schema of the MSBuild Lib settings.
+
+_Same(_lib, "AdditionalDependencies", _file_list)
+_Same(_lib, "AdditionalLibraryDirectories", _folder_list) # /LIBPATH
+_Same(_lib, "AdditionalOptions", _string_list)
+_Same(_lib, "ExportNamedFunctions", _string_list) # /EXPORT
+_Same(_lib, "ForceSymbolReferences", _string) # /INCLUDE
+_Same(_lib, "IgnoreAllDefaultLibraries", _boolean) # /NODEFAULTLIB
+_Same(_lib, "IgnoreSpecificDefaultLibraries", _file_list) # /NODEFAULTLIB
+_Same(_lib, "ModuleDefinitionFile", _file_name) # /DEF
+_Same(_lib, "OutputFile", _file_name) # /OUT
+_Same(_lib, "SuppressStartupBanner", _boolean) # /NOLOGO
+_Same(_lib, "UseUnicodeResponseFiles", _boolean)
+_Same(_lib, "LinkTimeCodeGeneration", _boolean) # /LTCG
+_Same(_lib, "TargetMachine", _target_machine_enumeration)
+
+# TODO(jeanluc) _link defines the same value that gets moved to
+# ProjectReference. We may want to validate that they are consistent.
+_Moved(_lib, "LinkLibraryDependencies", "ProjectReference", _boolean)
+
+_MSBuildOnly(_lib, "DisplayLibrary", _string) # /LIST Visible='false'
+_MSBuildOnly(
+ _lib,
+ "ErrorReporting",
+ _Enumeration(
+ [],
+ new=[
+ "PromptImmediately", # /ERRORREPORT:PROMPT
+ "QueueForNextLogin", # /ERRORREPORT:QUEUE
+ "SendErrorReport", # /ERRORREPORT:SEND
+ "NoErrorReport",
+ ],
+ ),
+) # /ERRORREPORT:NONE
+_MSBuildOnly(_lib, "MinimumRequiredVersion", _string)
+_MSBuildOnly(_lib, "Name", _file_name) # /NAME
+_MSBuildOnly(_lib, "RemoveObjects", _file_list) # /REMOVE
+_MSBuildOnly(_lib, "SubSystem", _subsystem_enumeration)
+_MSBuildOnly(_lib, "TrackerLogDirectory", _folder_name)
+_MSBuildOnly(_lib, "TreatLibWarningAsErrors", _boolean) # /WX
+_MSBuildOnly(_lib, "Verbose", _boolean)
+
+
+# Directives for converting VCManifestTool to Mt.
+# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
+# the schema of the MSBuild Lib settings.
+
+# Options that have the same name in MSVS and MSBuild
+_Same(_manifest, "AdditionalManifestFiles", _file_list) # /manifest
+_Same(_manifest, "AdditionalOptions", _string_list)
+_Same(_manifest, "AssemblyIdentity", _string) # /identity:
+_Same(_manifest, "ComponentFileName", _file_name) # /dll
+_Same(_manifest, "GenerateCatalogFiles", _boolean) # /makecdfs
+_Same(_manifest, "InputResourceManifests", _string) # /inputresource
+_Same(_manifest, "OutputManifestFile", _file_name) # /out
+_Same(_manifest, "RegistrarScriptFile", _file_name) # /rgs
+_Same(_manifest, "ReplacementsFile", _file_name) # /replacements
+_Same(_manifest, "SuppressStartupBanner", _boolean) # /nologo
+_Same(_manifest, "TypeLibraryFile", _file_name) # /tlb:
+_Same(_manifest, "UpdateFileHashes", _boolean) # /hashupdate
+_Same(_manifest, "UpdateFileHashesSearchPath", _file_name)
+_Same(_manifest, "VerboseOutput", _boolean) # /verbose
+
+# Options that have moved location.
+_MovedAndRenamed(
+ _manifest,
+ "ManifestResourceFile",
+ "ManifestResourceCompile",
+ "ResourceOutputFileName",
+ _file_name,
+)
+_Moved(_manifest, "EmbedManifest", "", _boolean)
+
+# MSVS options not found in MSBuild.
+_MSVSOnly(_manifest, "DependencyInformationFile", _file_name)
+_MSVSOnly(_manifest, "UseFAT32Workaround", _boolean)
+_MSVSOnly(_manifest, "UseUnicodeResponseFiles", _boolean)
+
+# MSBuild options not found in MSVS.
+_MSBuildOnly(_manifest, "EnableDPIAwareness", _boolean)
+_MSBuildOnly(_manifest, "GenerateCategoryTags", _boolean) # /category
+_MSBuildOnly(
+ _manifest, "ManifestFromManagedAssembly", _file_name
+) # /managedassemblyname
+_MSBuildOnly(_manifest, "OutputResourceManifests", _string) # /outputresource
+_MSBuildOnly(_manifest, "SuppressDependencyElement", _boolean) # /nodependency
+_MSBuildOnly(_manifest, "TrackerLogDirectory", _folder_name)
+
+
+# Directives for MASM.
+# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
+# MSBuild MASM settings.
+
+# Options that have the same name in MSVS and MSBuild.
+_Same(_masm, "UseSafeExceptionHandlers", _boolean) # /safeseh
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
new file mode 100755
index 0000000..6ca0968
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
@@ -0,0 +1,1547 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the MSVSSettings.py file."""
+
+import unittest
+import gyp.MSVSSettings as MSVSSettings
+
+from io import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+ def setUp(self):
+ self.stderr = StringIO()
+
+ def _ExpectedWarnings(self, expected):
+ """Compares recorded lines to expected warnings."""
+ self.stderr.seek(0)
+ actual = self.stderr.read().split("\n")
+ actual = [line for line in actual if line]
+ self.assertEqual(sorted(expected), sorted(actual))
+
+ def testValidateMSVSSettings_tool_names(self):
+ """Tests that only MSVS tool names are allowed."""
+ MSVSSettings.ValidateMSVSSettings(
+ {
+ "VCCLCompilerTool": {},
+ "VCLinkerTool": {},
+ "VCMIDLTool": {},
+ "foo": {},
+ "VCResourceCompilerTool": {},
+ "VCLibrarianTool": {},
+ "VCManifestTool": {},
+ "ClCompile": {},
+ },
+ self.stderr,
+ )
+ self._ExpectedWarnings(
+ ["Warning: unrecognized tool foo", "Warning: unrecognized tool ClCompile"]
+ )
+
+ def testValidateMSVSSettings_settings(self):
+ """Tests that for invalid MSVS settings."""
+ MSVSSettings.ValidateMSVSSettings(
+ {
+ "VCCLCompilerTool": {
+ "AdditionalIncludeDirectories": "folder1;folder2",
+ "AdditionalOptions": ["string1", "string2"],
+ "AdditionalUsingDirectories": "folder1;folder2",
+ "AssemblerListingLocation": "a_file_name",
+ "AssemblerOutput": "0",
+ "BasicRuntimeChecks": "5",
+ "BrowseInformation": "fdkslj",
+ "BrowseInformationFile": "a_file_name",
+ "BufferSecurityCheck": "true",
+ "CallingConvention": "-1",
+ "CompileAs": "1",
+ "DebugInformationFormat": "2",
+ "DefaultCharIsUnsigned": "true",
+ "Detect64BitPortabilityProblems": "true",
+ "DisableLanguageExtensions": "true",
+ "DisableSpecificWarnings": "string1;string2",
+ "EnableEnhancedInstructionSet": "1",
+ "EnableFiberSafeOptimizations": "true",
+ "EnableFunctionLevelLinking": "true",
+ "EnableIntrinsicFunctions": "true",
+ "EnablePREfast": "true",
+ "Enableprefast": "bogus",
+ "ErrorReporting": "1",
+ "ExceptionHandling": "1",
+ "ExpandAttributedSource": "true",
+ "FavorSizeOrSpeed": "1",
+ "FloatingPointExceptions": "true",
+ "FloatingPointModel": "1",
+ "ForceConformanceInForLoopScope": "true",
+ "ForcedIncludeFiles": "file1;file2",
+ "ForcedUsingFiles": "file1;file2",
+ "GeneratePreprocessedFile": "1",
+ "GenerateXMLDocumentationFiles": "true",
+ "IgnoreStandardIncludePath": "true",
+ "InlineFunctionExpansion": "1",
+ "KeepComments": "true",
+ "MinimalRebuild": "true",
+ "ObjectFile": "a_file_name",
+ "OmitDefaultLibName": "true",
+ "OmitFramePointers": "true",
+ "OpenMP": "true",
+ "Optimization": "1",
+ "PrecompiledHeaderFile": "a_file_name",
+ "PrecompiledHeaderThrough": "a_file_name",
+ "PreprocessorDefinitions": "string1;string2",
+ "ProgramDataBaseFileName": "a_file_name",
+ "RuntimeLibrary": "1",
+ "RuntimeTypeInfo": "true",
+ "ShowIncludes": "true",
+ "SmallerTypeCheck": "true",
+ "StringPooling": "true",
+ "StructMemberAlignment": "1",
+ "SuppressStartupBanner": "true",
+ "TreatWChar_tAsBuiltInType": "true",
+ "UndefineAllPreprocessorDefinitions": "true",
+ "UndefinePreprocessorDefinitions": "string1;string2",
+ "UseFullPaths": "true",
+ "UsePrecompiledHeader": "1",
+ "UseUnicodeResponseFiles": "true",
+ "WarnAsError": "true",
+ "WarningLevel": "1",
+ "WholeProgramOptimization": "true",
+ "XMLDocumentationFileName": "a_file_name",
+ "ZZXYZ": "bogus",
+ },
+ "VCLinkerTool": {
+ "AdditionalDependencies": "file1;file2",
+ "AdditionalDependencies_excluded": "file3",
+ "AdditionalLibraryDirectories": "folder1;folder2",
+ "AdditionalManifestDependencies": "file1;file2",
+ "AdditionalOptions": "a string1",
+ "AddModuleNamesToAssembly": "file1;file2",
+ "AllowIsolation": "true",
+ "AssemblyDebug": "2",
+ "AssemblyLinkResource": "file1;file2",
+ "BaseAddress": "a string1",
+ "CLRImageType": "2",
+ "CLRThreadAttribute": "2",
+ "CLRUnmanagedCodeCheck": "true",
+ "DataExecutionPrevention": "2",
+ "DelayLoadDLLs": "file1;file2",
+ "DelaySign": "true",
+ "Driver": "2",
+ "EmbedManagedResourceFile": "file1;file2",
+ "EnableCOMDATFolding": "2",
+ "EnableUAC": "true",
+ "EntryPointSymbol": "a string1",
+ "ErrorReporting": "2",
+ "FixedBaseAddress": "2",
+ "ForceSymbolReferences": "file1;file2",
+ "FunctionOrder": "a_file_name",
+ "GenerateDebugInformation": "true",
+ "GenerateManifest": "true",
+ "GenerateMapFile": "true",
+ "HeapCommitSize": "a string1",
+ "HeapReserveSize": "a string1",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreDefaultLibraryNames": "file1;file2",
+ "IgnoreEmbeddedIDL": "true",
+ "IgnoreImportLibrary": "true",
+ "ImportLibrary": "a_file_name",
+ "KeyContainer": "a_file_name",
+ "KeyFile": "a_file_name",
+ "LargeAddressAware": "2",
+ "LinkIncremental": "2",
+ "LinkLibraryDependencies": "true",
+ "LinkTimeCodeGeneration": "2",
+ "ManifestFile": "a_file_name",
+ "MapExports": "true",
+ "MapFileName": "a_file_name",
+ "MergedIDLBaseFileName": "a_file_name",
+ "MergeSections": "a string1",
+ "MidlCommandFile": "a_file_name",
+ "ModuleDefinitionFile": "a_file_name",
+ "OptimizeForWindows98": "1",
+ "OptimizeReferences": "2",
+ "OutputFile": "a_file_name",
+ "PerUserRedirection": "true",
+ "Profile": "true",
+ "ProfileGuidedDatabase": "a_file_name",
+ "ProgramDatabaseFile": "a_file_name",
+ "RandomizedBaseAddress": "2",
+ "RegisterOutput": "true",
+ "ResourceOnlyDLL": "true",
+ "SetChecksum": "true",
+ "ShowProgress": "2",
+ "StackCommitSize": "a string1",
+ "StackReserveSize": "a string1",
+ "StripPrivateSymbols": "a_file_name",
+ "SubSystem": "2",
+ "SupportUnloadOfDelayLoadedDLL": "true",
+ "SuppressStartupBanner": "true",
+ "SwapRunFromCD": "true",
+ "SwapRunFromNet": "true",
+ "TargetMachine": "2",
+ "TerminalServerAware": "2",
+ "TurnOffAssemblyGeneration": "true",
+ "TypeLibraryFile": "a_file_name",
+ "TypeLibraryResourceID": "33",
+ "UACExecutionLevel": "2",
+ "UACUIAccess": "true",
+ "UseLibraryDependencyInputs": "true",
+ "UseUnicodeResponseFiles": "true",
+ "Version": "a string1",
+ },
+ "VCMIDLTool": {
+ "AdditionalIncludeDirectories": "folder1;folder2",
+ "AdditionalOptions": "a string1",
+ "CPreprocessOptions": "a string1",
+ "DefaultCharType": "1",
+ "DLLDataFileName": "a_file_name",
+ "EnableErrorChecks": "1",
+ "ErrorCheckAllocations": "true",
+ "ErrorCheckBounds": "true",
+ "ErrorCheckEnumRange": "true",
+ "ErrorCheckRefPointers": "true",
+ "ErrorCheckStubData": "true",
+ "GenerateStublessProxies": "true",
+ "GenerateTypeLibrary": "true",
+ "HeaderFileName": "a_file_name",
+ "IgnoreStandardIncludePath": "true",
+ "InterfaceIdentifierFileName": "a_file_name",
+ "MkTypLibCompatible": "true",
+ "notgood": "bogus",
+ "OutputDirectory": "a string1",
+ "PreprocessorDefinitions": "string1;string2",
+ "ProxyFileName": "a_file_name",
+ "RedirectOutputAndErrors": "a_file_name",
+ "StructMemberAlignment": "1",
+ "SuppressStartupBanner": "true",
+ "TargetEnvironment": "1",
+ "TypeLibraryName": "a_file_name",
+ "UndefinePreprocessorDefinitions": "string1;string2",
+ "ValidateParameters": "true",
+ "WarnAsError": "true",
+ "WarningLevel": "1",
+ },
+ "VCResourceCompilerTool": {
+ "AdditionalOptions": "a string1",
+ "AdditionalIncludeDirectories": "folder1;folder2",
+ "Culture": "1003",
+ "IgnoreStandardIncludePath": "true",
+ "notgood2": "bogus",
+ "PreprocessorDefinitions": "string1;string2",
+ "ResourceOutputFileName": "a string1",
+ "ShowProgress": "true",
+ "SuppressStartupBanner": "true",
+ "UndefinePreprocessorDefinitions": "string1;string2",
+ },
+ "VCLibrarianTool": {
+ "AdditionalDependencies": "file1;file2",
+ "AdditionalLibraryDirectories": "folder1;folder2",
+ "AdditionalOptions": "a string1",
+ "ExportNamedFunctions": "string1;string2",
+ "ForceSymbolReferences": "a string1",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreSpecificDefaultLibraries": "file1;file2",
+ "LinkLibraryDependencies": "true",
+ "ModuleDefinitionFile": "a_file_name",
+ "OutputFile": "a_file_name",
+ "SuppressStartupBanner": "true",
+ "UseUnicodeResponseFiles": "true",
+ },
+ "VCManifestTool": {
+ "AdditionalManifestFiles": "file1;file2",
+ "AdditionalOptions": "a string1",
+ "AssemblyIdentity": "a string1",
+ "ComponentFileName": "a_file_name",
+ "DependencyInformationFile": "a_file_name",
+ "GenerateCatalogFiles": "true",
+ "InputResourceManifests": "a string1",
+ "ManifestResourceFile": "a_file_name",
+ "OutputManifestFile": "a_file_name",
+ "RegistrarScriptFile": "a_file_name",
+ "ReplacementsFile": "a_file_name",
+ "SuppressStartupBanner": "true",
+ "TypeLibraryFile": "a_file_name",
+ "UpdateFileHashes": "truel",
+ "UpdateFileHashesSearchPath": "a_file_name",
+ "UseFAT32Workaround": "true",
+ "UseUnicodeResponseFiles": "true",
+ "VerboseOutput": "true",
+ },
+ },
+ self.stderr,
+ )
+ self._ExpectedWarnings(
+ [
+ "Warning: for VCCLCompilerTool/BasicRuntimeChecks, "
+ "index value (5) not in expected range [0, 4)",
+ "Warning: for VCCLCompilerTool/BrowseInformation, "
+ "invalid literal for int() with base 10: 'fdkslj'",
+ "Warning: for VCCLCompilerTool/CallingConvention, "
+ "index value (-1) not in expected range [0, 4)",
+ "Warning: for VCCLCompilerTool/DebugInformationFormat, "
+ "converted value for 2 not specified.",
+ "Warning: unrecognized setting VCCLCompilerTool/Enableprefast",
+ "Warning: unrecognized setting VCCLCompilerTool/ZZXYZ",
+ "Warning: for VCLinkerTool/TargetMachine, "
+ "converted value for 2 not specified.",
+ "Warning: unrecognized setting VCMIDLTool/notgood",
+ "Warning: unrecognized setting VCResourceCompilerTool/notgood2",
+ "Warning: for VCManifestTool/UpdateFileHashes, "
+ "expected bool; got 'truel'"
+ "",
+ ]
+ )
+
+ def testValidateMSBuildSettings_settings(self):
+ """Tests that for invalid MSBuild settings."""
+ MSVSSettings.ValidateMSBuildSettings(
+ {
+ "ClCompile": {
+ "AdditionalIncludeDirectories": "folder1;folder2",
+ "AdditionalOptions": ["string1", "string2"],
+ "AdditionalUsingDirectories": "folder1;folder2",
+ "AssemblerListingLocation": "a_file_name",
+ "AssemblerOutput": "NoListing",
+ "BasicRuntimeChecks": "StackFrameRuntimeCheck",
+ "BrowseInformation": "false",
+ "BrowseInformationFile": "a_file_name",
+ "BufferSecurityCheck": "true",
+ "BuildingInIDE": "true",
+ "CallingConvention": "Cdecl",
+ "CompileAs": "CompileAsC",
+ "CompileAsManaged": "true",
+ "CreateHotpatchableImage": "true",
+ "DebugInformationFormat": "ProgramDatabase",
+ "DisableLanguageExtensions": "true",
+ "DisableSpecificWarnings": "string1;string2",
+ "EnableEnhancedInstructionSet": "StreamingSIMDExtensions",
+ "EnableFiberSafeOptimizations": "true",
+ "EnablePREfast": "true",
+ "Enableprefast": "bogus",
+ "ErrorReporting": "Prompt",
+ "ExceptionHandling": "SyncCThrow",
+ "ExpandAttributedSource": "true",
+ "FavorSizeOrSpeed": "Neither",
+ "FloatingPointExceptions": "true",
+ "FloatingPointModel": "Precise",
+ "ForceConformanceInForLoopScope": "true",
+ "ForcedIncludeFiles": "file1;file2",
+ "ForcedUsingFiles": "file1;file2",
+ "FunctionLevelLinking": "false",
+ "GenerateXMLDocumentationFiles": "true",
+ "IgnoreStandardIncludePath": "true",
+ "InlineFunctionExpansion": "OnlyExplicitInline",
+ "IntrinsicFunctions": "false",
+ "MinimalRebuild": "true",
+ "MultiProcessorCompilation": "true",
+ "ObjectFileName": "a_file_name",
+ "OmitDefaultLibName": "true",
+ "OmitFramePointers": "true",
+ "OpenMPSupport": "true",
+ "Optimization": "Disabled",
+ "PrecompiledHeader": "NotUsing",
+ "PrecompiledHeaderFile": "a_file_name",
+ "PrecompiledHeaderOutputFile": "a_file_name",
+ "PreprocessKeepComments": "true",
+ "PreprocessorDefinitions": "string1;string2",
+ "PreprocessOutputPath": "a string1",
+ "PreprocessSuppressLineNumbers": "false",
+ "PreprocessToFile": "false",
+ "ProcessorNumber": "33",
+ "ProgramDataBaseFileName": "a_file_name",
+ "RuntimeLibrary": "MultiThreaded",
+ "RuntimeTypeInfo": "true",
+ "ShowIncludes": "true",
+ "SmallerTypeCheck": "true",
+ "StringPooling": "true",
+ "StructMemberAlignment": "1Byte",
+ "SuppressStartupBanner": "true",
+ "TrackerLogDirectory": "a_folder",
+ "TreatSpecificWarningsAsErrors": "string1;string2",
+ "TreatWarningAsError": "true",
+ "TreatWChar_tAsBuiltInType": "true",
+ "UndefineAllPreprocessorDefinitions": "true",
+ "UndefinePreprocessorDefinitions": "string1;string2",
+ "UseFullPaths": "true",
+ "UseUnicodeForAssemblerListing": "true",
+ "WarningLevel": "TurnOffAllWarnings",
+ "WholeProgramOptimization": "true",
+ "XMLDocumentationFileName": "a_file_name",
+ "ZZXYZ": "bogus",
+ },
+ "Link": {
+ "AdditionalDependencies": "file1;file2",
+ "AdditionalLibraryDirectories": "folder1;folder2",
+ "AdditionalManifestDependencies": "file1;file2",
+ "AdditionalOptions": "a string1",
+ "AddModuleNamesToAssembly": "file1;file2",
+ "AllowIsolation": "true",
+ "AssemblyDebug": "",
+ "AssemblyLinkResource": "file1;file2",
+ "BaseAddress": "a string1",
+ "BuildingInIDE": "true",
+ "CLRImageType": "ForceIJWImage",
+ "CLRSupportLastError": "Enabled",
+ "CLRThreadAttribute": "MTAThreadingAttribute",
+ "CLRUnmanagedCodeCheck": "true",
+ "CreateHotPatchableImage": "X86Image",
+ "DataExecutionPrevention": "false",
+ "DelayLoadDLLs": "file1;file2",
+ "DelaySign": "true",
+ "Driver": "NotSet",
+ "EmbedManagedResourceFile": "file1;file2",
+ "EnableCOMDATFolding": "false",
+ "EnableUAC": "true",
+ "EntryPointSymbol": "a string1",
+ "FixedBaseAddress": "false",
+ "ForceFileOutput": "Enabled",
+ "ForceSymbolReferences": "file1;file2",
+ "FunctionOrder": "a_file_name",
+ "GenerateDebugInformation": "true",
+ "GenerateMapFile": "true",
+ "HeapCommitSize": "a string1",
+ "HeapReserveSize": "a string1",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreEmbeddedIDL": "true",
+ "IgnoreSpecificDefaultLibraries": "a_file_list",
+ "ImageHasSafeExceptionHandlers": "true",
+ "ImportLibrary": "a_file_name",
+ "KeyContainer": "a_file_name",
+ "KeyFile": "a_file_name",
+ "LargeAddressAware": "false",
+ "LinkDLL": "true",
+ "LinkErrorReporting": "SendErrorReport",
+ "LinkStatus": "true",
+ "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration",
+ "ManifestFile": "a_file_name",
+ "MapExports": "true",
+ "MapFileName": "a_file_name",
+ "MergedIDLBaseFileName": "a_file_name",
+ "MergeSections": "a string1",
+ "MidlCommandFile": "a_file_name",
+ "MinimumRequiredVersion": "a string1",
+ "ModuleDefinitionFile": "a_file_name",
+ "MSDOSStubFileName": "a_file_name",
+ "NoEntryPoint": "true",
+ "OptimizeReferences": "false",
+ "OutputFile": "a_file_name",
+ "PerUserRedirection": "true",
+ "PreventDllBinding": "true",
+ "Profile": "true",
+ "ProfileGuidedDatabase": "a_file_name",
+ "ProgramDatabaseFile": "a_file_name",
+ "RandomizedBaseAddress": "false",
+ "RegisterOutput": "true",
+ "SectionAlignment": "33",
+ "SetChecksum": "true",
+ "ShowProgress": "LinkVerboseREF",
+ "SpecifySectionAttributes": "a string1",
+ "StackCommitSize": "a string1",
+ "StackReserveSize": "a string1",
+ "StripPrivateSymbols": "a_file_name",
+ "SubSystem": "Console",
+ "SupportNobindOfDelayLoadedDLL": "true",
+ "SupportUnloadOfDelayLoadedDLL": "true",
+ "SuppressStartupBanner": "true",
+ "SwapRunFromCD": "true",
+ "SwapRunFromNET": "true",
+ "TargetMachine": "MachineX86",
+ "TerminalServerAware": "false",
+ "TrackerLogDirectory": "a_folder",
+ "TreatLinkerWarningAsErrors": "true",
+ "TurnOffAssemblyGeneration": "true",
+ "TypeLibraryFile": "a_file_name",
+ "TypeLibraryResourceID": "33",
+ "UACExecutionLevel": "AsInvoker",
+ "UACUIAccess": "true",
+ "Version": "a string1",
+ },
+ "ResourceCompile": {
+ "AdditionalIncludeDirectories": "folder1;folder2",
+ "AdditionalOptions": "a string1",
+ "Culture": "0x236",
+ "IgnoreStandardIncludePath": "true",
+ "NullTerminateStrings": "true",
+ "PreprocessorDefinitions": "string1;string2",
+ "ResourceOutputFileName": "a string1",
+ "ShowProgress": "true",
+ "SuppressStartupBanner": "true",
+ "TrackerLogDirectory": "a_folder",
+ "UndefinePreprocessorDefinitions": "string1;string2",
+ },
+ "Midl": {
+ "AdditionalIncludeDirectories": "folder1;folder2",
+ "AdditionalOptions": "a string1",
+ "ApplicationConfigurationMode": "true",
+ "ClientStubFile": "a_file_name",
+ "CPreprocessOptions": "a string1",
+ "DefaultCharType": "Signed",
+ "DllDataFileName": "a_file_name",
+ "EnableErrorChecks": "EnableCustom",
+ "ErrorCheckAllocations": "true",
+ "ErrorCheckBounds": "true",
+ "ErrorCheckEnumRange": "true",
+ "ErrorCheckRefPointers": "true",
+ "ErrorCheckStubData": "true",
+ "GenerateClientFiles": "Stub",
+ "GenerateServerFiles": "None",
+ "GenerateStublessProxies": "true",
+ "GenerateTypeLibrary": "true",
+ "HeaderFileName": "a_file_name",
+ "IgnoreStandardIncludePath": "true",
+ "InterfaceIdentifierFileName": "a_file_name",
+ "LocaleID": "33",
+ "MkTypLibCompatible": "true",
+ "OutputDirectory": "a string1",
+ "PreprocessorDefinitions": "string1;string2",
+ "ProxyFileName": "a_file_name",
+ "RedirectOutputAndErrors": "a_file_name",
+ "ServerStubFile": "a_file_name",
+ "StructMemberAlignment": "NotSet",
+ "SuppressCompilerWarnings": "true",
+ "SuppressStartupBanner": "true",
+ "TargetEnvironment": "Itanium",
+ "TrackerLogDirectory": "a_folder",
+ "TypeLibFormat": "NewFormat",
+ "TypeLibraryName": "a_file_name",
+ "UndefinePreprocessorDefinitions": "string1;string2",
+ "ValidateAllParameters": "true",
+ "WarnAsError": "true",
+ "WarningLevel": "1",
+ },
+ "Lib": {
+ "AdditionalDependencies": "file1;file2",
+ "AdditionalLibraryDirectories": "folder1;folder2",
+ "AdditionalOptions": "a string1",
+ "DisplayLibrary": "a string1",
+ "ErrorReporting": "PromptImmediately",
+ "ExportNamedFunctions": "string1;string2",
+ "ForceSymbolReferences": "a string1",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreSpecificDefaultLibraries": "file1;file2",
+ "LinkTimeCodeGeneration": "true",
+ "MinimumRequiredVersion": "a string1",
+ "ModuleDefinitionFile": "a_file_name",
+ "Name": "a_file_name",
+ "OutputFile": "a_file_name",
+ "RemoveObjects": "file1;file2",
+ "SubSystem": "Console",
+ "SuppressStartupBanner": "true",
+ "TargetMachine": "MachineX86i",
+ "TrackerLogDirectory": "a_folder",
+ "TreatLibWarningAsErrors": "true",
+ "UseUnicodeResponseFiles": "true",
+ "Verbose": "true",
+ },
+ "Manifest": {
+ "AdditionalManifestFiles": "file1;file2",
+ "AdditionalOptions": "a string1",
+ "AssemblyIdentity": "a string1",
+ "ComponentFileName": "a_file_name",
+ "EnableDPIAwareness": "fal",
+ "GenerateCatalogFiles": "truel",
+ "GenerateCategoryTags": "true",
+ "InputResourceManifests": "a string1",
+ "ManifestFromManagedAssembly": "a_file_name",
+ "notgood3": "bogus",
+ "OutputManifestFile": "a_file_name",
+ "OutputResourceManifests": "a string1",
+ "RegistrarScriptFile": "a_file_name",
+ "ReplacementsFile": "a_file_name",
+ "SuppressDependencyElement": "true",
+ "SuppressStartupBanner": "true",
+ "TrackerLogDirectory": "a_folder",
+ "TypeLibraryFile": "a_file_name",
+ "UpdateFileHashes": "true",
+ "UpdateFileHashesSearchPath": "a_file_name",
+ "VerboseOutput": "true",
+ },
+ "ProjectReference": {
+ "LinkLibraryDependencies": "true",
+ "UseLibraryDependencyInputs": "true",
+ },
+ "ManifestResourceCompile": {"ResourceOutputFileName": "a_file_name"},
+ "": {
+ "EmbedManifest": "true",
+ "GenerateManifest": "true",
+ "IgnoreImportLibrary": "true",
+ "LinkIncremental": "false",
+ },
+ },
+ self.stderr,
+ )
+ self._ExpectedWarnings(
+ [
+ "Warning: unrecognized setting ClCompile/Enableprefast",
+ "Warning: unrecognized setting ClCompile/ZZXYZ",
+ "Warning: unrecognized setting Manifest/notgood3",
+ "Warning: for Manifest/GenerateCatalogFiles, "
+ "expected bool; got 'truel'",
+ "Warning: for Lib/TargetMachine, unrecognized enumerated value "
+ "MachineX86i",
+ "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'",
+ ]
+ )
+
+ def testConvertToMSBuildSettings_empty(self):
+ """Tests an empty conversion."""
+ msvs_settings = {}
+ expected_msbuild_settings = {}
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings, self.stderr
+ )
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def testConvertToMSBuildSettings_minimal(self):
+ """Tests a minimal conversion."""
+ msvs_settings = {
+ "VCCLCompilerTool": {
+ "AdditionalIncludeDirectories": "dir1",
+ "AdditionalOptions": "/foo",
+ "BasicRuntimeChecks": "0",
+ },
+ "VCLinkerTool": {
+ "LinkTimeCodeGeneration": "1",
+ "ErrorReporting": "1",
+ "DataExecutionPrevention": "2",
+ },
+ }
+ expected_msbuild_settings = {
+ "ClCompile": {
+ "AdditionalIncludeDirectories": "dir1",
+ "AdditionalOptions": "/foo",
+ "BasicRuntimeChecks": "Default",
+ },
+ "Link": {
+ "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration",
+ "LinkErrorReporting": "PromptImmediately",
+ "DataExecutionPrevention": "true",
+ },
+ }
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings, self.stderr
+ )
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def testConvertToMSBuildSettings_warnings(self):
+ """Tests conversion that generates warnings."""
+ msvs_settings = {
+ "VCCLCompilerTool": {
+ "AdditionalIncludeDirectories": "1",
+ "AdditionalOptions": "2",
+ # These are incorrect values:
+ "BasicRuntimeChecks": "12",
+ "BrowseInformation": "21",
+ "UsePrecompiledHeader": "13",
+ "GeneratePreprocessedFile": "14",
+ },
+ "VCLinkerTool": {
+ # These are incorrect values:
+ "Driver": "10",
+ "LinkTimeCodeGeneration": "31",
+ "ErrorReporting": "21",
+ "FixedBaseAddress": "6",
+ },
+ "VCResourceCompilerTool": {
+ # Custom
+ "Culture": "1003"
+ },
+ }
+ expected_msbuild_settings = {
+ "ClCompile": {
+ "AdditionalIncludeDirectories": "1",
+ "AdditionalOptions": "2",
+ },
+ "Link": {},
+ "ResourceCompile": {
+ # Custom
+ "Culture": "0x03eb"
+ },
+ }
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings, self.stderr
+ )
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings(
+ [
+ "Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to "
+ "MSBuild, index value (12) not in expected range [0, 4)",
+ "Warning: while converting VCCLCompilerTool/BrowseInformation to "
+ "MSBuild, index value (21) not in expected range [0, 3)",
+ "Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to "
+ "MSBuild, index value (13) not in expected range [0, 3)",
+ "Warning: while converting "
+ "VCCLCompilerTool/GeneratePreprocessedFile to "
+ "MSBuild, value must be one of [0, 1, 2]; got 14",
+ "Warning: while converting VCLinkerTool/Driver to "
+ "MSBuild, index value (10) not in expected range [0, 4)",
+ "Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to "
+ "MSBuild, index value (31) not in expected range [0, 5)",
+ "Warning: while converting VCLinkerTool/ErrorReporting to "
+ "MSBuild, index value (21) not in expected range [0, 3)",
+ "Warning: while converting VCLinkerTool/FixedBaseAddress to "
+ "MSBuild, index value (6) not in expected range [0, 3)",
+ ]
+ )
+
+ def testConvertToMSBuildSettings_full_synthetic(self):
+ """Tests conversion of all the MSBuild settings."""
+ msvs_settings = {
+ "VCCLCompilerTool": {
+ "AdditionalIncludeDirectories": "folder1;folder2;folder3",
+ "AdditionalOptions": "a_string",
+ "AdditionalUsingDirectories": "folder1;folder2;folder3",
+ "AssemblerListingLocation": "a_file_name",
+ "AssemblerOutput": "0",
+ "BasicRuntimeChecks": "1",
+ "BrowseInformation": "2",
+ "BrowseInformationFile": "a_file_name",
+ "BufferSecurityCheck": "true",
+ "CallingConvention": "0",
+ "CompileAs": "1",
+ "DebugInformationFormat": "4",
+ "DefaultCharIsUnsigned": "true",
+ "Detect64BitPortabilityProblems": "true",
+ "DisableLanguageExtensions": "true",
+ "DisableSpecificWarnings": "d1;d2;d3",
+ "EnableEnhancedInstructionSet": "0",
+ "EnableFiberSafeOptimizations": "true",
+ "EnableFunctionLevelLinking": "true",
+ "EnableIntrinsicFunctions": "true",
+ "EnablePREfast": "true",
+ "ErrorReporting": "1",
+ "ExceptionHandling": "2",
+ "ExpandAttributedSource": "true",
+ "FavorSizeOrSpeed": "0",
+ "FloatingPointExceptions": "true",
+ "FloatingPointModel": "1",
+ "ForceConformanceInForLoopScope": "true",
+ "ForcedIncludeFiles": "file1;file2;file3",
+ "ForcedUsingFiles": "file1;file2;file3",
+ "GeneratePreprocessedFile": "1",
+ "GenerateXMLDocumentationFiles": "true",
+ "IgnoreStandardIncludePath": "true",
+ "InlineFunctionExpansion": "2",
+ "KeepComments": "true",
+ "MinimalRebuild": "true",
+ "ObjectFile": "a_file_name",
+ "OmitDefaultLibName": "true",
+ "OmitFramePointers": "true",
+ "OpenMP": "true",
+ "Optimization": "3",
+ "PrecompiledHeaderFile": "a_file_name",
+ "PrecompiledHeaderThrough": "a_file_name",
+ "PreprocessorDefinitions": "d1;d2;d3",
+ "ProgramDataBaseFileName": "a_file_name",
+ "RuntimeLibrary": "0",
+ "RuntimeTypeInfo": "true",
+ "ShowIncludes": "true",
+ "SmallerTypeCheck": "true",
+ "StringPooling": "true",
+ "StructMemberAlignment": "1",
+ "SuppressStartupBanner": "true",
+ "TreatWChar_tAsBuiltInType": "true",
+ "UndefineAllPreprocessorDefinitions": "true",
+ "UndefinePreprocessorDefinitions": "d1;d2;d3",
+ "UseFullPaths": "true",
+ "UsePrecompiledHeader": "1",
+ "UseUnicodeResponseFiles": "true",
+ "WarnAsError": "true",
+ "WarningLevel": "2",
+ "WholeProgramOptimization": "true",
+ "XMLDocumentationFileName": "a_file_name",
+ },
+ "VCLinkerTool": {
+ "AdditionalDependencies": "file1;file2;file3",
+ "AdditionalLibraryDirectories": "folder1;folder2;folder3",
+ "AdditionalLibraryDirectories_excluded": "folder1;folder2;folder3",
+ "AdditionalManifestDependencies": "file1;file2;file3",
+ "AdditionalOptions": "a_string",
+ "AddModuleNamesToAssembly": "file1;file2;file3",
+ "AllowIsolation": "true",
+ "AssemblyDebug": "0",
+ "AssemblyLinkResource": "file1;file2;file3",
+ "BaseAddress": "a_string",
+ "CLRImageType": "1",
+ "CLRThreadAttribute": "2",
+ "CLRUnmanagedCodeCheck": "true",
+ "DataExecutionPrevention": "0",
+ "DelayLoadDLLs": "file1;file2;file3",
+ "DelaySign": "true",
+ "Driver": "1",
+ "EmbedManagedResourceFile": "file1;file2;file3",
+ "EnableCOMDATFolding": "0",
+ "EnableUAC": "true",
+ "EntryPointSymbol": "a_string",
+ "ErrorReporting": "0",
+ "FixedBaseAddress": "1",
+ "ForceSymbolReferences": "file1;file2;file3",
+ "FunctionOrder": "a_file_name",
+ "GenerateDebugInformation": "true",
+ "GenerateManifest": "true",
+ "GenerateMapFile": "true",
+ "HeapCommitSize": "a_string",
+ "HeapReserveSize": "a_string",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreDefaultLibraryNames": "file1;file2;file3",
+ "IgnoreEmbeddedIDL": "true",
+ "IgnoreImportLibrary": "true",
+ "ImportLibrary": "a_file_name",
+ "KeyContainer": "a_file_name",
+ "KeyFile": "a_file_name",
+ "LargeAddressAware": "2",
+ "LinkIncremental": "1",
+ "LinkLibraryDependencies": "true",
+ "LinkTimeCodeGeneration": "2",
+ "ManifestFile": "a_file_name",
+ "MapExports": "true",
+ "MapFileName": "a_file_name",
+ "MergedIDLBaseFileName": "a_file_name",
+ "MergeSections": "a_string",
+ "MidlCommandFile": "a_file_name",
+ "ModuleDefinitionFile": "a_file_name",
+ "OptimizeForWindows98": "1",
+ "OptimizeReferences": "0",
+ "OutputFile": "a_file_name",
+ "PerUserRedirection": "true",
+ "Profile": "true",
+ "ProfileGuidedDatabase": "a_file_name",
+ "ProgramDatabaseFile": "a_file_name",
+ "RandomizedBaseAddress": "1",
+ "RegisterOutput": "true",
+ "ResourceOnlyDLL": "true",
+ "SetChecksum": "true",
+ "ShowProgress": "0",
+ "StackCommitSize": "a_string",
+ "StackReserveSize": "a_string",
+ "StripPrivateSymbols": "a_file_name",
+ "SubSystem": "2",
+ "SupportUnloadOfDelayLoadedDLL": "true",
+ "SuppressStartupBanner": "true",
+ "SwapRunFromCD": "true",
+ "SwapRunFromNet": "true",
+ "TargetMachine": "3",
+ "TerminalServerAware": "2",
+ "TurnOffAssemblyGeneration": "true",
+ "TypeLibraryFile": "a_file_name",
+ "TypeLibraryResourceID": "33",
+ "UACExecutionLevel": "1",
+ "UACUIAccess": "true",
+ "UseLibraryDependencyInputs": "false",
+ "UseUnicodeResponseFiles": "true",
+ "Version": "a_string",
+ },
+ "VCResourceCompilerTool": {
+ "AdditionalIncludeDirectories": "folder1;folder2;folder3",
+ "AdditionalOptions": "a_string",
+ "Culture": "1003",
+ "IgnoreStandardIncludePath": "true",
+ "PreprocessorDefinitions": "d1;d2;d3",
+ "ResourceOutputFileName": "a_string",
+ "ShowProgress": "true",
+ "SuppressStartupBanner": "true",
+ "UndefinePreprocessorDefinitions": "d1;d2;d3",
+ },
+ "VCMIDLTool": {
+ "AdditionalIncludeDirectories": "folder1;folder2;folder3",
+ "AdditionalOptions": "a_string",
+ "CPreprocessOptions": "a_string",
+ "DefaultCharType": "0",
+ "DLLDataFileName": "a_file_name",
+ "EnableErrorChecks": "2",
+ "ErrorCheckAllocations": "true",
+ "ErrorCheckBounds": "true",
+ "ErrorCheckEnumRange": "true",
+ "ErrorCheckRefPointers": "true",
+ "ErrorCheckStubData": "true",
+ "GenerateStublessProxies": "true",
+ "GenerateTypeLibrary": "true",
+ "HeaderFileName": "a_file_name",
+ "IgnoreStandardIncludePath": "true",
+ "InterfaceIdentifierFileName": "a_file_name",
+ "MkTypLibCompatible": "true",
+ "OutputDirectory": "a_string",
+ "PreprocessorDefinitions": "d1;d2;d3",
+ "ProxyFileName": "a_file_name",
+ "RedirectOutputAndErrors": "a_file_name",
+ "StructMemberAlignment": "3",
+ "SuppressStartupBanner": "true",
+ "TargetEnvironment": "1",
+ "TypeLibraryName": "a_file_name",
+ "UndefinePreprocessorDefinitions": "d1;d2;d3",
+ "ValidateParameters": "true",
+ "WarnAsError": "true",
+ "WarningLevel": "4",
+ },
+ "VCLibrarianTool": {
+ "AdditionalDependencies": "file1;file2;file3",
+ "AdditionalLibraryDirectories": "folder1;folder2;folder3",
+ "AdditionalLibraryDirectories_excluded": "folder1;folder2;folder3",
+ "AdditionalOptions": "a_string",
+ "ExportNamedFunctions": "d1;d2;d3",
+ "ForceSymbolReferences": "a_string",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreSpecificDefaultLibraries": "file1;file2;file3",
+ "LinkLibraryDependencies": "true",
+ "ModuleDefinitionFile": "a_file_name",
+ "OutputFile": "a_file_name",
+ "SuppressStartupBanner": "true",
+ "UseUnicodeResponseFiles": "true",
+ },
+ "VCManifestTool": {
+ "AdditionalManifestFiles": "file1;file2;file3",
+ "AdditionalOptions": "a_string",
+ "AssemblyIdentity": "a_string",
+ "ComponentFileName": "a_file_name",
+ "DependencyInformationFile": "a_file_name",
+ "EmbedManifest": "true",
+ "GenerateCatalogFiles": "true",
+ "InputResourceManifests": "a_string",
+ "ManifestResourceFile": "my_name",
+ "OutputManifestFile": "a_file_name",
+ "RegistrarScriptFile": "a_file_name",
+ "ReplacementsFile": "a_file_name",
+ "SuppressStartupBanner": "true",
+ "TypeLibraryFile": "a_file_name",
+ "UpdateFileHashes": "true",
+ "UpdateFileHashesSearchPath": "a_file_name",
+ "UseFAT32Workaround": "true",
+ "UseUnicodeResponseFiles": "true",
+ "VerboseOutput": "true",
+ },
+ }
+ expected_msbuild_settings = {
+ "ClCompile": {
+ "AdditionalIncludeDirectories": "folder1;folder2;folder3",
+ "AdditionalOptions": "a_string /J",
+ "AdditionalUsingDirectories": "folder1;folder2;folder3",
+ "AssemblerListingLocation": "a_file_name",
+ "AssemblerOutput": "NoListing",
+ "BasicRuntimeChecks": "StackFrameRuntimeCheck",
+ "BrowseInformation": "true",
+ "BrowseInformationFile": "a_file_name",
+ "BufferSecurityCheck": "true",
+ "CallingConvention": "Cdecl",
+ "CompileAs": "CompileAsC",
+ "DebugInformationFormat": "EditAndContinue",
+ "DisableLanguageExtensions": "true",
+ "DisableSpecificWarnings": "d1;d2;d3",
+ "EnableEnhancedInstructionSet": "NotSet",
+ "EnableFiberSafeOptimizations": "true",
+ "EnablePREfast": "true",
+ "ErrorReporting": "Prompt",
+ "ExceptionHandling": "Async",
+ "ExpandAttributedSource": "true",
+ "FavorSizeOrSpeed": "Neither",
+ "FloatingPointExceptions": "true",
+ "FloatingPointModel": "Strict",
+ "ForceConformanceInForLoopScope": "true",
+ "ForcedIncludeFiles": "file1;file2;file3",
+ "ForcedUsingFiles": "file1;file2;file3",
+ "FunctionLevelLinking": "true",
+ "GenerateXMLDocumentationFiles": "true",
+ "IgnoreStandardIncludePath": "true",
+ "InlineFunctionExpansion": "AnySuitable",
+ "IntrinsicFunctions": "true",
+ "MinimalRebuild": "true",
+ "ObjectFileName": "a_file_name",
+ "OmitDefaultLibName": "true",
+ "OmitFramePointers": "true",
+ "OpenMPSupport": "true",
+ "Optimization": "Full",
+ "PrecompiledHeader": "Create",
+ "PrecompiledHeaderFile": "a_file_name",
+ "PrecompiledHeaderOutputFile": "a_file_name",
+ "PreprocessKeepComments": "true",
+ "PreprocessorDefinitions": "d1;d2;d3",
+ "PreprocessSuppressLineNumbers": "false",
+ "PreprocessToFile": "true",
+ "ProgramDataBaseFileName": "a_file_name",
+ "RuntimeLibrary": "MultiThreaded",
+ "RuntimeTypeInfo": "true",
+ "ShowIncludes": "true",
+ "SmallerTypeCheck": "true",
+ "StringPooling": "true",
+ "StructMemberAlignment": "1Byte",
+ "SuppressStartupBanner": "true",
+ "TreatWarningAsError": "true",
+ "TreatWChar_tAsBuiltInType": "true",
+ "UndefineAllPreprocessorDefinitions": "true",
+ "UndefinePreprocessorDefinitions": "d1;d2;d3",
+ "UseFullPaths": "true",
+ "WarningLevel": "Level2",
+ "WholeProgramOptimization": "true",
+ "XMLDocumentationFileName": "a_file_name",
+ },
+ "Link": {
+ "AdditionalDependencies": "file1;file2;file3",
+ "AdditionalLibraryDirectories": "folder1;folder2;folder3",
+ "AdditionalManifestDependencies": "file1;file2;file3",
+ "AdditionalOptions": "a_string",
+ "AddModuleNamesToAssembly": "file1;file2;file3",
+ "AllowIsolation": "true",
+ "AssemblyDebug": "",
+ "AssemblyLinkResource": "file1;file2;file3",
+ "BaseAddress": "a_string",
+ "CLRImageType": "ForceIJWImage",
+ "CLRThreadAttribute": "STAThreadingAttribute",
+ "CLRUnmanagedCodeCheck": "true",
+ "DataExecutionPrevention": "",
+ "DelayLoadDLLs": "file1;file2;file3",
+ "DelaySign": "true",
+ "Driver": "Driver",
+ "EmbedManagedResourceFile": "file1;file2;file3",
+ "EnableCOMDATFolding": "",
+ "EnableUAC": "true",
+ "EntryPointSymbol": "a_string",
+ "FixedBaseAddress": "false",
+ "ForceSymbolReferences": "file1;file2;file3",
+ "FunctionOrder": "a_file_name",
+ "GenerateDebugInformation": "true",
+ "GenerateMapFile": "true",
+ "HeapCommitSize": "a_string",
+ "HeapReserveSize": "a_string",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreEmbeddedIDL": "true",
+ "IgnoreSpecificDefaultLibraries": "file1;file2;file3",
+ "ImportLibrary": "a_file_name",
+ "KeyContainer": "a_file_name",
+ "KeyFile": "a_file_name",
+ "LargeAddressAware": "true",
+ "LinkErrorReporting": "NoErrorReport",
+ "LinkTimeCodeGeneration": "PGInstrument",
+ "ManifestFile": "a_file_name",
+ "MapExports": "true",
+ "MapFileName": "a_file_name",
+ "MergedIDLBaseFileName": "a_file_name",
+ "MergeSections": "a_string",
+ "MidlCommandFile": "a_file_name",
+ "ModuleDefinitionFile": "a_file_name",
+ "NoEntryPoint": "true",
+ "OptimizeReferences": "",
+ "OutputFile": "a_file_name",
+ "PerUserRedirection": "true",
+ "Profile": "true",
+ "ProfileGuidedDatabase": "a_file_name",
+ "ProgramDatabaseFile": "a_file_name",
+ "RandomizedBaseAddress": "false",
+ "RegisterOutput": "true",
+ "SetChecksum": "true",
+ "ShowProgress": "NotSet",
+ "StackCommitSize": "a_string",
+ "StackReserveSize": "a_string",
+ "StripPrivateSymbols": "a_file_name",
+ "SubSystem": "Windows",
+ "SupportUnloadOfDelayLoadedDLL": "true",
+ "SuppressStartupBanner": "true",
+ "SwapRunFromCD": "true",
+ "SwapRunFromNET": "true",
+ "TargetMachine": "MachineARM",
+ "TerminalServerAware": "true",
+ "TurnOffAssemblyGeneration": "true",
+ "TypeLibraryFile": "a_file_name",
+ "TypeLibraryResourceID": "33",
+ "UACExecutionLevel": "HighestAvailable",
+ "UACUIAccess": "true",
+ "Version": "a_string",
+ },
+ "ResourceCompile": {
+ "AdditionalIncludeDirectories": "folder1;folder2;folder3",
+ "AdditionalOptions": "a_string",
+ "Culture": "0x03eb",
+ "IgnoreStandardIncludePath": "true",
+ "PreprocessorDefinitions": "d1;d2;d3",
+ "ResourceOutputFileName": "a_string",
+ "ShowProgress": "true",
+ "SuppressStartupBanner": "true",
+ "UndefinePreprocessorDefinitions": "d1;d2;d3",
+ },
+ "Midl": {
+ "AdditionalIncludeDirectories": "folder1;folder2;folder3",
+ "AdditionalOptions": "a_string",
+ "CPreprocessOptions": "a_string",
+ "DefaultCharType": "Unsigned",
+ "DllDataFileName": "a_file_name",
+ "EnableErrorChecks": "All",
+ "ErrorCheckAllocations": "true",
+ "ErrorCheckBounds": "true",
+ "ErrorCheckEnumRange": "true",
+ "ErrorCheckRefPointers": "true",
+ "ErrorCheckStubData": "true",
+ "GenerateStublessProxies": "true",
+ "GenerateTypeLibrary": "true",
+ "HeaderFileName": "a_file_name",
+ "IgnoreStandardIncludePath": "true",
+ "InterfaceIdentifierFileName": "a_file_name",
+ "MkTypLibCompatible": "true",
+ "OutputDirectory": "a_string",
+ "PreprocessorDefinitions": "d1;d2;d3",
+ "ProxyFileName": "a_file_name",
+ "RedirectOutputAndErrors": "a_file_name",
+ "StructMemberAlignment": "4",
+ "SuppressStartupBanner": "true",
+ "TargetEnvironment": "Win32",
+ "TypeLibraryName": "a_file_name",
+ "UndefinePreprocessorDefinitions": "d1;d2;d3",
+ "ValidateAllParameters": "true",
+ "WarnAsError": "true",
+ "WarningLevel": "4",
+ },
+ "Lib": {
+ "AdditionalDependencies": "file1;file2;file3",
+ "AdditionalLibraryDirectories": "folder1;folder2;folder3",
+ "AdditionalOptions": "a_string",
+ "ExportNamedFunctions": "d1;d2;d3",
+ "ForceSymbolReferences": "a_string",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreSpecificDefaultLibraries": "file1;file2;file3",
+ "ModuleDefinitionFile": "a_file_name",
+ "OutputFile": "a_file_name",
+ "SuppressStartupBanner": "true",
+ "UseUnicodeResponseFiles": "true",
+ },
+ "Manifest": {
+ "AdditionalManifestFiles": "file1;file2;file3",
+ "AdditionalOptions": "a_string",
+ "AssemblyIdentity": "a_string",
+ "ComponentFileName": "a_file_name",
+ "GenerateCatalogFiles": "true",
+ "InputResourceManifests": "a_string",
+ "OutputManifestFile": "a_file_name",
+ "RegistrarScriptFile": "a_file_name",
+ "ReplacementsFile": "a_file_name",
+ "SuppressStartupBanner": "true",
+ "TypeLibraryFile": "a_file_name",
+ "UpdateFileHashes": "true",
+ "UpdateFileHashesSearchPath": "a_file_name",
+ "VerboseOutput": "true",
+ },
+ "ManifestResourceCompile": {"ResourceOutputFileName": "my_name"},
+ "ProjectReference": {
+ "LinkLibraryDependencies": "true",
+ "UseLibraryDependencyInputs": "false",
+ },
+ "": {
+ "EmbedManifest": "true",
+ "GenerateManifest": "true",
+ "IgnoreImportLibrary": "true",
+ "LinkIncremental": "false",
+ },
+ }
+ self.maxDiff = 9999 # on failure display a long diff
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings, self.stderr
+ )
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+ def testConvertToMSBuildSettings_actual(self):
+ """Tests the conversion of an actual project.
+
+ A VS2008 project with most of the options defined was created through the
+ VS2008 IDE. It was then converted to VS2010. The tool settings found in
+ the .vcproj and .vcxproj files were converted to the two dictionaries
+ msvs_settings and expected_msbuild_settings.
+
+ Note that for many settings, the VS2010 converter adds macros like
+ %(AdditionalIncludeDirectories) to make sure than inherited values are
+ included. Since the Gyp projects we generate do not use inheritance,
+ we removed these macros. They were:
+ ClCompile:
+ AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
+ AdditionalOptions: ' %(AdditionalOptions)'
+ AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
+ DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
+ ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
+ ForcedUsingFiles: ';%(ForcedUsingFiles)',
+ PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
+ UndefinePreprocessorDefinitions:
+ ';%(UndefinePreprocessorDefinitions)',
+ Link:
+ AdditionalDependencies: ';%(AdditionalDependencies)',
+ AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
+ AdditionalManifestDependencies:
+ ';%(AdditionalManifestDependencies)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
+ AssemblyLinkResource: ';%(AssemblyLinkResource)',
+ DelayLoadDLLs: ';%(DelayLoadDLLs)',
+ EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
+ ForceSymbolReferences: ';%(ForceSymbolReferences)',
+ IgnoreSpecificDefaultLibraries:
+ ';%(IgnoreSpecificDefaultLibraries)',
+ ResourceCompile:
+ AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
+ Manifest:
+ AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ InputResourceManifests: ';%(InputResourceManifests)',
+ """
+ msvs_settings = {
+ "VCCLCompilerTool": {
+ "AdditionalIncludeDirectories": "dir1",
+ "AdditionalOptions": "/more",
+ "AdditionalUsingDirectories": "test",
+ "AssemblerListingLocation": "$(IntDir)\\a",
+ "AssemblerOutput": "1",
+ "BasicRuntimeChecks": "3",
+ "BrowseInformation": "1",
+ "BrowseInformationFile": "$(IntDir)\\e",
+ "BufferSecurityCheck": "false",
+ "CallingConvention": "1",
+ "CompileAs": "1",
+ "DebugInformationFormat": "4",
+ "DefaultCharIsUnsigned": "true",
+ "Detect64BitPortabilityProblems": "true",
+ "DisableLanguageExtensions": "true",
+ "DisableSpecificWarnings": "abc",
+ "EnableEnhancedInstructionSet": "1",
+ "EnableFiberSafeOptimizations": "true",
+ "EnableFunctionLevelLinking": "true",
+ "EnableIntrinsicFunctions": "true",
+ "EnablePREfast": "true",
+ "ErrorReporting": "2",
+ "ExceptionHandling": "2",
+ "ExpandAttributedSource": "true",
+ "FavorSizeOrSpeed": "2",
+ "FloatingPointExceptions": "true",
+ "FloatingPointModel": "1",
+ "ForceConformanceInForLoopScope": "false",
+ "ForcedIncludeFiles": "def",
+ "ForcedUsingFiles": "ge",
+ "GeneratePreprocessedFile": "2",
+ "GenerateXMLDocumentationFiles": "true",
+ "IgnoreStandardIncludePath": "true",
+ "InlineFunctionExpansion": "1",
+ "KeepComments": "true",
+ "MinimalRebuild": "true",
+ "ObjectFile": "$(IntDir)\\b",
+ "OmitDefaultLibName": "true",
+ "OmitFramePointers": "true",
+ "OpenMP": "true",
+ "Optimization": "3",
+ "PrecompiledHeaderFile": "$(IntDir)\\$(TargetName).pche",
+ "PrecompiledHeaderThrough": "StdAfx.hd",
+ "PreprocessorDefinitions": "WIN32;_DEBUG;_CONSOLE",
+ "ProgramDataBaseFileName": "$(IntDir)\\vc90b.pdb",
+ "RuntimeLibrary": "3",
+ "RuntimeTypeInfo": "false",
+ "ShowIncludes": "true",
+ "SmallerTypeCheck": "true",
+ "StringPooling": "true",
+ "StructMemberAlignment": "3",
+ "SuppressStartupBanner": "false",
+ "TreatWChar_tAsBuiltInType": "false",
+ "UndefineAllPreprocessorDefinitions": "true",
+ "UndefinePreprocessorDefinitions": "wer",
+ "UseFullPaths": "true",
+ "UsePrecompiledHeader": "0",
+ "UseUnicodeResponseFiles": "false",
+ "WarnAsError": "true",
+ "WarningLevel": "3",
+ "WholeProgramOptimization": "true",
+ "XMLDocumentationFileName": "$(IntDir)\\c",
+ },
+ "VCLinkerTool": {
+ "AdditionalDependencies": "zx",
+ "AdditionalLibraryDirectories": "asd",
+ "AdditionalManifestDependencies": "s2",
+ "AdditionalOptions": "/mor2",
+ "AddModuleNamesToAssembly": "d1",
+ "AllowIsolation": "false",
+ "AssemblyDebug": "1",
+ "AssemblyLinkResource": "d5",
+ "BaseAddress": "23423",
+ "CLRImageType": "3",
+ "CLRThreadAttribute": "1",
+ "CLRUnmanagedCodeCheck": "true",
+ "DataExecutionPrevention": "0",
+ "DelayLoadDLLs": "d4",
+ "DelaySign": "true",
+ "Driver": "2",
+ "EmbedManagedResourceFile": "d2",
+ "EnableCOMDATFolding": "1",
+ "EnableUAC": "false",
+ "EntryPointSymbol": "f5",
+ "ErrorReporting": "2",
+ "FixedBaseAddress": "1",
+ "ForceSymbolReferences": "d3",
+ "FunctionOrder": "fssdfsd",
+ "GenerateDebugInformation": "true",
+ "GenerateManifest": "false",
+ "GenerateMapFile": "true",
+ "HeapCommitSize": "13",
+ "HeapReserveSize": "12",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreDefaultLibraryNames": "flob;flok",
+ "IgnoreEmbeddedIDL": "true",
+ "IgnoreImportLibrary": "true",
+ "ImportLibrary": "f4",
+ "KeyContainer": "f7",
+ "KeyFile": "f6",
+ "LargeAddressAware": "2",
+ "LinkIncremental": "0",
+ "LinkLibraryDependencies": "false",
+ "LinkTimeCodeGeneration": "1",
+ "ManifestFile": "$(IntDir)\\$(TargetFileName).2intermediate.manifest",
+ "MapExports": "true",
+ "MapFileName": "d5",
+ "MergedIDLBaseFileName": "f2",
+ "MergeSections": "f5",
+ "MidlCommandFile": "f1",
+ "ModuleDefinitionFile": "sdsd",
+ "OptimizeForWindows98": "2",
+ "OptimizeReferences": "2",
+ "OutputFile": "$(OutDir)\\$(ProjectName)2.exe",
+ "PerUserRedirection": "true",
+ "Profile": "true",
+ "ProfileGuidedDatabase": "$(TargetDir)$(TargetName).pgdd",
+ "ProgramDatabaseFile": "Flob.pdb",
+ "RandomizedBaseAddress": "1",
+ "RegisterOutput": "true",
+ "ResourceOnlyDLL": "true",
+ "SetChecksum": "false",
+ "ShowProgress": "1",
+ "StackCommitSize": "15",
+ "StackReserveSize": "14",
+ "StripPrivateSymbols": "d3",
+ "SubSystem": "1",
+ "SupportUnloadOfDelayLoadedDLL": "true",
+ "SuppressStartupBanner": "false",
+ "SwapRunFromCD": "true",
+ "SwapRunFromNet": "true",
+ "TargetMachine": "1",
+ "TerminalServerAware": "1",
+ "TurnOffAssemblyGeneration": "true",
+ "TypeLibraryFile": "f3",
+ "TypeLibraryResourceID": "12",
+ "UACExecutionLevel": "2",
+ "UACUIAccess": "true",
+ "UseLibraryDependencyInputs": "true",
+ "UseUnicodeResponseFiles": "false",
+ "Version": "333",
+ },
+ "VCResourceCompilerTool": {
+ "AdditionalIncludeDirectories": "f3",
+ "AdditionalOptions": "/more3",
+ "Culture": "3084",
+ "IgnoreStandardIncludePath": "true",
+ "PreprocessorDefinitions": "_UNICODE;UNICODE2",
+ "ResourceOutputFileName": "$(IntDir)/$(InputName)3.res",
+ "ShowProgress": "true",
+ },
+ "VCManifestTool": {
+ "AdditionalManifestFiles": "sfsdfsd",
+ "AdditionalOptions": "afdsdafsd",
+ "AssemblyIdentity": "sddfdsadfsa",
+ "ComponentFileName": "fsdfds",
+ "DependencyInformationFile": "$(IntDir)\\mt.depdfd",
+ "EmbedManifest": "false",
+ "GenerateCatalogFiles": "true",
+ "InputResourceManifests": "asfsfdafs",
+ "ManifestResourceFile":
+ "$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf",
+ "OutputManifestFile": "$(TargetPath).manifestdfs",
+ "RegistrarScriptFile": "sdfsfd",
+ "ReplacementsFile": "sdffsd",
+ "SuppressStartupBanner": "false",
+ "TypeLibraryFile": "sfsd",
+ "UpdateFileHashes": "true",
+ "UpdateFileHashesSearchPath": "sfsd",
+ "UseFAT32Workaround": "true",
+ "UseUnicodeResponseFiles": "false",
+ "VerboseOutput": "true",
+ },
+ }
+ expected_msbuild_settings = {
+ "ClCompile": {
+ "AdditionalIncludeDirectories": "dir1",
+ "AdditionalOptions": "/more /J",
+ "AdditionalUsingDirectories": "test",
+ "AssemblerListingLocation": "$(IntDir)a",
+ "AssemblerOutput": "AssemblyCode",
+ "BasicRuntimeChecks": "EnableFastChecks",
+ "BrowseInformation": "true",
+ "BrowseInformationFile": "$(IntDir)e",
+ "BufferSecurityCheck": "false",
+ "CallingConvention": "FastCall",
+ "CompileAs": "CompileAsC",
+ "DebugInformationFormat": "EditAndContinue",
+ "DisableLanguageExtensions": "true",
+ "DisableSpecificWarnings": "abc",
+ "EnableEnhancedInstructionSet": "StreamingSIMDExtensions",
+ "EnableFiberSafeOptimizations": "true",
+ "EnablePREfast": "true",
+ "ErrorReporting": "Queue",
+ "ExceptionHandling": "Async",
+ "ExpandAttributedSource": "true",
+ "FavorSizeOrSpeed": "Size",
+ "FloatingPointExceptions": "true",
+ "FloatingPointModel": "Strict",
+ "ForceConformanceInForLoopScope": "false",
+ "ForcedIncludeFiles": "def",
+ "ForcedUsingFiles": "ge",
+ "FunctionLevelLinking": "true",
+ "GenerateXMLDocumentationFiles": "true",
+ "IgnoreStandardIncludePath": "true",
+ "InlineFunctionExpansion": "OnlyExplicitInline",
+ "IntrinsicFunctions": "true",
+ "MinimalRebuild": "true",
+ "ObjectFileName": "$(IntDir)b",
+ "OmitDefaultLibName": "true",
+ "OmitFramePointers": "true",
+ "OpenMPSupport": "true",
+ "Optimization": "Full",
+ "PrecompiledHeader": "NotUsing", # Actual conversion gives ''
+ "PrecompiledHeaderFile": "StdAfx.hd",
+ "PrecompiledHeaderOutputFile": "$(IntDir)$(TargetName).pche",
+ "PreprocessKeepComments": "true",
+ "PreprocessorDefinitions": "WIN32;_DEBUG;_CONSOLE",
+ "PreprocessSuppressLineNumbers": "true",
+ "PreprocessToFile": "true",
+ "ProgramDataBaseFileName": "$(IntDir)vc90b.pdb",
+ "RuntimeLibrary": "MultiThreadedDebugDLL",
+ "RuntimeTypeInfo": "false",
+ "ShowIncludes": "true",
+ "SmallerTypeCheck": "true",
+ "StringPooling": "true",
+ "StructMemberAlignment": "4Bytes",
+ "SuppressStartupBanner": "false",
+ "TreatWarningAsError": "true",
+ "TreatWChar_tAsBuiltInType": "false",
+ "UndefineAllPreprocessorDefinitions": "true",
+ "UndefinePreprocessorDefinitions": "wer",
+ "UseFullPaths": "true",
+ "WarningLevel": "Level3",
+ "WholeProgramOptimization": "true",
+ "XMLDocumentationFileName": "$(IntDir)c",
+ },
+ "Link": {
+ "AdditionalDependencies": "zx",
+ "AdditionalLibraryDirectories": "asd",
+ "AdditionalManifestDependencies": "s2",
+ "AdditionalOptions": "/mor2",
+ "AddModuleNamesToAssembly": "d1",
+ "AllowIsolation": "false",
+ "AssemblyDebug": "true",
+ "AssemblyLinkResource": "d5",
+ "BaseAddress": "23423",
+ "CLRImageType": "ForceSafeILImage",
+ "CLRThreadAttribute": "MTAThreadingAttribute",
+ "CLRUnmanagedCodeCheck": "true",
+ "DataExecutionPrevention": "",
+ "DelayLoadDLLs": "d4",
+ "DelaySign": "true",
+ "Driver": "UpOnly",
+ "EmbedManagedResourceFile": "d2",
+ "EnableCOMDATFolding": "false",
+ "EnableUAC": "false",
+ "EntryPointSymbol": "f5",
+ "FixedBaseAddress": "false",
+ "ForceSymbolReferences": "d3",
+ "FunctionOrder": "fssdfsd",
+ "GenerateDebugInformation": "true",
+ "GenerateMapFile": "true",
+ "HeapCommitSize": "13",
+ "HeapReserveSize": "12",
+ "IgnoreAllDefaultLibraries": "true",
+ "IgnoreEmbeddedIDL": "true",
+ "IgnoreSpecificDefaultLibraries": "flob;flok",
+ "ImportLibrary": "f4",
+ "KeyContainer": "f7",
+ "KeyFile": "f6",
+ "LargeAddressAware": "true",
+ "LinkErrorReporting": "QueueForNextLogin",
+ "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration",
+ "ManifestFile": "$(IntDir)$(TargetFileName).2intermediate.manifest",
+ "MapExports": "true",
+ "MapFileName": "d5",
+ "MergedIDLBaseFileName": "f2",
+ "MergeSections": "f5",
+ "MidlCommandFile": "f1",
+ "ModuleDefinitionFile": "sdsd",
+ "NoEntryPoint": "true",
+ "OptimizeReferences": "true",
+ "OutputFile": "$(OutDir)$(ProjectName)2.exe",
+ "PerUserRedirection": "true",
+ "Profile": "true",
+ "ProfileGuidedDatabase": "$(TargetDir)$(TargetName).pgdd",
+ "ProgramDatabaseFile": "Flob.pdb",
+ "RandomizedBaseAddress": "false",
+ "RegisterOutput": "true",
+ "SetChecksum": "false",
+ "ShowProgress": "LinkVerbose",
+ "StackCommitSize": "15",
+ "StackReserveSize": "14",
+ "StripPrivateSymbols": "d3",
+ "SubSystem": "Console",
+ "SupportUnloadOfDelayLoadedDLL": "true",
+ "SuppressStartupBanner": "false",
+ "SwapRunFromCD": "true",
+ "SwapRunFromNET": "true",
+ "TargetMachine": "MachineX86",
+ "TerminalServerAware": "false",
+ "TurnOffAssemblyGeneration": "true",
+ "TypeLibraryFile": "f3",
+ "TypeLibraryResourceID": "12",
+ "UACExecutionLevel": "RequireAdministrator",
+ "UACUIAccess": "true",
+ "Version": "333",
+ },
+ "ResourceCompile": {
+ "AdditionalIncludeDirectories": "f3",
+ "AdditionalOptions": "/more3",
+ "Culture": "0x0c0c",
+ "IgnoreStandardIncludePath": "true",
+ "PreprocessorDefinitions": "_UNICODE;UNICODE2",
+ "ResourceOutputFileName": "$(IntDir)%(Filename)3.res",
+ "ShowProgress": "true",
+ },
+ "Manifest": {
+ "AdditionalManifestFiles": "sfsdfsd",
+ "AdditionalOptions": "afdsdafsd",
+ "AssemblyIdentity": "sddfdsadfsa",
+ "ComponentFileName": "fsdfds",
+ "GenerateCatalogFiles": "true",
+ "InputResourceManifests": "asfsfdafs",
+ "OutputManifestFile": "$(TargetPath).manifestdfs",
+ "RegistrarScriptFile": "sdfsfd",
+ "ReplacementsFile": "sdffsd",
+ "SuppressStartupBanner": "false",
+ "TypeLibraryFile": "sfsd",
+ "UpdateFileHashes": "true",
+ "UpdateFileHashesSearchPath": "sfsd",
+ "VerboseOutput": "true",
+ },
+ "ProjectReference": {
+ "LinkLibraryDependencies": "false",
+ "UseLibraryDependencyInputs": "true",
+ },
+ "": {
+ "EmbedManifest": "false",
+ "GenerateManifest": "false",
+ "IgnoreImportLibrary": "true",
+ "LinkIncremental": "",
+ },
+ "ManifestResourceCompile": {
+ "ResourceOutputFileName":
+ "$(IntDir)$(TargetFileName).embed.manifest.resfdsf"
+ },
+ }
+ self.maxDiff = 9999 # on failure display a long diff
+ actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
+ msvs_settings, self.stderr
+ )
+ self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
+ self._ExpectedWarnings([])
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
new file mode 100644
index 0000000..2e5c811
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
@@ -0,0 +1,59 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio project reader/writer."""
+
+import gyp.easy_xml as easy_xml
+
+
+class Writer:
+ """Visual Studio XML tool file writer."""
+
+ def __init__(self, tool_file_path, name):
+ """Initializes the tool file.
+
+ Args:
+ tool_file_path: Path to the tool file.
+ name: Name of the tool file.
+ """
+ self.tool_file_path = tool_file_path
+ self.name = name
+ self.rules_section = ["Rules"]
+
+ def AddCustomBuildRule(
+ self, name, cmd, description, additional_dependencies, outputs, extensions
+ ):
+ """Adds a rule to the tool file.
+
+ Args:
+ name: Name of the rule.
+ description: Description of the rule.
+ cmd: Command line of the rule.
+ additional_dependencies: other files which may trigger the rule.
+ outputs: outputs of the rule.
+ extensions: extensions handled by the rule.
+ """
+ rule = [
+ "CustomBuildRule",
+ {
+ "Name": name,
+ "ExecutionDescription": description,
+ "CommandLine": cmd,
+ "Outputs": ";".join(outputs),
+ "FileExtensions": ";".join(extensions),
+ "AdditionalDependencies": ";".join(additional_dependencies),
+ },
+ ]
+ self.rules_section.append(rule)
+
+ def WriteIfChanged(self):
+ """Writes the tool file."""
+ content = [
+ "VisualStudioToolFile",
+ {"Version": "8.00", "Name": self.name},
+ self.rules_section,
+ ]
+ easy_xml.WriteXmlIfChanged(
+ content, self.tool_file_path, encoding="Windows-1252"
+ )
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
new file mode 100644
index 0000000..e580c00
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
@@ -0,0 +1,153 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Visual Studio user preferences file writer."""
+
+import os
+import re
+import socket # for gethostname
+
+import gyp.easy_xml as easy_xml
+
+
+# ------------------------------------------------------------------------------
+
+
+def _FindCommandInPath(command):
+ """If there are no slashes in the command given, this function
+ searches the PATH env to find the given command, and converts it
+ to an absolute path. We have to do this because MSVS is looking
+ for an actual file to launch a debugger on, not just a command
+ line. Note that this happens at GYP time, so anything needing to
+ be built needs to have a full path."""
+ if "/" in command or "\\" in command:
+ # If the command already has path elements (either relative or
+ # absolute), then assume it is constructed properly.
+ return command
+ else:
+ # Search through the path list and find an existing file that
+ # we can access.
+ paths = os.environ.get("PATH", "").split(os.pathsep)
+ for path in paths:
+ item = os.path.join(path, command)
+ if os.path.isfile(item) and os.access(item, os.X_OK):
+ return item
+ return command
+
+
+def _QuoteWin32CommandLineArgs(args):
+ new_args = []
+ for arg in args:
+ # Replace all double-quotes with double-double-quotes to escape
+ # them for cmd shell, and then quote the whole thing if there
+ # are any.
+ if arg.find('"') != -1:
+ arg = '""'.join(arg.split('"'))
+ arg = '"%s"' % arg
+
+ # Otherwise, if there are any spaces, quote the whole arg.
+ elif re.search(r"[ \t\n]", arg):
+ arg = '"%s"' % arg
+ new_args.append(arg)
+ return new_args
+
+
+class Writer:
+ """Visual Studio XML user user file writer."""
+
+ def __init__(self, user_file_path, version, name):
+ """Initializes the user file.
+
+ Args:
+ user_file_path: Path to the user file.
+ version: Version info.
+ name: Name of the user file.
+ """
+ self.user_file_path = user_file_path
+ self.version = version
+ self.name = name
+ self.configurations = {}
+
+ def AddConfig(self, name):
+ """Adds a configuration to the project.
+
+ Args:
+ name: Configuration name.
+ """
+ self.configurations[name] = ["Configuration", {"Name": name}]
+
+ def AddDebugSettings(
+ self, config_name, command, environment={}, working_directory=""
+ ):
+ """Adds a DebugSettings node to the user file for a particular config.
+
+ Args:
+ command: command line to run. First element in the list is the
+ executable. All elements of the command will be quoted if
+ necessary.
+ working_directory: other files which may trigger the rule. (optional)
+ """
+ command = _QuoteWin32CommandLineArgs(command)
+
+ abs_command = _FindCommandInPath(command[0])
+
+ if environment and isinstance(environment, dict):
+ env_list = [f'{key}="{val}"' for (key, val) in environment.items()]
+ environment = " ".join(env_list)
+ else:
+ environment = ""
+
+ n_cmd = [
+ "DebugSettings",
+ {
+ "Command": abs_command,
+ "WorkingDirectory": working_directory,
+ "CommandArguments": " ".join(command[1:]),
+ "RemoteMachine": socket.gethostname(),
+ "Environment": environment,
+ "EnvironmentMerge": "true",
+ # Currently these are all "dummy" values that we're just setting
+ # in the default manner that MSVS does it. We could use some of
+ # these to add additional capabilities, I suppose, but they might
+ # not have parity with other platforms then.
+ "Attach": "false",
+ "DebuggerType": "3", # 'auto' debugger
+ "Remote": "1",
+ "RemoteCommand": "",
+ "HttpUrl": "",
+ "PDBPath": "",
+ "SQLDebugging": "",
+ "DebuggerFlavor": "0",
+ "MPIRunCommand": "",
+ "MPIRunArguments": "",
+ "MPIRunWorkingDirectory": "",
+ "ApplicationCommand": "",
+ "ApplicationArguments": "",
+ "ShimCommand": "",
+ "MPIAcceptMode": "",
+ "MPIAcceptFilter": "",
+ },
+ ]
+
+ # Find the config, and add it if it doesn't exist.
+ if config_name not in self.configurations:
+ self.AddConfig(config_name)
+
+ # Add the DebugSettings onto the appropriate config.
+ self.configurations[config_name].append(n_cmd)
+
+ def WriteIfChanged(self):
+ """Writes the user file."""
+ configs = ["Configurations"]
+ for config, spec in sorted(self.configurations.items()):
+ configs.append(spec)
+
+ content = [
+ "VisualStudioUserFile",
+ {"Version": self.version.ProjectVersion(), "Name": self.name},
+ configs,
+ ]
+ easy_xml.WriteXmlIfChanged(
+ content, self.user_file_path, encoding="Windows-1252"
+ )
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
new file mode 100644
index 0000000..36bb782
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
@@ -0,0 +1,271 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions shared amongst the Windows generators."""
+
+import copy
+import os
+
+
+# A dictionary mapping supported target types to extensions.
+TARGET_TYPE_EXT = {
+ "executable": "exe",
+ "loadable_module": "dll",
+ "shared_library": "dll",
+ "static_library": "lib",
+ "windows_driver": "sys",
+}
+
+
+def _GetLargePdbShimCcPath():
+ """Returns the path of the large_pdb_shim.cc file."""
+ this_dir = os.path.abspath(os.path.dirname(__file__))
+ src_dir = os.path.abspath(os.path.join(this_dir, "..", ".."))
+ win_data_dir = os.path.join(src_dir, "data", "win")
+ large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc")
+ return large_pdb_shim_cc
+
+
+def _DeepCopySomeKeys(in_dict, keys):
+ """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
+
+ Arguments:
+ in_dict: The dictionary to copy.
+ keys: The keys to be copied. If a key is in this list and doesn't exist in
+ |in_dict| this is not an error.
+ Returns:
+ The partially deep-copied dictionary.
+ """
+ d = {}
+ for key in keys:
+ if key not in in_dict:
+ continue
+ d[key] = copy.deepcopy(in_dict[key])
+ return d
+
+
+def _SuffixName(name, suffix):
+ """Add a suffix to the end of a target.
+
+ Arguments:
+ name: name of the target (foo#target)
+ suffix: the suffix to be added
+ Returns:
+ Target name with suffix added (foo_suffix#target)
+ """
+ parts = name.rsplit("#", 1)
+ parts[0] = f"{parts[0]}_{suffix}"
+ return "#".join(parts)
+
+
+def _ShardName(name, number):
+ """Add a shard number to the end of a target.
+
+ Arguments:
+ name: name of the target (foo#target)
+ number: shard number
+ Returns:
+ Target name with shard added (foo_1#target)
+ """
+ return _SuffixName(name, str(number))
+
+
+def ShardTargets(target_list, target_dicts):
+ """Shard some targets apart to work around the linkers limits.
+
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ Returns:
+ Tuple of the new sharded versions of the inputs.
+ """
+ # Gather the targets to shard, and how many pieces.
+ targets_to_shard = {}
+ for t in target_dicts:
+ shards = int(target_dicts[t].get("msvs_shard", 0))
+ if shards:
+ targets_to_shard[t] = shards
+ # Shard target_list.
+ new_target_list = []
+ for t in target_list:
+ if t in targets_to_shard:
+ for i in range(targets_to_shard[t]):
+ new_target_list.append(_ShardName(t, i))
+ else:
+ new_target_list.append(t)
+ # Shard target_dict.
+ new_target_dicts = {}
+ for t in target_dicts:
+ if t in targets_to_shard:
+ for i in range(targets_to_shard[t]):
+ name = _ShardName(t, i)
+ new_target_dicts[name] = copy.copy(target_dicts[t])
+ new_target_dicts[name]["target_name"] = _ShardName(
+ new_target_dicts[name]["target_name"], i
+ )
+ sources = new_target_dicts[name].get("sources", [])
+ new_sources = []
+ for pos in range(i, len(sources), targets_to_shard[t]):
+ new_sources.append(sources[pos])
+ new_target_dicts[name]["sources"] = new_sources
+ else:
+ new_target_dicts[t] = target_dicts[t]
+ # Shard dependencies.
+ for t in sorted(new_target_dicts):
+ for deptype in ("dependencies", "dependencies_original"):
+ dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
+ new_dependencies = []
+ for d in dependencies:
+ if d in targets_to_shard:
+ for i in range(targets_to_shard[d]):
+ new_dependencies.append(_ShardName(d, i))
+ else:
+ new_dependencies.append(d)
+ new_target_dicts[t][deptype] = new_dependencies
+
+ return (new_target_list, new_target_dicts)
+
+
+def _GetPdbPath(target_dict, config_name, vars):
+ """Returns the path to the PDB file that will be generated by a given
+ configuration.
+
+ The lookup proceeds as follows:
+ - Look for an explicit path in the VCLinkerTool configuration block.
+ - Look for an 'msvs_large_pdb_path' variable.
+ - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
+ specified.
+ - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
+
+ Arguments:
+ target_dict: The target dictionary to be searched.
+ config_name: The name of the configuration of interest.
+ vars: A dictionary of common GYP variables with generator-specific values.
+ Returns:
+ The path of the corresponding PDB file.
+ """
+ config = target_dict["configurations"][config_name]
+ msvs = config.setdefault("msvs_settings", {})
+
+ linker = msvs.get("VCLinkerTool", {})
+
+ pdb_path = linker.get("ProgramDatabaseFile")
+ if pdb_path:
+ return pdb_path
+
+ variables = target_dict.get("variables", {})
+ pdb_path = variables.get("msvs_large_pdb_path", None)
+ if pdb_path:
+ return pdb_path
+
+ pdb_base = target_dict.get("product_name", target_dict["target_name"])
+ pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
+ pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
+
+ return pdb_path
+
+
+def InsertLargePdbShims(target_list, target_dicts, vars):
+ """Insert a shim target that forces the linker to use 4KB pagesize PDBs.
+
+ This is a workaround for targets with PDBs greater than 1GB in size, the
+ limit for the 1KB pagesize PDBs created by the linker by default.
+
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ vars: A dictionary of common GYP variables with generator-specific values.
+ Returns:
+ Tuple of the shimmed version of the inputs.
+ """
+ # Determine which targets need shimming.
+ targets_to_shim = []
+ for t in target_dicts:
+ target_dict = target_dicts[t]
+
+ # We only want to shim targets that have msvs_large_pdb enabled.
+ if not int(target_dict.get("msvs_large_pdb", 0)):
+ continue
+ # This is intended for executable, shared_library and loadable_module
+ # targets where every configuration is set up to produce a PDB output.
+ # If any of these conditions is not true then the shim logic will fail
+ # below.
+ targets_to_shim.append(t)
+
+ large_pdb_shim_cc = _GetLargePdbShimCcPath()
+
+ for t in targets_to_shim:
+ target_dict = target_dicts[t]
+ target_name = target_dict.get("target_name")
+
+ base_dict = _DeepCopySomeKeys(
+ target_dict, ["configurations", "default_configuration", "toolset"]
+ )
+
+ # This is the dict for copying the source file (part of the GYP tree)
+ # to the intermediate directory of the project. This is necessary because
+ # we can't always build a relative path to the shim source file (on Windows
+ # GYP and the project may be on different drives), and Ninja hates absolute
+ # paths (it ends up generating the .obj and .obj.d alongside the source
+ # file, polluting GYPs tree).
+ copy_suffix = "large_pdb_copy"
+ copy_target_name = target_name + "_" + copy_suffix
+ full_copy_target_name = _SuffixName(t, copy_suffix)
+ shim_cc_basename = os.path.basename(large_pdb_shim_cc)
+ shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name
+ shim_cc_path = shim_cc_dir + "/" + shim_cc_basename
+ copy_dict = copy.deepcopy(base_dict)
+ copy_dict["target_name"] = copy_target_name
+ copy_dict["type"] = "none"
+ copy_dict["sources"] = [large_pdb_shim_cc]
+ copy_dict["copies"] = [
+ {"destination": shim_cc_dir, "files": [large_pdb_shim_cc]}
+ ]
+
+ # This is the dict for the PDB generating shim target. It depends on the
+ # copy target.
+ shim_suffix = "large_pdb_shim"
+ shim_target_name = target_name + "_" + shim_suffix
+ full_shim_target_name = _SuffixName(t, shim_suffix)
+ shim_dict = copy.deepcopy(base_dict)
+ shim_dict["target_name"] = shim_target_name
+ shim_dict["type"] = "static_library"
+ shim_dict["sources"] = [shim_cc_path]
+ shim_dict["dependencies"] = [full_copy_target_name]
+
+ # Set up the shim to output its PDB to the same location as the final linker
+ # target.
+ for config_name, config in shim_dict.get("configurations").items():
+ pdb_path = _GetPdbPath(target_dict, config_name, vars)
+
+ # A few keys that we don't want to propagate.
+ for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]:
+ config.pop(key, None)
+
+ msvs = config.setdefault("msvs_settings", {})
+
+ # Update the compiler directives in the shim target.
+ compiler = msvs.setdefault("VCCLCompilerTool", {})
+ compiler["DebugInformationFormat"] = "3"
+ compiler["ProgramDataBaseFileName"] = pdb_path
+
+ # Set the explicit PDB path in the appropriate configuration of the
+ # original target.
+ config = target_dict["configurations"][config_name]
+ msvs = config.setdefault("msvs_settings", {})
+ linker = msvs.setdefault("VCLinkerTool", {})
+ linker["GenerateDebugInformation"] = "true"
+ linker["ProgramDatabaseFile"] = pdb_path
+
+ # Add the new targets. They must go to the beginning of the list so that
+ # the dependency generation works as expected in ninja.
+ target_list.insert(0, full_copy_target_name)
+ target_list.insert(0, full_shim_target_name)
+ target_dicts[full_copy_target_name] = copy_dict
+ target_dicts[full_shim_target_name] = shim_dict
+
+ # Update the original target to depend on the shim target.
+ target_dict.setdefault("dependencies", []).append(full_shim_target_name)
+
+ return (target_list, target_dicts)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
new file mode 100644
index 0000000..8d7f21e
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
@@ -0,0 +1,574 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Handle version information related to Visual Stuio."""
+
+import errno
+import os
+import re
+import subprocess
+import sys
+import glob
+
+
+def JoinPath(*args):
+ return os.path.normpath(os.path.join(*args))
+
+
+class VisualStudioVersion:
+ """Information regarding a version of Visual Studio."""
+
+ def __init__(
+ self,
+ short_name,
+ description,
+ solution_version,
+ project_version,
+ flat_sln,
+ uses_vcxproj,
+ path,
+ sdk_based,
+ default_toolset=None,
+ compatible_sdks=None,
+ ):
+ self.short_name = short_name
+ self.description = description
+ self.solution_version = solution_version
+ self.project_version = project_version
+ self.flat_sln = flat_sln
+ self.uses_vcxproj = uses_vcxproj
+ self.path = path
+ self.sdk_based = sdk_based
+ self.default_toolset = default_toolset
+ compatible_sdks = compatible_sdks or []
+ compatible_sdks.sort(key=lambda v: float(v.replace("v", "")), reverse=True)
+ self.compatible_sdks = compatible_sdks
+
+ def ShortName(self):
+ return self.short_name
+
+ def Description(self):
+ """Get the full description of the version."""
+ return self.description
+
+ def SolutionVersion(self):
+ """Get the version number of the sln files."""
+ return self.solution_version
+
+ def ProjectVersion(self):
+ """Get the version number of the vcproj or vcxproj files."""
+ return self.project_version
+
+ def FlatSolution(self):
+ return self.flat_sln
+
+ def UsesVcxproj(self):
+ """Returns true if this version uses a vcxproj file."""
+ return self.uses_vcxproj
+
+ def ProjectExtension(self):
+ """Returns the file extension for the project."""
+ return self.uses_vcxproj and ".vcxproj" or ".vcproj"
+
+ def Path(self):
+ """Returns the path to Visual Studio installation."""
+ return self.path
+
+ def ToolPath(self, tool):
+ """Returns the path to a given compiler tool. """
+ return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
+
+ def DefaultToolset(self):
+ """Returns the msbuild toolset version that will be used in the absence
+ of a user override."""
+ return self.default_toolset
+
+ def _SetupScriptInternal(self, target_arch):
+ """Returns a command (with arguments) to be used to set up the
+ environment."""
+ assert target_arch in ("x86", "x64"), "target_arch not supported"
+ # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
+ # depot_tools build tools and should run SetEnv.Cmd to set up the
+ # environment. The check for WindowsSDKDir alone is not sufficient because
+ # this is set by running vcvarsall.bat.
+ sdk_dir = os.environ.get("WindowsSDKDir", "")
+ setup_path = JoinPath(sdk_dir, "Bin", "SetEnv.Cmd")
+ if self.sdk_based and sdk_dir and os.path.exists(setup_path):
+ return [setup_path, "/" + target_arch]
+
+ is_host_arch_x64 = (
+ os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64"
+ or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64"
+ )
+
+ # For VS2017 (and newer) it's fairly easy
+ if self.short_name >= "2017":
+ script_path = JoinPath(
+ self.path, "VC", "Auxiliary", "Build", "vcvarsall.bat"
+ )
+
+ # Always use a native executable, cross-compiling if necessary.
+ host_arch = "amd64" if is_host_arch_x64 else "x86"
+ msvc_target_arch = "amd64" if target_arch == "x64" else "x86"
+ arg = host_arch
+ if host_arch != msvc_target_arch:
+ arg += "_" + msvc_target_arch
+
+ return [script_path, arg]
+
+ # We try to find the best version of the env setup batch.
+ vcvarsall = JoinPath(self.path, "VC", "vcvarsall.bat")
+ if target_arch == "x86":
+ if (
+ self.short_name >= "2013"
+ and self.short_name[-1] != "e"
+ and is_host_arch_x64
+ ):
+ # VS2013 and later, non-Express have a x64-x86 cross that we want
+ # to prefer.
+ return [vcvarsall, "amd64_x86"]
+ else:
+ # Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat
+ # for x86 because vcvarsall calls vcvars32, which it can only find if
+ # VS??COMNTOOLS is set, which isn't guaranteed.
+ return [JoinPath(self.path, "Common7", "Tools", "vsvars32.bat")]
+ elif target_arch == "x64":
+ arg = "x86_amd64"
+ # Use the 64-on-64 compiler if we're not using an express edition and
+ # we're running on a 64bit OS.
+ if self.short_name[-1] != "e" and is_host_arch_x64:
+ arg = "amd64"
+ return [vcvarsall, arg]
+
+ def SetupScript(self, target_arch):
+ script_data = self._SetupScriptInternal(target_arch)
+ script_path = script_data[0]
+ if not os.path.exists(script_path):
+ raise Exception(
+ "%s is missing - make sure VC++ tools are installed." % script_path
+ )
+ return script_data
+
+
+def _RegistryQueryBase(sysdir, key, value):
+ """Use reg.exe to read a particular key.
+
+ While ideally we might use the win32 module, we would like gyp to be
+ python neutral, so for instance cygwin python lacks this module.
+
+ Arguments:
+ sysdir: The system subdirectory to attempt to launch reg.exe from.
+ key: The registry key to read from.
+ value: The particular value to read.
+ Return:
+ stdout from reg.exe, or None for failure.
+ """
+ # Skip if not on Windows or Python Win32 setup issue
+ if sys.platform not in ("win32", "cygwin"):
+ return None
+ # Setup params to pass to and attempt to launch reg.exe
+ cmd = [os.path.join(os.environ.get("WINDIR", ""), sysdir, "reg.exe"), "query", key]
+ if value:
+ cmd.extend(["/v", value])
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
+ # Note that the error text may be in [1] in some cases
+ text = p.communicate()[0].decode("utf-8")
+ # Check return code from reg.exe; officially 0==success and 1==error
+ if p.returncode:
+ return None
+ return text
+
+
+def _RegistryQuery(key, value=None):
+ r"""Use reg.exe to read a particular key through _RegistryQueryBase.
+
+ First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
+ that fails, it falls back to System32. Sysnative is available on Vista and
+ up and available on Windows Server 2003 and XP through KB patch 942589. Note
+ that Sysnative will always fail if using 64-bit python due to it being a
+ virtual directory and System32 will work correctly in the first place.
+
+ KB 942589 - http://support.microsoft.com/kb/942589/en-us.
+
+ Arguments:
+ key: The registry key.
+ value: The particular registry value to read (optional).
+ Return:
+ stdout from reg.exe, or None for failure.
+ """
+ text = None
+ try:
+ text = _RegistryQueryBase("Sysnative", key, value)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ text = _RegistryQueryBase("System32", key, value)
+ else:
+ raise
+ return text
+
+
+def _RegistryGetValueUsingWinReg(key, value):
+ """Use the _winreg module to obtain the value of a registry key.
+
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure. Throws
+ ImportError if winreg is unavailable.
+ """
+ from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
+ try:
+ root, subkey = key.split("\\", 1)
+ assert root == "HKLM" # Only need HKLM for now.
+ with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey:
+ return QueryValueEx(hkey, value)[0]
+ except OSError:
+ return None
+
+
+def _RegistryGetValue(key, value):
+ """Use _winreg or reg.exe to obtain the value of a registry key.
+
+ Using _winreg is preferable because it solves an issue on some corporate
+ environments where access to reg.exe is locked down. However, we still need
+ to fallback to reg.exe for the case where the _winreg module is not available
+ (for example in cygwin python).
+
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure.
+ """
+ try:
+ return _RegistryGetValueUsingWinReg(key, value)
+ except ImportError:
+ pass
+
+ # Fallback to reg.exe if we fail to import _winreg.
+ text = _RegistryQuery(key, value)
+ if not text:
+ return None
+ # Extract value.
+ match = re.search(r"REG_\w+\s+([^\r]+)\r\n", text)
+ if not match:
+ return None
+ return match.group(1)
+
+
+def _CreateVersion(name, path, sdk_based=False):
+ """Sets up MSVS project generation.
+
+ Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
+ autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
+ passed in that doesn't match a value in versions python will throw a error.
+ """
+ if path:
+ path = os.path.normpath(path)
+ versions = {
+ "2022": VisualStudioVersion(
+ "2022",
+ "Visual Studio 2022",
+ solution_version="12.00",
+ project_version="17.0",
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset="v143",
+ compatible_sdks=["v8.1", "v10.0"],
+ ),
+ "2019": VisualStudioVersion(
+ "2019",
+ "Visual Studio 2019",
+ solution_version="12.00",
+ project_version="16.0",
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset="v142",
+ compatible_sdks=["v8.1", "v10.0"],
+ ),
+ "2017": VisualStudioVersion(
+ "2017",
+ "Visual Studio 2017",
+ solution_version="12.00",
+ project_version="15.0",
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset="v141",
+ compatible_sdks=["v8.1", "v10.0"],
+ ),
+ "2015": VisualStudioVersion(
+ "2015",
+ "Visual Studio 2015",
+ solution_version="12.00",
+ project_version="14.0",
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset="v140",
+ ),
+ "2013": VisualStudioVersion(
+ "2013",
+ "Visual Studio 2013",
+ solution_version="13.00",
+ project_version="12.0",
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset="v120",
+ ),
+ "2013e": VisualStudioVersion(
+ "2013e",
+ "Visual Studio 2013",
+ solution_version="13.00",
+ project_version="12.0",
+ flat_sln=True,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset="v120",
+ ),
+ "2012": VisualStudioVersion(
+ "2012",
+ "Visual Studio 2012",
+ solution_version="12.00",
+ project_version="4.0",
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset="v110",
+ ),
+ "2012e": VisualStudioVersion(
+ "2012e",
+ "Visual Studio 2012",
+ solution_version="12.00",
+ project_version="4.0",
+ flat_sln=True,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset="v110",
+ ),
+ "2010": VisualStudioVersion(
+ "2010",
+ "Visual Studio 2010",
+ solution_version="11.00",
+ project_version="4.0",
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ ),
+ "2010e": VisualStudioVersion(
+ "2010e",
+ "Visual C++ Express 2010",
+ solution_version="11.00",
+ project_version="4.0",
+ flat_sln=True,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ ),
+ "2008": VisualStudioVersion(
+ "2008",
+ "Visual Studio 2008",
+ solution_version="10.00",
+ project_version="9.00",
+ flat_sln=False,
+ uses_vcxproj=False,
+ path=path,
+ sdk_based=sdk_based,
+ ),
+ "2008e": VisualStudioVersion(
+ "2008e",
+ "Visual Studio 2008",
+ solution_version="10.00",
+ project_version="9.00",
+ flat_sln=True,
+ uses_vcxproj=False,
+ path=path,
+ sdk_based=sdk_based,
+ ),
+ "2005": VisualStudioVersion(
+ "2005",
+ "Visual Studio 2005",
+ solution_version="9.00",
+ project_version="8.00",
+ flat_sln=False,
+ uses_vcxproj=False,
+ path=path,
+ sdk_based=sdk_based,
+ ),
+ "2005e": VisualStudioVersion(
+ "2005e",
+ "Visual Studio 2005",
+ solution_version="9.00",
+ project_version="8.00",
+ flat_sln=True,
+ uses_vcxproj=False,
+ path=path,
+ sdk_based=sdk_based,
+ ),
+ }
+ return versions[str(name)]
+
+
+def _ConvertToCygpath(path):
+ """Convert to cygwin path if we are using cygwin."""
+ if sys.platform == "cygwin":
+ p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE)
+ path = p.communicate()[0].decode("utf-8").strip()
+ return path
+
+
+def _DetectVisualStudioVersions(versions_to_check, force_express):
+ """Collect the list of installed visual studio versions.
+
+ Returns:
+ A list of visual studio versions installed in descending order of
+ usage preference.
+ Base this on the registry and a quick check if devenv.exe exists.
+ Possibilities are:
+ 2005(e) - Visual Studio 2005 (8)
+ 2008(e) - Visual Studio 2008 (9)
+ 2010(e) - Visual Studio 2010 (10)
+ 2012(e) - Visual Studio 2012 (11)
+ 2013(e) - Visual Studio 2013 (12)
+ 2015 - Visual Studio 2015 (14)
+ 2017 - Visual Studio 2017 (15)
+ 2019 - Visual Studio 2019 (16)
+ 2022 - Visual Studio 2022 (17)
+ Where (e) is e for express editions of MSVS and blank otherwise.
+ """
+ version_to_year = {
+ "8.0": "2005",
+ "9.0": "2008",
+ "10.0": "2010",
+ "11.0": "2012",
+ "12.0": "2013",
+ "14.0": "2015",
+ "15.0": "2017",
+ "16.0": "2019",
+ "17.0": "2022",
+ }
+ versions = []
+ for version in versions_to_check:
+ # Old method of searching for which VS version is installed
+ # We don't use the 2010-encouraged-way because we also want to get the
+ # path to the binaries, which it doesn't offer.
+ keys = [
+ r"HKLM\Software\Microsoft\VisualStudio\%s" % version,
+ r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s" % version,
+ r"HKLM\Software\Microsoft\VCExpress\%s" % version,
+ r"HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s" % version,
+ ]
+ for index in range(len(keys)):
+ path = _RegistryGetValue(keys[index], "InstallDir")
+ if not path:
+ continue
+ path = _ConvertToCygpath(path)
+ # Check for full.
+ full_path = os.path.join(path, "devenv.exe")
+ express_path = os.path.join(path, "*express.exe")
+ if not force_express and os.path.exists(full_path):
+ # Add this one.
+ versions.append(
+ _CreateVersion(
+ version_to_year[version], os.path.join(path, "..", "..")
+ )
+ )
+ # Check for express.
+ elif glob.glob(express_path):
+ # Add this one.
+ versions.append(
+ _CreateVersion(
+ version_to_year[version] + "e", os.path.join(path, "..", "..")
+ )
+ )
+
+ # The old method above does not work when only SDK is installed.
+ keys = [
+ r"HKLM\Software\Microsoft\VisualStudio\SxS\VC7",
+ r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7",
+ r"HKLM\Software\Microsoft\VisualStudio\SxS\VS7",
+ r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7",
+ ]
+ for index in range(len(keys)):
+ path = _RegistryGetValue(keys[index], version)
+ if not path:
+ continue
+ path = _ConvertToCygpath(path)
+ if version == "15.0":
+ if os.path.exists(path):
+ versions.append(_CreateVersion("2017", path))
+ elif version != "14.0": # There is no Express edition for 2015.
+ versions.append(
+ _CreateVersion(
+ version_to_year[version] + "e",
+ os.path.join(path, ".."),
+ sdk_based=True,
+ )
+ )
+
+ return versions
+
+
+def SelectVisualStudioVersion(version="auto", allow_fallback=True):
+ """Select which version of Visual Studio projects to generate.
+
+ Arguments:
+ version: Hook to allow caller to force a particular version (vs auto).
+ Returns:
+ An object representing a visual studio project format version.
+ """
+ # In auto mode, check environment variable for override.
+ if version == "auto":
+ version = os.environ.get("GYP_MSVS_VERSION", "auto")
+ version_map = {
+ "auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"),
+ "2005": ("8.0",),
+ "2005e": ("8.0",),
+ "2008": ("9.0",),
+ "2008e": ("9.0",),
+ "2010": ("10.0",),
+ "2010e": ("10.0",),
+ "2012": ("11.0",),
+ "2012e": ("11.0",),
+ "2013": ("12.0",),
+ "2013e": ("12.0",),
+ "2015": ("14.0",),
+ "2017": ("15.0",),
+ "2019": ("16.0",),
+ "2022": ("17.0",),
+ }
+ override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
+ if override_path:
+ msvs_version = os.environ.get("GYP_MSVS_VERSION")
+ if not msvs_version:
+ raise ValueError(
+ "GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be "
+ "set to a particular version (e.g. 2010e)."
+ )
+ return _CreateVersion(msvs_version, override_path, sdk_based=True)
+ version = str(version)
+ versions = _DetectVisualStudioVersions(version_map[version], "e" in version)
+ if not versions:
+ if not allow_fallback:
+ raise ValueError("Could not locate Visual Studio installation.")
+ if version == "auto":
+ # Default to 2005 if we couldn't find anything
+ return _CreateVersion("2005", None)
+ else:
+ return _CreateVersion(version, None)
+ return versions[0]
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
new file mode 100755
index 0000000..2aa39d0
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
@@ -0,0 +1,690 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import copy
+import gyp.input
+import argparse
+import os.path
+import re
+import shlex
+import sys
+import traceback
+from gyp.common import GypError
+
+
+# Default debug modes for GYP
+debug = {}
+
+# List of "official" debug modes, but you can use anything you like.
+DEBUG_GENERAL = "general"
+DEBUG_VARIABLES = "variables"
+DEBUG_INCLUDES = "includes"
+
+
+def DebugOutput(mode, message, *args):
+ if "all" in gyp.debug or mode in gyp.debug:
+ ctx = ("unknown", 0, "unknown")
+ try:
+ f = traceback.extract_stack(limit=2)
+ if f:
+ ctx = f[0][:3]
+ except Exception:
+ pass
+ if args:
+ message %= args
+ print(
+ "%s:%s:%d:%s %s"
+ % (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message)
+ )
+
+
+def FindBuildFiles():
+ extension = ".gyp"
+ files = os.listdir(os.getcwd())
+ build_files = []
+ for file in files:
+ if file.endswith(extension):
+ build_files.append(file)
+ return build_files
+
+
+def Load(
+ build_files,
+ format,
+ default_variables={},
+ includes=[],
+ depth=".",
+ params=None,
+ check=False,
+ circular_check=True,
+):
+ """
+ Loads one or more specified build files.
+ default_variables and includes will be copied before use.
+ Returns the generator for the specified format and the
+ data returned by loading the specified build files.
+ """
+ if params is None:
+ params = {}
+
+ if "-" in format:
+ format, params["flavor"] = format.split("-", 1)
+
+ default_variables = copy.copy(default_variables)
+
+ # Default variables provided by this program and its modules should be
+ # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
+ # avoiding collisions with user and automatic variables.
+ default_variables["GENERATOR"] = format
+ default_variables["GENERATOR_FLAVOR"] = params.get("flavor", "")
+
+ # Format can be a custom python file, or by default the name of a module
+ # within gyp.generator.
+ if format.endswith(".py"):
+ generator_name = os.path.splitext(format)[0]
+ path, generator_name = os.path.split(generator_name)
+
+ # Make sure the path to the custom generator is in sys.path
+ # Don't worry about removing it once we are done. Keeping the path
+ # to each generator that is used in sys.path is likely harmless and
+ # arguably a good idea.
+ path = os.path.abspath(path)
+ if path not in sys.path:
+ sys.path.insert(0, path)
+ else:
+ generator_name = "gyp.generator." + format
+
+ # These parameters are passed in order (as opposed to by key)
+ # because ActivePython cannot handle key parameters to __import__.
+ generator = __import__(generator_name, globals(), locals(), generator_name)
+ for (key, val) in generator.generator_default_variables.items():
+ default_variables.setdefault(key, val)
+
+ output_dir = params["options"].generator_output or params["options"].toplevel_dir
+ if default_variables["GENERATOR"] == "ninja":
+ default_variables.setdefault(
+ "PRODUCT_DIR_ABS",
+ os.path.join(output_dir, "out", default_variables["build_type"]),
+ )
+ else:
+ default_variables.setdefault(
+ "PRODUCT_DIR_ABS",
+ os.path.join(output_dir, default_variables["CONFIGURATION_NAME"]),
+ )
+
+ # Give the generator the opportunity to set additional variables based on
+ # the params it will receive in the output phase.
+ if getattr(generator, "CalculateVariables", None):
+ generator.CalculateVariables(default_variables, params)
+
+ # Give the generator the opportunity to set generator_input_info based on
+ # the params it will receive in the output phase.
+ if getattr(generator, "CalculateGeneratorInputInfo", None):
+ generator.CalculateGeneratorInputInfo(params)
+
+ # Fetch the generator specific info that gets fed to input, we use getattr
+ # so we can default things and the generators only have to provide what
+ # they need.
+ generator_input_info = {
+ "non_configuration_keys": getattr(
+ generator, "generator_additional_non_configuration_keys", []
+ ),
+ "path_sections": getattr(generator, "generator_additional_path_sections", []),
+ "extra_sources_for_rules": getattr(
+ generator, "generator_extra_sources_for_rules", []
+ ),
+ "generator_supports_multiple_toolsets": getattr(
+ generator, "generator_supports_multiple_toolsets", False
+ ),
+ "generator_wants_static_library_dependencies_adjusted": getattr(
+ generator, "generator_wants_static_library_dependencies_adjusted", True
+ ),
+ "generator_wants_sorted_dependencies": getattr(
+ generator, "generator_wants_sorted_dependencies", False
+ ),
+ "generator_filelist_paths": getattr(
+ generator, "generator_filelist_paths", None
+ ),
+ }
+
+ # Process the input specific to this generator.
+ result = gyp.input.Load(
+ build_files,
+ default_variables,
+ includes[:],
+ depth,
+ generator_input_info,
+ check,
+ circular_check,
+ params["parallel"],
+ params["root_targets"],
+ )
+ return [generator] + result
+
+
+def NameValueListToDict(name_value_list):
+ """
+ Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
+ of the pairs. If a string is simply NAME, then the value in the dictionary
+ is set to True. If VALUE can be converted to an integer, it is.
+ """
+ result = {}
+ for item in name_value_list:
+ tokens = item.split("=", 1)
+ if len(tokens) == 2:
+ # If we can make it an int, use that, otherwise, use the string.
+ try:
+ token_value = int(tokens[1])
+ except ValueError:
+ token_value = tokens[1]
+ # Set the variable to the supplied value.
+ result[tokens[0]] = token_value
+ else:
+ # No value supplied, treat it as a boolean and set it.
+ result[tokens[0]] = True
+ return result
+
+
+def ShlexEnv(env_name):
+ flags = os.environ.get(env_name, [])
+ if flags:
+ flags = shlex.split(flags)
+ return flags
+
+
+def FormatOpt(opt, value):
+ if opt.startswith("--"):
+ return f"{opt}={value}"
+ return opt + value
+
+
+def RegenerateAppendFlag(flag, values, predicate, env_name, options):
+ """Regenerate a list of command line flags, for an option of action='append'.
+
+ The |env_name|, if given, is checked in the environment and used to generate
+ an initial list of options, then the options that were specified on the
+ command line (given in |values|) are appended. This matches the handling of
+ environment variables and command line flags where command line flags override
+ the environment, while not requiring the environment to be set when the flags
+ are used again.
+ """
+ flags = []
+ if options.use_environment and env_name:
+ for flag_value in ShlexEnv(env_name):
+ value = FormatOpt(flag, predicate(flag_value))
+ if value in flags:
+ flags.remove(value)
+ flags.append(value)
+ if values:
+ for flag_value in values:
+ flags.append(FormatOpt(flag, predicate(flag_value)))
+ return flags
+
+
+def RegenerateFlags(options):
+ """Given a parsed options object, and taking the environment variables into
+ account, returns a list of flags that should regenerate an equivalent options
+ object (even in the absence of the environment variables.)
+
+ Any path options will be normalized relative to depth.
+
+ The format flag is not included, as it is assumed the calling generator will
+ set that as appropriate.
+ """
+
+ def FixPath(path):
+ path = gyp.common.FixIfRelativePath(path, options.depth)
+ if not path:
+ return os.path.curdir
+ return path
+
+ def Noop(value):
+ return value
+
+ # We always want to ignore the environment when regenerating, to avoid
+ # duplicate or changed flags in the environment at the time of regeneration.
+ flags = ["--ignore-environment"]
+ for name, metadata in options._regeneration_metadata.items():
+ opt = metadata["opt"]
+ value = getattr(options, name)
+ value_predicate = metadata["type"] == "path" and FixPath or Noop
+ action = metadata["action"]
+ env_name = metadata["env_name"]
+ if action == "append":
+ flags.extend(
+ RegenerateAppendFlag(opt, value, value_predicate, env_name, options)
+ )
+ elif action in ("store", None): # None is a synonym for 'store'.
+ if value:
+ flags.append(FormatOpt(opt, value_predicate(value)))
+ elif options.use_environment and env_name and os.environ.get(env_name):
+ flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
+ elif action in ("store_true", "store_false"):
+ if (action == "store_true" and value) or (
+ action == "store_false" and not value
+ ):
+ flags.append(opt)
+ elif options.use_environment and env_name:
+ print(
+ "Warning: environment regeneration unimplemented "
+ "for %s flag %r env_name %r" % (action, opt, env_name),
+ file=sys.stderr,
+ )
+ else:
+ print(
+ "Warning: regeneration unimplemented for action %r "
+ "flag %r" % (action, opt),
+ file=sys.stderr,
+ )
+
+ return flags
+
+
+class RegeneratableOptionParser(argparse.ArgumentParser):
+ def __init__(self, usage):
+ self.__regeneratable_options = {}
+ argparse.ArgumentParser.__init__(self, usage=usage)
+
+ def add_argument(self, *args, **kw):
+ """Add an option to the parser.
+
+ This accepts the same arguments as ArgumentParser.add_argument, plus the
+ following:
+ regenerate: can be set to False to prevent this option from being included
+ in regeneration.
+ env_name: name of environment variable that additional values for this
+ option come from.
+ type: adds type='path', to tell the regenerator that the values of
+ this option need to be made relative to options.depth
+ """
+ env_name = kw.pop("env_name", None)
+ if "dest" in kw and kw.pop("regenerate", True):
+ dest = kw["dest"]
+
+ # The path type is needed for regenerating, for optparse we can just treat
+ # it as a string.
+ type = kw.get("type")
+ if type == "path":
+ kw["type"] = str
+
+ self.__regeneratable_options[dest] = {
+ "action": kw.get("action"),
+ "type": type,
+ "env_name": env_name,
+ "opt": args[0],
+ }
+
+ argparse.ArgumentParser.add_argument(self, *args, **kw)
+
+ def parse_args(self, *args):
+ values, args = argparse.ArgumentParser.parse_known_args(self, *args)
+ values._regeneration_metadata = self.__regeneratable_options
+ return values, args
+
+
+def gyp_main(args):
+ my_name = os.path.basename(sys.argv[0])
+ usage = "usage: %(prog)s [options ...] [build_file ...]"
+
+ parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s"))
+ parser.add_argument(
+ "--build",
+ dest="configs",
+ action="append",
+ help="configuration for build after project generation",
+ )
+ parser.add_argument(
+ "--check", dest="check", action="store_true", help="check format of gyp files"
+ )
+ parser.add_argument(
+ "--config-dir",
+ dest="config_dir",
+ action="store",
+ env_name="GYP_CONFIG_DIR",
+ default=None,
+ help="The location for configuration files like " "include.gypi.",
+ )
+ parser.add_argument(
+ "-d",
+ "--debug",
+ dest="debug",
+ metavar="DEBUGMODE",
+ action="append",
+ default=[],
+ help="turn on a debugging "
+ 'mode for debugging GYP. Supported modes are "variables", '
+ '"includes" and "general" or "all" for all of them.',
+ )
+ parser.add_argument(
+ "-D",
+ dest="defines",
+ action="append",
+ metavar="VAR=VAL",
+ env_name="GYP_DEFINES",
+ help="sets variable VAR to value VAL",
+ )
+ parser.add_argument(
+ "--depth",
+ dest="depth",
+ metavar="PATH",
+ type="path",
+ help="set DEPTH gyp variable to a relative path to PATH",
+ )
+ parser.add_argument(
+ "-f",
+ "--format",
+ dest="formats",
+ action="append",
+ env_name="GYP_GENERATORS",
+ regenerate=False,
+ help="output formats to generate",
+ )
+ parser.add_argument(
+ "-G",
+ dest="generator_flags",
+ action="append",
+ default=[],
+ metavar="FLAG=VAL",
+ env_name="GYP_GENERATOR_FLAGS",
+ help="sets generator flag FLAG to VAL",
+ )
+ parser.add_argument(
+ "--generator-output",
+ dest="generator_output",
+ action="store",
+ default=None,
+ metavar="DIR",
+ type="path",
+ env_name="GYP_GENERATOR_OUTPUT",
+ help="puts generated build files under DIR",
+ )
+ parser.add_argument(
+ "--ignore-environment",
+ dest="use_environment",
+ action="store_false",
+ default=True,
+ regenerate=False,
+ help="do not read options from environment variables",
+ )
+ parser.add_argument(
+ "-I",
+ "--include",
+ dest="includes",
+ action="append",
+ metavar="INCLUDE",
+ type="path",
+ help="files to include in all loaded .gyp files",
+ )
+ # --no-circular-check disables the check for circular relationships between
+ # .gyp files. These relationships should not exist, but they've only been
+ # observed to be harmful with the Xcode generator. Chromium's .gyp files
+ # currently have some circular relationships on non-Mac platforms, so this
+ # option allows the strict behavior to be used on Macs and the lenient
+ # behavior to be used elsewhere.
+ # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
+ parser.add_argument(
+ "--no-circular-check",
+ dest="circular_check",
+ action="store_false",
+ default=True,
+ regenerate=False,
+ help="don't check for circular relationships between files",
+ )
+ parser.add_argument(
+ "--no-parallel",
+ action="store_true",
+ default=False,
+ help="Disable multiprocessing",
+ )
+ parser.add_argument(
+ "-S",
+ "--suffix",
+ dest="suffix",
+ default="",
+ help="suffix to add to generated files",
+ )
+ parser.add_argument(
+ "--toplevel-dir",
+ dest="toplevel_dir",
+ action="store",
+ default=None,
+ metavar="DIR",
+ type="path",
+ help="directory to use as the root of the source tree",
+ )
+ parser.add_argument(
+ "-R",
+ "--root-target",
+ dest="root_targets",
+ action="append",
+ metavar="TARGET",
+ help="include only TARGET and its deep dependencies",
+ )
+ parser.add_argument(
+ "-V",
+ "--version",
+ dest="version",
+ action="store_true",
+ help="Show the version and exit.",
+ )
+
+ options, build_files_arg = parser.parse_args(args)
+ if options.version:
+ import pkg_resources
+ print(f"v{pkg_resources.get_distribution('gyp-next').version}")
+ return 0
+ build_files = build_files_arg
+
+ # Set up the configuration directory (defaults to ~/.gyp)
+ if not options.config_dir:
+ home = None
+ home_dot_gyp = None
+ if options.use_environment:
+ home_dot_gyp = os.environ.get("GYP_CONFIG_DIR", None)
+ if home_dot_gyp:
+ home_dot_gyp = os.path.expanduser(home_dot_gyp)
+
+ if not home_dot_gyp:
+ home_vars = ["HOME"]
+ if sys.platform in ("cygwin", "win32"):
+ home_vars.append("USERPROFILE")
+ for home_var in home_vars:
+ home = os.getenv(home_var)
+ if home:
+ home_dot_gyp = os.path.join(home, ".gyp")
+ if not os.path.exists(home_dot_gyp):
+ home_dot_gyp = None
+ else:
+ break
+ else:
+ home_dot_gyp = os.path.expanduser(options.config_dir)
+
+ if home_dot_gyp and not os.path.exists(home_dot_gyp):
+ home_dot_gyp = None
+
+ if not options.formats:
+ # If no format was given on the command line, then check the env variable.
+ generate_formats = []
+ if options.use_environment:
+ generate_formats = os.environ.get("GYP_GENERATORS", [])
+ if generate_formats:
+ generate_formats = re.split(r"[\s,]", generate_formats)
+ if generate_formats:
+ options.formats = generate_formats
+ else:
+ # Nothing in the variable, default based on platform.
+ if sys.platform == "darwin":
+ options.formats = ["xcode"]
+ elif sys.platform in ("win32", "cygwin"):
+ options.formats = ["msvs"]
+ else:
+ options.formats = ["make"]
+
+ if not options.generator_output and options.use_environment:
+ g_o = os.environ.get("GYP_GENERATOR_OUTPUT")
+ if g_o:
+ options.generator_output = g_o
+
+ options.parallel = not options.no_parallel
+
+ for mode in options.debug:
+ gyp.debug[mode] = 1
+
+ # Do an extra check to avoid work when we're not debugging.
+ if DEBUG_GENERAL in gyp.debug:
+ DebugOutput(DEBUG_GENERAL, "running with these options:")
+ for option, value in sorted(options.__dict__.items()):
+ if option[0] == "_":
+ continue
+ if isinstance(value, str):
+ DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
+ else:
+ DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
+
+ if not build_files:
+ build_files = FindBuildFiles()
+ if not build_files:
+ raise GypError((usage + "\n\n%s: error: no build_file") % (my_name, my_name))
+
+ # TODO(mark): Chromium-specific hack!
+ # For Chromium, the gyp "depth" variable should always be a relative path
+ # to Chromium's top-level "src" directory. If no depth variable was set
+ # on the command line, try to find a "src" directory by looking at the
+ # absolute path to each build file's directory. The first "src" component
+ # found will be treated as though it were the path used for --depth.
+ if not options.depth:
+ for build_file in build_files:
+ build_file_dir = os.path.abspath(os.path.dirname(build_file))
+ build_file_dir_components = build_file_dir.split(os.path.sep)
+ components_len = len(build_file_dir_components)
+ for index in range(components_len - 1, -1, -1):
+ if build_file_dir_components[index] == "src":
+ options.depth = os.path.sep.join(build_file_dir_components)
+ break
+ del build_file_dir_components[index]
+
+ # If the inner loop found something, break without advancing to another
+ # build file.
+ if options.depth:
+ break
+
+ if not options.depth:
+ raise GypError(
+ "Could not automatically locate src directory. This is"
+ "a temporary Chromium feature that will be removed. Use"
+ "--depth as a workaround."
+ )
+
+ # If toplevel-dir is not set, we assume that depth is the root of our source
+ # tree.
+ if not options.toplevel_dir:
+ options.toplevel_dir = options.depth
+
+ # -D on the command line sets variable defaults - D isn't just for define,
+ # it's for default. Perhaps there should be a way to force (-F?) a
+ # variable's value so that it can't be overridden by anything else.
+ cmdline_default_variables = {}
+ defines = []
+ if options.use_environment:
+ defines += ShlexEnv("GYP_DEFINES")
+ if options.defines:
+ defines += options.defines
+ cmdline_default_variables = NameValueListToDict(defines)
+ if DEBUG_GENERAL in gyp.debug:
+ DebugOutput(
+ DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables
+ )
+
+ # Set up includes.
+ includes = []
+
+ # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
+ # .gyp file that's loaded, before anything else is included.
+ if home_dot_gyp:
+ default_include = os.path.join(home_dot_gyp, "include.gypi")
+ if os.path.exists(default_include):
+ print("Using overrides found in " + default_include)
+ includes.append(default_include)
+
+ # Command-line --include files come after the default include.
+ if options.includes:
+ includes.extend(options.includes)
+
+ # Generator flags should be prefixed with the target generator since they
+ # are global across all generator runs.
+ gen_flags = []
+ if options.use_environment:
+ gen_flags += ShlexEnv("GYP_GENERATOR_FLAGS")
+ if options.generator_flags:
+ gen_flags += options.generator_flags
+ generator_flags = NameValueListToDict(gen_flags)
+ if DEBUG_GENERAL in gyp.debug.keys():
+ DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
+
+ # Generate all requested formats (use a set in case we got one format request
+ # twice)
+ for format in set(options.formats):
+ params = {
+ "options": options,
+ "build_files": build_files,
+ "generator_flags": generator_flags,
+ "cwd": os.getcwd(),
+ "build_files_arg": build_files_arg,
+ "gyp_binary": sys.argv[0],
+ "home_dot_gyp": home_dot_gyp,
+ "parallel": options.parallel,
+ "root_targets": options.root_targets,
+ "target_arch": cmdline_default_variables.get("target_arch", ""),
+ }
+
+ # Start with the default variables from the command line.
+ [generator, flat_list, targets, data] = Load(
+ build_files,
+ format,
+ cmdline_default_variables,
+ includes,
+ options.depth,
+ params,
+ options.check,
+ options.circular_check,
+ )
+
+ # TODO(mark): Pass |data| for now because the generator needs a list of
+ # build files that came in. In the future, maybe it should just accept
+ # a list, and not the whole data dict.
+ # NOTE: flat_list is the flattened dependency graph specifying the order
+ # that targets may be built. Build systems that operate serially or that
+ # need to have dependencies defined before dependents reference them should
+ # generate targets in the order specified in flat_list.
+ generator.GenerateOutput(flat_list, targets, data, params)
+
+ if options.configs:
+ valid_configs = targets[flat_list[0]]["configurations"]
+ for conf in options.configs:
+ if conf not in valid_configs:
+ raise GypError("Invalid config specified via --build: %s" % conf)
+ generator.PerformBuild(data, options.configs, params)
+
+ # Done
+ return 0
+
+
+def main(args):
+ try:
+ return gyp_main(args)
+ except GypError as e:
+ sys.stderr.write("gyp: %s\n" % e)
+ return 1
+
+
+# NOTE: setuptools generated console_scripts calls function with no arguments
+def script_main():
+ return main(sys.argv[1:])
+
+
+if __name__ == "__main__":
+ sys.exit(script_main())
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/common.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/common.py
new file mode 100644
index 0000000..d77adee
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/common.py
@@ -0,0 +1,661 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import filecmp
+import os.path
+import re
+import tempfile
+import sys
+import subprocess
+
+from collections.abc import MutableSet
+
+
+# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
+# among other "problems".
+class memoize:
+ def __init__(self, func):
+ self.func = func
+ self.cache = {}
+
+ def __call__(self, *args):
+ try:
+ return self.cache[args]
+ except KeyError:
+ result = self.func(*args)
+ self.cache[args] = result
+ return result
+
+
+class GypError(Exception):
+ """Error class representing an error, which is to be presented
+ to the user. The main entry point will catch and display this.
+ """
+
+ pass
+
+
+def ExceptionAppend(e, msg):
+ """Append a message to the given exception's message."""
+ if not e.args:
+ e.args = (msg,)
+ elif len(e.args) == 1:
+ e.args = (str(e.args[0]) + " " + msg,)
+ else:
+ e.args = (str(e.args[0]) + " " + msg,) + e.args[1:]
+
+
+def FindQualifiedTargets(target, qualified_list):
+ """
+ Given a list of qualified targets, return the qualified targets for the
+ specified |target|.
+ """
+ return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
+
+
+def ParseQualifiedTarget(target):
+ # Splits a qualified target into a build file, target name and toolset.
+
+ # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
+ target_split = target.rsplit(":", 1)
+ if len(target_split) == 2:
+ [build_file, target] = target_split
+ else:
+ build_file = None
+
+ target_split = target.rsplit("#", 1)
+ if len(target_split) == 2:
+ [target, toolset] = target_split
+ else:
+ toolset = None
+
+ return [build_file, target, toolset]
+
+
+def ResolveTarget(build_file, target, toolset):
+ # This function resolves a target into a canonical form:
+ # - a fully defined build file, either absolute or relative to the current
+ # directory
+ # - a target name
+ # - a toolset
+ #
+ # build_file is the file relative to which 'target' is defined.
+ # target is the qualified target.
+ # toolset is the default toolset for that target.
+ [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
+
+ if parsed_build_file:
+ if build_file:
+ # If a relative path, parsed_build_file is relative to the directory
+ # containing build_file. If build_file is not in the current directory,
+ # parsed_build_file is not a usable path as-is. Resolve it by
+ # interpreting it as relative to build_file. If parsed_build_file is
+ # absolute, it is usable as a path regardless of the current directory,
+ # and os.path.join will return it as-is.
+ build_file = os.path.normpath(
+ os.path.join(os.path.dirname(build_file), parsed_build_file)
+ )
+ # Further (to handle cases like ../cwd), make it relative to cwd)
+ if not os.path.isabs(build_file):
+ build_file = RelativePath(build_file, ".")
+ else:
+ build_file = parsed_build_file
+
+ if parsed_toolset:
+ toolset = parsed_toolset
+
+ return [build_file, target, toolset]
+
+
+def BuildFile(fully_qualified_target):
+ # Extracts the build file from the fully qualified target.
+ return ParseQualifiedTarget(fully_qualified_target)[0]
+
+
+def GetEnvironFallback(var_list, default):
+ """Look up a key in the environment, with fallback to secondary keys
+ and finally falling back to a default value."""
+ for var in var_list:
+ if var in os.environ:
+ return os.environ[var]
+ return default
+
+
+def QualifiedTarget(build_file, target, toolset):
+ # "Qualified" means the file that a target was defined in and the target
+ # name, separated by a colon, suffixed by a # and the toolset name:
+ # /path/to/file.gyp:target_name#toolset
+ fully_qualified = build_file + ":" + target
+ if toolset:
+ fully_qualified = fully_qualified + "#" + toolset
+ return fully_qualified
+
+
+@memoize
+def RelativePath(path, relative_to, follow_path_symlink=True):
+ # Assuming both |path| and |relative_to| are relative to the current
+ # directory, returns a relative path that identifies path relative to
+ # relative_to.
+ # If |follow_symlink_path| is true (default) and |path| is a symlink, then
+ # this method returns a path to the real file represented by |path|. If it is
+ # false, this method returns a path to the symlink. If |path| is not a
+ # symlink, this option has no effect.
+
+ # Convert to normalized (and therefore absolute paths).
+ if follow_path_symlink:
+ path = os.path.realpath(path)
+ else:
+ path = os.path.abspath(path)
+ relative_to = os.path.realpath(relative_to)
+
+ # On Windows, we can't create a relative path to a different drive, so just
+ # use the absolute path.
+ if sys.platform == "win32":
+ if (
+ os.path.splitdrive(path)[0].lower()
+ != os.path.splitdrive(relative_to)[0].lower()
+ ):
+ return path
+
+ # Split the paths into components.
+ path_split = path.split(os.path.sep)
+ relative_to_split = relative_to.split(os.path.sep)
+
+ # Determine how much of the prefix the two paths share.
+ prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
+
+ # Put enough ".." components to back up out of relative_to to the common
+ # prefix, and then append the part of path_split after the common prefix.
+ relative_split = [os.path.pardir] * (
+ len(relative_to_split) - prefix_len
+ ) + path_split[prefix_len:]
+
+ if len(relative_split) == 0:
+ # The paths were the same.
+ return ""
+
+ # Turn it back into a string and we're done.
+ return os.path.join(*relative_split)
+
+
+@memoize
+def InvertRelativePath(path, toplevel_dir=None):
+ """Given a path like foo/bar that is relative to toplevel_dir, return
+ the inverse relative path back to the toplevel_dir.
+
+ E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
+ should always produce the empty string, unless the path contains symlinks.
+ """
+ if not path:
+ return path
+ toplevel_dir = "." if toplevel_dir is None else toplevel_dir
+ return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
+
+
+def FixIfRelativePath(path, relative_to):
+ # Like RelativePath but returns |path| unchanged if it is absolute.
+ if os.path.isabs(path):
+ return path
+ return RelativePath(path, relative_to)
+
+
+def UnrelativePath(path, relative_to):
+ # Assuming that |relative_to| is relative to the current directory, and |path|
+ # is a path relative to the dirname of |relative_to|, returns a path that
+ # identifies |path| relative to the current directory.
+ rel_dir = os.path.dirname(relative_to)
+ return os.path.normpath(os.path.join(rel_dir, path))
+
+
+# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
+# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
+# and the documentation for various shells.
+
+# _quote is a pattern that should match any argument that needs to be quoted
+# with double-quotes by EncodePOSIXShellArgument. It matches the following
+# characters appearing anywhere in an argument:
+# \t, \n, space parameter separators
+# # comments
+# $ expansions (quoted to always expand within one argument)
+# % called out by IEEE 1003.1 XCU.2.2
+# & job control
+# ' quoting
+# (, ) subshell execution
+# *, ?, [ pathname expansion
+# ; command delimiter
+# <, >, | redirection
+# = assignment
+# {, } brace expansion (bash)
+# ~ tilde expansion
+# It also matches the empty string, because "" (or '') is the only way to
+# represent an empty string literal argument to a POSIX shell.
+#
+# This does not match the characters in _escape, because those need to be
+# backslash-escaped regardless of whether they appear in a double-quoted
+# string.
+_quote = re.compile("[\t\n #$%&'()*;<=>?[{|}~]|^$")
+
+# _escape is a pattern that should match any character that needs to be
+# escaped with a backslash, whether or not the argument matched the _quote
+# pattern. _escape is used with re.sub to backslash anything in _escape's
+# first match group, hence the (parentheses) in the regular expression.
+#
+# _escape matches the following characters appearing anywhere in an argument:
+# " to prevent POSIX shells from interpreting this character for quoting
+# \ to prevent POSIX shells from interpreting this character for escaping
+# ` to prevent POSIX shells from interpreting this character for command
+# substitution
+# Missing from this list is $, because the desired behavior of
+# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
+#
+# Also missing from this list is !, which bash will interpret as the history
+# expansion character when history is enabled. bash does not enable history
+# by default in non-interactive shells, so this is not thought to be a problem.
+# ! was omitted from this list because bash interprets "\!" as a literal string
+# including the backslash character (avoiding history expansion but retaining
+# the backslash), which would not be correct for argument encoding. Handling
+# this case properly would also be problematic because bash allows the history
+# character to be changed with the histchars shell variable. Fortunately,
+# as history is not enabled in non-interactive shells and
+# EncodePOSIXShellArgument is only expected to encode for non-interactive
+# shells, there is no room for error here by ignoring !.
+_escape = re.compile(r'(["\\`])')
+
+
+def EncodePOSIXShellArgument(argument):
+ """Encodes |argument| suitably for consumption by POSIX shells.
+
+ argument may be quoted and escaped as necessary to ensure that POSIX shells
+ treat the returned value as a literal representing the argument passed to
+ this function. Parameter (variable) expansions beginning with $ are allowed
+ to remain intact without escaping the $, to allow the argument to contain
+ references to variables to be expanded by the shell.
+ """
+
+ if not isinstance(argument, str):
+ argument = str(argument)
+
+ if _quote.search(argument):
+ quote = '"'
+ else:
+ quote = ""
+
+ encoded = quote + re.sub(_escape, r"\\\1", argument) + quote
+
+ return encoded
+
+
+def EncodePOSIXShellList(list):
+ """Encodes |list| suitably for consumption by POSIX shells.
+
+ Returns EncodePOSIXShellArgument for each item in list, and joins them
+ together using the space character as an argument separator.
+ """
+
+ encoded_arguments = []
+ for argument in list:
+ encoded_arguments.append(EncodePOSIXShellArgument(argument))
+ return " ".join(encoded_arguments)
+
+
+def DeepDependencyTargets(target_dicts, roots):
+ """Returns the recursive list of target dependencies."""
+ dependencies = set()
+ pending = set(roots)
+ while pending:
+ # Pluck out one.
+ r = pending.pop()
+ # Skip if visited already.
+ if r in dependencies:
+ continue
+ # Add it.
+ dependencies.add(r)
+ # Add its children.
+ spec = target_dicts[r]
+ pending.update(set(spec.get("dependencies", [])))
+ pending.update(set(spec.get("dependencies_original", [])))
+ return list(dependencies - set(roots))
+
+
+def BuildFileTargets(target_list, build_file):
+ """From a target_list, returns the subset from the specified build_file.
+ """
+ return [p for p in target_list if BuildFile(p) == build_file]
+
+
+def AllTargets(target_list, target_dicts, build_file):
+ """Returns all targets (direct and dependencies) for the specified build_file.
+ """
+ bftargets = BuildFileTargets(target_list, build_file)
+ deptargets = DeepDependencyTargets(target_dicts, bftargets)
+ return bftargets + deptargets
+
+
+def WriteOnDiff(filename):
+ """Write to a file only if the new contents differ.
+
+ Arguments:
+ filename: name of the file to potentially write to.
+ Returns:
+ A file like object which will write to temporary file and only overwrite
+ the target if it differs (on close).
+ """
+
+ class Writer:
+ """Wrapper around file which only covers the target if it differs."""
+
+ def __init__(self):
+ # On Cygwin remove the "dir" argument
+ # `C:` prefixed paths are treated as relative,
+ # consequently ending up with current dir "/cygdrive/c/..."
+ # being prefixed to those, which was
+ # obviously a non-existent path,
+ # for example: "/cygdrive/c/<some folder>/C:\<my win style abs path>".
+ # For more details see:
+ # https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp
+ base_temp_dir = "" if IsCygwin() else os.path.dirname(filename)
+ # Pick temporary file.
+ tmp_fd, self.tmp_path = tempfile.mkstemp(
+ suffix=".tmp",
+ prefix=os.path.split(filename)[1] + ".gyp.",
+ dir=base_temp_dir,
+ )
+ try:
+ self.tmp_file = os.fdopen(tmp_fd, "wb")
+ except Exception:
+ # Don't leave turds behind.
+ os.unlink(self.tmp_path)
+ raise
+
+ def __getattr__(self, attrname):
+ # Delegate everything else to self.tmp_file
+ return getattr(self.tmp_file, attrname)
+
+ def close(self):
+ try:
+ # Close tmp file.
+ self.tmp_file.close()
+ # Determine if different.
+ same = False
+ try:
+ same = filecmp.cmp(self.tmp_path, filename, False)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if same:
+ # The new file is identical to the old one, just get rid of the new
+ # one.
+ os.unlink(self.tmp_path)
+ else:
+ # The new file is different from the old one,
+ # or there is no old one.
+ # Rename the new file to the permanent name.
+ #
+ # tempfile.mkstemp uses an overly restrictive mode, resulting in a
+ # file that can only be read by the owner, regardless of the umask.
+ # There's no reason to not respect the umask here,
+ # which means that an extra hoop is required
+ # to fetch it and reset the new file's mode.
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+ umask = os.umask(0o77)
+ os.umask(umask)
+ os.chmod(self.tmp_path, 0o666 & ~umask)
+ if sys.platform == "win32" and os.path.exists(filename):
+ # NOTE: on windows (but not cygwin) rename will not replace an
+ # existing file, so it must be preceded with a remove.
+ # Sadly there is no way to make the switch atomic.
+ os.remove(filename)
+ os.rename(self.tmp_path, filename)
+ except Exception:
+ # Don't leave turds behind.
+ os.unlink(self.tmp_path)
+ raise
+
+ def write(self, s):
+ self.tmp_file.write(s.encode("utf-8"))
+
+ return Writer()
+
+
+def EnsureDirExists(path):
+ """Make sure the directory for |path| exists."""
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError:
+ pass
+
+
+def GetFlavor(params):
+ """Returns |params.flavor| if it's set, the system's default flavor else."""
+ flavors = {
+ "cygwin": "win",
+ "win32": "win",
+ "darwin": "mac",
+ }
+
+ if "flavor" in params:
+ return params["flavor"]
+ if sys.platform in flavors:
+ return flavors[sys.platform]
+ if sys.platform.startswith("sunos"):
+ return "solaris"
+ if sys.platform.startswith(("dragonfly", "freebsd")):
+ return "freebsd"
+ if sys.platform.startswith("openbsd"):
+ return "openbsd"
+ if sys.platform.startswith("netbsd"):
+ return "netbsd"
+ if sys.platform.startswith("aix"):
+ return "aix"
+ if sys.platform.startswith(("os390", "zos")):
+ return "zos"
+ if sys.platform == "os400":
+ return "os400"
+
+ return "linux"
+
+
+def CopyTool(flavor, out_path, generator_flags={}):
+ """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
+ to |out_path|."""
+ # aix and solaris just need flock emulation. mac and win use more complicated
+ # support scripts.
+ prefix = {
+ "aix": "flock",
+ "os400": "flock",
+ "solaris": "flock",
+ "mac": "mac",
+ "ios": "mac",
+ "win": "win",
+ }.get(flavor, None)
+ if not prefix:
+ return
+
+ # Slurp input file.
+ source_path = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "%s_tool.py" % prefix
+ )
+ with open(source_path) as source_file:
+ source = source_file.readlines()
+
+ # Set custom header flags.
+ header = "# Generated by gyp. Do not edit.\n"
+ mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
+ if flavor == "mac" and mac_toolchain_dir:
+ header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir
+
+ # Add header and write it out.
+ tool_path = os.path.join(out_path, "gyp-%s-tool" % prefix)
+ with open(tool_path, "w") as tool_file:
+ tool_file.write("".join([source[0], header] + source[1:]))
+
+ # Make file executable.
+ os.chmod(tool_path, 0o755)
+
+
+# From Alex Martelli,
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
+# ASPN: Python Cookbook: Remove duplicates from a sequence
+# First comment, dated 2001/10/13.
+# (Also in the printed Python Cookbook.)
+
+
+def uniquer(seq, idfun=lambda x: x):
+ seen = {}
+ result = []
+ for item in seq:
+ marker = idfun(item)
+ if marker in seen:
+ continue
+ seen[marker] = 1
+ result.append(item)
+ return result
+
+
+# Based on http://code.activestate.com/recipes/576694/.
+class OrderedSet(MutableSet):
+ def __init__(self, iterable=None):
+ self.end = end = []
+ end += [None, end, end] # sentinel node for doubly linked list
+ self.map = {} # key --> [key, prev, next]
+ if iterable is not None:
+ self |= iterable
+
+ def __len__(self):
+ return len(self.map)
+
+ def __contains__(self, key):
+ return key in self.map
+
+ def add(self, key):
+ if key not in self.map:
+ end = self.end
+ curr = end[1]
+ curr[2] = end[1] = self.map[key] = [key, curr, end]
+
+ def discard(self, key):
+ if key in self.map:
+ key, prev_item, next_item = self.map.pop(key)
+ prev_item[2] = next_item
+ next_item[1] = prev_item
+
+ def __iter__(self):
+ end = self.end
+ curr = end[2]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[2]
+
+ def __reversed__(self):
+ end = self.end
+ curr = end[1]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[1]
+
+ # The second argument is an addition that causes a pylint warning.
+ def pop(self, last=True): # pylint: disable=W0221
+ if not self:
+ raise KeyError("set is empty")
+ key = self.end[1][0] if last else self.end[2][0]
+ self.discard(key)
+ return key
+
+ def __repr__(self):
+ if not self:
+ return f"{self.__class__.__name__}()"
+ return f"{self.__class__.__name__}({list(self)!r})"
+
+ def __eq__(self, other):
+ if isinstance(other, OrderedSet):
+ return len(self) == len(other) and list(self) == list(other)
+ return set(self) == set(other)
+
+ # Extensions to the recipe.
+ def update(self, iterable):
+ for i in iterable:
+ if i not in self:
+ self.add(i)
+
+
+class CycleError(Exception):
+ """An exception raised when an unexpected cycle is detected."""
+
+ def __init__(self, nodes):
+ self.nodes = nodes
+
+ def __str__(self):
+ return "CycleError: cycle involving: " + str(self.nodes)
+
+
+def TopologicallySorted(graph, get_edges):
+ r"""Topologically sort based on a user provided edge definition.
+
+ Args:
+ graph: A list of node names.
+ get_edges: A function mapping from node name to a hashable collection
+ of node names which this node has outgoing edges to.
+ Returns:
+ A list containing all of the node in graph in topological order.
+ It is assumed that calling get_edges once for each node and caching is
+ cheaper than repeatedly calling get_edges.
+ Raises:
+ CycleError in the event of a cycle.
+ Example:
+ graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
+ def GetEdges(node):
+ return re.findall(r'\$\(([^))]\)', graph[node])
+ print TopologicallySorted(graph.keys(), GetEdges)
+ ==>
+ ['a', 'c', b']
+ """
+ get_edges = memoize(get_edges)
+ visited = set()
+ visiting = set()
+ ordered_nodes = []
+
+ def Visit(node):
+ if node in visiting:
+ raise CycleError(visiting)
+ if node in visited:
+ return
+ visited.add(node)
+ visiting.add(node)
+ for neighbor in get_edges(node):
+ Visit(neighbor)
+ visiting.remove(node)
+ ordered_nodes.insert(0, node)
+
+ for node in sorted(graph):
+ Visit(node)
+ return ordered_nodes
+
+
+def CrossCompileRequested():
+ # TODO: figure out how to not build extra host objects in the
+ # non-cross-compile case when this is enabled, and enable unconditionally.
+ return (
+ os.environ.get("GYP_CROSSCOMPILE")
+ or os.environ.get("AR_host")
+ or os.environ.get("CC_host")
+ or os.environ.get("CXX_host")
+ or os.environ.get("AR_target")
+ or os.environ.get("CC_target")
+ or os.environ.get("CXX_target")
+ )
+
+
+def IsCygwin():
+ try:
+ out = subprocess.Popen(
+ "uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ stdout = out.communicate()[0].decode("utf-8")
+ return "CYGWIN" in str(stdout)
+ except Exception:
+ return False
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
new file mode 100755
index 0000000..0534408
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the common.py file."""
+
+import gyp.common
+import unittest
+import sys
+
+
+class TestTopologicallySorted(unittest.TestCase):
+ def test_Valid(self):
+ """Test that sorting works on a valid graph with one possible order."""
+ graph = {
+ "a": ["b", "c"],
+ "b": [],
+ "c": ["d"],
+ "d": ["b"],
+ }
+
+ def GetEdge(node):
+ return tuple(graph[node])
+
+ self.assertEqual(
+ gyp.common.TopologicallySorted(graph.keys(), GetEdge), ["a", "c", "d", "b"]
+ )
+
+ def test_Cycle(self):
+ """Test that an exception is thrown on a cyclic graph."""
+ graph = {
+ "a": ["b"],
+ "b": ["c"],
+ "c": ["d"],
+ "d": ["a"],
+ }
+
+ def GetEdge(node):
+ return tuple(graph[node])
+
+ self.assertRaises(
+ gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge
+ )
+
+
+class TestGetFlavor(unittest.TestCase):
+ """Test that gyp.common.GetFlavor works as intended"""
+
+ original_platform = ""
+
+ def setUp(self):
+ self.original_platform = sys.platform
+
+ def tearDown(self):
+ sys.platform = self.original_platform
+
+ def assertFlavor(self, expected, argument, param):
+ sys.platform = argument
+ self.assertEqual(expected, gyp.common.GetFlavor(param))
+
+ def test_platform_default(self):
+ self.assertFlavor("freebsd", "freebsd9", {})
+ self.assertFlavor("freebsd", "freebsd10", {})
+ self.assertFlavor("openbsd", "openbsd5", {})
+ self.assertFlavor("solaris", "sunos5", {})
+ self.assertFlavor("solaris", "sunos", {})
+ self.assertFlavor("linux", "linux2", {})
+ self.assertFlavor("linux", "linux3", {})
+ self.assertFlavor("linux", "linux", {})
+
+ def test_param(self):
+ self.assertFlavor("foobar", "linux2", {"flavor": "foobar"})
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
new file mode 100644
index 0000000..bda1a47
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
@@ -0,0 +1,165 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import re
+import os
+import locale
+from functools import reduce
+
+
+def XmlToString(content, encoding="utf-8", pretty=False):
+ """ Writes the XML content to disk, touching the file only if it has changed.
+
+ Visual Studio files have a lot of pre-defined structures. This function makes
+ it easy to represent these structures as Python data structures, instead of
+ having to create a lot of function calls.
+
+ Each XML element of the content is represented as a list composed of:
+ 1. The name of the element, a string,
+ 2. The attributes of the element, a dictionary (optional), and
+ 3+. The content of the element, if any. Strings are simple text nodes and
+ lists are child elements.
+
+ Example 1:
+ <test/>
+ becomes
+ ['test']
+
+ Example 2:
+ <myelement a='value1' b='value2'>
+ <childtype>This is</childtype>
+ <childtype>it!</childtype>
+ </myelement>
+
+ becomes
+ ['myelement', {'a':'value1', 'b':'value2'},
+ ['childtype', 'This is'],
+ ['childtype', 'it!'],
+ ]
+
+ Args:
+ content: The structured content to be converted.
+ encoding: The encoding to report on the first XML line.
+ pretty: True if we want pretty printing with indents and new lines.
+
+ Returns:
+ The XML content as a string.
+ """
+ # We create a huge list of all the elements of the file.
+ xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
+ if pretty:
+ xml_parts.append("\n")
+ _ConstructContentList(xml_parts, content, pretty)
+
+ # Convert it to a string
+ return "".join(xml_parts)
+
+
+def _ConstructContentList(xml_parts, specification, pretty, level=0):
+ """ Appends the XML parts corresponding to the specification.
+
+ Args:
+ xml_parts: A list of XML parts to be appended to.
+ specification: The specification of the element. See EasyXml docs.
+ pretty: True if we want pretty printing with indents and new lines.
+ level: Indentation level.
+ """
+ # The first item in a specification is the name of the element.
+ if pretty:
+ indentation = " " * level
+ new_line = "\n"
+ else:
+ indentation = ""
+ new_line = ""
+ name = specification[0]
+ if not isinstance(name, str):
+ raise Exception(
+ "The first item of an EasyXml specification should be "
+ "a string. Specification was " + str(specification)
+ )
+ xml_parts.append(indentation + "<" + name)
+
+ # Optionally in second position is a dictionary of the attributes.
+ rest = specification[1:]
+ if rest and isinstance(rest[0], dict):
+ for at, val in sorted(rest[0].items()):
+ xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"')
+ rest = rest[1:]
+ if rest:
+ xml_parts.append(">")
+ all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
+ multi_line = not all_strings
+ if multi_line and new_line:
+ xml_parts.append(new_line)
+ for child_spec in rest:
+ # If it's a string, append a text node.
+ # Otherwise recurse over that child definition
+ if isinstance(child_spec, str):
+ xml_parts.append(_XmlEscape(child_spec))
+ else:
+ _ConstructContentList(xml_parts, child_spec, pretty, level + 1)
+ if multi_line and indentation:
+ xml_parts.append(indentation)
+ xml_parts.append(f"</{name}>{new_line}")
+ else:
+ xml_parts.append("/>%s" % new_line)
+
+
+def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
+ win32=(sys.platform == "win32")):
+ """ Writes the XML content to disk, touching the file only if it has changed.
+
+ Args:
+ content: The structured content to be written.
+ path: Location of the file.
+ encoding: The encoding to report on the first line of the XML file.
+ pretty: True if we want pretty printing with indents and new lines.
+ """
+ xml_string = XmlToString(content, encoding, pretty)
+ if win32 and os.linesep != "\r\n":
+ xml_string = xml_string.replace("\n", "\r\n")
+
+ default_encoding = locale.getdefaultlocale()[1]
+ if default_encoding and default_encoding.upper() != encoding.upper():
+ xml_string = xml_string.encode(encoding)
+
+ # Get the old content
+ try:
+ with open(path) as file:
+ existing = file.read()
+ except OSError:
+ existing = None
+
+ # It has changed, write it
+ if existing != xml_string:
+ with open(path, "wb") as file:
+ file.write(xml_string)
+
+
+_xml_escape_map = {
+ '"': "&quot;",
+ "'": "&apos;",
+ "<": "&lt;",
+ ">": "&gt;",
+ "&": "&amp;",
+ "\n": "&#xA;",
+ "\r": "&#xD;",
+}
+
+
+_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
+
+
+def _XmlEscape(value, attr=False):
+ """ Escape a string for inclusion in XML."""
+
+ def replace(match):
+ m = match.string[match.start() : match.end()]
+ # don't replace single quotes in attrs
+ if attr and m == "'":
+ return m
+ return _xml_escape_map[m]
+
+ return _xml_escape_re.sub(replace, value)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
new file mode 100755
index 0000000..342f693
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the easy_xml.py file. """
+
+import gyp.easy_xml as easy_xml
+import unittest
+
+from io import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+ def setUp(self):
+ self.stderr = StringIO()
+
+ def test_EasyXml_simple(self):
+ self.assertEqual(
+ easy_xml.XmlToString(["test"]),
+ '<?xml version="1.0" encoding="utf-8"?><test/>',
+ )
+
+ self.assertEqual(
+ easy_xml.XmlToString(["test"], encoding="Windows-1252"),
+ '<?xml version="1.0" encoding="Windows-1252"?><test/>',
+ )
+
+ def test_EasyXml_simple_with_attributes(self):
+ self.assertEqual(
+ easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]),
+ '<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>',
+ )
+
+ def test_EasyXml_escaping(self):
+ original = "<test>'\"\r&\nfoo"
+ converted = "&lt;test&gt;'&quot;&#xD;&amp;&#xA;foo"
+ converted_apos = converted.replace("'", "&apos;")
+ self.assertEqual(
+ easy_xml.XmlToString(["test3", {"a": original}, original]),
+ '<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>'
+ % (converted, converted_apos),
+ )
+
+ def test_EasyXml_pretty(self):
+ self.assertEqual(
+ easy_xml.XmlToString(
+ ["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]],
+ pretty=True,
+ ),
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ "<test3>\n"
+ " <GrandParent>\n"
+ " <Parent1>\n"
+ " <Child/>\n"
+ " </Parent1>\n"
+ " <Parent2/>\n"
+ " </GrandParent>\n"
+ "</test3>\n",
+ )
+
+ def test_EasyXml_complex(self):
+ # We want to create:
+ target = (
+ '<?xml version="1.0" encoding="utf-8"?>'
+ "<Project>"
+ '<PropertyGroup Label="Globals">'
+ "<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>"
+ "<Keyword>Win32Proj</Keyword>"
+ "<RootNamespace>automated_ui_tests</RootNamespace>"
+ "</PropertyGroup>"
+ '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
+ "<PropertyGroup "
+ "Condition=\"'$(Configuration)|$(Platform)'=="
+ '\'Debug|Win32\'" Label="Configuration">'
+ "<ConfigurationType>Application</ConfigurationType>"
+ "<CharacterSet>Unicode</CharacterSet>"
+ "</PropertyGroup>"
+ "</Project>"
+ )
+
+ xml = easy_xml.XmlToString(
+ [
+ "Project",
+ [
+ "PropertyGroup",
+ {"Label": "Globals"},
+ ["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"],
+ ["Keyword", "Win32Proj"],
+ ["RootNamespace", "automated_ui_tests"],
+ ],
+ ["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}],
+ [
+ "PropertyGroup",
+ {
+ "Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'",
+ "Label": "Configuration",
+ },
+ ["ConfigurationType", "Application"],
+ ["CharacterSet", "Unicode"],
+ ],
+ ]
+ )
+ self.assertEqual(xml, target)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
new file mode 100755
index 0000000..0754aff
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""These functions are executed via gyp-flock-tool when using the Makefile
+generator. Used on systems that don't have a built-in flock."""
+
+import fcntl
+import os
+import struct
+import subprocess
+import sys
+
+
+def main(args):
+ executor = FlockTool()
+ executor.Dispatch(args)
+
+
+class FlockTool:
+ """This class emulates the 'flock' command."""
+
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
+ return name_string.title().replace("-", "")
+
+ def ExecFlock(self, lockfile, *cmd_list):
+ """Emulates the most basic behavior of Linux's flock(1)."""
+ # Rely on exception handling to report errors.
+ # Note that the stock python on SunOS has a bug
+ # where fcntl.flock(fd, LOCK_EX) always fails
+ # with EBADF, that's why we use this F_SETLK
+ # hack instead.
+ fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
+ if sys.platform.startswith("aix") or sys.platform == "os400":
+ # Python on AIX is compiled with LARGEFILE support, which changes the
+ # struct size.
+ op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
+ else:
+ op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
+ fcntl.fcntl(fd, fcntl.F_SETLK, op)
+ return subprocess.call(cmd_list)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
new file mode 100644
index 0000000..f15df00
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
@@ -0,0 +1,808 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
+the generator flag config_path) the path of a json file that dictates the files
+and targets to search for. The following keys are supported:
+files: list of paths (relative) of the files to search for.
+test_targets: unqualified target names to search for. Any target in this list
+that depends upon a file in |files| is output regardless of the type of target
+or chain of dependencies.
+additional_compile_targets: Unqualified targets to search for in addition to
+test_targets. Targets in the combined list that depend upon a file in |files|
+are not necessarily output. For example, if the target is of type none then the
+target is not output (but one of the descendants of the target will be).
+
+The following is output:
+error: only supplied if there is an error.
+compile_targets: minimal set of targets that directly or indirectly (for
+ targets of type none) depend on the files in |files| and is one of the
+ supplied targets or a target that one of the supplied targets depends on.
+ The expectation is this set of targets is passed into a build step. This list
+ always contains the output of test_targets as well.
+test_targets: set of targets from the supplied |test_targets| that either
+ directly or indirectly depend upon a file in |files|. This list if useful
+ if additional processing needs to be done for certain targets after the
+ build, such as running tests.
+status: outputs one of three values: none of the supplied files were found,
+ one of the include files changed so that it should be assumed everything
+ changed (in this case test_targets and compile_targets are not output) or at
+ least one file was found.
+invalid_targets: list of supplied targets that were not found.
+
+Example:
+Consider a graph like the following:
+ A D
+ / \
+B C
+A depends upon both B and C, A is of type none and B and C are executables.
+D is an executable, has no dependencies and nothing depends on it.
+If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
+files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
+the following is output:
+|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
+and the supplied target A depends upon it. A is not output as a build_target
+as it is of type none with no rules and actions.
+|test_targets| = ["B"] B directly depends upon the change file b.cc.
+
+Even though the file d.cc, which D depends upon, has changed D is not output
+as it was not supplied by way of |additional_compile_targets| or |test_targets|.
+
+If the generator flag analyzer_output_path is specified, output is written
+there. Otherwise output is written to stdout.
+
+In Gyp the "all" target is shorthand for the root targets in the files passed
+to gyp. For example, if file "a.gyp" contains targets "a1" and
+"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
+on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
+Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
+directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
+then the "all" target includes "b1" and "b2".
+"""
+
+
+import gyp.common
+import json
+import os
+import posixpath
+
+debug = False
+
+found_dependency_string = "Found dependency"
+no_dependency_string = "No dependencies"
+# Status when it should be assumed that everything has changed.
+all_changed_string = "Found dependency (all)"
+
+# MatchStatus is used indicate if and how a target depends upon the supplied
+# sources.
+# The target's sources contain one of the supplied paths.
+MATCH_STATUS_MATCHES = 1
+# The target has a dependency on another target that contains one of the
+# supplied paths.
+MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
+# The target's sources weren't in the supplied paths and none of the target's
+# dependencies depend upon a target that matched.
+MATCH_STATUS_DOESNT_MATCH = 3
+# The target doesn't contain the source, but the dependent targets have not yet
+# been visited to determine a more specific status yet.
+MATCH_STATUS_TBD = 4
+
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_default_variables = {}
+for dirname in [
+ "INTERMEDIATE_DIR",
+ "SHARED_INTERMEDIATE_DIR",
+ "PRODUCT_DIR",
+ "LIB_DIR",
+ "SHARED_LIB_DIR",
+]:
+ generator_default_variables[dirname] = "!!!"
+
+for unused in [
+ "RULE_INPUT_PATH",
+ "RULE_INPUT_ROOT",
+ "RULE_INPUT_NAME",
+ "RULE_INPUT_DIRNAME",
+ "RULE_INPUT_EXT",
+ "EXECUTABLE_PREFIX",
+ "EXECUTABLE_SUFFIX",
+ "STATIC_LIB_PREFIX",
+ "STATIC_LIB_SUFFIX",
+ "SHARED_LIB_PREFIX",
+ "SHARED_LIB_SUFFIX",
+ "CONFIGURATION_NAME",
+]:
+ generator_default_variables[unused] = ""
+
+
+def _ToGypPath(path):
+ """Converts a path to the format used by gyp."""
+ if os.sep == "\\" and os.altsep == "/":
+ return path.replace("\\", "/")
+ return path
+
+
+def _ResolveParent(path, base_path_components):
+ """Resolves |path|, which starts with at least one '../'. Returns an empty
+ string if the path shouldn't be considered. See _AddSources() for a
+ description of |base_path_components|."""
+ depth = 0
+ while path.startswith("../"):
+ depth += 1
+ path = path[3:]
+ # Relative includes may go outside the source tree. For example, an action may
+ # have inputs in /usr/include, which are not in the source tree.
+ if depth > len(base_path_components):
+ return ""
+ if depth == len(base_path_components):
+ return path
+ return (
+ "/".join(base_path_components[0 : len(base_path_components) - depth])
+ + "/"
+ + path
+ )
+
+
+def _AddSources(sources, base_path, base_path_components, result):
+ """Extracts valid sources from |sources| and adds them to |result|. Each
+ source file is relative to |base_path|, but may contain '..'. To make
+ resolving '..' easier |base_path_components| contains each of the
+ directories in |base_path|. Additionally each source may contain variables.
+ Such sources are ignored as it is assumed dependencies on them are expressed
+ and tracked in some other means."""
+ # NOTE: gyp paths are always posix style.
+ for source in sources:
+ if not len(source) or source.startswith("!!!") or source.startswith("$"):
+ continue
+ # variable expansion may lead to //.
+ org_source = source
+ source = source[0] + source[1:].replace("//", "/")
+ if source.startswith("../"):
+ source = _ResolveParent(source, base_path_components)
+ if len(source):
+ result.append(source)
+ continue
+ result.append(base_path + source)
+ if debug:
+ print("AddSource", org_source, result[len(result) - 1])
+
+
+def _ExtractSourcesFromAction(action, base_path, base_path_components, results):
+ if "inputs" in action:
+ _AddSources(action["inputs"], base_path, base_path_components, results)
+
+
+def _ToLocalPath(toplevel_dir, path):
+ """Converts |path| to a path relative to |toplevel_dir|."""
+ if path == toplevel_dir:
+ return ""
+ if path.startswith(toplevel_dir + "/"):
+ return path[len(toplevel_dir) + len("/") :]
+ return path
+
+
+def _ExtractSources(target, target_dict, toplevel_dir):
+ # |target| is either absolute or relative and in the format of the OS. Gyp
+ # source paths are always posix. Convert |target| to a posix path relative to
+ # |toplevel_dir_|. This is done to make it easy to build source paths.
+ base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
+ base_path_components = base_path.split("/")
+
+ # Add a trailing '/' so that _AddSources() can easily build paths.
+ if len(base_path):
+ base_path += "/"
+
+ if debug:
+ print("ExtractSources", target, base_path)
+
+ results = []
+ if "sources" in target_dict:
+ _AddSources(target_dict["sources"], base_path, base_path_components, results)
+ # Include the inputs from any actions. Any changes to these affect the
+ # resulting output.
+ if "actions" in target_dict:
+ for action in target_dict["actions"]:
+ _ExtractSourcesFromAction(action, base_path, base_path_components, results)
+ if "rules" in target_dict:
+ for rule in target_dict["rules"]:
+ _ExtractSourcesFromAction(rule, base_path, base_path_components, results)
+
+ return results
+
+
+class Target:
+ """Holds information about a particular target:
+ deps: set of Targets this Target depends upon. This is not recursive, only the
+ direct dependent Targets.
+ match_status: one of the MatchStatus values.
+ back_deps: set of Targets that have a dependency on this Target.
+ visited: used during iteration to indicate whether we've visited this target.
+ This is used for two iterations, once in building the set of Targets and
+ again in _GetBuildTargets().
+ name: fully qualified name of the target.
+ requires_build: True if the target type is such that it needs to be built.
+ See _DoesTargetTypeRequireBuild for details.
+ added_to_compile_targets: used when determining if the target was added to the
+ set of targets that needs to be built.
+ in_roots: true if this target is a descendant of one of the root nodes.
+ is_executable: true if the type of target is executable.
+ is_static_library: true if the type of target is static_library.
+ is_or_has_linked_ancestor: true if the target does a link (eg executable), or
+ if there is a target in back_deps that does a link."""
+
+ def __init__(self, name):
+ self.deps = set()
+ self.match_status = MATCH_STATUS_TBD
+ self.back_deps = set()
+ self.name = name
+ # TODO(sky): I don't like hanging this off Target. This state is specific
+ # to certain functions and should be isolated there.
+ self.visited = False
+ self.requires_build = False
+ self.added_to_compile_targets = False
+ self.in_roots = False
+ self.is_executable = False
+ self.is_static_library = False
+ self.is_or_has_linked_ancestor = False
+
+
+class Config:
+ """Details what we're looking for
+ files: set of files to search for
+ targets: see file description for details."""
+
+ def __init__(self):
+ self.files = []
+ self.targets = set()
+ self.additional_compile_target_names = set()
+ self.test_target_names = set()
+
+ def Init(self, params):
+ """Initializes Config. This is a separate method as it raises an exception
+ if there is a parse error."""
+ generator_flags = params.get("generator_flags", {})
+ config_path = generator_flags.get("config_path", None)
+ if not config_path:
+ return
+ try:
+ f = open(config_path)
+ config = json.load(f)
+ f.close()
+ except OSError:
+ raise Exception("Unable to open file " + config_path)
+ except ValueError as e:
+ raise Exception("Unable to parse config file " + config_path + str(e))
+ if not isinstance(config, dict):
+ raise Exception("config_path must be a JSON file containing a dictionary")
+ self.files = config.get("files", [])
+ self.additional_compile_target_names = set(
+ config.get("additional_compile_targets", [])
+ )
+ self.test_target_names = set(config.get("test_targets", []))
+
+
+def _WasBuildFileModified(build_file, data, files, toplevel_dir):
+ """Returns true if the build file |build_file| is either in |files| or
+ one of the files included by |build_file| is in |files|. |toplevel_dir| is
+ the root of the source tree."""
+ if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
+ if debug:
+ print("gyp file modified", build_file)
+ return True
+
+ # First element of included_files is the file itself.
+ if len(data[build_file]["included_files"]) <= 1:
+ return False
+
+ for include_file in data[build_file]["included_files"][1:]:
+ # |included_files| are relative to the directory of the |build_file|.
+ rel_include_file = _ToGypPath(
+ gyp.common.UnrelativePath(include_file, build_file)
+ )
+ if _ToLocalPath(toplevel_dir, rel_include_file) in files:
+ if debug:
+ print(
+ "included gyp file modified, gyp_file=",
+ build_file,
+ "included file=",
+ rel_include_file,
+ )
+ return True
+ return False
+
+
+def _GetOrCreateTargetByName(targets, target_name):
+ """Creates or returns the Target at targets[target_name]. If there is no
+ Target for |target_name| one is created. Returns a tuple of whether a new
+ Target was created and the Target."""
+ if target_name in targets:
+ return False, targets[target_name]
+ target = Target(target_name)
+ targets[target_name] = target
+ return True, target
+
+
+def _DoesTargetTypeRequireBuild(target_dict):
+ """Returns true if the target type is such that it needs to be built."""
+ # If a 'none' target has rules or actions we assume it requires a build.
+ return bool(
+ target_dict["type"] != "none"
+ or target_dict.get("actions")
+ or target_dict.get("rules")
+ )
+
+
+def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files):
+ """Returns a tuple of the following:
+ . A dictionary mapping from fully qualified name to Target.
+ . A list of the targets that have a source file in |files|.
+ . Targets that constitute the 'all' target. See description at top of file
+ for details on the 'all' target.
+ This sets the |match_status| of the targets that contain any of the source
+ files in |files| to MATCH_STATUS_MATCHES.
+ |toplevel_dir| is the root of the source tree."""
+ # Maps from target name to Target.
+ name_to_target = {}
+
+ # Targets that matched.
+ matching_targets = []
+
+ # Queue of targets to visit.
+ targets_to_visit = target_list[:]
+
+ # Maps from build file to a boolean indicating whether the build file is in
+ # |files|.
+ build_file_in_files = {}
+
+ # Root targets across all files.
+ roots = set()
+
+ # Set of Targets in |build_files|.
+ build_file_targets = set()
+
+ while len(targets_to_visit) > 0:
+ target_name = targets_to_visit.pop()
+ created_target, target = _GetOrCreateTargetByName(name_to_target, target_name)
+ if created_target:
+ roots.add(target)
+ elif target.visited:
+ continue
+
+ target.visited = True
+ target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name])
+ target_type = target_dicts[target_name]["type"]
+ target.is_executable = target_type == "executable"
+ target.is_static_library = target_type == "static_library"
+ target.is_or_has_linked_ancestor = (
+ target_type == "executable" or target_type == "shared_library"
+ )
+
+ build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
+ if build_file not in build_file_in_files:
+ build_file_in_files[build_file] = _WasBuildFileModified(
+ build_file, data, files, toplevel_dir
+ )
+
+ if build_file in build_files:
+ build_file_targets.add(target)
+
+ # If a build file (or any of its included files) is modified we assume all
+ # targets in the file are modified.
+ if build_file_in_files[build_file]:
+ print("matching target from modified build file", target_name)
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ else:
+ sources = _ExtractSources(
+ target_name, target_dicts[target_name], toplevel_dir
+ )
+ for source in sources:
+ if _ToGypPath(os.path.normpath(source)) in files:
+ print("target", target_name, "matches", source)
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ break
+
+ # Add dependencies to visit as well as updating back pointers for deps.
+ for dep in target_dicts[target_name].get("dependencies", []):
+ targets_to_visit.append(dep)
+
+ created_dep_target, dep_target = _GetOrCreateTargetByName(
+ name_to_target, dep
+ )
+ if not created_dep_target:
+ roots.discard(dep_target)
+
+ target.deps.add(dep_target)
+ dep_target.back_deps.add(target)
+
+ return name_to_target, matching_targets, roots & build_file_targets
+
+
+def _GetUnqualifiedToTargetMapping(all_targets, to_find):
+ """Returns a tuple of the following:
+ . mapping (dictionary) from unqualified name to Target for all the
+ Targets in |to_find|.
+ . any target names not found. If this is empty all targets were found."""
+ result = {}
+ if not to_find:
+ return {}, []
+ to_find = set(to_find)
+ for target_name in all_targets.keys():
+ extracted = gyp.common.ParseQualifiedTarget(target_name)
+ if len(extracted) > 1 and extracted[1] in to_find:
+ to_find.remove(extracted[1])
+ result[extracted[1]] = all_targets[target_name]
+ if not to_find:
+ return result, []
+ return result, [x for x in to_find]
+
+
+def _DoesTargetDependOnMatchingTargets(target):
+ """Returns true if |target| or any of its dependencies is one of the
+ targets containing the files supplied as input to analyzer. This updates
+ |matches| of the Targets as it recurses.
+ target: the Target to look for."""
+ if target.match_status == MATCH_STATUS_DOESNT_MATCH:
+ return False
+ if (
+ target.match_status == MATCH_STATUS_MATCHES
+ or target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY
+ ):
+ return True
+ for dep in target.deps:
+ if _DoesTargetDependOnMatchingTargets(dep):
+ target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
+ print("\t", target.name, "matches by dep", dep.name)
+ return True
+ target.match_status = MATCH_STATUS_DOESNT_MATCH
+ return False
+
+
+def _GetTargetsDependingOnMatchingTargets(possible_targets):
+ """Returns the list of Targets in |possible_targets| that depend (either
+ directly on indirectly) on at least one of the targets containing the files
+ supplied as input to analyzer.
+ possible_targets: targets to search from."""
+ found = []
+ print("Targets that matched by dependency:")
+ for target in possible_targets:
+ if _DoesTargetDependOnMatchingTargets(target):
+ found.append(target)
+ return found
+
+
+def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
+ """Recurses through all targets that depend on |target|, adding all targets
+ that need to be built (and are in |roots|) to |result|.
+ roots: set of root targets.
+ add_if_no_ancestor: If true and there are no ancestors of |target| then add
+ |target| to |result|. |target| must still be in |roots|.
+ result: targets that need to be built are added here."""
+ if target.visited:
+ return
+
+ target.visited = True
+ target.in_roots = target in roots
+
+ for back_dep_target in target.back_deps:
+ _AddCompileTargets(back_dep_target, roots, False, result)
+ target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
+ target.in_roots |= back_dep_target.in_roots
+ target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor
+
+ # Always add 'executable' targets. Even though they may be built by other
+ # targets that depend upon them it makes detection of what is going to be
+ # built easier.
+ # And always add static_libraries that have no dependencies on them from
+ # linkables. This is necessary as the other dependencies on them may be
+ # static libraries themselves, which are not compile time dependencies.
+ if target.in_roots and (
+ target.is_executable
+ or (
+ not target.added_to_compile_targets
+ and (add_if_no_ancestor or target.requires_build)
+ )
+ or (
+ target.is_static_library
+ and add_if_no_ancestor
+ and not target.is_or_has_linked_ancestor
+ )
+ ):
+ print(
+ "\t\tadding to compile targets",
+ target.name,
+ "executable",
+ target.is_executable,
+ "added_to_compile_targets",
+ target.added_to_compile_targets,
+ "add_if_no_ancestor",
+ add_if_no_ancestor,
+ "requires_build",
+ target.requires_build,
+ "is_static_library",
+ target.is_static_library,
+ "is_or_has_linked_ancestor",
+ target.is_or_has_linked_ancestor,
+ )
+ result.add(target)
+ target.added_to_compile_targets = True
+
+
+def _GetCompileTargets(matching_targets, supplied_targets):
+ """Returns the set of Targets that require a build.
+ matching_targets: targets that changed and need to be built.
+ supplied_targets: set of targets supplied to analyzer to search from."""
+ result = set()
+ for target in matching_targets:
+ print("finding compile targets for match", target.name)
+ _AddCompileTargets(target, supplied_targets, True, result)
+ return result
+
+
+def _WriteOutput(params, **values):
+ """Writes the output, either to stdout or a file is specified."""
+ if "error" in values:
+ print("Error:", values["error"])
+ if "status" in values:
+ print(values["status"])
+ if "targets" in values:
+ values["targets"].sort()
+ print("Supplied targets that depend on changed files:")
+ for target in values["targets"]:
+ print("\t", target)
+ if "invalid_targets" in values:
+ values["invalid_targets"].sort()
+ print("The following targets were not found:")
+ for target in values["invalid_targets"]:
+ print("\t", target)
+ if "build_targets" in values:
+ values["build_targets"].sort()
+ print("Targets that require a build:")
+ for target in values["build_targets"]:
+ print("\t", target)
+ if "compile_targets" in values:
+ values["compile_targets"].sort()
+ print("Targets that need to be built:")
+ for target in values["compile_targets"]:
+ print("\t", target)
+ if "test_targets" in values:
+ values["test_targets"].sort()
+ print("Test targets:")
+ for target in values["test_targets"]:
+ print("\t", target)
+
+ output_path = params.get("generator_flags", {}).get("analyzer_output_path", None)
+ if not output_path:
+ print(json.dumps(values))
+ return
+ try:
+ f = open(output_path, "w")
+ f.write(json.dumps(values) + "\n")
+ f.close()
+ except OSError as e:
+ print("Error writing to output file", output_path, str(e))
+
+
+def _WasGypIncludeFileModified(params, files):
+ """Returns true if one of the files in |files| is in the set of included
+ files."""
+ if params["options"].includes:
+ for include in params["options"].includes:
+ if _ToGypPath(os.path.normpath(include)) in files:
+ print("Include file modified, assuming all changed", include)
+ return True
+ return False
+
+
+def _NamesNotIn(names, mapping):
+ """Returns a list of the values in |names| that are not in |mapping|."""
+ return [name for name in names if name not in mapping]
+
+
+def _LookupTargets(names, mapping):
+ """Returns a list of the mapping[name] for each value in |names| that is in
+ |mapping|."""
+ return [mapping[name] for name in names if name in mapping]
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == "mac":
+ default_variables.setdefault("OS", "mac")
+ elif flavor == "win":
+ default_variables.setdefault("OS", "win")
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+ else:
+ operating_system = flavor
+ if flavor == "android":
+ operating_system = "linux" # Keep this legacy behavior for now.
+ default_variables.setdefault("OS", operating_system)
+
+
+class TargetCalculator:
+ """Calculates the matching test_targets and matching compile_targets."""
+
+ def __init__(
+ self,
+ files,
+ additional_compile_target_names,
+ test_target_names,
+ data,
+ target_list,
+ target_dicts,
+ toplevel_dir,
+ build_files,
+ ):
+ self._additional_compile_target_names = set(additional_compile_target_names)
+ self._test_target_names = set(test_target_names)
+ (
+ self._name_to_target,
+ self._changed_targets,
+ self._root_targets,
+ ) = _GenerateTargets(
+ data, target_list, target_dicts, toplevel_dir, frozenset(files), build_files
+ )
+ (
+ self._unqualified_mapping,
+ self.invalid_targets,
+ ) = _GetUnqualifiedToTargetMapping(
+ self._name_to_target, self._supplied_target_names_no_all()
+ )
+
+ def _supplied_target_names(self):
+ return self._additional_compile_target_names | self._test_target_names
+
+ def _supplied_target_names_no_all(self):
+ """Returns the supplied test targets without 'all'."""
+ result = self._supplied_target_names()
+ result.discard("all")
+ return result
+
+ def is_build_impacted(self):
+ """Returns true if the supplied files impact the build at all."""
+ return self._changed_targets
+
+ def find_matching_test_target_names(self):
+ """Returns the set of output test targets."""
+ assert self.is_build_impacted()
+ # Find the test targets first. 'all' is special cased to mean all the
+ # root targets. To deal with all the supplied |test_targets| are expanded
+ # to include the root targets during lookup. If any of the root targets
+ # match, we remove it and replace it with 'all'.
+ test_target_names_no_all = set(self._test_target_names)
+ test_target_names_no_all.discard("all")
+ test_targets_no_all = _LookupTargets(
+ test_target_names_no_all, self._unqualified_mapping
+ )
+ test_target_names_contains_all = "all" in self._test_target_names
+ if test_target_names_contains_all:
+ test_targets = [
+ x for x in (set(test_targets_no_all) | set(self._root_targets))
+ ]
+ else:
+ test_targets = [x for x in test_targets_no_all]
+ print("supplied test_targets")
+ for target_name in self._test_target_names:
+ print("\t", target_name)
+ print("found test_targets")
+ for target in test_targets:
+ print("\t", target.name)
+ print("searching for matching test targets")
+ matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
+ matching_test_targets_contains_all = test_target_names_contains_all and set(
+ matching_test_targets
+ ) & set(self._root_targets)
+ if matching_test_targets_contains_all:
+ # Remove any of the targets for all that were not explicitly supplied,
+ # 'all' is subsequentely added to the matching names below.
+ matching_test_targets = [
+ x for x in (set(matching_test_targets) & set(test_targets_no_all))
+ ]
+ print("matched test_targets")
+ for target in matching_test_targets:
+ print("\t", target.name)
+ matching_target_names = [
+ gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in matching_test_targets
+ ]
+ if matching_test_targets_contains_all:
+ matching_target_names.append("all")
+ print("\tall")
+ return matching_target_names
+
+ def find_matching_compile_target_names(self):
+ """Returns the set of output compile targets."""
+ assert self.is_build_impacted()
+ # Compile targets are found by searching up from changed targets.
+ # Reset the visited status for _GetBuildTargets.
+ for target in self._name_to_target.values():
+ target.visited = False
+
+ supplied_targets = _LookupTargets(
+ self._supplied_target_names_no_all(), self._unqualified_mapping
+ )
+ if "all" in self._supplied_target_names():
+ supplied_targets = [
+ x for x in (set(supplied_targets) | set(self._root_targets))
+ ]
+ print("Supplied test_targets & compile_targets")
+ for target in supplied_targets:
+ print("\t", target.name)
+ print("Finding compile targets")
+ compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets)
+ return [
+ gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in compile_targets
+ ]
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Called by gyp as the final stage. Outputs results."""
+ config = Config()
+ try:
+ config.Init(params)
+
+ if not config.files:
+ raise Exception(
+ "Must specify files to analyze via config_path generator " "flag"
+ )
+
+ toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir))
+ if debug:
+ print("toplevel_dir", toplevel_dir)
+
+ if _WasGypIncludeFileModified(params, config.files):
+ result_dict = {
+ "status": all_changed_string,
+ "test_targets": list(config.test_target_names),
+ "compile_targets": list(
+ config.additional_compile_target_names | config.test_target_names
+ ),
+ }
+ _WriteOutput(params, **result_dict)
+ return
+
+ calculator = TargetCalculator(
+ config.files,
+ config.additional_compile_target_names,
+ config.test_target_names,
+ data,
+ target_list,
+ target_dicts,
+ toplevel_dir,
+ params["build_files"],
+ )
+ if not calculator.is_build_impacted():
+ result_dict = {
+ "status": no_dependency_string,
+ "test_targets": [],
+ "compile_targets": [],
+ }
+ if calculator.invalid_targets:
+ result_dict["invalid_targets"] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+ return
+
+ test_target_names = calculator.find_matching_test_target_names()
+ compile_target_names = calculator.find_matching_compile_target_names()
+ found_at_least_one_target = compile_target_names or test_target_names
+ result_dict = {
+ "test_targets": test_target_names,
+ "status": found_dependency_string
+ if found_at_least_one_target
+ else no_dependency_string,
+ "compile_targets": list(set(compile_target_names) | set(test_target_names)),
+ }
+ if calculator.invalid_targets:
+ result_dict["invalid_targets"] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+
+ except Exception as e:
+ _WriteOutput(params, error=str(e))
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
new file mode 100644
index 0000000..cdf1a48
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
@@ -0,0 +1,1173 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Notes:
+#
+# This generates makefiles suitable for inclusion into the Android build system
+# via an Android.mk file. It is based on make.py, the standard makefile
+# generator.
+#
+# The code below generates a separate .mk file for each target, but
+# all are sourced by the top-level GypAndroid.mk. This means that all
+# variables in .mk-files clobber one another, and furthermore that any
+# variables set potentially clash with other Android build system variables.
+# Try to avoid setting global variables where possible.
+
+
+import gyp
+import gyp.common
+import gyp.generator.make as make # Reuse global functions from make backend.
+import os
+import re
+import subprocess
+
+generator_default_variables = {
+ "OS": "android",
+ "EXECUTABLE_PREFIX": "",
+ "EXECUTABLE_SUFFIX": "",
+ "STATIC_LIB_PREFIX": "lib",
+ "SHARED_LIB_PREFIX": "lib",
+ "STATIC_LIB_SUFFIX": ".a",
+ "SHARED_LIB_SUFFIX": ".so",
+ "INTERMEDIATE_DIR": "$(gyp_intermediate_dir)",
+ "SHARED_INTERMEDIATE_DIR": "$(gyp_shared_intermediate_dir)",
+ "PRODUCT_DIR": "$(gyp_shared_intermediate_dir)",
+ "SHARED_LIB_DIR": "$(builddir)/lib.$(TOOLSET)",
+ "LIB_DIR": "$(obj).$(TOOLSET)",
+ "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", # This gets expanded by Python.
+ "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", # This gets expanded by Python.
+ "RULE_INPUT_PATH": "$(RULE_SOURCES)",
+ "RULE_INPUT_EXT": "$(suffix $<)",
+ "RULE_INPUT_NAME": "$(notdir $<)",
+ "CONFIGURATION_NAME": "$(GYP_CONFIGURATION)",
+}
+
+# Make supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+
+# Generator-specific gyp specs.
+generator_additional_non_configuration_keys = [
+ # Boolean to declare that this target does not want its name mangled.
+ "android_unmangled_name",
+ # Map of android build system variables to set.
+ "aosp_build_settings",
+]
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+
+
+ALL_MODULES_FOOTER = """\
+# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
+# all the included sub-makefiles. This is just here to clarify.
+gyp_all_modules:
+"""
+
+header = """\
+# This file is generated by gyp; do not edit.
+
+"""
+
+# Map gyp target types to Android module classes.
+MODULE_CLASSES = {
+ "static_library": "STATIC_LIBRARIES",
+ "shared_library": "SHARED_LIBRARIES",
+ "executable": "EXECUTABLES",
+}
+
+
+def IsCPPExtension(ext):
+ return make.COMPILABLE_EXTENSIONS.get(ext) == "cxx"
+
+
+def Sourceify(path):
+ """Convert a path to its source directory form. The Android backend does not
+ support options.generator_output, so this function is a noop."""
+ return path
+
+
+# Map from qualified target to path to output.
+# For Android, the target of these maps is a tuple ('static', 'modulename'),
+# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
+# since we link by module.
+target_outputs = {}
+# Map from qualified target to any linkable output. A subset
+# of target_outputs. E.g. when mybinary depends on liba, we want to
+# include liba in the linker line; when otherbinary depends on
+# mybinary, we just want to build mybinary first.
+target_link_deps = {}
+
+
+class AndroidMkWriter:
+ """AndroidMkWriter packages up the writing of one target-specific Android.mk.
+
+ Its only real entry point is Write(), and is mostly used for namespacing.
+ """
+
+ def __init__(self, android_top_dir):
+ self.android_top_dir = android_top_dir
+
+ def Write(
+ self,
+ qualified_target,
+ relative_target,
+ base_path,
+ output_filename,
+ spec,
+ configs,
+ part_of_all,
+ write_alias_target,
+ sdk_version,
+ ):
+ """The main entry point: writes a .mk file for a single target.
+
+ Arguments:
+ qualified_target: target we're generating
+ relative_target: qualified target name relative to the root
+ base_path: path relative to source root we're building in, used to resolve
+ target-relative paths
+ output_filename: output .mk file name to write
+ spec, configs: gyp info
+ part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for
+ this target
+ sdk_version: what to emit for LOCAL_SDK_VERSION in output
+ """
+ gyp.common.EnsureDirExists(output_filename)
+
+ self.fp = open(output_filename, "w")
+
+ self.fp.write(header)
+
+ self.qualified_target = qualified_target
+ self.relative_target = relative_target
+ self.path = base_path
+ self.target = spec["target_name"]
+ self.type = spec["type"]
+ self.toolset = spec["toolset"]
+
+ deps, link_deps = self.ComputeDeps(spec)
+
+ # Some of the generation below can add extra output, sources, or
+ # link dependencies. All of the out params of the functions that
+ # follow use names like extra_foo.
+ extra_outputs = []
+ extra_sources = []
+
+ self.android_class = MODULE_CLASSES.get(self.type, "GYP")
+ self.android_module = self.ComputeAndroidModule(spec)
+ (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
+ self.output = self.output_binary = self.ComputeOutput(spec)
+
+ # Standard header.
+ self.WriteLn("include $(CLEAR_VARS)\n")
+
+ # Module class and name.
+ self.WriteLn("LOCAL_MODULE_CLASS := " + self.android_class)
+ self.WriteLn("LOCAL_MODULE := " + self.android_module)
+ # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
+ # The library module classes fail if the stem is set. ComputeOutputParts
+ # makes sure that stem == modulename in these cases.
+ if self.android_stem != self.android_module:
+ self.WriteLn("LOCAL_MODULE_STEM := " + self.android_stem)
+ self.WriteLn("LOCAL_MODULE_SUFFIX := " + self.android_suffix)
+ if self.toolset == "host":
+ self.WriteLn("LOCAL_IS_HOST_MODULE := true")
+ self.WriteLn("LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)")
+ elif sdk_version > 0:
+ self.WriteLn(
+ "LOCAL_MODULE_TARGET_ARCH := " "$(TARGET_$(GYP_VAR_PREFIX)ARCH)"
+ )
+ self.WriteLn("LOCAL_SDK_VERSION := %s" % sdk_version)
+
+ # Grab output directories; needed for Actions and Rules.
+ if self.toolset == "host":
+ self.WriteLn(
+ "gyp_intermediate_dir := "
+ "$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))"
+ )
+ else:
+ self.WriteLn(
+ "gyp_intermediate_dir := "
+ "$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))"
+ )
+ self.WriteLn(
+ "gyp_shared_intermediate_dir := "
+ "$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))"
+ )
+ self.WriteLn()
+
+ # List files this target depends on so that actions/rules/copies/sources
+ # can depend on the list.
+ # TODO: doesn't pull in things through transitive link deps; needed?
+ target_dependencies = [x[1] for x in deps if x[0] == "path"]
+ self.WriteLn("# Make sure our deps are built first.")
+ self.WriteList(
+ target_dependencies, "GYP_TARGET_DEPENDENCIES", local_pathify=True
+ )
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if "actions" in spec:
+ self.WriteActions(spec["actions"], extra_sources, extra_outputs)
+
+ # Rules must be early like actions.
+ if "rules" in spec:
+ self.WriteRules(spec["rules"], extra_sources, extra_outputs)
+
+ if "copies" in spec:
+ self.WriteCopies(spec["copies"], extra_outputs)
+
+ # GYP generated outputs.
+ self.WriteList(extra_outputs, "GYP_GENERATED_OUTPUTS", local_pathify=True)
+
+ # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
+ # on both our dependency targets and our generated files.
+ self.WriteLn("# Make sure our deps and generated files are built first.")
+ self.WriteLn(
+ "LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) "
+ "$(GYP_GENERATED_OUTPUTS)"
+ )
+ self.WriteLn()
+
+ # Sources.
+ if spec.get("sources", []) or extra_sources:
+ self.WriteSources(spec, configs, extra_sources)
+
+ self.WriteTarget(
+ spec, configs, deps, link_deps, part_of_all, write_alias_target
+ )
+
+ # Update global list of target outputs, used in dependency tracking.
+ target_outputs[qualified_target] = ("path", self.output_binary)
+
+ # Update global list of link dependencies.
+ if self.type == "static_library":
+ target_link_deps[qualified_target] = ("static", self.android_module)
+ elif self.type == "shared_library":
+ target_link_deps[qualified_target] = ("shared", self.android_module)
+
+ self.fp.close()
+ return self.android_module
+
+ def WriteActions(self, actions, extra_sources, extra_outputs):
+ """Write Makefile code for any 'actions' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ actions (used to make other pieces dependent on these
+ actions)
+ """
+ for action in actions:
+ name = make.StringToMakefileVariable(
+ "{}_{}".format(self.relative_target, action["action_name"])
+ )
+ self.WriteLn('### Rules for action "%s":' % action["action_name"])
+ inputs = action["inputs"]
+ outputs = action["outputs"]
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set()
+ for out in outputs:
+ if not out.startswith("$"):
+ print(
+ 'WARNING: Action for target "%s" writes output to local path '
+ '"%s".' % (self.target, out)
+ )
+ dir = os.path.split(out)[0]
+ if dir:
+ dirs.add(dir)
+ if int(action.get("process_outputs_as_sources", False)):
+ extra_sources += outputs
+
+ # Prepare the actual command.
+ command = gyp.common.EncodePOSIXShellList(action["action"])
+ if "message" in action:
+ quiet_cmd = "Gyp action: %s ($@)" % action["message"]
+ else:
+ quiet_cmd = "Gyp action: %s ($@)" % name
+ if len(dirs) > 0:
+ command = "mkdir -p %s" % " ".join(dirs) + "; " + command
+
+ cd_action = "cd $(gyp_local_path)/%s; " % self.path
+ command = cd_action + command
+
+ # The makefile rules are all relative to the top dir, but the gyp actions
+ # are defined relative to their containing dir. This replaces the gyp_*
+ # variables for the action rule with an absolute version so that the
+ # output goes in the right place.
+ # Only write the gyp_* rules for the "primary" output (:1);
+ # it's superfluous for the "extra outputs", and this avoids accidentally
+ # writing duplicate dummy rules for those outputs.
+ main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
+ self.WriteLn("%s: gyp_local_path := $(LOCAL_PATH)" % main_output)
+ self.WriteLn("%s: gyp_var_prefix := $(GYP_VAR_PREFIX)" % main_output)
+ self.WriteLn(
+ "%s: gyp_intermediate_dir := "
+ "$(abspath $(gyp_intermediate_dir))" % main_output
+ )
+ self.WriteLn(
+ "%s: gyp_shared_intermediate_dir := "
+ "$(abspath $(gyp_shared_intermediate_dir))" % main_output
+ )
+
+ # Android's envsetup.sh adds a number of directories to the path including
+ # the built host binary directory. This causes actions/rules invoked by
+ # gyp to sometimes use these instead of system versions, e.g. bison.
+ # The built host binaries may not be suitable, and can cause errors.
+ # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
+ # set by envsetup.
+ self.WriteLn(
+ "%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))"
+ % main_output
+ )
+
+ # Don't allow spaces in input/output filenames, but make an exception for
+ # filenames which start with '$(' since it's okay for there to be spaces
+ # inside of make function/macro invocations.
+ for input in inputs:
+ if not input.startswith("$(") and " " in input:
+ raise gyp.common.GypError(
+ 'Action input filename "%s" in target %s contains a space'
+ % (input, self.target)
+ )
+ for output in outputs:
+ if not output.startswith("$(") and " " in output:
+ raise gyp.common.GypError(
+ 'Action output filename "%s" in target %s contains a space'
+ % (output, self.target)
+ )
+
+ self.WriteLn(
+ "%s: %s $(GYP_TARGET_DEPENDENCIES)"
+ % (main_output, " ".join(map(self.LocalPathify, inputs)))
+ )
+ self.WriteLn('\t@echo "%s"' % quiet_cmd)
+ self.WriteLn("\t$(hide)%s\n" % command)
+ for output in outputs[1:]:
+ # Make each output depend on the main output, with an empty command
+ # to force make to notice that the mtime has changed.
+ self.WriteLn(f"{self.LocalPathify(output)}: {main_output} ;")
+
+ extra_outputs += outputs
+ self.WriteLn()
+
+ self.WriteLn()
+
+ def WriteRules(self, rules, extra_sources, extra_outputs):
+ """Write Makefile code for any 'rules' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ rules (used to make other pieces dependent on these rules)
+ """
+ if len(rules) == 0:
+ return
+
+ for rule in rules:
+ if len(rule.get("rule_sources", [])) == 0:
+ continue
+ name = make.StringToMakefileVariable(
+ "{}_{}".format(self.relative_target, rule["rule_name"])
+ )
+ self.WriteLn('\n### Generated for rule "%s":' % name)
+ self.WriteLn('# "%s":' % rule)
+
+ inputs = rule.get("inputs")
+ for rule_source in rule.get("rule_sources", []):
+ (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
+ (rule_source_root, rule_source_ext) = os.path.splitext(
+ rule_source_basename
+ )
+
+ outputs = [
+ self.ExpandInputRoot(out, rule_source_root, rule_source_dirname)
+ for out in rule["outputs"]
+ ]
+
+ dirs = set()
+ for out in outputs:
+ if not out.startswith("$"):
+ print(
+ "WARNING: Rule for target %s writes output to local path %s"
+ % (self.target, out)
+ )
+ dir = os.path.dirname(out)
+ if dir:
+ dirs.add(dir)
+ extra_outputs += outputs
+ if int(rule.get("process_outputs_as_sources", False)):
+ extra_sources.extend(outputs)
+
+ components = []
+ for component in rule["action"]:
+ component = self.ExpandInputRoot(
+ component, rule_source_root, rule_source_dirname
+ )
+ if "$(RULE_SOURCES)" in component:
+ component = component.replace("$(RULE_SOURCES)", rule_source)
+ components.append(component)
+
+ command = gyp.common.EncodePOSIXShellList(components)
+ cd_action = "cd $(gyp_local_path)/%s; " % self.path
+ command = cd_action + command
+ if dirs:
+ command = "mkdir -p %s" % " ".join(dirs) + "; " + command
+
+ # We set up a rule to build the first output, and then set up
+ # a rule for each additional output to depend on the first.
+ outputs = map(self.LocalPathify, outputs)
+ main_output = outputs[0]
+ self.WriteLn("%s: gyp_local_path := $(LOCAL_PATH)" % main_output)
+ self.WriteLn("%s: gyp_var_prefix := $(GYP_VAR_PREFIX)" % main_output)
+ self.WriteLn(
+ "%s: gyp_intermediate_dir := "
+ "$(abspath $(gyp_intermediate_dir))" % main_output
+ )
+ self.WriteLn(
+ "%s: gyp_shared_intermediate_dir := "
+ "$(abspath $(gyp_shared_intermediate_dir))" % main_output
+ )
+
+ # See explanation in WriteActions.
+ self.WriteLn(
+ "%s: export PATH := "
+ "$(subst $(ANDROID_BUILD_PATHS),,$(PATH))" % main_output
+ )
+
+ main_output_deps = self.LocalPathify(rule_source)
+ if inputs:
+ main_output_deps += " "
+ main_output_deps += " ".join([self.LocalPathify(f) for f in inputs])
+
+ self.WriteLn(
+ "%s: %s $(GYP_TARGET_DEPENDENCIES)"
+ % (main_output, main_output_deps)
+ )
+ self.WriteLn("\t%s\n" % command)
+ for output in outputs[1:]:
+ # Make each output depend on the main output, with an empty command
+ # to force make to notice that the mtime has changed.
+ self.WriteLn(f"{output}: {main_output} ;")
+ self.WriteLn()
+
+ self.WriteLn()
+
+ def WriteCopies(self, copies, extra_outputs):
+ """Write Makefile code for any 'copies' from the gyp input.
+
+ extra_outputs: a list that will be filled in with any outputs of this action
+ (used to make other pieces dependent on this action)
+ """
+ self.WriteLn("### Generated for copy rule.")
+
+ variable = make.StringToMakefileVariable(self.relative_target + "_copies")
+ outputs = []
+ for copy in copies:
+ for path in copy["files"]:
+ # The Android build system does not allow generation of files into the
+ # source tree. The destination should start with a variable, which will
+ # typically be $(gyp_intermediate_dir) or
+ # $(gyp_shared_intermediate_dir). Note that we can't use an assertion
+ # because some of the gyp tests depend on this.
+ if not copy["destination"].startswith("$"):
+ print(
+ "WARNING: Copy rule for target %s writes output to "
+ "local path %s" % (self.target, copy["destination"])
+ )
+
+ # LocalPathify() calls normpath, stripping trailing slashes.
+ path = Sourceify(self.LocalPathify(path))
+ filename = os.path.split(path)[1]
+ output = Sourceify(
+ self.LocalPathify(os.path.join(copy["destination"], filename))
+ )
+
+ self.WriteLn(f"{output}: {path} $(GYP_TARGET_DEPENDENCIES) | $(ACP)")
+ self.WriteLn("\t@echo Copying: $@")
+ self.WriteLn("\t$(hide) mkdir -p $(dir $@)")
+ self.WriteLn("\t$(hide) $(ACP) -rpf $< $@")
+ self.WriteLn()
+ outputs.append(output)
+ self.WriteLn(
+ "{} = {}".format(variable, " ".join(map(make.QuoteSpaces, outputs)))
+ )
+ extra_outputs.append("$(%s)" % variable)
+ self.WriteLn()
+
+ def WriteSourceFlags(self, spec, configs):
+ """Write out the flags and include paths used to compile source files for
+ the current target.
+
+ Args:
+ spec, configs: input from gyp.
+ """
+ for configname, config in sorted(configs.items()):
+ extracted_includes = []
+
+ self.WriteLn("\n# Flags passed to both C and C++ files.")
+ cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
+ config.get("cflags", []) + config.get("cflags_c", [])
+ )
+ extracted_includes.extend(includes_from_cflags)
+ self.WriteList(cflags, "MY_CFLAGS_%s" % configname)
+
+ self.WriteList(
+ config.get("defines"),
+ "MY_DEFS_%s" % configname,
+ prefix="-D",
+ quoter=make.EscapeCppDefine,
+ )
+
+ self.WriteLn("\n# Include paths placed before CFLAGS/CPPFLAGS")
+ includes = list(config.get("include_dirs", []))
+ includes.extend(extracted_includes)
+ includes = map(Sourceify, map(self.LocalPathify, includes))
+ includes = self.NormalizeIncludePaths(includes)
+ self.WriteList(includes, "LOCAL_C_INCLUDES_%s" % configname)
+
+ self.WriteLn("\n# Flags passed to only C++ (and not C) files.")
+ self.WriteList(config.get("cflags_cc"), "LOCAL_CPPFLAGS_%s" % configname)
+
+ self.WriteLn(
+ "\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) "
+ "$(MY_DEFS_$(GYP_CONFIGURATION))"
+ )
+ # Undefine ANDROID for host modules
+ # TODO: the source code should not use macro ANDROID to tell if it's host
+ # or target module.
+ if self.toolset == "host":
+ self.WriteLn("# Undefine ANDROID for host modules")
+ self.WriteLn("LOCAL_CFLAGS += -UANDROID")
+ self.WriteLn(
+ "LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) "
+ "$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))"
+ )
+ self.WriteLn("LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))")
+ # Android uses separate flags for assembly file invocations, but gyp expects
+ # the same CFLAGS to be applied:
+ self.WriteLn("LOCAL_ASFLAGS := $(LOCAL_CFLAGS)")
+
+ def WriteSources(self, spec, configs, extra_sources):
+ """Write Makefile code for any 'sources' from the gyp input.
+ These are source files necessary to build the current target.
+ We need to handle shared_intermediate directory source files as
+ a special case by copying them to the intermediate directory and
+ treating them as a generated sources. Otherwise the Android build
+ rules won't pick them up.
+
+ Args:
+ spec, configs: input from gyp.
+ extra_sources: Sources generated from Actions or Rules.
+ """
+ sources = filter(make.Compilable, spec.get("sources", []))
+ generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
+ extra_sources = filter(make.Compilable, extra_sources)
+
+ # Determine and output the C++ extension used by these sources.
+ # We simply find the first C++ file and use that extension.
+ all_sources = sources + extra_sources
+ local_cpp_extension = ".cpp"
+ for source in all_sources:
+ (root, ext) = os.path.splitext(source)
+ if IsCPPExtension(ext):
+ local_cpp_extension = ext
+ break
+ if local_cpp_extension != ".cpp":
+ self.WriteLn("LOCAL_CPP_EXTENSION := %s" % local_cpp_extension)
+
+ # We need to move any non-generated sources that are coming from the
+ # shared intermediate directory out of LOCAL_SRC_FILES and put them
+ # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
+ # that don't match our local_cpp_extension, since Android will only
+ # generate Makefile rules for a single LOCAL_CPP_EXTENSION.
+ local_files = []
+ for source in sources:
+ (root, ext) = os.path.splitext(source)
+ if "$(gyp_shared_intermediate_dir)" in source:
+ extra_sources.append(source)
+ elif "$(gyp_intermediate_dir)" in source:
+ extra_sources.append(source)
+ elif IsCPPExtension(ext) and ext != local_cpp_extension:
+ extra_sources.append(source)
+ else:
+ local_files.append(os.path.normpath(os.path.join(self.path, source)))
+
+ # For any generated source, if it is coming from the shared intermediate
+ # directory then we add a Make rule to copy them to the local intermediate
+ # directory first. This is because the Android LOCAL_GENERATED_SOURCES
+ # must be in the local module intermediate directory for the compile rules
+ # to work properly. If the file has the wrong C++ extension, then we add
+ # a rule to copy that to intermediates and use the new version.
+ final_generated_sources = []
+ # If a source file gets copied, we still need to add the original source
+ # directory as header search path, for GCC searches headers in the
+ # directory that contains the source file by default.
+ origin_src_dirs = []
+ for source in extra_sources:
+ local_file = source
+ if "$(gyp_intermediate_dir)/" not in local_file:
+ basename = os.path.basename(local_file)
+ local_file = "$(gyp_intermediate_dir)/" + basename
+ (root, ext) = os.path.splitext(local_file)
+ if IsCPPExtension(ext) and ext != local_cpp_extension:
+ local_file = root + local_cpp_extension
+ if local_file != source:
+ self.WriteLn(f"{local_file}: {self.LocalPathify(source)}")
+ self.WriteLn("\tmkdir -p $(@D); cp $< $@")
+ origin_src_dirs.append(os.path.dirname(source))
+ final_generated_sources.append(local_file)
+
+ # We add back in all of the non-compilable stuff to make sure that the
+ # make rules have dependencies on them.
+ final_generated_sources.extend(generated_not_sources)
+ self.WriteList(final_generated_sources, "LOCAL_GENERATED_SOURCES")
+
+ origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
+ origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
+ self.WriteList(origin_src_dirs, "GYP_COPIED_SOURCE_ORIGIN_DIRS")
+
+ self.WriteList(local_files, "LOCAL_SRC_FILES")
+
+ # Write out the flags used to compile the source; this must be done last
+ # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
+ self.WriteSourceFlags(spec, configs)
+
+ def ComputeAndroidModule(self, spec):
+ """Return the Android module name used for a gyp spec.
+
+ We use the complete qualified target name to avoid collisions between
+ duplicate targets in different directories. We also add a suffix to
+ distinguish gyp-generated module names.
+ """
+
+ if int(spec.get("android_unmangled_name", 0)):
+ assert self.type != "shared_library" or self.target.startswith("lib")
+ return self.target
+
+ if self.type == "shared_library":
+ # For reasons of convention, the Android build system requires that all
+ # shared library modules are named 'libfoo' when generating -l flags.
+ prefix = "lib_"
+ else:
+ prefix = ""
+
+ if spec["toolset"] == "host":
+ suffix = "_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp"
+ else:
+ suffix = "_gyp"
+
+ if self.path:
+ middle = make.StringToMakefileVariable(f"{self.path}_{self.target}")
+ else:
+ middle = make.StringToMakefileVariable(self.target)
+
+ return "".join([prefix, middle, suffix])
+
+ def ComputeOutputParts(self, spec):
+ """Return the 'output basename' of a gyp spec, split into filename + ext.
+
+ Android libraries must be named the same thing as their module name,
+ otherwise the linker can't find them, so product_name and so on must be
+ ignored if we are building a library, and the "lib" prepending is
+ not done for Android.
+ """
+ assert self.type != "loadable_module" # TODO: not supported?
+
+ target = spec["target_name"]
+ target_prefix = ""
+ target_ext = ""
+ if self.type == "static_library":
+ target = self.ComputeAndroidModule(spec)
+ target_ext = ".a"
+ elif self.type == "shared_library":
+ target = self.ComputeAndroidModule(spec)
+ target_ext = ".so"
+ elif self.type == "none":
+ target_ext = ".stamp"
+ elif self.type != "executable":
+ print(
+ "ERROR: What output file should be generated?",
+ "type",
+ self.type,
+ "target",
+ target,
+ )
+
+ if self.type != "static_library" and self.type != "shared_library":
+ target_prefix = spec.get("product_prefix", target_prefix)
+ target = spec.get("product_name", target)
+ product_ext = spec.get("product_extension")
+ if product_ext:
+ target_ext = "." + product_ext
+
+ target_stem = target_prefix + target
+ return (target_stem, target_ext)
+
+ def ComputeOutputBasename(self, spec):
+ """Return the 'output basename' of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ 'libfoobar.so'
+ """
+ return "".join(self.ComputeOutputParts(spec))
+
+ def ComputeOutput(self, spec):
+ """Return the 'output' (full output path) of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
+ if self.type == "executable":
+ # We install host executables into shared_intermediate_dir so they can be
+ # run by gyp rules that refer to PRODUCT_DIR.
+ path = "$(gyp_shared_intermediate_dir)"
+ elif self.type == "shared_library":
+ if self.toolset == "host":
+ path = "$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)"
+ else:
+ path = "$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)"
+ else:
+ # Other targets just get built into their intermediate dir.
+ if self.toolset == "host":
+ path = (
+ "$(call intermediates-dir-for,%s,%s,true,,"
+ "$(GYP_HOST_VAR_PREFIX))"
+ % (self.android_class, self.android_module)
+ )
+ else:
+ path = "$(call intermediates-dir-for,{},{},,,$(GYP_VAR_PREFIX))".format(
+ self.android_class,
+ self.android_module,
+ )
+
+ assert spec.get("product_dir") is None # TODO: not supported?
+ return os.path.join(path, self.ComputeOutputBasename(spec))
+
+ def NormalizeIncludePaths(self, include_paths):
+ """Normalize include_paths.
+ Convert absolute paths to relative to the Android top directory.
+
+ Args:
+ include_paths: A list of unprocessed include paths.
+ Returns:
+ A list of normalized include paths.
+ """
+ normalized = []
+ for path in include_paths:
+ if path[0] == "/":
+ path = gyp.common.RelativePath(path, self.android_top_dir)
+ normalized.append(path)
+ return normalized
+
+ def ExtractIncludesFromCFlags(self, cflags):
+ """Extract includes "-I..." out from cflags
+
+ Args:
+ cflags: A list of compiler flags, which may be mixed with "-I.."
+ Returns:
+ A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
+ """
+ clean_cflags = []
+ include_paths = []
+ for flag in cflags:
+ if flag.startswith("-I"):
+ include_paths.append(flag[2:])
+ else:
+ clean_cflags.append(flag)
+
+ return (clean_cflags, include_paths)
+
+ def FilterLibraries(self, libraries):
+ """Filter the 'libraries' key to separate things that shouldn't be ldflags.
+
+ Library entries that look like filenames should be converted to android
+ module names instead of being passed to the linker as flags.
+
+ Args:
+ libraries: the value of spec.get('libraries')
+ Returns:
+ A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
+ """
+ static_lib_modules = []
+ dynamic_lib_modules = []
+ ldflags = []
+ for libs in libraries:
+ # Libs can have multiple words.
+ for lib in libs.split():
+ # Filter the system libraries, which are added by default by the Android
+ # build system.
+ if (
+ lib == "-lc"
+ or lib == "-lstdc++"
+ or lib == "-lm"
+ or lib.endswith("libgcc.a")
+ ):
+ continue
+ match = re.search(r"([^/]+)\.a$", lib)
+ if match:
+ static_lib_modules.append(match.group(1))
+ continue
+ match = re.search(r"([^/]+)\.so$", lib)
+ if match:
+ dynamic_lib_modules.append(match.group(1))
+ continue
+ if lib.startswith("-l"):
+ ldflags.append(lib)
+ return (static_lib_modules, dynamic_lib_modules, ldflags)
+
+ def ComputeDeps(self, spec):
+ """Compute the dependencies of a gyp spec.
+
+ Returns a tuple (deps, link_deps), where each is a list of
+ filenames that will need to be put in front of make for either
+ building (deps) or linking (link_deps).
+ """
+ deps = []
+ link_deps = []
+ if "dependencies" in spec:
+ deps.extend(
+ [
+ target_outputs[dep]
+ for dep in spec["dependencies"]
+ if target_outputs[dep]
+ ]
+ )
+ for dep in spec["dependencies"]:
+ if dep in target_link_deps:
+ link_deps.append(target_link_deps[dep])
+ deps.extend(link_deps)
+ return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
+
+ def WriteTargetFlags(self, spec, configs, link_deps):
+ """Write Makefile code to specify the link flags and library dependencies.
+
+ spec, configs: input from gyp.
+ link_deps: link dependency list; see ComputeDeps()
+ """
+ # Libraries (i.e. -lfoo)
+ # These must be included even for static libraries as some of them provide
+ # implicit include paths through the build system.
+ libraries = gyp.common.uniquer(spec.get("libraries", []))
+ static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
+
+ if self.type != "static_library":
+ for configname, config in sorted(configs.items()):
+ ldflags = list(config.get("ldflags", []))
+ self.WriteLn("")
+ self.WriteList(ldflags, "LOCAL_LDFLAGS_%s" % configname)
+ self.WriteList(ldflags_libs, "LOCAL_GYP_LIBS")
+ self.WriteLn(
+ "LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) "
+ "$(LOCAL_GYP_LIBS)"
+ )
+
+ # Link dependencies (i.e. other gyp targets this target depends on)
+ # These need not be included for static libraries as within the gyp build
+ # we do not use the implicit include path mechanism.
+ if self.type != "static_library":
+ static_link_deps = [x[1] for x in link_deps if x[0] == "static"]
+ shared_link_deps = [x[1] for x in link_deps if x[0] == "shared"]
+ else:
+ static_link_deps = []
+ shared_link_deps = []
+
+ # Only write the lists if they are non-empty.
+ if static_libs or static_link_deps:
+ self.WriteLn("")
+ self.WriteList(static_libs + static_link_deps, "LOCAL_STATIC_LIBRARIES")
+ self.WriteLn("# Enable grouping to fix circular references")
+ self.WriteLn("LOCAL_GROUP_STATIC_LIBRARIES := true")
+ if dynamic_libs or shared_link_deps:
+ self.WriteLn("")
+ self.WriteList(dynamic_libs + shared_link_deps, "LOCAL_SHARED_LIBRARIES")
+
+ def WriteTarget(
+ self, spec, configs, deps, link_deps, part_of_all, write_alias_target
+ ):
+ """Write Makefile code to produce the final target of the gyp spec.
+
+ spec, configs: input from gyp.
+ deps, link_deps: dependency lists; see ComputeDeps()
+ part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for this
+ target
+ """
+ self.WriteLn("### Rules for final target.")
+
+ if self.type != "none":
+ self.WriteTargetFlags(spec, configs, link_deps)
+
+ settings = spec.get("aosp_build_settings", {})
+ if settings:
+ self.WriteLn("### Set directly by aosp_build_settings.")
+ for k, v in settings.items():
+ if isinstance(v, list):
+ self.WriteList(v, k)
+ else:
+ self.WriteLn(f"{k} := {make.QuoteIfNecessary(v)}")
+ self.WriteLn("")
+
+ # Add to the set of targets which represent the gyp 'all' target. We use the
+ # name 'gyp_all_modules' as the Android build system doesn't allow the use
+ # of the Make target 'all' and because 'all_modules' is the equivalent of
+ # the Make target 'all' on Android.
+ if part_of_all and write_alias_target:
+ self.WriteLn('# Add target alias to "gyp_all_modules" target.')
+ self.WriteLn(".PHONY: gyp_all_modules")
+ self.WriteLn("gyp_all_modules: %s" % self.android_module)
+ self.WriteLn("")
+
+ # Add an alias from the gyp target name to the Android module name. This
+ # simplifies manual builds of the target, and is required by the test
+ # framework.
+ if self.target != self.android_module and write_alias_target:
+ self.WriteLn("# Alias gyp target name.")
+ self.WriteLn(".PHONY: %s" % self.target)
+ self.WriteLn(f"{self.target}: {self.android_module}")
+ self.WriteLn("")
+
+ # Add the command to trigger build of the target type depending
+ # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
+ # NOTE: This has to come last!
+ modifier = ""
+ if self.toolset == "host":
+ modifier = "HOST_"
+ if self.type == "static_library":
+ self.WriteLn("include $(BUILD_%sSTATIC_LIBRARY)" % modifier)
+ elif self.type == "shared_library":
+ self.WriteLn("LOCAL_PRELINK_MODULE := false")
+ self.WriteLn("include $(BUILD_%sSHARED_LIBRARY)" % modifier)
+ elif self.type == "executable":
+ self.WriteLn("LOCAL_CXX_STL := libc++_static")
+ # Executables are for build and test purposes only, so they're installed
+ # to a directory that doesn't get included in the system image.
+ self.WriteLn("LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)")
+ self.WriteLn("include $(BUILD_%sEXECUTABLE)" % modifier)
+ else:
+ self.WriteLn("LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp")
+ self.WriteLn("LOCAL_UNINSTALLABLE_MODULE := true")
+ if self.toolset == "target":
+ self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)")
+ else:
+ self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)")
+ self.WriteLn()
+ self.WriteLn("include $(BUILD_SYSTEM)/base_rules.mk")
+ self.WriteLn()
+ self.WriteLn("$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)")
+ self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
+ self.WriteLn("\t$(hide) mkdir -p $(dir $@)")
+ self.WriteLn("\t$(hide) touch $@")
+ self.WriteLn()
+ self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX :=")
+
+ def WriteList(
+ self,
+ value_list,
+ variable=None,
+ prefix="",
+ quoter=make.QuoteIfNecessary,
+ local_pathify=False,
+ ):
+ """Write a variable definition that is a list of values.
+
+ E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
+ foo = blaha blahb
+ but in a pretty-printed style.
+ """
+ values = ""
+ if value_list:
+ value_list = [quoter(prefix + value) for value in value_list]
+ if local_pathify:
+ value_list = [self.LocalPathify(value) for value in value_list]
+ values = " \\\n\t" + " \\\n\t".join(value_list)
+ self.fp.write(f"{variable} :={values}\n\n")
+
+ def WriteLn(self, text=""):
+ self.fp.write(text + "\n")
+
+ def LocalPathify(self, path):
+ """Convert a subdirectory-relative path into a normalized path which starts
+ with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
+ Absolute paths, or paths that contain variables, are just normalized."""
+ if "$(" in path or os.path.isabs(path):
+ # path is not a file in the project tree in this case, but calling
+ # normpath is still important for trimming trailing slashes.
+ return os.path.normpath(path)
+ local_path = os.path.join("$(LOCAL_PATH)", self.path, path)
+ local_path = os.path.normpath(local_path)
+ # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
+ # - i.e. that the resulting path is still inside the project tree. The
+ # path may legitimately have ended up containing just $(LOCAL_PATH), though,
+ # so we don't look for a slash.
+ assert local_path.startswith(
+ "$(LOCAL_PATH)"
+ ), f"Path {path} attempts to escape from gyp path {self.path} !)"
+ return local_path
+
+ def ExpandInputRoot(self, template, expansion, dirname):
+ if "%(INPUT_ROOT)s" not in template and "%(INPUT_DIRNAME)s" not in template:
+ return template
+ path = template % {
+ "INPUT_ROOT": expansion,
+ "INPUT_DIRNAME": dirname,
+ }
+ return os.path.normpath(path)
+
+
+def PerformBuild(data, configurations, params):
+ # The android backend only supports the default configuration.
+ options = params["options"]
+ makefile = os.path.abspath(os.path.join(options.toplevel_dir, "GypAndroid.mk"))
+ env = dict(os.environ)
+ env["ONE_SHOT_MAKEFILE"] = makefile
+ arguments = ["make", "-C", os.environ["ANDROID_BUILD_TOP"], "gyp_all_modules"]
+ print("Building: %s" % arguments)
+ subprocess.check_call(arguments, env=env)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params["options"]
+ generator_flags = params.get("generator_flags", {})
+ limit_to_target_all = generator_flags.get("limit_to_target_all", False)
+ write_alias_targets = generator_flags.get("write_alias_targets", True)
+ sdk_version = generator_flags.get("aosp_sdk_version", 0)
+ android_top_dir = os.environ.get("ANDROID_BUILD_TOP")
+ assert android_top_dir, "$ANDROID_BUILD_TOP not set; you need to run lunch."
+
+ def CalculateMakefilePath(build_file, base_name):
+ """Determine where to write a Makefile for a given gyp file."""
+ # Paths in gyp files are relative to the .gyp file, but we want
+ # paths relative to the source root for the master makefile. Grab
+ # the path of the .gyp file as the base to relativize against.
+ # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth)
+ # We write the file in the base_path directory.
+ output_file = os.path.join(options.depth, base_path, base_name)
+ assert (
+ not options.generator_output
+ ), "The Android backend does not support options.generator_output."
+ base_path = gyp.common.RelativePath(
+ os.path.dirname(build_file), options.toplevel_dir
+ )
+ return base_path, output_file
+
+ # TODO: search for the first non-'Default' target. This can go
+ # away when we add verification that all targets have the
+ # necessary configurations.
+ default_configuration = None
+ for target in target_list:
+ spec = target_dicts[target]
+ if spec["default_configuration"] != "Default":
+ default_configuration = spec["default_configuration"]
+ break
+ if not default_configuration:
+ default_configuration = "Default"
+
+ makefile_name = "GypAndroid" + options.suffix + ".mk"
+ makefile_path = os.path.join(options.toplevel_dir, makefile_name)
+ assert (
+ not options.generator_output
+ ), "The Android backend does not support options.generator_output."
+ gyp.common.EnsureDirExists(makefile_path)
+ root_makefile = open(makefile_path, "w")
+
+ root_makefile.write(header)
+
+ # We set LOCAL_PATH just once, here, to the top of the project tree. This
+ # allows all the other paths we use to be relative to the Android.mk file,
+ # as the Android build system expects.
+ root_makefile.write("\nLOCAL_PATH := $(call my-dir)\n")
+
+ # Find the list of targets that derive from the gyp file(s) being built.
+ needed_targets = set()
+ for build_file in params["build_files"]:
+ for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
+ needed_targets.add(target)
+
+ build_files = set()
+ include_list = set()
+ android_modules = {}
+ for qualified_target in target_list:
+ build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target)
+ relative_build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
+ build_files.add(relative_build_file)
+ included_files = data[build_file]["included_files"]
+ for included_file in included_files:
+ # The included_files entries are relative to the dir of the build file
+ # that included them, so we have to undo that and then make them relative
+ # to the root dir.
+ relative_include_file = gyp.common.RelativePath(
+ gyp.common.UnrelativePath(included_file, build_file),
+ options.toplevel_dir,
+ )
+ abs_include_file = os.path.abspath(relative_include_file)
+ # If the include file is from the ~/.gyp dir, we should use absolute path
+ # so that relocating the src dir doesn't break the path.
+ if params["home_dot_gyp"] and abs_include_file.startswith(
+ params["home_dot_gyp"]
+ ):
+ build_files.add(abs_include_file)
+ else:
+ build_files.add(relative_include_file)
+
+ base_path, output_file = CalculateMakefilePath(
+ build_file, target + "." + toolset + options.suffix + ".mk"
+ )
+
+ spec = target_dicts[qualified_target]
+ configs = spec["configurations"]
+
+ part_of_all = qualified_target in needed_targets
+ if limit_to_target_all and not part_of_all:
+ continue
+
+ relative_target = gyp.common.QualifiedTarget(
+ relative_build_file, target, toolset
+ )
+ writer = AndroidMkWriter(android_top_dir)
+ android_module = writer.Write(
+ qualified_target,
+ relative_target,
+ base_path,
+ output_file,
+ spec,
+ configs,
+ part_of_all=part_of_all,
+ write_alias_target=write_alias_targets,
+ sdk_version=sdk_version,
+ )
+ if android_module in android_modules:
+ print(
+ "ERROR: Android module names must be unique. The following "
+ "targets both generate Android module name %s.\n %s\n %s"
+ % (android_module, android_modules[android_module], qualified_target)
+ )
+ return
+ android_modules[android_module] = qualified_target
+
+ # Our root_makefile lives at the source root. Compute the relative path
+ # from there to the output_file for including.
+ mkfile_rel_path = gyp.common.RelativePath(
+ output_file, os.path.dirname(makefile_path)
+ )
+ include_list.add(mkfile_rel_path)
+
+ root_makefile.write("GYP_CONFIGURATION ?= %s\n" % default_configuration)
+ root_makefile.write("GYP_VAR_PREFIX ?=\n")
+ root_makefile.write("GYP_HOST_VAR_PREFIX ?=\n")
+ root_makefile.write("GYP_HOST_MULTILIB ?= first\n")
+
+ # Write out the sorted list of includes.
+ root_makefile.write("\n")
+ for include_file in sorted(include_list):
+ root_makefile.write("include $(LOCAL_PATH)/" + include_file + "\n")
+ root_makefile.write("\n")
+
+ if write_alias_targets:
+ root_makefile.write(ALL_MODULES_FOOTER)
+
+ root_makefile.close()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
new file mode 100644
index 0000000..c95d184
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
@@ -0,0 +1,1321 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cmake output module
+
+This module is under development and should be considered experimental.
+
+This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
+created for each configuration.
+
+This module's original purpose was to support editing in IDEs like KDevelop
+which use CMake for project management. It is also possible to use CMake to
+generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
+will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
+but build using CMake. As a result QtCreator editor is unaware of compiler
+defines. The generated CMakeLists.txt can also be used to build on Linux. There
+is currently no support for building on platforms other than Linux.
+
+The generated CMakeLists.txt should properly compile all projects. However,
+there is a mismatch between gyp and cmake with regard to linking. All attempts
+are made to work around this, but CMake sometimes sees -Wl,--start-group as a
+library and incorrectly repeats it. As a result the output of this generator
+should not be relied on for building.
+
+When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
+not be able to find the header file directories described in the generated
+CMakeLists.txt file.
+"""
+
+
+import multiprocessing
+import os
+import signal
+import subprocess
+import gyp.common
+import gyp.xcode_emulation
+
+_maketrans = str.maketrans
+
+generator_default_variables = {
+ "EXECUTABLE_PREFIX": "",
+ "EXECUTABLE_SUFFIX": "",
+ "STATIC_LIB_PREFIX": "lib",
+ "STATIC_LIB_SUFFIX": ".a",
+ "SHARED_LIB_PREFIX": "lib",
+ "SHARED_LIB_SUFFIX": ".so",
+ "SHARED_LIB_DIR": "${builddir}/lib.${TOOLSET}",
+ "LIB_DIR": "${obj}.${TOOLSET}",
+ "INTERMEDIATE_DIR": "${obj}.${TOOLSET}/${TARGET}/geni",
+ "SHARED_INTERMEDIATE_DIR": "${obj}/gen",
+ "PRODUCT_DIR": "${builddir}",
+ "RULE_INPUT_PATH": "${RULE_INPUT_PATH}",
+ "RULE_INPUT_DIRNAME": "${RULE_INPUT_DIRNAME}",
+ "RULE_INPUT_NAME": "${RULE_INPUT_NAME}",
+ "RULE_INPUT_ROOT": "${RULE_INPUT_ROOT}",
+ "RULE_INPUT_EXT": "${RULE_INPUT_EXT}",
+ "CONFIGURATION_NAME": "${configuration}",
+}
+
+FULL_PATH_VARS = ("${CMAKE_CURRENT_LIST_DIR}", "${builddir}", "${obj}")
+
+generator_supports_multiple_toolsets = True
+generator_wants_static_library_dependencies_adjusted = True
+
+COMPILABLE_EXTENSIONS = {
+ ".c": "cc",
+ ".cc": "cxx",
+ ".cpp": "cxx",
+ ".cxx": "cxx",
+ ".s": "s", # cc
+ ".S": "s", # cc
+}
+
+
+def RemovePrefix(a, prefix):
+ """Returns 'a' without 'prefix' if it starts with 'prefix'."""
+ return a[len(prefix) :] if a.startswith(prefix) else a
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ default_variables.setdefault("OS", gyp.common.GetFlavor(params))
+
+
+def Compilable(filename):
+ """Return true if the file is compilable (should be in OBJS)."""
+ return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
+
+
+def Linkable(filename):
+ """Return true if the file is linkable (should be on the link line)."""
+ return filename.endswith(".o")
+
+
+def NormjoinPathForceCMakeSource(base_path, rel_path):
+ """Resolves rel_path against base_path and returns the result.
+
+ If rel_path is an absolute path it is returned unchanged.
+ Otherwise it is resolved against base_path and normalized.
+ If the result is a relative path, it is forced to be relative to the
+ CMakeLists.txt.
+ """
+ if os.path.isabs(rel_path):
+ return rel_path
+ if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
+ return rel_path
+ # TODO: do we need to check base_path for absolute variables as well?
+ return os.path.join(
+ "${CMAKE_CURRENT_LIST_DIR}", os.path.normpath(os.path.join(base_path, rel_path))
+ )
+
+
+def NormjoinPath(base_path, rel_path):
+ """Resolves rel_path against base_path and returns the result.
+ TODO: what is this really used for?
+ If rel_path begins with '$' it is returned unchanged.
+ Otherwise it is resolved against base_path if relative, then normalized.
+ """
+ if rel_path.startswith("$") and not rel_path.startswith("${configuration}"):
+ return rel_path
+ return os.path.normpath(os.path.join(base_path, rel_path))
+
+
+def CMakeStringEscape(a):
+ """Escapes the string 'a' for use inside a CMake string.
+
+ This means escaping
+ '\' otherwise it may be seen as modifying the next character
+ '"' otherwise it will end the string
+ ';' otherwise the string becomes a list
+
+ The following do not need to be escaped
+ '#' when the lexer is in string state, this does not start a comment
+
+ The following are yet unknown
+ '$' generator variables (like ${obj}) must not be escaped,
+ but text $ should be escaped
+ what is wanted is to know which $ come from generator variables
+ """
+ return a.replace("\\", "\\\\").replace(";", "\\;").replace('"', '\\"')
+
+
+def SetFileProperty(output, source_name, property_name, values, sep):
+ """Given a set of source file, sets the given property on them."""
+ output.write("set_source_files_properties(")
+ output.write(source_name)
+ output.write(" PROPERTIES ")
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+ """Given a set of source files, sets the given property on them."""
+ output.write("set_source_files_properties(")
+ WriteVariable(output, variable)
+ output.write(" PROPERTIES ")
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetTargetProperty(output, target_name, property_name, values, sep=""):
+ """Given a target, sets the given property."""
+ output.write("set_target_properties(")
+ output.write(target_name)
+ output.write(" PROPERTIES ")
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetVariable(output, variable_name, value):
+ """Sets a CMake variable."""
+ output.write("set(")
+ output.write(variable_name)
+ output.write(' "')
+ output.write(CMakeStringEscape(value))
+ output.write('")\n')
+
+
+def SetVariableList(output, variable_name, values):
+ """Sets a CMake variable to a list."""
+ if not values:
+ return SetVariable(output, variable_name, "")
+ if len(values) == 1:
+ return SetVariable(output, variable_name, values[0])
+ output.write("list(APPEND ")
+ output.write(variable_name)
+ output.write('\n "')
+ output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
+ output.write('")\n')
+
+
+def UnsetVariable(output, variable_name):
+ """Unsets a CMake variable."""
+ output.write("unset(")
+ output.write(variable_name)
+ output.write(")\n")
+
+
+def WriteVariable(output, variable_name, prepend=None):
+ if prepend:
+ output.write(prepend)
+ output.write("${")
+ output.write(variable_name)
+ output.write("}")
+
+
+class CMakeTargetType:
+ def __init__(self, command, modifier, property_modifier):
+ self.command = command
+ self.modifier = modifier
+ self.property_modifier = property_modifier
+
+
+cmake_target_type_from_gyp_target_type = {
+ "executable": CMakeTargetType("add_executable", None, "RUNTIME"),
+ "static_library": CMakeTargetType("add_library", "STATIC", "ARCHIVE"),
+ "shared_library": CMakeTargetType("add_library", "SHARED", "LIBRARY"),
+ "loadable_module": CMakeTargetType("add_library", "MODULE", "LIBRARY"),
+ "none": CMakeTargetType("add_custom_target", "SOURCES", None),
+}
+
+
+def StringToCMakeTargetName(a):
+ """Converts the given string 'a' to a valid CMake target name.
+
+ All invalid characters are replaced by '_'.
+ Invalid for cmake: ' ', '/', '(', ')', '"'
+ Invalid for make: ':'
+ Invalid for unknown reasons but cause failures: '.'
+ """
+ return a.translate(_maketrans(' /():."', "_______"))
+
+
+def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, output):
+ """Write CMake for the 'actions' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ for action in actions:
+ action_name = StringToCMakeTargetName(action["action_name"])
+ action_target_name = f"{target_name}__{action_name}"
+
+ inputs = action["inputs"]
+ inputs_name = action_target_name + "__input"
+ SetVariableList(
+ output,
+ inputs_name,
+ [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs],
+ )
+
+ outputs = action["outputs"]
+ cmake_outputs = [
+ NormjoinPathForceCMakeSource(path_to_gyp, out) for out in outputs
+ ]
+ outputs_name = action_target_name + "__output"
+ SetVariableList(output, outputs_name, cmake_outputs)
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir}
+
+ if int(action.get("process_outputs_as_sources", False)):
+ extra_sources.extend(zip(cmake_outputs, outputs))
+
+ # add_custom_command
+ output.write("add_custom_command(OUTPUT ")
+ WriteVariable(output, outputs_name)
+ output.write("\n")
+
+ if len(dirs) > 0:
+ for directory in dirs:
+ output.write(" COMMAND ${CMAKE_COMMAND} -E make_directory ")
+ output.write(directory)
+ output.write("\n")
+
+ output.write(" COMMAND ")
+ output.write(gyp.common.EncodePOSIXShellList(action["action"]))
+ output.write("\n")
+
+ output.write(" DEPENDS ")
+ WriteVariable(output, inputs_name)
+ output.write("\n")
+
+ output.write(" WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/")
+ output.write(path_to_gyp)
+ output.write("\n")
+
+ output.write(" COMMENT ")
+ if "message" in action:
+ output.write(action["message"])
+ else:
+ output.write(action_target_name)
+ output.write("\n")
+
+ output.write(" VERBATIM\n")
+ output.write(")\n")
+
+ # add_custom_target
+ output.write("add_custom_target(")
+ output.write(action_target_name)
+ output.write("\n DEPENDS ")
+ WriteVariable(output, outputs_name)
+ output.write("\n SOURCES ")
+ WriteVariable(output, inputs_name)
+ output.write("\n)\n")
+
+ extra_deps.append(action_target_name)
+
+
+def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
+ if rel_path.startswith(("${RULE_INPUT_PATH}", "${RULE_INPUT_DIRNAME}")):
+ if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
+ return rel_path
+ return NormjoinPathForceCMakeSource(base_path, rel_path)
+
+
+def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, output):
+ """Write CMake for the 'rules' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ for rule in rules:
+ rule_name = StringToCMakeTargetName(target_name + "__" + rule["rule_name"])
+
+ inputs = rule.get("inputs", [])
+ inputs_name = rule_name + "__input"
+ SetVariableList(
+ output,
+ inputs_name,
+ [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs],
+ )
+ outputs = rule["outputs"]
+ var_outputs = []
+
+ for count, rule_source in enumerate(rule.get("rule_sources", [])):
+ action_name = rule_name + "_" + str(count)
+
+ rule_source_dirname, rule_source_basename = os.path.split(rule_source)
+ rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
+
+ SetVariable(output, "RULE_INPUT_PATH", rule_source)
+ SetVariable(output, "RULE_INPUT_DIRNAME", rule_source_dirname)
+ SetVariable(output, "RULE_INPUT_NAME", rule_source_basename)
+ SetVariable(output, "RULE_INPUT_ROOT", rule_source_root)
+ SetVariable(output, "RULE_INPUT_EXT", rule_source_ext)
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir}
+
+ # Create variables for the output, as 'local' variable will be unset.
+ these_outputs = []
+ for output_index, out in enumerate(outputs):
+ output_name = action_name + "_" + str(output_index)
+ SetVariable(
+ output,
+ output_name,
+ NormjoinRulePathForceCMakeSource(path_to_gyp, out, rule_source),
+ )
+ if int(rule.get("process_outputs_as_sources", False)):
+ extra_sources.append(("${" + output_name + "}", out))
+ these_outputs.append("${" + output_name + "}")
+ var_outputs.append("${" + output_name + "}")
+
+ # add_custom_command
+ output.write("add_custom_command(OUTPUT\n")
+ for out in these_outputs:
+ output.write(" ")
+ output.write(out)
+ output.write("\n")
+
+ for directory in dirs:
+ output.write(" COMMAND ${CMAKE_COMMAND} -E make_directory ")
+ output.write(directory)
+ output.write("\n")
+
+ output.write(" COMMAND ")
+ output.write(gyp.common.EncodePOSIXShellList(rule["action"]))
+ output.write("\n")
+
+ output.write(" DEPENDS ")
+ WriteVariable(output, inputs_name)
+ output.write(" ")
+ output.write(NormjoinPath(path_to_gyp, rule_source))
+ output.write("\n")
+
+ # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
+ # The cwd is the current build directory.
+ output.write(" WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/")
+ output.write(path_to_gyp)
+ output.write("\n")
+
+ output.write(" COMMENT ")
+ if "message" in rule:
+ output.write(rule["message"])
+ else:
+ output.write(action_name)
+ output.write("\n")
+
+ output.write(" VERBATIM\n")
+ output.write(")\n")
+
+ UnsetVariable(output, "RULE_INPUT_PATH")
+ UnsetVariable(output, "RULE_INPUT_DIRNAME")
+ UnsetVariable(output, "RULE_INPUT_NAME")
+ UnsetVariable(output, "RULE_INPUT_ROOT")
+ UnsetVariable(output, "RULE_INPUT_EXT")
+
+ # add_custom_target
+ output.write("add_custom_target(")
+ output.write(rule_name)
+ output.write(" DEPENDS\n")
+ for out in var_outputs:
+ output.write(" ")
+ output.write(out)
+ output.write("\n")
+ output.write("SOURCES ")
+ WriteVariable(output, inputs_name)
+ output.write("\n")
+ for rule_source in rule.get("rule_sources", []):
+ output.write(" ")
+ output.write(NormjoinPath(path_to_gyp, rule_source))
+ output.write("\n")
+ output.write(")\n")
+
+ extra_deps.append(rule_name)
+
+
+def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
+ """Write CMake for the 'copies' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ copy_name = target_name + "__copies"
+
+ # CMake gets upset with custom targets with OUTPUT which specify no output.
+ have_copies = any(copy["files"] for copy in copies)
+ if not have_copies:
+ output.write("add_custom_target(")
+ output.write(copy_name)
+ output.write(")\n")
+ extra_deps.append(copy_name)
+ return
+
+ class Copy:
+ def __init__(self, ext, command):
+ self.cmake_inputs = []
+ self.cmake_outputs = []
+ self.gyp_inputs = []
+ self.gyp_outputs = []
+ self.ext = ext
+ self.inputs_name = None
+ self.outputs_name = None
+ self.command = command
+
+ file_copy = Copy("", "copy")
+ dir_copy = Copy("_dirs", "copy_directory")
+
+ for copy in copies:
+ files = copy["files"]
+ destination = copy["destination"]
+ for src in files:
+ path = os.path.normpath(src)
+ basename = os.path.split(path)[1]
+ dst = os.path.join(destination, basename)
+
+ copy = file_copy if os.path.basename(src) else dir_copy
+
+ copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
+ copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
+ copy.gyp_inputs.append(src)
+ copy.gyp_outputs.append(dst)
+
+ for copy in (file_copy, dir_copy):
+ if copy.cmake_inputs:
+ copy.inputs_name = copy_name + "__input" + copy.ext
+ SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
+
+ copy.outputs_name = copy_name + "__output" + copy.ext
+ SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
+
+ # add_custom_command
+ output.write("add_custom_command(\n")
+
+ output.write("OUTPUT")
+ for copy in (file_copy, dir_copy):
+ if copy.outputs_name:
+ WriteVariable(output, copy.outputs_name, " ")
+ output.write("\n")
+
+ for copy in (file_copy, dir_copy):
+ for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
+ # 'cmake -E copy src dst' will create the 'dst' directory if needed.
+ output.write("COMMAND ${CMAKE_COMMAND} -E %s " % copy.command)
+ output.write(src)
+ output.write(" ")
+ output.write(dst)
+ output.write("\n")
+
+ output.write("DEPENDS")
+ for copy in (file_copy, dir_copy):
+ if copy.inputs_name:
+ WriteVariable(output, copy.inputs_name, " ")
+ output.write("\n")
+
+ output.write("WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/")
+ output.write(path_to_gyp)
+ output.write("\n")
+
+ output.write("COMMENT Copying for ")
+ output.write(target_name)
+ output.write("\n")
+
+ output.write("VERBATIM\n")
+ output.write(")\n")
+
+ # add_custom_target
+ output.write("add_custom_target(")
+ output.write(copy_name)
+ output.write("\n DEPENDS")
+ for copy in (file_copy, dir_copy):
+ if copy.outputs_name:
+ WriteVariable(output, copy.outputs_name, " ")
+ output.write("\n SOURCES")
+ if file_copy.inputs_name:
+ WriteVariable(output, file_copy.inputs_name, " ")
+ output.write("\n)\n")
+
+ extra_deps.append(copy_name)
+
+
+def CreateCMakeTargetBaseName(qualified_target):
+ """This is the name we would like the target to have."""
+ _, gyp_target_name, gyp_target_toolset = gyp.common.ParseQualifiedTarget(
+ qualified_target
+ )
+ cmake_target_base_name = gyp_target_name
+ if gyp_target_toolset and gyp_target_toolset != "target":
+ cmake_target_base_name += "_" + gyp_target_toolset
+ return StringToCMakeTargetName(cmake_target_base_name)
+
+
+def CreateCMakeTargetFullName(qualified_target):
+ """An unambiguous name for the target."""
+ gyp_file, gyp_target_name, gyp_target_toolset = gyp.common.ParseQualifiedTarget(
+ qualified_target
+ )
+ cmake_target_full_name = gyp_file + ":" + gyp_target_name
+ if gyp_target_toolset and gyp_target_toolset != "target":
+ cmake_target_full_name += "_" + gyp_target_toolset
+ return StringToCMakeTargetName(cmake_target_full_name)
+
+
+class CMakeNamer:
+ """Converts Gyp target names into CMake target names.
+
+ CMake requires that target names be globally unique. One way to ensure
+ this is to fully qualify the names of the targets. Unfortunately, this
+ ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
+ of just "chrome". If this generator were only interested in building, it
+ would be possible to fully qualify all target names, then create
+ unqualified target names which depend on all qualified targets which
+ should have had that name. This is more or less what the 'make' generator
+ does with aliases. However, one goal of this generator is to create CMake
+ files for use with IDEs, and fully qualified names are not as user
+ friendly.
+
+ Since target name collision is rare, we do the above only when required.
+
+ Toolset variants are always qualified from the base, as this is required for
+ building. However, it also makes sense for an IDE, as it is possible for
+ defines to be different.
+ """
+
+ def __init__(self, target_list):
+ self.cmake_target_base_names_conficting = set()
+
+ cmake_target_base_names_seen = set()
+ for qualified_target in target_list:
+ cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
+
+ if cmake_target_base_name not in cmake_target_base_names_seen:
+ cmake_target_base_names_seen.add(cmake_target_base_name)
+ else:
+ self.cmake_target_base_names_conficting.add(cmake_target_base_name)
+
+ def CreateCMakeTargetName(self, qualified_target):
+ base_name = CreateCMakeTargetBaseName(qualified_target)
+ if base_name in self.cmake_target_base_names_conficting:
+ return CreateCMakeTargetFullName(qualified_target)
+ return base_name
+
+
+def WriteTarget(
+ namer,
+ qualified_target,
+ target_dicts,
+ build_dir,
+ config_to_use,
+ options,
+ generator_flags,
+ all_qualified_targets,
+ flavor,
+ output,
+):
+ # The make generator does this always.
+ # TODO: It would be nice to be able to tell CMake all dependencies.
+ circular_libs = generator_flags.get("circular", True)
+
+ if not generator_flags.get("standalone", False):
+ output.write("\n#")
+ output.write(qualified_target)
+ output.write("\n")
+
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
+ rel_gyp_dir = os.path.dirname(rel_gyp_file)
+
+ # Relative path from build dir to top dir.
+ build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
+ # Relative path from build dir to gyp dir.
+ build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
+
+ path_from_cmakelists_to_gyp = build_to_gyp
+
+ spec = target_dicts.get(qualified_target, {})
+ config = spec.get("configurations", {}).get(config_to_use, {})
+
+ xcode_settings = None
+ if flavor == "mac":
+ xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+
+ target_name = spec.get("target_name", "<missing target name>")
+ target_type = spec.get("type", "<missing target type>")
+ target_toolset = spec.get("toolset")
+
+ cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
+ if cmake_target_type is None:
+ print(
+ "Target %s has unknown target type %s, skipping."
+ % (target_name, target_type)
+ )
+ return
+
+ SetVariable(output, "TARGET", target_name)
+ SetVariable(output, "TOOLSET", target_toolset)
+
+ cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
+
+ extra_sources = []
+ extra_deps = []
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if "actions" in spec:
+ WriteActions(
+ cmake_target_name,
+ spec["actions"],
+ extra_sources,
+ extra_deps,
+ path_from_cmakelists_to_gyp,
+ output,
+ )
+
+ # Rules must be early like actions.
+ if "rules" in spec:
+ WriteRules(
+ cmake_target_name,
+ spec["rules"],
+ extra_sources,
+ extra_deps,
+ path_from_cmakelists_to_gyp,
+ output,
+ )
+
+ # Copies
+ if "copies" in spec:
+ WriteCopies(
+ cmake_target_name,
+ spec["copies"],
+ extra_deps,
+ path_from_cmakelists_to_gyp,
+ output,
+ )
+
+ # Target and sources
+ srcs = spec.get("sources", [])
+
+ # Gyp separates the sheep from the goats based on file extensions.
+ # A full separation is done here because of flag handing (see below).
+ s_sources = []
+ c_sources = []
+ cxx_sources = []
+ linkable_sources = []
+ other_sources = []
+ for src in srcs:
+ _, ext = os.path.splitext(src)
+ src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+ src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src)
+
+ if src_type == "s":
+ s_sources.append(src_norm_path)
+ elif src_type == "cc":
+ c_sources.append(src_norm_path)
+ elif src_type == "cxx":
+ cxx_sources.append(src_norm_path)
+ elif Linkable(ext):
+ linkable_sources.append(src_norm_path)
+ else:
+ other_sources.append(src_norm_path)
+
+ for extra_source in extra_sources:
+ src, real_source = extra_source
+ _, ext = os.path.splitext(real_source)
+ src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+
+ if src_type == "s":
+ s_sources.append(src)
+ elif src_type == "cc":
+ c_sources.append(src)
+ elif src_type == "cxx":
+ cxx_sources.append(src)
+ elif Linkable(ext):
+ linkable_sources.append(src)
+ else:
+ other_sources.append(src)
+
+ s_sources_name = None
+ if s_sources:
+ s_sources_name = cmake_target_name + "__asm_srcs"
+ SetVariableList(output, s_sources_name, s_sources)
+
+ c_sources_name = None
+ if c_sources:
+ c_sources_name = cmake_target_name + "__c_srcs"
+ SetVariableList(output, c_sources_name, c_sources)
+
+ cxx_sources_name = None
+ if cxx_sources:
+ cxx_sources_name = cmake_target_name + "__cxx_srcs"
+ SetVariableList(output, cxx_sources_name, cxx_sources)
+
+ linkable_sources_name = None
+ if linkable_sources:
+ linkable_sources_name = cmake_target_name + "__linkable_srcs"
+ SetVariableList(output, linkable_sources_name, linkable_sources)
+
+ other_sources_name = None
+ if other_sources:
+ other_sources_name = cmake_target_name + "__other_srcs"
+ SetVariableList(output, other_sources_name, other_sources)
+
+ # CMake gets upset when executable targets provide no sources.
+ # http://www.cmake.org/pipermail/cmake/2010-July/038461.html
+ dummy_sources_name = None
+ has_sources = (
+ s_sources_name
+ or c_sources_name
+ or cxx_sources_name
+ or linkable_sources_name
+ or other_sources_name
+ )
+ if target_type == "executable" and not has_sources:
+ dummy_sources_name = cmake_target_name + "__dummy_srcs"
+ SetVariable(
+ output, dummy_sources_name, "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c"
+ )
+ output.write('if(NOT EXISTS "')
+ WriteVariable(output, dummy_sources_name)
+ output.write('")\n')
+ output.write(' file(WRITE "')
+ WriteVariable(output, dummy_sources_name)
+ output.write('" "")\n')
+ output.write("endif()\n")
+
+ # CMake is opposed to setting linker directories and considers the practice
+ # of setting linker directories dangerous. Instead, it favors the use of
+ # find_library and passing absolute paths to target_link_libraries.
+ # However, CMake does provide the command link_directories, which adds
+ # link directories to targets defined after it is called.
+ # As a result, link_directories must come before the target definition.
+ # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
+ library_dirs = config.get("library_dirs")
+ if library_dirs is not None:
+ output.write("link_directories(")
+ for library_dir in library_dirs:
+ output.write(" ")
+ output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
+ output.write("\n")
+ output.write(")\n")
+
+ output.write(cmake_target_type.command)
+ output.write("(")
+ output.write(cmake_target_name)
+
+ if cmake_target_type.modifier is not None:
+ output.write(" ")
+ output.write(cmake_target_type.modifier)
+
+ if s_sources_name:
+ WriteVariable(output, s_sources_name, " ")
+ if c_sources_name:
+ WriteVariable(output, c_sources_name, " ")
+ if cxx_sources_name:
+ WriteVariable(output, cxx_sources_name, " ")
+ if linkable_sources_name:
+ WriteVariable(output, linkable_sources_name, " ")
+ if other_sources_name:
+ WriteVariable(output, other_sources_name, " ")
+ if dummy_sources_name:
+ WriteVariable(output, dummy_sources_name, " ")
+
+ output.write(")\n")
+
+ # Let CMake know if the 'all' target should depend on this target.
+ exclude_from_all = (
+ "TRUE" if qualified_target not in all_qualified_targets else "FALSE"
+ )
+ SetTargetProperty(output, cmake_target_name, "EXCLUDE_FROM_ALL", exclude_from_all)
+ for extra_target_name in extra_deps:
+ SetTargetProperty(
+ output, extra_target_name, "EXCLUDE_FROM_ALL", exclude_from_all
+ )
+
+ # Output name and location.
+ if target_type != "none":
+ # Link as 'C' if there are no other files
+ if not c_sources and not cxx_sources:
+ SetTargetProperty(output, cmake_target_name, "LINKER_LANGUAGE", ["C"])
+
+ # Mark uncompiled sources as uncompiled.
+ if other_sources_name:
+ output.write("set_source_files_properties(")
+ WriteVariable(output, other_sources_name, "")
+ output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
+
+ # Mark object sources as linkable.
+ if linkable_sources_name:
+ output.write("set_source_files_properties(")
+ WriteVariable(output, other_sources_name, "")
+ output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
+
+ # Output directory
+ target_output_directory = spec.get("product_dir")
+ if target_output_directory is None:
+ if target_type in ("executable", "loadable_module"):
+ target_output_directory = generator_default_variables["PRODUCT_DIR"]
+ elif target_type == "shared_library":
+ target_output_directory = "${builddir}/lib.${TOOLSET}"
+ elif spec.get("standalone_static_library", False):
+ target_output_directory = generator_default_variables["PRODUCT_DIR"]
+ else:
+ base_path = gyp.common.RelativePath(
+ os.path.dirname(gyp_file), options.toplevel_dir
+ )
+ target_output_directory = "${obj}.${TOOLSET}"
+ target_output_directory = os.path.join(
+ target_output_directory, base_path
+ )
+
+ cmake_target_output_directory = NormjoinPathForceCMakeSource(
+ path_from_cmakelists_to_gyp, target_output_directory
+ )
+ SetTargetProperty(
+ output,
+ cmake_target_name,
+ cmake_target_type.property_modifier + "_OUTPUT_DIRECTORY",
+ cmake_target_output_directory,
+ )
+
+ # Output name
+ default_product_prefix = ""
+ default_product_name = target_name
+ default_product_ext = ""
+ if target_type == "static_library":
+ static_library_prefix = generator_default_variables["STATIC_LIB_PREFIX"]
+ default_product_name = RemovePrefix(
+ default_product_name, static_library_prefix
+ )
+ default_product_prefix = static_library_prefix
+ default_product_ext = generator_default_variables["STATIC_LIB_SUFFIX"]
+
+ elif target_type in ("loadable_module", "shared_library"):
+ shared_library_prefix = generator_default_variables["SHARED_LIB_PREFIX"]
+ default_product_name = RemovePrefix(
+ default_product_name, shared_library_prefix
+ )
+ default_product_prefix = shared_library_prefix
+ default_product_ext = generator_default_variables["SHARED_LIB_SUFFIX"]
+
+ elif target_type != "executable":
+ print(
+ "ERROR: What output file should be generated?",
+ "type",
+ target_type,
+ "target",
+ target_name,
+ )
+
+ product_prefix = spec.get("product_prefix", default_product_prefix)
+ product_name = spec.get("product_name", default_product_name)
+ product_ext = spec.get("product_extension")
+ if product_ext:
+ product_ext = "." + product_ext
+ else:
+ product_ext = default_product_ext
+
+ SetTargetProperty(output, cmake_target_name, "PREFIX", product_prefix)
+ SetTargetProperty(
+ output,
+ cmake_target_name,
+ cmake_target_type.property_modifier + "_OUTPUT_NAME",
+ product_name,
+ )
+ SetTargetProperty(output, cmake_target_name, "SUFFIX", product_ext)
+
+ # Make the output of this target referenceable as a source.
+ cmake_target_output_basename = product_prefix + product_name + product_ext
+ cmake_target_output = os.path.join(
+ cmake_target_output_directory, cmake_target_output_basename
+ )
+ SetFileProperty(output, cmake_target_output, "GENERATED", ["TRUE"], "")
+
+ # Includes
+ includes = config.get("include_dirs")
+ if includes:
+ # This (target include directories) is what requires CMake 2.8.8
+ includes_name = cmake_target_name + "__include_dirs"
+ SetVariableList(
+ output,
+ includes_name,
+ [
+ NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
+ for include in includes
+ ],
+ )
+ output.write("set_property(TARGET ")
+ output.write(cmake_target_name)
+ output.write(" APPEND PROPERTY INCLUDE_DIRECTORIES ")
+ WriteVariable(output, includes_name, "")
+ output.write(")\n")
+
+ # Defines
+ defines = config.get("defines")
+ if defines is not None:
+ SetTargetProperty(
+ output, cmake_target_name, "COMPILE_DEFINITIONS", defines, ";"
+ )
+
+ # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
+ # CMake currently does not have target C and CXX flags.
+ # So, instead of doing...
+
+ # cflags_c = config.get('cflags_c')
+ # if cflags_c is not None:
+ # SetTargetProperty(output, cmake_target_name,
+ # 'C_COMPILE_FLAGS', cflags_c, ' ')
+
+ # cflags_cc = config.get('cflags_cc')
+ # if cflags_cc is not None:
+ # SetTargetProperty(output, cmake_target_name,
+ # 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
+
+ # Instead we must...
+ cflags = config.get("cflags", [])
+ cflags_c = config.get("cflags_c", [])
+ cflags_cxx = config.get("cflags_cc", [])
+ if xcode_settings:
+ cflags = xcode_settings.GetCflags(config_to_use)
+ cflags_c = xcode_settings.GetCflagsC(config_to_use)
+ cflags_cxx = xcode_settings.GetCflagsCC(config_to_use)
+ # cflags_objc = xcode_settings.GetCflagsObjC(config_to_use)
+ # cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use)
+
+ if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
+ SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", cflags, " ")
+
+ elif c_sources and not (s_sources or cxx_sources):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_c)
+ SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", flags, " ")
+
+ elif cxx_sources and not (s_sources or c_sources):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_cxx)
+ SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", flags, " ")
+
+ else:
+ # TODO: This is broken, one cannot generally set properties on files,
+ # as other targets may require different properties on the same files.
+ if s_sources and cflags:
+ SetFilesProperty(output, s_sources_name, "COMPILE_FLAGS", cflags, " ")
+
+ if c_sources and (cflags or cflags_c):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_c)
+ SetFilesProperty(output, c_sources_name, "COMPILE_FLAGS", flags, " ")
+
+ if cxx_sources and (cflags or cflags_cxx):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_cxx)
+ SetFilesProperty(output, cxx_sources_name, "COMPILE_FLAGS", flags, " ")
+
+ # Linker flags
+ ldflags = config.get("ldflags")
+ if ldflags is not None:
+ SetTargetProperty(output, cmake_target_name, "LINK_FLAGS", ldflags, " ")
+
+ # XCode settings
+ xcode_settings = config.get("xcode_settings", {})
+ for xcode_setting, xcode_value in xcode_settings.items():
+ SetTargetProperty(
+ output,
+ cmake_target_name,
+ "XCODE_ATTRIBUTE_%s" % xcode_setting,
+ xcode_value,
+ "" if isinstance(xcode_value, str) else " ",
+ )
+
+ # Note on Dependencies and Libraries:
+ # CMake wants to handle link order, resolving the link line up front.
+ # Gyp does not retain or enforce specifying enough information to do so.
+ # So do as other gyp generators and use --start-group and --end-group.
+ # Give CMake as little information as possible so that it doesn't mess it up.
+
+ # Dependencies
+ rawDeps = spec.get("dependencies", [])
+
+ static_deps = []
+ shared_deps = []
+ other_deps = []
+ for rawDep in rawDeps:
+ dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
+ dep_spec = target_dicts.get(rawDep, {})
+ dep_target_type = dep_spec.get("type", None)
+
+ if dep_target_type == "static_library":
+ static_deps.append(dep_cmake_name)
+ elif dep_target_type == "shared_library":
+ shared_deps.append(dep_cmake_name)
+ else:
+ other_deps.append(dep_cmake_name)
+
+ # ensure all external dependencies are complete before internal dependencies
+ # extra_deps currently only depend on their own deps, so otherwise run early
+ if static_deps or shared_deps or other_deps:
+ for extra_dep in extra_deps:
+ output.write("add_dependencies(")
+ output.write(extra_dep)
+ output.write("\n")
+ for deps in (static_deps, shared_deps, other_deps):
+ for dep in gyp.common.uniquer(deps):
+ output.write(" ")
+ output.write(dep)
+ output.write("\n")
+ output.write(")\n")
+
+ linkable = target_type in ("executable", "loadable_module", "shared_library")
+ other_deps.extend(extra_deps)
+ if other_deps or (not linkable and (static_deps or shared_deps)):
+ output.write("add_dependencies(")
+ output.write(cmake_target_name)
+ output.write("\n")
+ for dep in gyp.common.uniquer(other_deps):
+ output.write(" ")
+ output.write(dep)
+ output.write("\n")
+ if not linkable:
+ for deps in (static_deps, shared_deps):
+ for lib_dep in gyp.common.uniquer(deps):
+ output.write(" ")
+ output.write(lib_dep)
+ output.write("\n")
+ output.write(")\n")
+
+ # Libraries
+ if linkable:
+ external_libs = [lib for lib in spec.get("libraries", []) if len(lib) > 0]
+ if external_libs or static_deps or shared_deps:
+ output.write("target_link_libraries(")
+ output.write(cmake_target_name)
+ output.write("\n")
+ if static_deps:
+ write_group = circular_libs and len(static_deps) > 1 and flavor != "mac"
+ if write_group:
+ output.write("-Wl,--start-group\n")
+ for dep in gyp.common.uniquer(static_deps):
+ output.write(" ")
+ output.write(dep)
+ output.write("\n")
+ if write_group:
+ output.write("-Wl,--end-group\n")
+ if shared_deps:
+ for dep in gyp.common.uniquer(shared_deps):
+ output.write(" ")
+ output.write(dep)
+ output.write("\n")
+ if external_libs:
+ for lib in gyp.common.uniquer(external_libs):
+ output.write(' "')
+ output.write(RemovePrefix(lib, "$(SDKROOT)"))
+ output.write('"\n')
+
+ output.write(")\n")
+
+ UnsetVariable(output, "TOOLSET")
+ UnsetVariable(output, "TARGET")
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data, params, config_to_use):
+ options = params["options"]
+ generator_flags = params["generator_flags"]
+ flavor = gyp.common.GetFlavor(params)
+
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+ # Each Gyp configuration creates a different CMakeLists.txt file
+ # to avoid incompatibilities between Gyp and CMake configurations.
+ generator_dir = os.path.relpath(options.generator_output or ".")
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get("output_dir", "out")
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(os.path.join(generator_dir, output_dir, config_to_use))
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+
+ output_file = os.path.join(toplevel_build, "CMakeLists.txt")
+ gyp.common.EnsureDirExists(output_file)
+
+ output = open(output_file, "w")
+ output.write("cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n")
+ output.write("cmake_policy(VERSION 2.8.8)\n")
+
+ gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
+ output.write("project(")
+ output.write(project_target)
+ output.write(")\n")
+
+ SetVariable(output, "configuration", config_to_use)
+
+ ar = None
+ cc = None
+ cxx = None
+
+ make_global_settings = data[gyp_file].get("make_global_settings", [])
+ build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
+ for key, value in make_global_settings:
+ if key == "AR":
+ ar = os.path.join(build_to_top, value)
+ if key == "CC":
+ cc = os.path.join(build_to_top, value)
+ if key == "CXX":
+ cxx = os.path.join(build_to_top, value)
+
+ ar = gyp.common.GetEnvironFallback(["AR_target", "AR"], ar)
+ cc = gyp.common.GetEnvironFallback(["CC_target", "CC"], cc)
+ cxx = gyp.common.GetEnvironFallback(["CXX_target", "CXX"], cxx)
+
+ if ar:
+ SetVariable(output, "CMAKE_AR", ar)
+ if cc:
+ SetVariable(output, "CMAKE_C_COMPILER", cc)
+ if cxx:
+ SetVariable(output, "CMAKE_CXX_COMPILER", cxx)
+
+ # The following appears to be as-yet undocumented.
+ # http://public.kitware.com/Bug/view.php?id=8392
+ output.write("enable_language(ASM)\n")
+ # ASM-ATT does not support .S files.
+ # output.write('enable_language(ASM-ATT)\n')
+
+ if cc:
+ SetVariable(output, "CMAKE_ASM_COMPILER", cc)
+
+ SetVariable(output, "builddir", "${CMAKE_CURRENT_BINARY_DIR}")
+ SetVariable(output, "obj", "${builddir}/obj")
+ output.write("\n")
+
+ # TODO: Undocumented/unsupported (the CMake Java generator depends on it).
+ # CMake by default names the object resulting from foo.c to be foo.c.o.
+ # Gyp traditionally names the object resulting from foo.c foo.o.
+ # This should be irrelevant, but some targets extract .o files from .a
+ # and depend on the name of the extracted .o files.
+ output.write("set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n")
+ output.write("set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n")
+ output.write("\n")
+
+ # Force ninja to use rsp files. Otherwise link and ar lines can get too long,
+ # resulting in 'Argument list too long' errors.
+ # However, rsp files don't work correctly on Mac.
+ if flavor != "mac":
+ output.write("set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n")
+ output.write("\n")
+
+ namer = CMakeNamer(target_list)
+
+ # The list of targets upon which the 'all' target should depend.
+ # CMake has it's own implicit 'all' target, one is not created explicitly.
+ all_qualified_targets = set()
+ for build_file in params["build_files"]:
+ for qualified_target in gyp.common.AllTargets(
+ target_list, target_dicts, os.path.normpath(build_file)
+ ):
+ all_qualified_targets.add(qualified_target)
+
+ for qualified_target in target_list:
+ if flavor == "mac":
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ spec = target_dicts[qualified_target]
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec)
+
+ WriteTarget(
+ namer,
+ qualified_target,
+ target_dicts,
+ build_dir,
+ config_to_use,
+ options,
+ generator_flags,
+ all_qualified_targets,
+ flavor,
+ output,
+ )
+
+ output.close()
+
+
+def PerformBuild(data, configurations, params):
+ options = params["options"]
+ generator_flags = params["generator_flags"]
+
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+ generator_dir = os.path.relpath(options.generator_output or ".")
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get("output_dir", "out")
+
+ for config_name in configurations:
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(
+ os.path.join(generator_dir, output_dir, config_name)
+ )
+ arguments = ["cmake", "-G", "Ninja"]
+ print(f"Generating [{config_name}]: {arguments}")
+ subprocess.check_call(arguments, cwd=build_dir)
+
+ arguments = ["ninja", "-C", build_dir]
+ print(f"Building [{config_name}]: {arguments}")
+ subprocess.check_call(arguments)
+
+
+def CallGenerateOutputForConfig(arglist):
+ # Ignore the interrupt signal so that the parent process catches it and
+ # kills all multiprocessing children.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ target_list, target_dicts, data, params, config_name = arglist
+ GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ user_config = params.get("generator_flags", {}).get("config", None)
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
+ else:
+ config_names = target_dicts[target_list[0]]["configurations"]
+ if params["parallel"]:
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+ arglists = []
+ for config_name in config_names:
+ arglists.append(
+ (target_list, target_dicts, data, params, config_name)
+ )
+ pool.map(CallGenerateOutputForConfig, arglists)
+ except KeyboardInterrupt as e:
+ pool.terminate()
+ raise e
+ else:
+ for config_name in config_names:
+ GenerateOutputForConfig(
+ target_list, target_dicts, data, params, config_name
+ )
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
new file mode 100644
index 0000000..f330a04
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
@@ -0,0 +1,120 @@
+# Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import gyp.common
+import gyp.xcode_emulation
+import json
+import os
+
+generator_additional_non_configuration_keys = []
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+generator_filelist_paths = None
+generator_supports_multiple_toolsets = True
+generator_wants_sorted_dependencies = False
+
+# Lifted from make.py. The actual values don't matter much.
+generator_default_variables = {
+ "CONFIGURATION_NAME": "$(BUILDTYPE)",
+ "EXECUTABLE_PREFIX": "",
+ "EXECUTABLE_SUFFIX": "",
+ "INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni",
+ "PRODUCT_DIR": "$(builddir)",
+ "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s",
+ "RULE_INPUT_EXT": "$(suffix $<)",
+ "RULE_INPUT_NAME": "$(notdir $<)",
+ "RULE_INPUT_PATH": "$(abspath $<)",
+ "RULE_INPUT_ROOT": "%(INPUT_ROOT)s",
+ "SHARED_INTERMEDIATE_DIR": "$(obj)/gen",
+ "SHARED_LIB_PREFIX": "lib",
+ "STATIC_LIB_PREFIX": "lib",
+ "STATIC_LIB_SUFFIX": ".a",
+}
+
+
+def IsMac(params):
+ return "mac" == gyp.common.GetFlavor(params)
+
+
+def CalculateVariables(default_variables, params):
+ default_variables.setdefault("OS", gyp.common.GetFlavor(params))
+
+
+def AddCommandsForTarget(cwd, target, params, per_config_commands):
+ output_dir = params["generator_flags"].get("output_dir", "out")
+ for configuration_name, configuration in target["configurations"].items():
+ if IsMac(params):
+ xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
+ cflags = xcode_settings.GetCflags(configuration_name)
+ cflags_c = xcode_settings.GetCflagsC(configuration_name)
+ cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
+ else:
+ cflags = configuration.get("cflags", [])
+ cflags_c = configuration.get("cflags_c", [])
+ cflags_cc = configuration.get("cflags_cc", [])
+
+ cflags_c = cflags + cflags_c
+ cflags_cc = cflags + cflags_cc
+
+ defines = configuration.get("defines", [])
+ defines = ["-D" + s for s in defines]
+
+ # TODO(bnoordhuis) Handle generated source files.
+ extensions = (".c", ".cc", ".cpp", ".cxx")
+ sources = [s for s in target.get("sources", []) if s.endswith(extensions)]
+
+ def resolve(filename):
+ return os.path.abspath(os.path.join(cwd, filename))
+
+ # TODO(bnoordhuis) Handle generated header files.
+ include_dirs = configuration.get("include_dirs", [])
+ include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")]
+ includes = ["-I" + resolve(s) for s in include_dirs]
+
+ defines = gyp.common.EncodePOSIXShellList(defines)
+ includes = gyp.common.EncodePOSIXShellList(includes)
+ cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
+ cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
+
+ commands = per_config_commands.setdefault(configuration_name, [])
+ for source in sources:
+ file = resolve(source)
+ isc = source.endswith(".c")
+ cc = "cc" if isc else "c++"
+ cflags = cflags_c if isc else cflags_cc
+ command = " ".join(
+ (
+ cc,
+ defines,
+ includes,
+ cflags,
+ "-c",
+ gyp.common.EncodePOSIXShellArgument(file),
+ )
+ )
+ commands.append(dict(command=command, directory=output_dir, file=file))
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ per_config_commands = {}
+ for qualified_target, target in target_dicts.items():
+ build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
+ qualified_target
+ )
+ if IsMac(params):
+ settings = data[build_file]
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
+ cwd = os.path.dirname(build_file)
+ AddCommandsForTarget(cwd, target, params, per_config_commands)
+
+ output_dir = params["generator_flags"].get("output_dir", "out")
+ for configuration_name, commands in per_config_commands.items():
+ filename = os.path.join(output_dir, configuration_name, "compile_commands.json")
+ gyp.common.EnsureDirExists(filename)
+ fp = open(filename, "w")
+ json.dump(commands, fp=fp, indent=0, check_circular=False)
+
+
+def PerformBuild(data, configurations, params):
+ pass
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
new file mode 100644
index 0000000..99d5c1f
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -0,0 +1,103 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import os
+import gyp
+import gyp.common
+import gyp.msvs_emulation
+import json
+
+generator_supports_multiple_toolsets = True
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_filelist_paths = {}
+
+generator_default_variables = {}
+for dirname in [
+ "INTERMEDIATE_DIR",
+ "SHARED_INTERMEDIATE_DIR",
+ "PRODUCT_DIR",
+ "LIB_DIR",
+ "SHARED_LIB_DIR",
+]:
+ # Some gyp steps fail if these are empty(!).
+ generator_default_variables[dirname] = "dir"
+for unused in [
+ "RULE_INPUT_PATH",
+ "RULE_INPUT_ROOT",
+ "RULE_INPUT_NAME",
+ "RULE_INPUT_DIRNAME",
+ "RULE_INPUT_EXT",
+ "EXECUTABLE_PREFIX",
+ "EXECUTABLE_SUFFIX",
+ "STATIC_LIB_PREFIX",
+ "STATIC_LIB_SUFFIX",
+ "SHARED_LIB_PREFIX",
+ "SHARED_LIB_SUFFIX",
+ "CONFIGURATION_NAME",
+]:
+ generator_default_variables[unused] = ""
+
+
+def CalculateVariables(default_variables, params):
+ generator_flags = params.get("generator_flags", {})
+ for key, val in generator_flags.items():
+ default_variables.setdefault(key, val)
+ default_variables.setdefault("OS", gyp.common.GetFlavor(params))
+
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == "win":
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get("generator_flags", {})
+ if generator_flags.get("adjust_static_libraries", False):
+ global generator_wants_static_library_dependencies_adjusted
+ generator_wants_static_library_dependencies_adjusted = True
+
+ toplevel = params["options"].toplevel_dir
+ generator_dir = os.path.relpath(params["options"].generator_output or ".")
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get("output_dir", "out")
+ qualified_out_dir = os.path.normpath(
+ os.path.join(toplevel, generator_dir, output_dir, "gypfiles")
+ )
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ "toplevel": toplevel,
+ "qualified_out_dir": qualified_out_dir,
+ }
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Map of target -> list of targets it depends on.
+ edges = {}
+
+ # Queue of targets to visit.
+ targets_to_visit = target_list[:]
+
+ while len(targets_to_visit) > 0:
+ target = targets_to_visit.pop()
+ if target in edges:
+ continue
+ edges[target] = []
+
+ for dep in target_dicts[target].get("dependencies", []):
+ edges[target].append(dep)
+ targets_to_visit.append(dep)
+
+ try:
+ filepath = params["generator_flags"]["output_dir"]
+ except KeyError:
+ filepath = "."
+ filename = os.path.join(filepath, "dump.json")
+ f = open(filename, "w")
+ json.dump(edges, f)
+ f.close()
+ print("Wrote json to %s." % filename)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
new file mode 100644
index 0000000..1ff0dc8
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
@@ -0,0 +1,464 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""GYP backend that generates Eclipse CDT settings files.
+
+This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
+files that can be imported into an Eclipse CDT project. The XML file contains a
+list of include paths and symbols (i.e. defines).
+
+Because a full .cproject definition is not created by this generator, it's not
+possible to properly define the include dirs and symbols for each file
+individually. Instead, one set of includes/symbols is generated for the entire
+project. This works fairly well (and is a vast improvement in general), but may
+still result in a few indexer issues here and there.
+
+This generator has no automated tests, so expect it to be broken.
+"""
+
+from xml.sax.saxutils import escape
+import os.path
+import subprocess
+import gyp
+import gyp.common
+import gyp.msvs_emulation
+import shlex
+import xml.etree.cElementTree as ET
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_default_variables = {}
+
+for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]:
+ # Some gyp steps fail if these are empty(!), so we convert them to variables
+ generator_default_variables[dirname] = "$" + dirname
+
+for unused in [
+ "RULE_INPUT_PATH",
+ "RULE_INPUT_ROOT",
+ "RULE_INPUT_NAME",
+ "RULE_INPUT_DIRNAME",
+ "RULE_INPUT_EXT",
+ "EXECUTABLE_PREFIX",
+ "EXECUTABLE_SUFFIX",
+ "STATIC_LIB_PREFIX",
+ "STATIC_LIB_SUFFIX",
+ "SHARED_LIB_PREFIX",
+ "SHARED_LIB_SUFFIX",
+ "CONFIGURATION_NAME",
+]:
+ generator_default_variables[unused] = ""
+
+# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
+# part of the path when dealing with generated headers. This value will be
+# replaced dynamically for each configuration.
+generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR"
+
+
+def CalculateVariables(default_variables, params):
+ generator_flags = params.get("generator_flags", {})
+ for key, val in generator_flags.items():
+ default_variables.setdefault(key, val)
+ flavor = gyp.common.GetFlavor(params)
+ default_variables.setdefault("OS", flavor)
+ if flavor == "win":
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get("generator_flags", {})
+ if generator_flags.get("adjust_static_libraries", False):
+ global generator_wants_static_library_dependencies_adjusted
+ generator_wants_static_library_dependencies_adjusted = True
+
+
+def GetAllIncludeDirectories(
+ target_list,
+ target_dicts,
+ shared_intermediate_dirs,
+ config_name,
+ params,
+ compiler_path,
+):
+ """Calculate the set of include directories to be used.
+
+ Returns:
+ A list including all the include_dir's specified for every target followed
+ by any include directories that were added as cflag compiler options.
+ """
+
+ gyp_includes_set = set()
+ compiler_includes_list = []
+
+ # Find compiler's default include dirs.
+ if compiler_path:
+ command = shlex.split(compiler_path)
+ command.extend(["-E", "-xc++", "-v", "-"])
+ proc = subprocess.Popen(
+ args=command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ output = proc.communicate()[1].decode("utf-8")
+ # Extract the list of include dirs from the output, which has this format:
+ # ...
+ # #include "..." search starts here:
+ # #include <...> search starts here:
+ # /usr/include/c++/4.6
+ # /usr/local/include
+ # End of search list.
+ # ...
+ in_include_list = False
+ for line in output.splitlines():
+ if line.startswith("#include"):
+ in_include_list = True
+ continue
+ if line.startswith("End of search list."):
+ break
+ if in_include_list:
+ include_dir = line.strip()
+ if include_dir not in compiler_includes_list:
+ compiler_includes_list.append(include_dir)
+
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == "win":
+ generator_flags = params.get("generator_flags", {})
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ if config_name in target["configurations"]:
+ config = target["configurations"][config_name]
+
+ # Look for any include dirs that were explicitly added via cflags. This
+ # may be done in gyp files to force certain includes to come at the end.
+ # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
+ # remove this.
+ if flavor == "win":
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+ cflags = msvs_settings.GetCflags(config_name)
+ else:
+ cflags = config["cflags"]
+ for cflag in cflags:
+ if cflag.startswith("-I"):
+ include_dir = cflag[2:]
+ if include_dir not in compiler_includes_list:
+ compiler_includes_list.append(include_dir)
+
+ # Find standard gyp include dirs.
+ if "include_dirs" in config:
+ include_dirs = config["include_dirs"]
+ for shared_intermediate_dir in shared_intermediate_dirs:
+ for include_dir in include_dirs:
+ include_dir = include_dir.replace(
+ "$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir
+ )
+ if not os.path.isabs(include_dir):
+ base_dir = os.path.dirname(target_name)
+
+ include_dir = base_dir + "/" + include_dir
+ include_dir = os.path.abspath(include_dir)
+
+ gyp_includes_set.add(include_dir)
+
+ # Generate a list that has all the include dirs.
+ all_includes_list = list(gyp_includes_set)
+ all_includes_list.sort()
+ for compiler_include in compiler_includes_list:
+ if compiler_include not in gyp_includes_set:
+ all_includes_list.append(compiler_include)
+
+ # All done.
+ return all_includes_list
+
+
+def GetCompilerPath(target_list, data, options):
+ """Determine a command that can be used to invoke the compiler.
+
+ Returns:
+ If this is a gyp project that has explicit make settings, try to determine
+ the compiler from that. Otherwise, see if a compiler was specified via the
+ CC_target environment variable.
+ """
+ # First, see if the compiler is configured in make's settings.
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings_dict = data[build_file].get("make_global_settings", {})
+ for key, value in make_global_settings_dict:
+ if key in ["CC", "CXX"]:
+ return os.path.join(options.toplevel_dir, value)
+
+ # Check to see if the compiler was specified as an environment variable.
+ for key in ["CC_target", "CC", "CXX"]:
+ compiler = os.environ.get(key)
+ if compiler:
+ return compiler
+
+ return "gcc"
+
+
+def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path):
+ """Calculate the defines for a project.
+
+ Returns:
+ A dict that includes explicit defines declared in gyp files along with all
+ of the default defines that the compiler uses.
+ """
+
+ # Get defines declared in the gyp files.
+ all_defines = {}
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == "win":
+ generator_flags = params.get("generator_flags", {})
+ for target_name in target_list:
+ target = target_dicts[target_name]
+
+ if flavor == "win":
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+ extra_defines = msvs_settings.GetComputedDefines(config_name)
+ else:
+ extra_defines = []
+ if config_name in target["configurations"]:
+ config = target["configurations"][config_name]
+ target_defines = config["defines"]
+ else:
+ target_defines = []
+ for define in target_defines + extra_defines:
+ split_define = define.split("=", 1)
+ if len(split_define) == 1:
+ split_define.append("1")
+ if split_define[0].strip() in all_defines:
+ # Already defined
+ continue
+ all_defines[split_define[0].strip()] = split_define[1].strip()
+ # Get default compiler defines (if possible).
+ if flavor == "win":
+ return all_defines # Default defines already processed in the loop above.
+ if compiler_path:
+ command = shlex.split(compiler_path)
+ command.extend(["-E", "-dM", "-"])
+ cpp_proc = subprocess.Popen(
+ args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE
+ )
+ cpp_output = cpp_proc.communicate()[0].decode("utf-8")
+ cpp_lines = cpp_output.split("\n")
+ for cpp_line in cpp_lines:
+ if not cpp_line.strip():
+ continue
+ cpp_line_parts = cpp_line.split(" ", 2)
+ key = cpp_line_parts[1]
+ if len(cpp_line_parts) >= 3:
+ val = cpp_line_parts[2]
+ else:
+ val = "1"
+ all_defines[key] = val
+
+ return all_defines
+
+
+def WriteIncludePaths(out, eclipse_langs, include_dirs):
+ """Write the includes section of a CDT settings export file."""
+
+ out.write(
+ ' <section name="org.eclipse.cdt.internal.ui.wizards.'
+ 'settingswizards.IncludePaths">\n'
+ )
+ out.write(' <language name="holder for library settings"></language>\n')
+ for lang in eclipse_langs:
+ out.write(' <language name="%s">\n' % lang)
+ for include_dir in include_dirs:
+ out.write(
+ ' <includepath workspace_path="false">%s</includepath>\n'
+ % include_dir
+ )
+ out.write(" </language>\n")
+ out.write(" </section>\n")
+
+
+def WriteMacros(out, eclipse_langs, defines):
+ """Write the macros section of a CDT settings export file."""
+
+ out.write(
+ ' <section name="org.eclipse.cdt.internal.ui.wizards.'
+ 'settingswizards.Macros">\n'
+ )
+ out.write(' <language name="holder for library settings"></language>\n')
+ for lang in eclipse_langs:
+ out.write(' <language name="%s">\n' % lang)
+ for key in sorted(defines):
+ out.write(
+ " <macro><name>%s</name><value>%s</value></macro>\n"
+ % (escape(key), escape(defines[key]))
+ )
+ out.write(" </language>\n")
+ out.write(" </section>\n")
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name):
+ options = params["options"]
+ generator_flags = params.get("generator_flags", {})
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name)
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+ # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
+ # SHARED_INTERMEDIATE_DIR. Include both possible locations.
+ shared_intermediate_dirs = [
+ os.path.join(toplevel_build, "obj", "gen"),
+ os.path.join(toplevel_build, "gen"),
+ ]
+
+ GenerateCdtSettingsFile(
+ target_list,
+ target_dicts,
+ data,
+ params,
+ config_name,
+ os.path.join(toplevel_build, "eclipse-cdt-settings.xml"),
+ options,
+ shared_intermediate_dirs,
+ )
+ GenerateClasspathFile(
+ target_list,
+ target_dicts,
+ options.toplevel_dir,
+ toplevel_build,
+ os.path.join(toplevel_build, "eclipse-classpath.xml"),
+ )
+
+
+def GenerateCdtSettingsFile(
+ target_list,
+ target_dicts,
+ data,
+ params,
+ config_name,
+ out_name,
+ options,
+ shared_intermediate_dirs,
+):
+ gyp.common.EnsureDirExists(out_name)
+ with open(out_name, "w") as out:
+ out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
+ out.write("<cdtprojectproperties>\n")
+
+ eclipse_langs = [
+ "C++ Source File",
+ "C Source File",
+ "Assembly Source File",
+ "GNU C++",
+ "GNU C",
+ "Assembly",
+ ]
+ compiler_path = GetCompilerPath(target_list, data, options)
+ include_dirs = GetAllIncludeDirectories(
+ target_list,
+ target_dicts,
+ shared_intermediate_dirs,
+ config_name,
+ params,
+ compiler_path,
+ )
+ WriteIncludePaths(out, eclipse_langs, include_dirs)
+ defines = GetAllDefines(
+ target_list, target_dicts, data, config_name, params, compiler_path
+ )
+ WriteMacros(out, eclipse_langs, defines)
+
+ out.write("</cdtprojectproperties>\n")
+
+
+def GenerateClasspathFile(
+ target_list, target_dicts, toplevel_dir, toplevel_build, out_name
+):
+ """Generates a classpath file suitable for symbol navigation and code
+ completion of Java code (such as in Android projects) by finding all
+ .java and .jar files used as action inputs."""
+ gyp.common.EnsureDirExists(out_name)
+ result = ET.Element("classpath")
+
+ def AddElements(kind, paths):
+ # First, we need to normalize the paths so they are all relative to the
+ # toplevel dir.
+ rel_paths = set()
+ for path in paths:
+ if os.path.isabs(path):
+ rel_paths.add(os.path.relpath(path, toplevel_dir))
+ else:
+ rel_paths.add(path)
+
+ for path in sorted(rel_paths):
+ entry_element = ET.SubElement(result, "classpathentry")
+ entry_element.set("kind", kind)
+ entry_element.set("path", path)
+
+ AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir))
+ AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
+ # Include the standard JRE container and a dummy out folder
+ AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"])
+ # Include a dummy out folder so that Eclipse doesn't use the default /bin
+ # folder in the root of the project.
+ AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")])
+
+ ET.ElementTree(result).write(out_name)
+
+
+def GetJavaJars(target_list, target_dicts, toplevel_dir):
+ """Generates a sequence of all .jars used as inputs."""
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ for action in target.get("actions", []):
+ for input_ in action["inputs"]:
+ if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"):
+ if os.path.isabs(input_):
+ yield input_
+ else:
+ yield os.path.join(os.path.dirname(target_name), input_)
+
+
+def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
+ """Generates a sequence of all likely java package root directories."""
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ for action in target.get("actions", []):
+ for input_ in action["inputs"]:
+ if os.path.splitext(input_)[1] == ".java" and not input_.startswith(
+ "$"
+ ):
+ dir_ = os.path.dirname(
+ os.path.join(os.path.dirname(target_name), input_)
+ )
+ # If there is a parent 'src' or 'java' folder, navigate up to it -
+ # these are canonical package root names in Chromium. This will
+ # break if 'src' or 'java' exists in the package structure. This
+ # could be further improved by inspecting the java file for the
+ # package name if this proves to be too fragile in practice.
+ parent_search = dir_
+ while os.path.basename(parent_search) not in ["src", "java"]:
+ parent_search, _ = os.path.split(parent_search)
+ if not parent_search or parent_search == toplevel_dir:
+ # Didn't find a known root, just return the original path
+ yield dir_
+ break
+ else:
+ yield parent_search
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Generate an XML settings file that can be imported into a CDT project."""
+
+ if params["options"].generator_output:
+ raise NotImplementedError("--generator_output not implemented for eclipse")
+
+ user_config = params.get("generator_flags", {}).get("config", None)
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
+ else:
+ config_names = target_dicts[target_list[0]]["configurations"]
+ for config_name in config_names:
+ GenerateOutputForConfig(
+ target_list, target_dicts, data, params, config_name
+ )
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
new file mode 100644
index 0000000..4171704
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gypd output module
+
+This module produces gyp input as its output. Output files are given the
+.gypd extension to avoid overwriting the .gyp files that they are generated
+from. Internal references to .gyp files (such as those found in
+"dependencies" sections) are not adjusted to point to .gypd files instead;
+unlike other paths, which are relative to the .gyp or .gypd file, such paths
+are relative to the directory from which gyp was run to create the .gypd file.
+
+This generator module is intended to be a sample and a debugging aid, hence
+the "d" for "debug" in .gypd. It is useful to inspect the results of the
+various merges, expansions, and conditional evaluations performed by gyp
+and to see a representation of what would be fed to a generator module.
+
+It's not advisable to rename .gypd files produced by this module to .gyp,
+because they will have all merges, expansions, and evaluations already
+performed and the relevant constructs not present in the output; paths to
+dependencies may be wrong; and various sections that do not belong in .gyp
+files such as such as "included_files" and "*_excluded" will be present.
+Output will also be stripped of comments. This is not intended to be a
+general-purpose gyp pretty-printer; for that, you probably just want to
+run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
+comments but won't do all of the other things done to this module's output.
+
+The specific formatting of the output generated by this module is subject
+to change.
+"""
+
+
+import gyp.common
+import pprint
+
+
+# These variables should just be spit back out as variable references.
+_generator_identity_variables = [
+ "CONFIGURATION_NAME",
+ "EXECUTABLE_PREFIX",
+ "EXECUTABLE_SUFFIX",
+ "INTERMEDIATE_DIR",
+ "LIB_DIR",
+ "PRODUCT_DIR",
+ "RULE_INPUT_ROOT",
+ "RULE_INPUT_DIRNAME",
+ "RULE_INPUT_EXT",
+ "RULE_INPUT_NAME",
+ "RULE_INPUT_PATH",
+ "SHARED_INTERMEDIATE_DIR",
+ "SHARED_LIB_DIR",
+ "SHARED_LIB_PREFIX",
+ "SHARED_LIB_SUFFIX",
+ "STATIC_LIB_PREFIX",
+ "STATIC_LIB_SUFFIX",
+]
+
+# gypd doesn't define a default value for OS like many other generator
+# modules. Specify "-D OS=whatever" on the command line to provide a value.
+generator_default_variables = {}
+
+# gypd supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+# TODO(mark): This always uses <, which isn't right. The input module should
+# notify the generator to tell it which phase it is operating in, and this
+# module should use < for the early phase and then switch to > for the late
+# phase. Bonus points for carrying @ back into the output too.
+for v in _generator_identity_variables:
+ generator_default_variables[v] = "<(%s)" % v
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ output_files = {}
+ for qualified_target in target_list:
+ [input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
+
+ if input_file[-4:] != ".gyp":
+ continue
+ input_file_stem = input_file[:-4]
+ output_file = input_file_stem + params["options"].suffix + ".gypd"
+
+ output_files[output_file] = output_files.get(output_file, input_file)
+
+ for output_file, input_file in output_files.items():
+ output = open(output_file, "w")
+ pprint.pprint(data[input_file], output)
+ output.close()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
new file mode 100644
index 0000000..82a07dd
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gypsh output module
+
+gypsh is a GYP shell. It's not really a generator per se. All it does is
+fire up an interactive Python session with a few local variables set to the
+variables passed to the generator. Like gypd, it's intended as a debugging
+aid, to facilitate the exploration of .gyp structures after being processed
+by the input module.
+
+The expected usage is "gyp -f gypsh -D OS=desired_os".
+"""
+
+
+import code
+import sys
+
+
+# All of this stuff about generator variables was lovingly ripped from gypd.py.
+# That module has a much better description of what's going on and why.
+_generator_identity_variables = [
+ "EXECUTABLE_PREFIX",
+ "EXECUTABLE_SUFFIX",
+ "INTERMEDIATE_DIR",
+ "PRODUCT_DIR",
+ "RULE_INPUT_ROOT",
+ "RULE_INPUT_DIRNAME",
+ "RULE_INPUT_EXT",
+ "RULE_INPUT_NAME",
+ "RULE_INPUT_PATH",
+ "SHARED_INTERMEDIATE_DIR",
+]
+
+generator_default_variables = {}
+
+for v in _generator_identity_variables:
+ generator_default_variables[v] = "<(%s)" % v
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ locals = {
+ "target_list": target_list,
+ "target_dicts": target_dicts,
+ "data": data,
+ }
+
+ # Use a banner that looks like the stock Python one and like what
+ # code.interact uses by default, but tack on something to indicate what
+ # locals are available, and identify gypsh.
+ banner = "Python {} on {}\nlocals.keys() = {}\ngypsh".format(
+ sys.version,
+ sys.platform,
+ repr(sorted(locals.keys())),
+ )
+
+ code.interact(banner, local=locals)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
new file mode 100644
index 0000000..f1d01a6
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
@@ -0,0 +1,2717 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Notes:
+#
+# This is all roughly based on the Makefile system used by the Linux
+# kernel, but is a non-recursive make -- we put the entire dependency
+# graph in front of make and let it figure it out.
+#
+# The code below generates a separate .mk file for each target, but
+# all are sourced by the top-level Makefile. This means that all
+# variables in .mk-files clobber one another. Be careful to use :=
+# where appropriate for immediate evaluation, and similarly to watch
+# that you're not relying on a variable value to last between different
+# .mk files.
+#
+# TODOs:
+#
+# Global settings and utility functions are currently stuffed in the
+# toplevel Makefile. It may make sense to generate some .mk files on
+# the side to keep the files readable.
+
+
+import os
+import re
+import subprocess
+import gyp
+import gyp.common
+import gyp.xcode_emulation
+from gyp.common import GetEnvironFallback
+
+import hashlib
+
+generator_default_variables = {
+ "EXECUTABLE_PREFIX": "",
+ "EXECUTABLE_SUFFIX": "",
+ "STATIC_LIB_PREFIX": "lib",
+ "SHARED_LIB_PREFIX": "lib",
+ "STATIC_LIB_SUFFIX": ".a",
+ "INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni",
+ "SHARED_INTERMEDIATE_DIR": "$(obj)/gen",
+ "PRODUCT_DIR": "$(builddir)",
+ "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", # This gets expanded by Python.
+ "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", # This gets expanded by Python.
+ "RULE_INPUT_PATH": "$(abspath $<)",
+ "RULE_INPUT_EXT": "$(suffix $<)",
+ "RULE_INPUT_NAME": "$(notdir $<)",
+ "CONFIGURATION_NAME": "$(BUILDTYPE)",
+}
+
+# Make supports multiple toolsets
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+# Request sorted dependencies in the order from dependents to dependencies.
+generator_wants_sorted_dependencies = False
+
+# Placates pylint.
+generator_additional_non_configuration_keys = []
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+generator_filelist_paths = None
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == "mac":
+ default_variables.setdefault("OS", "mac")
+ default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib")
+ default_variables.setdefault(
+ "SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"]
+ )
+ default_variables.setdefault(
+ "LIB_DIR", generator_default_variables["PRODUCT_DIR"]
+ )
+
+ # Copy additional generator configuration data from Xcode, which is shared
+ # by the Mac Make generator.
+ import gyp.generator.xcode as xcode_generator
+
+ global generator_additional_non_configuration_keys
+ generator_additional_non_configuration_keys = getattr(
+ xcode_generator, "generator_additional_non_configuration_keys", []
+ )
+ global generator_additional_path_sections
+ generator_additional_path_sections = getattr(
+ xcode_generator, "generator_additional_path_sections", []
+ )
+ global generator_extra_sources_for_rules
+ generator_extra_sources_for_rules = getattr(
+ xcode_generator, "generator_extra_sources_for_rules", []
+ )
+ COMPILABLE_EXTENSIONS.update({".m": "objc", ".mm": "objcxx"})
+ else:
+ operating_system = flavor
+ if flavor == "android":
+ operating_system = "linux" # Keep this legacy behavior for now.
+ default_variables.setdefault("OS", operating_system)
+ if flavor == "aix":
+ default_variables.setdefault("SHARED_LIB_SUFFIX", ".a")
+ elif flavor == "zos":
+ default_variables.setdefault("SHARED_LIB_SUFFIX", ".x")
+ COMPILABLE_EXTENSIONS.update({".pli": "pli"})
+ else:
+ default_variables.setdefault("SHARED_LIB_SUFFIX", ".so")
+ default_variables.setdefault("SHARED_LIB_DIR", "$(builddir)/lib.$(TOOLSET)")
+ default_variables.setdefault("LIB_DIR", "$(obj).$(TOOLSET)")
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get("generator_flags", {})
+ android_ndk_version = generator_flags.get("android_ndk_version", None)
+ # Android NDK requires a strict link order.
+ if android_ndk_version:
+ global generator_wants_sorted_dependencies
+ generator_wants_sorted_dependencies = True
+
+ output_dir = params["options"].generator_output or params["options"].toplevel_dir
+ builddir_name = generator_flags.get("output_dir", "out")
+ qualified_out_dir = os.path.normpath(
+ os.path.join(output_dir, builddir_name, "gypfiles")
+ )
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ "toplevel": params["options"].toplevel_dir,
+ "qualified_out_dir": qualified_out_dir,
+ }
+
+
+# The .d checking code below uses these functions:
+# wildcard, sort, foreach, shell, wordlist
+# wildcard can handle spaces, the rest can't.
+# Since I could find no way to make foreach work with spaces in filenames
+# correctly, the .d files have spaces replaced with another character. The .d
+# file for
+# Chromium\ Framework.framework/foo
+# is for example
+# out/Release/.deps/out/Release/Chromium?Framework.framework/foo
+# This is the replacement character.
+SPACE_REPLACEMENT = "?"
+
+
+LINK_COMMANDS_LINUX = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
+
+# Due to circular dependencies between libraries :(, we wrap the
+# special "figure out circular dependencies" flags around the entire
+# input list during linking.
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) -o $@ $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,--start-group $(LD_INPUTS) $(LIBS) -Wl,--end-group
+
+# Note: this does not handle spaces in paths
+define xargs
+ $(1) $(word 1,$(2))
+$(if $(word 2,$(2)),$(call xargs,$(1),$(wordlist 2,$(words $(2)),$(2))))
+endef
+
+define write-to-file
+ @: >$(1)
+$(call xargs,@printf "%s\\n" >>$(1),$(2))
+endef
+
+OBJ_FILE_LIST := ar-file-list
+
+define create_archive
+ rm -f $(1) $(1).$(OBJ_FILE_LIST); mkdir -p `dirname $(1)`
+ $(call write-to-file,$(1).$(OBJ_FILE_LIST),$(filter %.o,$(2)))
+ $(AR.$(TOOLSET)) crs $(1) @$(1).$(OBJ_FILE_LIST)
+endef
+
+define create_thin_archive
+ rm -f $(1) $(OBJ_FILE_LIST); mkdir -p `dirname $(1)`
+ $(call write-to-file,$(1).$(OBJ_FILE_LIST),$(filter %.o,$(2)))
+ $(AR.$(TOOLSET)) crsT $(1) @$(1).$(OBJ_FILE_LIST)
+endef
+
+# We support two kinds of shared objects (.so):
+# 1) shared_library, which is just bundling together many dependent libraries
+# into a link line.
+# 2) loadable_module, which is generating a module intended for dlopen().
+#
+# They differ only slightly:
+# In the former case, we want to package all dependent code into the .so.
+# In the latter case, we want to package just the API exposed by the
+# outermost module.
+# This means shared_library uses --whole-archive, while loadable_module doesn't.
+# (Note that --whole-archive is incompatible with the --start-group used in
+# normal linking.)
+
+# Other shared-object link notes:
+# - Set SONAME to the library filename so our binaries don't reference
+# the local, absolute paths used on the link command-line.
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -o $@ -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -o $@ -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+""" # noqa: E501
+
+LINK_COMMANDS_MAC = """\
+quiet_cmd_alink = LIBTOOL-STATIC $@
+cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+""" # noqa: E501
+
+LINK_COMMANDS_ANDROID = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
+
+# Note: this does not handle spaces in paths
+define xargs
+ $(1) $(word 1,$(2))
+$(if $(word 2,$(2)),$(call xargs,$(1),$(wordlist 2,$(words $(2)),$(2))))
+endef
+
+define write-to-file
+ @: >$(1)
+$(call xargs,@printf "%s\\n" >>$(1),$(2))
+endef
+
+OBJ_FILE_LIST := ar-file-list
+
+define create_archive
+ rm -f $(1) $(1).$(OBJ_FILE_LIST); mkdir -p `dirname $(1)`
+ $(call write-to-file,$(1).$(OBJ_FILE_LIST),$(filter %.o,$(2)))
+ $(AR.$(TOOLSET)) crs $(1) @$(1).$(OBJ_FILE_LIST)
+endef
+
+define create_thin_archive
+ rm -f $(1) $(OBJ_FILE_LIST); mkdir -p `dirname $(1)`
+ $(call write-to-file,$(1).$(OBJ_FILE_LIST),$(filter %.o,$(2)))
+ $(AR.$(TOOLSET)) crsT $(1) @$(1).$(OBJ_FILE_LIST)
+endef
+
+# Due to circular dependencies between libraries :(, we wrap the
+# special "figure out circular dependencies" flags around the entire
+# input list during linking.
+quiet_cmd_link = LINK($(TOOLSET)) $@
+quiet_cmd_link_host = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
+cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
+
+# Other shared-object link notes:
+# - Set SONAME to the library filename so our binaries don't reference
+# the local, absolute paths used on the link command-line.
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+""" # noqa: E501
+
+
+LINK_COMMANDS_AIX = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+""" # noqa: E501
+
+
+LINK_COMMANDS_OS400 = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X64 crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X64 crs $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+""" # noqa: E501
+
+
+LINK_COMMANDS_OS390 = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+""" # noqa: E501
+
+
+# Header of toplevel Makefile.
+# This should go into the build tree, but it's easier to keep it here for now.
+SHARED_HEADER = (
+ """\
+# We borrow heavily from the kernel build setup, though we are simpler since
+# we don't have Kconfig tweaking settings on us.
+
+# The implicit make rules have it looking for RCS files, among other things.
+# We instead explicitly write all the rules we care about.
+# It's even quicker (saves ~200ms) to pass -r on the command line.
+MAKEFLAGS=-r
+
+# The source directory tree.
+srcdir := %(srcdir)s
+abs_srcdir := $(abspath $(srcdir))
+
+# The name of the builddir.
+builddir_name ?= %(builddir)s
+
+# The V=1 flag on command line makes us verbosely print command lines.
+ifdef V
+ quiet=
+else
+ quiet=quiet_
+endif
+
+# Specify BUILDTYPE=Release on the command line for a release build.
+BUILDTYPE ?= %(default_configuration)s
+
+# Directory all our build output goes into.
+# Note that this must be two directories beneath src/ for unit tests to pass,
+# as they reach into the src/ directory for data with relative paths.
+builddir ?= $(builddir_name)/$(BUILDTYPE)
+abs_builddir := $(abspath $(builddir))
+depsdir := $(builddir)/.deps
+
+# Object output directory.
+obj := $(builddir)/obj
+abs_obj := $(abspath $(obj))
+
+# We build up a list of every single one of the targets so we can slurp in the
+# generated dependency rule Makefiles in one pass.
+all_deps :=
+
+%(make_global_settings)s
+
+CC.target ?= %(CC.target)s
+CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
+CXX.target ?= %(CXX.target)s
+CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
+LINK.target ?= %(LINK.target)s
+LDFLAGS.target ?= $(LDFLAGS)
+AR.target ?= $(AR)
+PLI.target ?= %(PLI.target)s
+
+# C++ apps need to be linked with g++.
+LINK ?= $(CXX.target)
+
+# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
+# to replicate this environment fallback in make as well.
+CC.host ?= %(CC.host)s
+CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
+CXX.host ?= %(CXX.host)s
+CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
+LINK.host ?= %(LINK.host)s
+LDFLAGS.host ?= $(LDFLAGS_host)
+AR.host ?= %(AR.host)s
+PLI.host ?= %(PLI.host)s
+
+# Define a dir function that can handle spaces.
+# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
+# "leading spaces cannot appear in the text of the first argument as written.
+# These characters can be put into the argument value by variable substitution."
+empty :=
+space := $(empty) $(empty)
+
+# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
+replace_spaces = $(subst $(space),"""
+ + SPACE_REPLACEMENT
+ + """,$1)
+unreplace_spaces = $(subst """
+ + SPACE_REPLACEMENT
+ + """,$(space),$1)
+dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
+
+# Flags to make gcc output dependency info. Note that you need to be
+# careful here to use the flags that ccache and distcc can understand.
+# We write to a dep file on the side first and then rename at the end
+# so we can't end up with a broken dep file.
+depfile = $(depsdir)/$(call replace_spaces,$@).d
+DEPFLAGS = %(makedep_args)s -MF $(depfile).raw
+
+# We have to fixup the deps output in a few ways.
+# (1) the file output should mention the proper .o file.
+# ccache or distcc lose the path to the target, so we convert a rule of
+# the form:
+# foobar.o: DEP1 DEP2
+# into
+# path/to/foobar.o: DEP1 DEP2
+# (2) we want missing files not to cause us to fail to build.
+# We want to rewrite
+# foobar.o: DEP1 DEP2 \\
+# DEP3
+# to
+# DEP1:
+# DEP2:
+# DEP3:
+# so if the files are missing, they're just considered phony rules.
+# We have to do some pretty insane escaping to get those backslashes
+# and dollar signs past make, the shell, and sed at the same time.
+# Doesn't work with spaces, but that's fine: .d files have spaces in
+# their names replaced with other characters."""
+ r"""
+define fixup_dep
+# The depfile may not exist if the input file didn't have any #includes.
+touch $(depfile).raw
+# Fixup path as in (1).
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
+# Add extra rules as in (2).
+# We remove slashes and replace spaces with new lines;
+# remove blank lines;
+# delete the first line and append a colon to the remaining lines.
+sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
+ grep -v '^$$' |\
+ sed -e 1d -e 's|$$|:|' \
+ >> $(depfile)
+rm $(depfile).raw
+endef
+"""
+ """
+# Command definitions:
+# - cmd_foo is the actual command to run;
+# - quiet_cmd_foo is the brief-output summary of the command.
+
+quiet_cmd_cc = CC($(TOOLSET)) $@
+cmd_cc = $(CC.$(TOOLSET)) -o $@ $< $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c
+
+quiet_cmd_cxx = CXX($(TOOLSET)) $@
+cmd_cxx = $(CXX.$(TOOLSET)) -o $@ $< $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c
+%(extra_commands)s
+quiet_cmd_touch = TOUCH $@
+cmd_touch = touch $@
+
+quiet_cmd_copy = COPY $@
+# send stderr to /dev/null to ignore messages when linking directories.
+cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@")
+
+quiet_cmd_symlink = SYMLINK $@
+cmd_symlink = ln -sf "$<" "$@"
+
+%(link_commands)s
+""" # noqa: E501
+ r"""
+# Define an escape_quotes function to escape single quotes.
+# This allows us to handle quotes properly as long as we always use
+# use single quotes and escape_quotes.
+escape_quotes = $(subst ','\'',$(1))
+# This comment is here just to include a ' to unconfuse syntax highlighting.
+# Define an escape_vars function to escape '$' variable syntax.
+# This allows us to read/write command lines with shell variables (e.g.
+# $LD_LIBRARY_PATH), without triggering make substitution.
+escape_vars = $(subst $$,$$$$,$(1))
+# Helper that expands to a shell command to echo a string exactly as it is in
+# make. This uses printf instead of echo because printf's behaviour with respect
+# to escape sequences is more portable than echo's across different shells
+# (e.g., dash, bash).
+exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
+"""
+ """
+# Helper to compare the command we're about to run against the command
+# we logged the last time we ran the command. Produces an empty
+# string (false) when the commands match.
+# Tricky point: Make has no string-equality test function.
+# The kernel uses the following, but it seems like it would have false
+# positives, where one string reordered its arguments.
+# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
+# $(filter-out $(cmd_$@), $(cmd_$(1))))
+# We instead substitute each for the empty string into the other, and
+# say they're equal if both substitutions produce the empty string.
+# .d files contain """
+ + SPACE_REPLACEMENT
+ + """ instead of spaces, take that into account.
+command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
+ $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
+
+# Helper that is non-empty when a prerequisite changes.
+# Normally make does this implicitly, but we force rules to always run
+# so we can check their command lines.
+# $? -- new prerequisites
+# $| -- order-only dependencies
+prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
+
+# Helper that executes all postbuilds until one fails.
+define do_postbuilds
+ @E=0;\\
+ for p in $(POSTBUILDS); do\\
+ eval $$p;\\
+ E=$$?;\\
+ if [ $$E -ne 0 ]; then\\
+ break;\\
+ fi;\\
+ done;\\
+ if [ $$E -ne 0 ]; then\\
+ rm -rf "$@";\\
+ exit $$E;\\
+ fi
+endef
+
+# do_cmd: run a command via the above cmd_foo names, if necessary.
+# Should always run for a given target to handle command-line changes.
+# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
+# Third argument, if non-zero, makes it do POSTBUILDS processing.
+# Note: We intentionally do NOT call dirx for depfile, since it contains """
+ + SPACE_REPLACEMENT
+ + """ for
+# spaces already and dirx strips the """
+ + SPACE_REPLACEMENT
+ + """ characters.
+define do_cmd
+$(if $(or $(command_changed),$(prereq_changed)),
+ @$(call exact_echo, $($(quiet)cmd_$(1)))
+ @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
+ $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
+ @$(cmd_$(1))
+ @echo " $(quiet_cmd_$(1)): Finished",
+ @$(cmd_$(1))
+ )
+ @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
+ @$(if $(2),$(fixup_dep))
+ $(if $(and $(3), $(POSTBUILDS)),
+ $(call do_postbuilds)
+ )
+)
+endef
+
+# Declare the "%(default_target)s" target first so it is the default,
+# even though we don't have the deps yet.
+.PHONY: %(default_target)s
+%(default_target)s:
+
+# make looks for ways to re-generate included makefiles, but in our case, we
+# don't have a direct way. Explicitly telling make that it has nothing to do
+# for them makes it go faster.
+%%.d: ;
+
+# Use FORCE_DO_CMD to force a target to run. Should be coupled with
+# do_cmd.
+.PHONY: FORCE_DO_CMD
+FORCE_DO_CMD:
+
+""" # noqa: E501
+)
+
+SHARED_HEADER_MAC_COMMANDS = """
+quiet_cmd_objc = CXX($(TOOLSET)) $@
+cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+quiet_cmd_objcxx = CXX($(TOOLSET)) $@
+cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# Commands for precompiled header files.
+quiet_cmd_pch_c = CXX($(TOOLSET)) $@
+cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
+cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_m = CXX($(TOOLSET)) $@
+cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
+cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# gyp-mac-tool is written next to the root Makefile by gyp.
+# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
+# already.
+quiet_cmd_mac_tool = MACTOOL $(4) $<
+cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
+
+quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
+cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
+
+quiet_cmd_infoplist = INFOPLIST $@
+cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
+""" # noqa: E501
+
+
+def WriteRootHeaderSuffixRules(writer):
+ extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
+
+ writer.write("# Suffix rules, putting all outputs into $(obj).\n")
+ for ext in extensions:
+ writer.write("$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n" % ext)
+ writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext])
+
+ writer.write("\n# Try building from generated source, too.\n")
+ for ext in extensions:
+ writer.write(
+ "$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n" % ext
+ )
+ writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext])
+ writer.write("\n")
+ for ext in extensions:
+ writer.write("$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n" % ext)
+ writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext])
+ writer.write("\n")
+
+
+SHARED_HEADER_OS390_COMMANDS = """
+PLIFLAGS.target ?= -qlp=64 -qlimits=extname=31 $(PLIFLAGS)
+PLIFLAGS.host ?= -qlp=64 -qlimits=extname=31 $(PLIFLAGS)
+
+quiet_cmd_pli = PLI($(TOOLSET)) $@
+cmd_pli = $(PLI.$(TOOLSET)) $(GYP_PLIFLAGS) $(PLIFLAGS.$(TOOLSET)) -c $< && \
+ if [ -f $(notdir $@) ]; then /bin/cp $(notdir $@) $@; else true; fi
+"""
+
+SHARED_HEADER_SUFFIX_RULES_COMMENT1 = """\
+# Suffix rules, putting all outputs into $(obj).
+"""
+
+
+SHARED_HEADER_SUFFIX_RULES_COMMENT2 = """\
+# Try building from generated source, too.
+"""
+
+
+SHARED_FOOTER = """\
+# "all" is a concatenation of the "all" targets from all the included
+# sub-makefiles. This is just here to clarify.
+all:
+
+# Add in dependency-tracking rules. $(all_deps) is the list of every single
+# target in our tree. Only consider the ones with .d (dependency) info:
+d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
+ifneq ($(d_files),)
+ include $(d_files)
+endif
+"""
+
+header = """\
+# This file is generated by gyp; do not edit.
+
+"""
+
+# Maps every compilable file extension to the do_cmd that compiles it.
+COMPILABLE_EXTENSIONS = {
+ ".c": "cc",
+ ".cc": "cxx",
+ ".cpp": "cxx",
+ ".cxx": "cxx",
+ ".s": "cc",
+ ".S": "cc",
+}
+
+
+def Compilable(filename):
+ """Return true if the file is compilable (should be in OBJS)."""
+ for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
+ if res:
+ return True
+ return False
+
+
+def Linkable(filename):
+ """Return true if the file is linkable (should be on the link line)."""
+ return filename.endswith(".o")
+
+
+def Target(filename):
+ """Translate a compilable filename to its .o target."""
+ return os.path.splitext(filename)[0] + ".o"
+
+
+def EscapeShellArgument(s):
+ """Quotes an argument so that it will be interpreted literally by a POSIX
+ shell. Taken from
+ http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
+ """
+ return "'" + s.replace("'", "'\\''") + "'"
+
+
+def EscapeMakeVariableExpansion(s):
+ """Make has its own variable expansion syntax using $. We must escape it for
+ string to be interpreted literally."""
+ return s.replace("$", "$$")
+
+
+def EscapeCppDefine(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = EscapeShellArgument(s)
+ s = EscapeMakeVariableExpansion(s)
+ # '#' characters must be escaped even embedded in a string, else Make will
+ # treat it as the start of a comment.
+ return s.replace("#", r"\#")
+
+
+def QuoteIfNecessary(string):
+ """TODO: Should this ideally be replaced with one or more of the above
+ functions?"""
+ if '"' in string:
+ string = '"' + string.replace('"', '\\"') + '"'
+ return string
+
+
+def StringToMakefileVariable(string):
+ """Convert a string to a value that is acceptable as a make variable name."""
+ return re.sub("[^a-zA-Z0-9_]", "_", string)
+
+
+srcdir_prefix = ""
+
+
+def Sourceify(path):
+ """Convert a path to its source directory form."""
+ if "$(" in path:
+ return path
+ if os.path.isabs(path):
+ return path
+ return srcdir_prefix + path
+
+
+def QuoteSpaces(s, quote=r"\ "):
+ return s.replace(" ", quote)
+
+
+def SourceifyAndQuoteSpaces(path):
+ """Convert a path to its source directory form and quote spaces."""
+ return QuoteSpaces(Sourceify(path))
+
+
+# Map from qualified target to path to output.
+target_outputs = {}
+# Map from qualified target to any linkable output. A subset
+# of target_outputs. E.g. when mybinary depends on liba, we want to
+# include liba in the linker line; when otherbinary depends on
+# mybinary, we just want to build mybinary first.
+target_link_deps = {}
+
+
+class MakefileWriter:
+ """MakefileWriter packages up the writing of one target-specific foobar.mk.
+
+ Its only real entry point is Write(), and is mostly used for namespacing.
+ """
+
+ def __init__(self, generator_flags, flavor):
+ self.generator_flags = generator_flags
+ self.flavor = flavor
+
+ self.suffix_rules_srcdir = {}
+ self.suffix_rules_objdir1 = {}
+ self.suffix_rules_objdir2 = {}
+
+ # Generate suffix rules for all compilable extensions.
+ for ext in COMPILABLE_EXTENSIONS.keys():
+ # Suffix rules for source folder.
+ self.suffix_rules_srcdir.update(
+ {
+ ext: (
+ """\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
+\t@$(call do_cmd,%s,1)
+"""
+ % (ext, COMPILABLE_EXTENSIONS[ext])
+ )
+ }
+ )
+
+ # Suffix rules for generated source files.
+ self.suffix_rules_objdir1.update(
+ {
+ ext: (
+ """\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
+\t@$(call do_cmd,%s,1)
+"""
+ % (ext, COMPILABLE_EXTENSIONS[ext])
+ )
+ }
+ )
+ self.suffix_rules_objdir2.update(
+ {
+ ext: (
+ """\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
+\t@$(call do_cmd,%s,1)
+"""
+ % (ext, COMPILABLE_EXTENSIONS[ext])
+ )
+ }
+ )
+
+ def Write(
+ self, qualified_target, base_path, output_filename, spec, configs, part_of_all
+ ):
+ """The main entry point: writes a .mk file for a single target.
+
+ Arguments:
+ qualified_target: target we're generating
+ base_path: path relative to source root we're building in, used to resolve
+ target-relative paths
+ output_filename: output .mk file name to write
+ spec, configs: gyp info
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ gyp.common.EnsureDirExists(output_filename)
+
+ self.fp = open(output_filename, "w")
+
+ self.fp.write(header)
+
+ self.qualified_target = qualified_target
+ self.path = base_path
+ self.target = spec["target_name"]
+ self.type = spec["type"]
+ self.toolset = spec["toolset"]
+
+ self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
+ if self.flavor == "mac":
+ self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ else:
+ self.xcode_settings = None
+
+ deps, link_deps = self.ComputeDeps(spec)
+
+ # Some of the generation below can add extra output, sources, or
+ # link dependencies. All of the out params of the functions that
+ # follow use names like extra_foo.
+ extra_outputs = []
+ extra_sources = []
+ extra_link_deps = []
+ extra_mac_bundle_resources = []
+ mac_bundle_deps = []
+
+ if self.is_mac_bundle:
+ self.output = self.ComputeMacBundleOutput(spec)
+ self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
+ else:
+ self.output = self.output_binary = self.ComputeOutput(spec)
+
+ self.is_standalone_static_library = bool(
+ spec.get("standalone_static_library", 0)
+ )
+ self._INSTALLABLE_TARGETS = ("executable", "loadable_module", "shared_library")
+ if self.is_standalone_static_library or self.type in self._INSTALLABLE_TARGETS:
+ self.alias = os.path.basename(self.output)
+ install_path = self._InstallableTargetInstallPath()
+ else:
+ self.alias = self.output
+ install_path = self.output
+
+ self.WriteLn("TOOLSET := " + self.toolset)
+ self.WriteLn("TARGET := " + self.target)
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if "actions" in spec:
+ self.WriteActions(
+ spec["actions"],
+ extra_sources,
+ extra_outputs,
+ extra_mac_bundle_resources,
+ part_of_all,
+ )
+
+ # Rules must be early like actions.
+ if "rules" in spec:
+ self.WriteRules(
+ spec["rules"],
+ extra_sources,
+ extra_outputs,
+ extra_mac_bundle_resources,
+ part_of_all,
+ )
+
+ if "copies" in spec:
+ self.WriteCopies(spec["copies"], extra_outputs, part_of_all)
+
+ # Bundle resources.
+ if self.is_mac_bundle:
+ all_mac_bundle_resources = (
+ spec.get("mac_bundle_resources", []) + extra_mac_bundle_resources
+ )
+ self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
+ self.WriteMacInfoPlist(mac_bundle_deps)
+
+ # Sources.
+ all_sources = spec.get("sources", []) + extra_sources
+ if all_sources:
+ self.WriteSources(
+ configs,
+ deps,
+ all_sources,
+ extra_outputs,
+ extra_link_deps,
+ part_of_all,
+ gyp.xcode_emulation.MacPrefixHeader(
+ self.xcode_settings,
+ lambda p: Sourceify(self.Absolutify(p)),
+ self.Pchify,
+ ),
+ )
+ sources = [x for x in all_sources if Compilable(x)]
+ if sources:
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
+ extensions = {os.path.splitext(s)[1] for s in sources}
+ for ext in extensions:
+ if ext in self.suffix_rules_srcdir:
+ self.WriteLn(self.suffix_rules_srcdir[ext])
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
+ for ext in extensions:
+ if ext in self.suffix_rules_objdir1:
+ self.WriteLn(self.suffix_rules_objdir1[ext])
+ for ext in extensions:
+ if ext in self.suffix_rules_objdir2:
+ self.WriteLn(self.suffix_rules_objdir2[ext])
+ self.WriteLn("# End of this set of suffix rules")
+
+ # Add dependency from bundle to bundle binary.
+ if self.is_mac_bundle:
+ mac_bundle_deps.append(self.output_binary)
+
+ self.WriteTarget(
+ spec,
+ configs,
+ deps,
+ extra_link_deps + link_deps,
+ mac_bundle_deps,
+ extra_outputs,
+ part_of_all,
+ )
+
+ # Update global list of target outputs, used in dependency tracking.
+ target_outputs[qualified_target] = install_path
+
+ # Update global list of link dependencies.
+ if self.type in ("static_library", "shared_library"):
+ target_link_deps[qualified_target] = self.output_binary
+
+ # Currently any versions have the same effect, but in future the behavior
+ # could be different.
+ if self.generator_flags.get("android_ndk_version", None):
+ self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
+
+ self.fp.close()
+
+ def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
+ """Write a "sub-project" Makefile.
+
+ This is a small, wrapper Makefile that calls the top-level Makefile to build
+ the targets from a single gyp file (i.e. a sub-project).
+
+ Arguments:
+ output_filename: sub-project Makefile name to write
+ makefile_path: path to the top-level Makefile
+ targets: list of "all" targets for this sub-project
+ build_dir: build output directory, relative to the sub-project
+ """
+ gyp.common.EnsureDirExists(output_filename)
+ self.fp = open(output_filename, "w")
+ self.fp.write(header)
+ # For consistency with other builders, put sub-project build output in the
+ # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
+ self.WriteLn(
+ "export builddir_name ?= %s"
+ % os.path.join(os.path.dirname(output_filename), build_dir)
+ )
+ self.WriteLn(".PHONY: all")
+ self.WriteLn("all:")
+ if makefile_path:
+ makefile_path = " -C " + makefile_path
+ self.WriteLn("\t$(MAKE){} {}".format(makefile_path, " ".join(targets)))
+ self.fp.close()
+
+ def WriteActions(
+ self,
+ actions,
+ extra_sources,
+ extra_outputs,
+ extra_mac_bundle_resources,
+ part_of_all,
+ ):
+ """Write Makefile code for any 'actions' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ actions (used to make other pieces dependent on these
+ actions)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ env = self.GetSortedXcodeEnv()
+ for action in actions:
+ name = StringToMakefileVariable(
+ "{}_{}".format(self.qualified_target, action["action_name"])
+ )
+ self.WriteLn('### Rules for action "%s":' % action["action_name"])
+ inputs = action["inputs"]
+ outputs = action["outputs"]
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set()
+ for out in outputs:
+ dir = os.path.split(out)[0]
+ if dir:
+ dirs.add(dir)
+ if int(action.get("process_outputs_as_sources", False)):
+ extra_sources += outputs
+ if int(action.get("process_outputs_as_mac_bundle_resources", False)):
+ extra_mac_bundle_resources += outputs
+
+ # Write the actual command.
+ action_commands = action["action"]
+ if self.flavor == "mac":
+ action_commands = [
+ gyp.xcode_emulation.ExpandEnvVars(command, env)
+ for command in action_commands
+ ]
+ command = gyp.common.EncodePOSIXShellList(action_commands)
+ if "message" in action:
+ self.WriteLn(
+ "quiet_cmd_{} = ACTION {} $@".format(name, action["message"])
+ )
+ else:
+ self.WriteLn(f"quiet_cmd_{name} = ACTION {name} $@")
+ if len(dirs) > 0:
+ command = "mkdir -p %s" % " ".join(dirs) + "; " + command
+
+ cd_action = "cd %s; " % Sourceify(self.path or ".")
+
+ # command and cd_action get written to a toplevel variable called
+ # cmd_foo. Toplevel variables can't handle things that change per
+ # makefile like $(TARGET), so hardcode the target.
+ command = command.replace("$(TARGET)", self.target)
+ cd_action = cd_action.replace("$(TARGET)", self.target)
+
+ # Set LD_LIBRARY_PATH in case the action runs an executable from this
+ # build which links to shared libs from this build.
+ # actions run on the host, so they should in theory only use host
+ # libraries, but until everything is made cross-compile safe, also use
+ # target libraries.
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ if self.flavor == "zos" or self.flavor == "aix":
+ self.WriteLn(
+ "cmd_%s = LIBPATH=$(builddir)/lib.host:"
+ "$(builddir)/lib.target:$$LIBPATH; "
+ "export LIBPATH; "
+ "%s%s" % (name, cd_action, command)
+ )
+ else:
+ self.WriteLn(
+ "cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:"
+ "$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
+ "export LD_LIBRARY_PATH; "
+ "%s%s" % (name, cd_action, command)
+ )
+ self.WriteLn()
+ outputs = [self.Absolutify(o) for o in outputs]
+ # The makefile rules are all relative to the top dir, but the gyp actions
+ # are defined relative to their containing dir. This replaces the obj
+ # variable for the action rule with an absolute version so that the output
+ # goes in the right place.
+ # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
+ # it's superfluous for the "extra outputs", and this avoids accidentally
+ # writing duplicate dummy rules for those outputs.
+ # Same for environment.
+ self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
+ self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
+ self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
+
+ for input in inputs:
+ assert " " not in input, (
+ "Spaces in action input filenames not supported (%s)" % input
+ )
+ for output in outputs:
+ assert " " not in output, (
+ "Spaces in action output filenames not supported (%s)" % output
+ )
+
+ # See the comment in WriteCopies about expanding env vars.
+ outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
+ inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
+
+ self.WriteDoCmd(
+ outputs,
+ [Sourceify(self.Absolutify(i)) for i in inputs],
+ part_of_all=part_of_all,
+ command=name,
+ )
+
+ # Stuff the outputs in a variable so we can refer to them later.
+ outputs_variable = "action_%s_outputs" % name
+ self.WriteLn("{} := {}".format(outputs_variable, " ".join(outputs)))
+ extra_outputs.append("$(%s)" % outputs_variable)
+ self.WriteLn()
+
+ self.WriteLn()
+
+ def WriteRules(
+ self,
+ rules,
+ extra_sources,
+ extra_outputs,
+ extra_mac_bundle_resources,
+ part_of_all,
+ ):
+ """Write Makefile code for any 'rules' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ rules (used to make other pieces dependent on these rules)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ env = self.GetSortedXcodeEnv()
+ for rule in rules:
+ name = StringToMakefileVariable(
+ "{}_{}".format(self.qualified_target, rule["rule_name"])
+ )
+ count = 0
+ self.WriteLn("### Generated for rule %s:" % name)
+
+ all_outputs = []
+
+ for rule_source in rule.get("rule_sources", []):
+ dirs = set()
+ (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
+ (rule_source_root, rule_source_ext) = os.path.splitext(
+ rule_source_basename
+ )
+
+ outputs = [
+ self.ExpandInputRoot(out, rule_source_root, rule_source_dirname)
+ for out in rule["outputs"]
+ ]
+
+ for out in outputs:
+ dir = os.path.dirname(out)
+ if dir:
+ dirs.add(dir)
+ if int(rule.get("process_outputs_as_sources", False)):
+ extra_sources += outputs
+ if int(rule.get("process_outputs_as_mac_bundle_resources", False)):
+ extra_mac_bundle_resources += outputs
+ inputs = [
+ Sourceify(self.Absolutify(i))
+ for i in [rule_source] + rule.get("inputs", [])
+ ]
+ actions = ["$(call do_cmd,%s_%d)" % (name, count)]
+
+ if name == "resources_grit":
+ # HACK: This is ugly. Grit intentionally doesn't touch the
+ # timestamp of its output file when the file doesn't change,
+ # which is fine in hash-based dependency systems like scons
+ # and forge, but not kosher in the make world. After some
+ # discussion, hacking around it here seems like the least
+ # amount of pain.
+ actions += ["@touch --no-create $@"]
+
+ # See the comment in WriteCopies about expanding env vars.
+ outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
+ inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
+
+ outputs = [self.Absolutify(o) for o in outputs]
+ all_outputs += outputs
+ # Only write the 'obj' and 'builddir' rules for the "primary" output
+ # (:1); it's superfluous for the "extra outputs", and this avoids
+ # accidentally writing duplicate dummy rules for those outputs.
+ self.WriteLn("%s: obj := $(abs_obj)" % outputs[0])
+ self.WriteLn("%s: builddir := $(abs_builddir)" % outputs[0])
+ self.WriteMakeRule(
+ outputs, inputs, actions, command="%s_%d" % (name, count)
+ )
+ # Spaces in rule filenames are not supported, but rule variables have
+ # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
+ # The spaces within the variables are valid, so remove the variables
+ # before checking.
+ variables_with_spaces = re.compile(r"\$\([^ ]* \$<\)")
+ for output in outputs:
+ output = re.sub(variables_with_spaces, "", output)
+ assert " " not in output, (
+ "Spaces in rule filenames not yet supported (%s)" % output
+ )
+ self.WriteLn("all_deps += %s" % " ".join(outputs))
+
+ action = [
+ self.ExpandInputRoot(ac, rule_source_root, rule_source_dirname)
+ for ac in rule["action"]
+ ]
+ mkdirs = ""
+ if len(dirs) > 0:
+ mkdirs = "mkdir -p %s; " % " ".join(dirs)
+ cd_action = "cd %s; " % Sourceify(self.path or ".")
+
+ # action, cd_action, and mkdirs get written to a toplevel variable
+ # called cmd_foo. Toplevel variables can't handle things that change
+ # per makefile like $(TARGET), so hardcode the target.
+ if self.flavor == "mac":
+ action = [
+ gyp.xcode_emulation.ExpandEnvVars(command, env)
+ for command in action
+ ]
+ action = gyp.common.EncodePOSIXShellList(action)
+ action = action.replace("$(TARGET)", self.target)
+ cd_action = cd_action.replace("$(TARGET)", self.target)
+ mkdirs = mkdirs.replace("$(TARGET)", self.target)
+
+ # Set LD_LIBRARY_PATH in case the rule runs an executable from this
+ # build which links to shared libs from this build.
+ # rules run on the host, so they should in theory only use host
+ # libraries, but until everything is made cross-compile safe, also use
+ # target libraries.
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ self.WriteLn(
+ "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
+ "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
+ "export LD_LIBRARY_PATH; "
+ "%(cd_action)s%(mkdirs)s%(action)s"
+ % {
+ "action": action,
+ "cd_action": cd_action,
+ "count": count,
+ "mkdirs": mkdirs,
+ "name": name,
+ }
+ )
+ self.WriteLn(
+ "quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@"
+ % {"count": count, "name": name}
+ )
+ self.WriteLn()
+ count += 1
+
+ outputs_variable = "rule_%s_outputs" % name
+ self.WriteList(all_outputs, outputs_variable)
+ extra_outputs.append("$(%s)" % outputs_variable)
+
+ self.WriteLn("### Finished generating for rule: %s" % name)
+ self.WriteLn()
+ self.WriteLn("### Finished generating for all rules")
+ self.WriteLn("")
+
+ def WriteCopies(self, copies, extra_outputs, part_of_all):
+ """Write Makefile code for any 'copies' from the gyp input.
+
+ extra_outputs: a list that will be filled in with any outputs of this action
+ (used to make other pieces dependent on this action)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ self.WriteLn("### Generated for copy rule.")
+
+ variable = StringToMakefileVariable(self.qualified_target + "_copies")
+ outputs = []
+ for copy in copies:
+ for path in copy["files"]:
+ # Absolutify() may call normpath, and will strip trailing slashes.
+ path = Sourceify(self.Absolutify(path))
+ filename = os.path.split(path)[1]
+ output = Sourceify(
+ self.Absolutify(os.path.join(copy["destination"], filename))
+ )
+
+ # If the output path has variables in it, which happens in practice for
+ # 'copies', writing the environment as target-local doesn't work,
+ # because the variables are already needed for the target name.
+ # Copying the environment variables into global make variables doesn't
+ # work either, because then the .d files will potentially contain spaces
+ # after variable expansion, and .d file handling cannot handle spaces.
+ # As a workaround, manually expand variables at gyp time. Since 'copies'
+ # can't run scripts, there's no need to write the env then.
+ # WriteDoCmd() will escape spaces for .d files.
+ env = self.GetSortedXcodeEnv()
+ output = gyp.xcode_emulation.ExpandEnvVars(output, env)
+ path = gyp.xcode_emulation.ExpandEnvVars(path, env)
+ self.WriteDoCmd([output], [path], "copy", part_of_all)
+ outputs.append(output)
+ self.WriteLn(
+ "{} = {}".format(variable, " ".join(QuoteSpaces(o) for o in outputs))
+ )
+ extra_outputs.append("$(%s)" % variable)
+ self.WriteLn()
+
+ def WriteMacBundleResources(self, resources, bundle_deps):
+ """Writes Makefile code for 'mac_bundle_resources'."""
+ self.WriteLn("### Generated for mac_bundle_resources")
+
+ for output, res in gyp.xcode_emulation.GetMacBundleResources(
+ generator_default_variables["PRODUCT_DIR"],
+ self.xcode_settings,
+ [Sourceify(self.Absolutify(r)) for r in resources],
+ ):
+ _, ext = os.path.splitext(output)
+ if ext != ".xcassets":
+ # Make does not supports '.xcassets' emulation.
+ self.WriteDoCmd(
+ [output], [res], "mac_tool,,,copy-bundle-resource", part_of_all=True
+ )
+ bundle_deps.append(output)
+
+ def WriteMacInfoPlist(self, bundle_deps):
+ """Write Makefile code for bundle Info.plist files."""
+ info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
+ generator_default_variables["PRODUCT_DIR"],
+ self.xcode_settings,
+ lambda p: Sourceify(self.Absolutify(p)),
+ )
+ if not info_plist:
+ return
+ if defines:
+ # Create an intermediate file to store preprocessed results.
+ intermediate_plist = "$(obj).$(TOOLSET)/$(TARGET)/" + os.path.basename(
+ info_plist
+ )
+ self.WriteList(
+ defines,
+ intermediate_plist + ": INFOPLIST_DEFINES",
+ "-D",
+ quoter=EscapeCppDefine,
+ )
+ self.WriteMakeRule(
+ [intermediate_plist],
+ [info_plist],
+ [
+ "$(call do_cmd,infoplist)",
+ # "Convert" the plist so that any weird whitespace changes from the
+ # preprocessor do not affect the XML parser in mac_tool.
+ "@plutil -convert xml1 $@ $@",
+ ],
+ )
+ info_plist = intermediate_plist
+ # plists can contain envvars and substitute them into the file.
+ self.WriteSortedXcodeEnv(
+ out, self.GetSortedXcodeEnv(additional_settings=extra_env)
+ )
+ self.WriteDoCmd(
+ [out], [info_plist], "mac_tool,,,copy-info-plist", part_of_all=True
+ )
+ bundle_deps.append(out)
+
+ def WriteSources(
+ self,
+ configs,
+ deps,
+ sources,
+ extra_outputs,
+ extra_link_deps,
+ part_of_all,
+ precompiled_header,
+ ):
+ """Write Makefile code for any 'sources' from the gyp input.
+ These are source files necessary to build the current target.
+
+ configs, deps, sources: input from gyp.
+ extra_outputs: a list of extra outputs this action should be dependent on;
+ used to serialize action/rules before compilation
+ extra_link_deps: a list that will be filled in with any outputs of
+ compilation (to be used in link lines)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+
+ # Write configuration-specific variables for CFLAGS, etc.
+ for configname in sorted(configs.keys()):
+ config = configs[configname]
+ self.WriteList(
+ config.get("defines"),
+ "DEFS_%s" % configname,
+ prefix="-D",
+ quoter=EscapeCppDefine,
+ )
+
+ if self.flavor == "mac":
+ cflags = self.xcode_settings.GetCflags(
+ configname, arch=config.get("xcode_configuration_platform")
+ )
+ cflags_c = self.xcode_settings.GetCflagsC(configname)
+ cflags_cc = self.xcode_settings.GetCflagsCC(configname)
+ cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
+ cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
+ else:
+ cflags = config.get("cflags")
+ cflags_c = config.get("cflags_c")
+ cflags_cc = config.get("cflags_cc")
+
+ self.WriteLn("# Flags passed to all source files.")
+ self.WriteList(cflags, "CFLAGS_%s" % configname)
+ self.WriteLn("# Flags passed to only C files.")
+ self.WriteList(cflags_c, "CFLAGS_C_%s" % configname)
+ self.WriteLn("# Flags passed to only C++ files.")
+ self.WriteList(cflags_cc, "CFLAGS_CC_%s" % configname)
+ if self.flavor == "mac":
+ self.WriteLn("# Flags passed to only ObjC files.")
+ self.WriteList(cflags_objc, "CFLAGS_OBJC_%s" % configname)
+ self.WriteLn("# Flags passed to only ObjC++ files.")
+ self.WriteList(cflags_objcc, "CFLAGS_OBJCC_%s" % configname)
+ includes = config.get("include_dirs")
+ if includes:
+ includes = [Sourceify(self.Absolutify(i)) for i in includes]
+ self.WriteList(includes, "INCS_%s" % configname, prefix="-I")
+
+ compilable = list(filter(Compilable, sources))
+ objs = [self.Objectify(self.Absolutify(Target(c))) for c in compilable]
+ self.WriteList(objs, "OBJS")
+
+ for obj in objs:
+ assert " " not in obj, "Spaces in object filenames not supported (%s)" % obj
+ self.WriteLn(
+ "# Add to the list of files we specially track " "dependencies for."
+ )
+ self.WriteLn("all_deps += $(OBJS)")
+ self.WriteLn()
+
+ # Make sure our dependencies are built first.
+ if deps:
+ self.WriteMakeRule(
+ ["$(OBJS)"],
+ deps,
+ comment="Make sure our dependencies are built " "before any of us.",
+ order_only=True,
+ )
+
+ # Make sure the actions and rules run first.
+ # If they generate any extra headers etc., the per-.o file dep tracking
+ # will catch the proper rebuilds, so order only is still ok here.
+ if extra_outputs:
+ self.WriteMakeRule(
+ ["$(OBJS)"],
+ extra_outputs,
+ comment="Make sure our actions/rules run " "before any of us.",
+ order_only=True,
+ )
+
+ pchdeps = precompiled_header.GetObjDependencies(compilable, objs)
+ if pchdeps:
+ self.WriteLn("# Dependencies from obj files to their precompiled headers")
+ for source, obj, gch in pchdeps:
+ self.WriteLn(f"{obj}: {gch}")
+ self.WriteLn("# End precompiled header dependencies")
+
+ if objs:
+ extra_link_deps.append("$(OBJS)")
+ self.WriteLn(
+ """\
+# CFLAGS et al overrides must be target-local.
+# See "Target-specific Variable Values" in the GNU Make manual."""
+ )
+ self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
+ self.WriteLn(
+ "$(OBJS): GYP_CFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude("c") + "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_C_$(BUILDTYPE))"
+ )
+ self.WriteLn(
+ "$(OBJS): GYP_CXXFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude("cc") + "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_CC_$(BUILDTYPE))"
+ )
+ if self.flavor == "mac":
+ self.WriteLn(
+ "$(OBJS): GYP_OBJCFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude("m")
+ + "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_C_$(BUILDTYPE)) "
+ "$(CFLAGS_OBJC_$(BUILDTYPE))"
+ )
+ self.WriteLn(
+ "$(OBJS): GYP_OBJCXXFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude("mm")
+ + "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_CC_$(BUILDTYPE)) "
+ "$(CFLAGS_OBJCC_$(BUILDTYPE))"
+ )
+
+ self.WritePchTargets(precompiled_header.GetPchBuildCommands())
+
+ # If there are any object files in our input file list, link them into our
+ # output.
+ extra_link_deps += [source for source in sources if Linkable(source)]
+
+ self.WriteLn()
+
+ def WritePchTargets(self, pch_commands):
+ """Writes make rules to compile prefix headers."""
+ if not pch_commands:
+ return
+
+ for gch, lang_flag, lang, input in pch_commands:
+ extra_flags = {
+ "c": "$(CFLAGS_C_$(BUILDTYPE))",
+ "cc": "$(CFLAGS_CC_$(BUILDTYPE))",
+ "m": "$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))",
+ "mm": "$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))",
+ }[lang]
+ var_name = {
+ "c": "GYP_PCH_CFLAGS",
+ "cc": "GYP_PCH_CXXFLAGS",
+ "m": "GYP_PCH_OBJCFLAGS",
+ "mm": "GYP_PCH_OBJCXXFLAGS",
+ }[lang]
+ self.WriteLn(
+ f"{gch}: {var_name} := {lang_flag} " + "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "$(CFLAGS_$(BUILDTYPE)) " + extra_flags
+ )
+
+ self.WriteLn(f"{gch}: {input} FORCE_DO_CMD")
+ self.WriteLn("\t@$(call do_cmd,pch_%s,1)" % lang)
+ self.WriteLn("")
+ assert " " not in gch, "Spaces in gch filenames not supported (%s)" % gch
+ self.WriteLn("all_deps += %s" % gch)
+ self.WriteLn("")
+
+ def ComputeOutputBasename(self, spec):
+ """Return the 'output basename' of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ 'libfoobar.so'
+ """
+ assert not self.is_mac_bundle
+
+ if self.flavor == "mac" and self.type in (
+ "static_library",
+ "executable",
+ "shared_library",
+ "loadable_module",
+ ):
+ return self.xcode_settings.GetExecutablePath()
+
+ target = spec["target_name"]
+ target_prefix = ""
+ target_ext = ""
+ if self.type == "static_library":
+ if target[:3] == "lib":
+ target = target[3:]
+ target_prefix = "lib"
+ target_ext = ".a"
+ elif self.type in ("loadable_module", "shared_library"):
+ if target[:3] == "lib":
+ target = target[3:]
+ target_prefix = "lib"
+ if self.flavor == "aix":
+ target_ext = ".a"
+ elif self.flavor == "zos":
+ target_ext = ".x"
+ else:
+ target_ext = ".so"
+ elif self.type == "none":
+ target = "%s.stamp" % target
+ elif self.type != "executable":
+ print(
+ "ERROR: What output file should be generated?",
+ "type",
+ self.type,
+ "target",
+ target,
+ )
+
+ target_prefix = spec.get("product_prefix", target_prefix)
+ target = spec.get("product_name", target)
+ product_ext = spec.get("product_extension")
+ if product_ext:
+ target_ext = "." + product_ext
+
+ return target_prefix + target + target_ext
+
+ def _InstallImmediately(self):
+ return (
+ self.toolset == "target"
+ and self.flavor == "mac"
+ and self.type
+ in ("static_library", "executable", "shared_library", "loadable_module")
+ )
+
+ def ComputeOutput(self, spec):
+ """Return the 'output' (full output path) of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
+ assert not self.is_mac_bundle
+
+ path = os.path.join("$(obj)." + self.toolset, self.path)
+ if self.type == "executable" or self._InstallImmediately():
+ path = "$(builddir)"
+ path = spec.get("product_dir", path)
+ return os.path.join(path, self.ComputeOutputBasename(spec))
+
+ def ComputeMacBundleOutput(self, spec):
+ """Return the 'output' (full output path) to a bundle output directory."""
+ assert self.is_mac_bundle
+ path = generator_default_variables["PRODUCT_DIR"]
+ return os.path.join(path, self.xcode_settings.GetWrapperName())
+
+ def ComputeMacBundleBinaryOutput(self, spec):
+ """Return the 'output' (full output path) to the binary in a bundle."""
+ path = generator_default_variables["PRODUCT_DIR"]
+ return os.path.join(path, self.xcode_settings.GetExecutablePath())
+
+ def ComputeDeps(self, spec):
+ """Compute the dependencies of a gyp spec.
+
+ Returns a tuple (deps, link_deps), where each is a list of
+ filenames that will need to be put in front of make for either
+ building (deps) or linking (link_deps).
+ """
+ deps = []
+ link_deps = []
+ if "dependencies" in spec:
+ deps.extend(
+ [
+ target_outputs[dep]
+ for dep in spec["dependencies"]
+ if target_outputs[dep]
+ ]
+ )
+ for dep in spec["dependencies"]:
+ if dep in target_link_deps:
+ link_deps.append(target_link_deps[dep])
+ deps.extend(link_deps)
+ # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
+ # This hack makes it work:
+ # link_deps.extend(spec.get('libraries', []))
+ return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
+
+ def GetSharedObjectFromSidedeck(self, sidedeck):
+ """Return the shared object files based on sidedeck"""
+ return re.sub(r"\.x$", ".so", sidedeck)
+
+ def GetUnversionedSidedeckFromSidedeck(self, sidedeck):
+ """Return the shared object files based on sidedeck"""
+ return re.sub(r"\.\d+\.x$", ".x", sidedeck)
+
+ def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
+ self.WriteMakeRule(
+ [self.output_binary],
+ extra_outputs,
+ comment="Build our special outputs first.",
+ order_only=True,
+ )
+
+ def WriteTarget(
+ self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all
+ ):
+ """Write Makefile code to produce the final target of the gyp spec.
+
+ spec, configs: input from gyp.
+ deps, link_deps: dependency lists; see ComputeDeps()
+ extra_outputs: any extra outputs that our target should depend on
+ part_of_all: flag indicating this target is part of 'all'
+ """
+
+ self.WriteLn("### Rules for final target.")
+
+ if extra_outputs:
+ self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
+ self.WriteMakeRule(
+ extra_outputs,
+ deps,
+ comment=("Preserve order dependency of " "special output on deps."),
+ order_only=True,
+ )
+
+ target_postbuilds = {}
+ if self.type != "none":
+ for configname in sorted(configs.keys()):
+ config = configs[configname]
+ if self.flavor == "mac":
+ ldflags = self.xcode_settings.GetLdflags(
+ configname,
+ generator_default_variables["PRODUCT_DIR"],
+ lambda p: Sourceify(self.Absolutify(p)),
+ arch=config.get("xcode_configuration_platform"),
+ )
+
+ # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
+ gyp_to_build = gyp.common.InvertRelativePath(self.path)
+ target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
+ configname,
+ QuoteSpaces(
+ os.path.normpath(os.path.join(gyp_to_build, self.output))
+ ),
+ QuoteSpaces(
+ os.path.normpath(
+ os.path.join(gyp_to_build, self.output_binary)
+ )
+ ),
+ )
+ if target_postbuild:
+ target_postbuilds[configname] = target_postbuild
+ else:
+ ldflags = config.get("ldflags", [])
+ # Compute an rpath for this output if needed.
+ if any(dep.endswith(".so") or ".so." in dep for dep in deps):
+ # We want to get the literal string "$ORIGIN"
+ # into the link command, so we need lots of escaping.
+ ldflags.append(r"-Wl,-rpath=\$$ORIGIN/")
+ ldflags.append(r"-Wl,-rpath-link=\$(builddir)/")
+ library_dirs = config.get("library_dirs", [])
+ ldflags += [("-L%s" % library_dir) for library_dir in library_dirs]
+ self.WriteList(ldflags, "LDFLAGS_%s" % configname)
+ if self.flavor == "mac":
+ self.WriteList(
+ self.xcode_settings.GetLibtoolflags(configname),
+ "LIBTOOLFLAGS_%s" % configname,
+ )
+ libraries = spec.get("libraries")
+ if libraries:
+ # Remove duplicate entries
+ libraries = gyp.common.uniquer(libraries)
+ if self.flavor == "mac":
+ libraries = self.xcode_settings.AdjustLibraries(libraries)
+ self.WriteList(libraries, "LIBS")
+ self.WriteLn(
+ "%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))"
+ % QuoteSpaces(self.output_binary)
+ )
+ self.WriteLn("%s: LIBS := $(LIBS)" % QuoteSpaces(self.output_binary))
+
+ if self.flavor == "mac":
+ self.WriteLn(
+ "%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))"
+ % QuoteSpaces(self.output_binary)
+ )
+
+ # Postbuild actions. Like actions, but implicitly depend on the target's
+ # output.
+ postbuilds = []
+ if self.flavor == "mac":
+ if target_postbuilds:
+ postbuilds.append("$(TARGET_POSTBUILDS_$(BUILDTYPE))")
+ postbuilds.extend(gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
+
+ if postbuilds:
+ # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
+ # so we must output its definition first, since we declare variables
+ # using ":=".
+ self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
+
+ for configname in target_postbuilds:
+ self.WriteLn(
+ "%s: TARGET_POSTBUILDS_%s := %s"
+ % (
+ QuoteSpaces(self.output),
+ configname,
+ gyp.common.EncodePOSIXShellList(target_postbuilds[configname]),
+ )
+ )
+
+ # Postbuilds expect to be run in the gyp file's directory, so insert an
+ # implicit postbuild to cd to there.
+ postbuilds.insert(0, gyp.common.EncodePOSIXShellList(["cd", self.path]))
+ for i, postbuild in enumerate(postbuilds):
+ if not postbuild.startswith("$"):
+ postbuilds[i] = EscapeShellArgument(postbuild)
+ self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(self.output))
+ self.WriteLn(
+ "%s: POSTBUILDS := %s"
+ % (QuoteSpaces(self.output), " ".join(postbuilds))
+ )
+
+ # A bundle directory depends on its dependencies such as bundle resources
+ # and bundle binary. When all dependencies have been built, the bundle
+ # needs to be packaged.
+ if self.is_mac_bundle:
+ # If the framework doesn't contain a binary, then nothing depends
+ # on the actions -- make the framework depend on them directly too.
+ self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
+
+ # Bundle dependencies. Note that the code below adds actions to this
+ # target, so if you move these two lines, move the lines below as well.
+ self.WriteList([QuoteSpaces(dep) for dep in bundle_deps], "BUNDLE_DEPS")
+ self.WriteLn("%s: $(BUNDLE_DEPS)" % QuoteSpaces(self.output))
+
+ # After the framework is built, package it. Needs to happen before
+ # postbuilds, since postbuilds depend on this.
+ if self.type in ("shared_library", "loadable_module"):
+ self.WriteLn(
+ "\t@$(call do_cmd,mac_package_framework,,,%s)"
+ % self.xcode_settings.GetFrameworkVersion()
+ )
+
+ # Bundle postbuilds can depend on the whole bundle, so run them after
+ # the bundle is packaged, not already after the bundle binary is done.
+ if postbuilds:
+ self.WriteLn("\t@$(call do_postbuilds)")
+ postbuilds = [] # Don't write postbuilds for target's output.
+
+ # Needed by test/mac/gyptest-rebuild.py.
+ self.WriteLn("\t@true # No-op, used by tests")
+
+ # Since this target depends on binary and resources which are in
+ # nested subfolders, the framework directory will be older than
+ # its dependencies usually. To prevent this rule from executing
+ # on every build (expensive, especially with postbuilds), expliclity
+ # update the time on the framework directory.
+ self.WriteLn("\t@touch -c %s" % QuoteSpaces(self.output))
+
+ if postbuilds:
+ assert not self.is_mac_bundle, (
+ "Postbuilds for bundles should be done "
+ "on the bundle, not the binary (target '%s')" % self.target
+ )
+ assert "product_dir" not in spec, (
+ "Postbuilds do not work with " "custom product_dir"
+ )
+
+ if self.type == "executable":
+ self.WriteLn(
+ "%s: LD_INPUTS := %s"
+ % (
+ QuoteSpaces(self.output_binary),
+ " ".join(QuoteSpaces(dep) for dep in link_deps),
+ )
+ )
+ if self.toolset == "host" and self.flavor == "android":
+ self.WriteDoCmd(
+ [self.output_binary],
+ link_deps,
+ "link_host",
+ part_of_all,
+ postbuilds=postbuilds,
+ )
+ else:
+ self.WriteDoCmd(
+ [self.output_binary],
+ link_deps,
+ "link",
+ part_of_all,
+ postbuilds=postbuilds,
+ )
+
+ elif self.type == "static_library":
+ for link_dep in link_deps:
+ assert " " not in link_dep, (
+ "Spaces in alink input filenames not supported (%s)" % link_dep
+ )
+ if (
+ self.flavor not in ("mac", "openbsd", "netbsd", "win")
+ and not self.is_standalone_static_library
+ ):
+ if self.flavor in ("linux", "android"):
+ self.WriteMakeRule(
+ [self.output_binary],
+ link_deps,
+ actions=["$(call create_thin_archive,$@,$^)"],
+ )
+ else:
+ self.WriteDoCmd(
+ [self.output_binary],
+ link_deps,
+ "alink_thin",
+ part_of_all,
+ postbuilds=postbuilds,
+ )
+ else:
+ if self.flavor in ("linux", "android"):
+ self.WriteMakeRule(
+ [self.output_binary],
+ link_deps,
+ actions=["$(call create_archive,$@,$^)"],
+ )
+ else:
+ self.WriteDoCmd(
+ [self.output_binary],
+ link_deps,
+ "alink",
+ part_of_all,
+ postbuilds=postbuilds,
+ )
+ elif self.type == "shared_library":
+ self.WriteLn(
+ "%s: LD_INPUTS := %s"
+ % (
+ QuoteSpaces(self.output_binary),
+ " ".join(QuoteSpaces(dep) for dep in link_deps),
+ )
+ )
+ self.WriteDoCmd(
+ [self.output_binary],
+ link_deps,
+ "solink",
+ part_of_all,
+ postbuilds=postbuilds,
+ )
+ # z/OS has a .so target as well as a sidedeck .x target
+ if self.flavor == "zos":
+ self.WriteLn(
+ "%s: %s"
+ % (
+ QuoteSpaces(
+ self.GetSharedObjectFromSidedeck(self.output_binary)
+ ),
+ QuoteSpaces(self.output_binary),
+ )
+ )
+ elif self.type == "loadable_module":
+ for link_dep in link_deps:
+ assert " " not in link_dep, (
+ "Spaces in module input filenames not supported (%s)" % link_dep
+ )
+ if self.toolset == "host" and self.flavor == "android":
+ self.WriteDoCmd(
+ [self.output_binary],
+ link_deps,
+ "solink_module_host",
+ part_of_all,
+ postbuilds=postbuilds,
+ )
+ else:
+ self.WriteDoCmd(
+ [self.output_binary],
+ link_deps,
+ "solink_module",
+ part_of_all,
+ postbuilds=postbuilds,
+ )
+ elif self.type == "none":
+ # Write a stamp line.
+ self.WriteDoCmd(
+ [self.output_binary], deps, "touch", part_of_all, postbuilds=postbuilds
+ )
+ else:
+ print("WARNING: no output for", self.type, self.target)
+
+ # Add an alias for each target (if there are any outputs).
+ # Installable target aliases are created below.
+ if (self.output and self.output != self.target) and (
+ self.type not in self._INSTALLABLE_TARGETS
+ ):
+ self.WriteMakeRule(
+ [self.target], [self.output], comment="Add target alias", phony=True
+ )
+ if part_of_all:
+ self.WriteMakeRule(
+ ["all"],
+ [self.target],
+ comment='Add target alias to "all" target.',
+ phony=True,
+ )
+
+ # Add special-case rules for our installable targets.
+ # 1) They need to install to the build dir or "product" dir.
+ # 2) They get shortcuts for building (e.g. "make chrome").
+ # 3) They are part of "make all".
+ if self.type in self._INSTALLABLE_TARGETS or self.is_standalone_static_library:
+ if self.type == "shared_library":
+ file_desc = "shared library"
+ elif self.type == "static_library":
+ file_desc = "static library"
+ else:
+ file_desc = "executable"
+ install_path = self._InstallableTargetInstallPath()
+ installable_deps = []
+ if self.flavor != "zos":
+ installable_deps.append(self.output)
+ if (
+ self.flavor == "mac"
+ and "product_dir" not in spec
+ and self.toolset == "target"
+ ):
+ # On mac, products are created in install_path immediately.
+ assert install_path == self.output, "{} != {}".format(
+ install_path,
+ self.output,
+ )
+
+ # Point the target alias to the final binary output.
+ self.WriteMakeRule(
+ [self.target], [install_path], comment="Add target alias", phony=True
+ )
+ if install_path != self.output:
+ assert not self.is_mac_bundle # See comment a few lines above.
+ self.WriteDoCmd(
+ [install_path],
+ [self.output],
+ "copy",
+ comment="Copy this to the %s output path." % file_desc,
+ part_of_all=part_of_all,
+ )
+ if self.flavor != "zos":
+ installable_deps.append(install_path)
+ if self.flavor == "zos" and self.type == "shared_library":
+ # lib.target/libnode.so has a dependency on $(obj).target/libnode.so
+ self.WriteDoCmd(
+ [self.GetSharedObjectFromSidedeck(install_path)],
+ [self.GetSharedObjectFromSidedeck(self.output)],
+ "copy",
+ comment="Copy this to the %s output path." % file_desc,
+ part_of_all=part_of_all,
+ )
+ # Create a symlink of libnode.x to libnode.version.x
+ self.WriteDoCmd(
+ [self.GetUnversionedSidedeckFromSidedeck(install_path)],
+ [install_path],
+ "symlink",
+ comment="Symlnk this to the %s output path." % file_desc,
+ part_of_all=part_of_all,
+ )
+ # Place libnode.version.so and libnode.x symlink in lib.target dir
+ installable_deps.append(self.GetSharedObjectFromSidedeck(install_path))
+ installable_deps.append(
+ self.GetUnversionedSidedeckFromSidedeck(install_path)
+ )
+ if self.output != self.alias and self.alias != self.target:
+ self.WriteMakeRule(
+ [self.alias],
+ installable_deps,
+ comment="Short alias for building this %s." % file_desc,
+ phony=True,
+ )
+ if self.flavor == "zos" and self.type == "shared_library":
+ # Make sure that .x symlink target is run
+ self.WriteMakeRule(
+ ["all"],
+ [
+ self.GetUnversionedSidedeckFromSidedeck(install_path),
+ self.GetSharedObjectFromSidedeck(install_path),
+ ],
+ comment='Add %s to "all" target.' % file_desc,
+ phony=True,
+ )
+ elif part_of_all:
+ self.WriteMakeRule(
+ ["all"],
+ [install_path],
+ comment='Add %s to "all" target.' % file_desc,
+ phony=True,
+ )
+
+ def WriteList(self, value_list, variable=None, prefix="", quoter=QuoteIfNecessary):
+ """Write a variable definition that is a list of values.
+
+ E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
+ foo = blaha blahb
+ but in a pretty-printed style.
+ """
+ values = ""
+ if value_list:
+ value_list = [quoter(prefix + value) for value in value_list]
+ values = " \\\n\t" + " \\\n\t".join(value_list)
+ self.fp.write(f"{variable} :={values}\n\n")
+
+ def WriteDoCmd(
+ self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False
+ ):
+ """Write a Makefile rule that uses do_cmd.
+
+ This makes the outputs dependent on the command line that was run,
+ as well as support the V= make command line flag.
+ """
+ suffix = ""
+ if postbuilds:
+ assert "," not in command
+ suffix = ",,1" # Tell do_cmd to honor $POSTBUILDS
+ self.WriteMakeRule(
+ outputs,
+ inputs,
+ actions=[f"$(call do_cmd,{command}{suffix})"],
+ comment=comment,
+ command=command,
+ force=True,
+ )
+ # Add our outputs to the list of targets we read depfiles from.
+ # all_deps is only used for deps file reading, and for deps files we replace
+ # spaces with ? because escaping doesn't work with make's $(sort) and
+ # other functions.
+ outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
+ self.WriteLn("all_deps += %s" % " ".join(outputs))
+
+ def WriteMakeRule(
+ self,
+ outputs,
+ inputs,
+ actions=None,
+ comment=None,
+ order_only=False,
+ force=False,
+ phony=False,
+ command=None,
+ ):
+ """Write a Makefile rule, with some extra tricks.
+
+ outputs: a list of outputs for the rule (note: this is not directly
+ supported by make; see comments below)
+ inputs: a list of inputs for the rule
+ actions: a list of shell commands to run for the rule
+ comment: a comment to put in the Makefile above the rule (also useful
+ for making this Python script's code self-documenting)
+ order_only: if true, makes the dependency order-only
+ force: if true, include FORCE_DO_CMD as an order-only dep
+ phony: if true, the rule does not actually generate the named output, the
+ output is just a name to run the rule
+ command: (optional) command name to generate unambiguous labels
+ """
+ outputs = [QuoteSpaces(o) for o in outputs]
+ inputs = [QuoteSpaces(i) for i in inputs]
+
+ if comment:
+ self.WriteLn("# " + comment)
+ if phony:
+ self.WriteLn(".PHONY: " + " ".join(outputs))
+ if actions:
+ self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
+ force_append = " FORCE_DO_CMD" if force else ""
+
+ if order_only:
+ # Order only rule: Just write a simple rule.
+ # TODO(evanm): just make order_only a list of deps instead of this hack.
+ self.WriteLn(
+ "{}: | {}{}".format(" ".join(outputs), " ".join(inputs), force_append)
+ )
+ elif len(outputs) == 1:
+ # Regular rule, one output: Just write a simple rule.
+ self.WriteLn("{}: {}{}".format(outputs[0], " ".join(inputs), force_append))
+ else:
+ # Regular rule, more than one output: Multiple outputs are tricky in
+ # make. We will write three rules:
+ # - All outputs depend on an intermediate file.
+ # - Make .INTERMEDIATE depend on the intermediate.
+ # - The intermediate file depends on the inputs and executes the
+ # actual command.
+ # - The intermediate recipe will 'touch' the intermediate file.
+ # - The multi-output rule will have an do-nothing recipe.
+
+ # Hash the target name to avoid generating overlong filenames.
+ cmddigest = hashlib.sha1(
+ (command or self.target).encode("utf-8")
+ ).hexdigest()
+ intermediate = "%s.intermediate" % cmddigest
+ self.WriteLn("{}: {}".format(" ".join(outputs), intermediate))
+ self.WriteLn("\t%s" % "@:")
+ self.WriteLn("{}: {}".format(".INTERMEDIATE", intermediate))
+ self.WriteLn(
+ "{}: {}{}".format(intermediate, " ".join(inputs), force_append)
+ )
+ actions.insert(0, "$(call do_cmd,touch)")
+
+ if actions:
+ for action in actions:
+ self.WriteLn("\t%s" % action)
+ self.WriteLn()
+
+ def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
+ """Write a set of LOCAL_XXX definitions for Android NDK.
+
+ These variable definitions will be used by Android NDK but do nothing for
+ non-Android applications.
+
+ Arguments:
+ module_name: Android NDK module name, which must be unique among all
+ module names.
+ all_sources: A list of source files (will be filtered by Compilable).
+ link_deps: A list of link dependencies, which must be sorted in
+ the order from dependencies to dependents.
+ """
+ if self.type not in ("executable", "shared_library", "static_library"):
+ return
+
+ self.WriteLn("# Variable definitions for Android applications")
+ self.WriteLn("include $(CLEAR_VARS)")
+ self.WriteLn("LOCAL_MODULE := " + module_name)
+ self.WriteLn(
+ "LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) "
+ "$(DEFS_$(BUILDTYPE)) "
+ # LOCAL_CFLAGS is applied to both of C and C++. There is
+ # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
+ # sources.
+ "$(CFLAGS_C_$(BUILDTYPE)) "
+ # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
+ # LOCAL_C_INCLUDES does not expect it. So put it in
+ # LOCAL_CFLAGS.
+ "$(INCS_$(BUILDTYPE))"
+ )
+ # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
+ self.WriteLn("LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))")
+ self.WriteLn("LOCAL_C_INCLUDES :=")
+ self.WriteLn("LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)")
+
+ # Detect the C++ extension.
+ cpp_ext = {".cc": 0, ".cpp": 0, ".cxx": 0}
+ default_cpp_ext = ".cpp"
+ for filename in all_sources:
+ ext = os.path.splitext(filename)[1]
+ if ext in cpp_ext:
+ cpp_ext[ext] += 1
+ if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
+ default_cpp_ext = ext
+ self.WriteLn("LOCAL_CPP_EXTENSION := " + default_cpp_ext)
+
+ self.WriteList(
+ list(map(self.Absolutify, filter(Compilable, all_sources))),
+ "LOCAL_SRC_FILES",
+ )
+
+ # Filter out those which do not match prefix and suffix and produce
+ # the resulting list without prefix and suffix.
+ def DepsToModules(deps, prefix, suffix):
+ modules = []
+ for filepath in deps:
+ filename = os.path.basename(filepath)
+ if filename.startswith(prefix) and filename.endswith(suffix):
+ modules.append(filename[len(prefix) : -len(suffix)])
+ return modules
+
+ # Retrieve the default value of 'SHARED_LIB_SUFFIX'
+ params = {"flavor": "linux"}
+ default_variables = {}
+ CalculateVariables(default_variables, params)
+
+ self.WriteList(
+ DepsToModules(
+ link_deps,
+ generator_default_variables["SHARED_LIB_PREFIX"],
+ default_variables["SHARED_LIB_SUFFIX"],
+ ),
+ "LOCAL_SHARED_LIBRARIES",
+ )
+ self.WriteList(
+ DepsToModules(
+ link_deps,
+ generator_default_variables["STATIC_LIB_PREFIX"],
+ generator_default_variables["STATIC_LIB_SUFFIX"],
+ ),
+ "LOCAL_STATIC_LIBRARIES",
+ )
+
+ if self.type == "executable":
+ self.WriteLn("include $(BUILD_EXECUTABLE)")
+ elif self.type == "shared_library":
+ self.WriteLn("include $(BUILD_SHARED_LIBRARY)")
+ elif self.type == "static_library":
+ self.WriteLn("include $(BUILD_STATIC_LIBRARY)")
+ self.WriteLn()
+
+ def WriteLn(self, text=""):
+ self.fp.write(text + "\n")
+
+ def GetSortedXcodeEnv(self, additional_settings=None):
+ return gyp.xcode_emulation.GetSortedXcodeEnv(
+ self.xcode_settings,
+ "$(abs_builddir)",
+ os.path.join("$(abs_srcdir)", self.path),
+ "$(BUILDTYPE)",
+ additional_settings,
+ )
+
+ def GetSortedXcodePostbuildEnv(self):
+ # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
+ # TODO(thakis): It would be nice to have some general mechanism instead.
+ strip_save_file = self.xcode_settings.GetPerTargetSetting(
+ "CHROMIUM_STRIP_SAVE_FILE", ""
+ )
+ # Even if strip_save_file is empty, explicitly write it. Else a postbuild
+ # might pick up an export from an earlier target.
+ return self.GetSortedXcodeEnv(
+ additional_settings={"CHROMIUM_STRIP_SAVE_FILE": strip_save_file}
+ )
+
+ def WriteSortedXcodeEnv(self, target, env):
+ for k, v in env:
+ # For
+ # foo := a\ b
+ # the escaped space does the right thing. For
+ # export foo := a\ b
+ # it does not -- the backslash is written to the env as literal character.
+ # So don't escape spaces in |env[k]|.
+ self.WriteLn(f"{QuoteSpaces(target)}: export {k} := {v}")
+
+ def Objectify(self, path):
+ """Convert a path to its output directory form."""
+ if "$(" in path:
+ path = path.replace("$(obj)/", "$(obj).%s/$(TARGET)/" % self.toolset)
+ if "$(obj)" not in path:
+ path = f"$(obj).{self.toolset}/$(TARGET)/{path}"
+ return path
+
+ def Pchify(self, path, lang):
+ """Convert a prefix header path to its output directory form."""
+ path = self.Absolutify(path)
+ if "$(" in path:
+ path = path.replace(
+ "$(obj)/", f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}"
+ )
+ return path
+ return f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}/{path}"
+
+ def Absolutify(self, path):
+ """Convert a subdirectory-relative path into a base-relative path.
+ Skips over paths that contain variables."""
+ if "$(" in path:
+ # Don't call normpath in this case, as it might collapse the
+ # path too aggressively if it features '..'. However it's still
+ # important to strip trailing slashes.
+ return path.rstrip("/")
+ return os.path.normpath(os.path.join(self.path, path))
+
+ def ExpandInputRoot(self, template, expansion, dirname):
+ if "%(INPUT_ROOT)s" not in template and "%(INPUT_DIRNAME)s" not in template:
+ return template
+ path = template % {
+ "INPUT_ROOT": expansion,
+ "INPUT_DIRNAME": dirname,
+ }
+ return path
+
+ def _InstallableTargetInstallPath(self):
+ """Returns the location of the final output for an installable target."""
+ # Functionality removed for all platforms to match Xcode and hoist
+ # shared libraries into PRODUCT_DIR for users:
+ # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
+ # rely on this. Emulate this behavior for mac.
+ # if self.type == "shared_library" and (
+ # self.flavor != "mac" or self.toolset != "target"
+ # ):
+ # # Install all shared libs into a common directory (per toolset) for
+ # # convenient access with LD_LIBRARY_PATH.
+ # return "$(builddir)/lib.%s/%s" % (self.toolset, self.alias)
+ if self.flavor == "zos" and self.type == "shared_library":
+ return "$(builddir)/lib.%s/%s" % (self.toolset, self.alias)
+
+ return "$(builddir)/" + self.alias
+
+
+def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files):
+ """Write the target to regenerate the Makefile."""
+ options = params["options"]
+ build_files_args = [
+ gyp.common.RelativePath(filename, options.toplevel_dir)
+ for filename in params["build_files_arg"]
+ ]
+
+ gyp_binary = gyp.common.FixIfRelativePath(
+ params["gyp_binary"], options.toplevel_dir
+ )
+ if not gyp_binary.startswith(os.sep):
+ gyp_binary = os.path.join(".", gyp_binary)
+
+ root_makefile.write(
+ "quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
+ "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
+ "%(makefile_name)s: %(deps)s\n"
+ "\t$(call do_cmd,regen_makefile)\n\n"
+ % {
+ "makefile_name": makefile_name,
+ "deps": " ".join(SourceifyAndQuoteSpaces(bf) for bf in build_files),
+ "cmd": gyp.common.EncodePOSIXShellList(
+ [gyp_binary, "-fmake"] + gyp.RegenerateFlags(options) + build_files_args
+ ),
+ }
+ )
+
+
+def PerformBuild(data, configurations, params):
+ options = params["options"]
+ for config in configurations:
+ arguments = ["make"]
+ if options.toplevel_dir and options.toplevel_dir != ".":
+ arguments += "-C", options.toplevel_dir
+ arguments.append("BUILDTYPE=" + config)
+ print(f"Building [{config}]: {arguments}")
+ subprocess.check_call(arguments)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params["options"]
+ flavor = gyp.common.GetFlavor(params)
+ generator_flags = params.get("generator_flags", {})
+ builddir_name = generator_flags.get("output_dir", "out")
+ android_ndk_version = generator_flags.get("android_ndk_version", None)
+ default_target = generator_flags.get("default_target", "all")
+
+ def CalculateMakefilePath(build_file, base_name):
+ """Determine where to write a Makefile for a given gyp file."""
+ # Paths in gyp files are relative to the .gyp file, but we want
+ # paths relative to the source root for the master makefile. Grab
+ # the path of the .gyp file as the base to relativize against.
+ # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth)
+ # We write the file in the base_path directory.
+ output_file = os.path.join(options.depth, base_path, base_name)
+ if options.generator_output:
+ output_file = os.path.join(
+ options.depth, options.generator_output, base_path, base_name
+ )
+ base_path = gyp.common.RelativePath(
+ os.path.dirname(build_file), options.toplevel_dir
+ )
+ return base_path, output_file
+
+ # TODO: search for the first non-'Default' target. This can go
+ # away when we add verification that all targets have the
+ # necessary configurations.
+ default_configuration = None
+ toolsets = {target_dicts[target]["toolset"] for target in target_list}
+ for target in target_list:
+ spec = target_dicts[target]
+ if spec["default_configuration"] != "Default":
+ default_configuration = spec["default_configuration"]
+ break
+ if not default_configuration:
+ default_configuration = "Default"
+
+ srcdir = "."
+ makefile_name = "Makefile" + options.suffix
+ makefile_path = os.path.join(options.toplevel_dir, makefile_name)
+ if options.generator_output:
+ global srcdir_prefix
+ makefile_path = os.path.join(
+ options.toplevel_dir, options.generator_output, makefile_name
+ )
+ srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
+ srcdir_prefix = "$(srcdir)/"
+
+ flock_command = "flock"
+ copy_archive_arguments = "-af"
+ makedep_arguments = "-MMD"
+ header_params = {
+ "default_target": default_target,
+ "builddir": builddir_name,
+ "default_configuration": default_configuration,
+ "flock": flock_command,
+ "flock_index": 1,
+ "link_commands": LINK_COMMANDS_LINUX,
+ "extra_commands": "",
+ "srcdir": srcdir,
+ "copy_archive_args": copy_archive_arguments,
+ "makedep_args": makedep_arguments,
+ "CC.target": GetEnvironFallback(("CC_target", "CC"), "$(CC)"),
+ "AR.target": GetEnvironFallback(("AR_target", "AR"), "$(AR)"),
+ "CXX.target": GetEnvironFallback(("CXX_target", "CXX"), "$(CXX)"),
+ "LINK.target": GetEnvironFallback(("LINK_target", "LINK"), "$(LINK)"),
+ "PLI.target": GetEnvironFallback(("PLI_target", "PLI"), "pli"),
+ "CC.host": GetEnvironFallback(("CC_host", "CC"), "gcc"),
+ "AR.host": GetEnvironFallback(("AR_host", "AR"), "ar"),
+ "CXX.host": GetEnvironFallback(("CXX_host", "CXX"), "g++"),
+ "LINK.host": GetEnvironFallback(("LINK_host", "LINK"), "$(CXX.host)"),
+ "PLI.host": GetEnvironFallback(("PLI_host", "PLI"), "pli"),
+ }
+ if flavor == "mac":
+ flock_command = "./gyp-mac-tool flock"
+ header_params.update(
+ {
+ "flock": flock_command,
+ "flock_index": 2,
+ "link_commands": LINK_COMMANDS_MAC,
+ "extra_commands": SHARED_HEADER_MAC_COMMANDS,
+ }
+ )
+ elif flavor == "android":
+ header_params.update({"link_commands": LINK_COMMANDS_ANDROID})
+ elif flavor == "zos":
+ copy_archive_arguments = "-fPR"
+ CC_target = GetEnvironFallback(("CC_target", "CC"), "njsc")
+ makedep_arguments = "-MMD"
+ if CC_target == "clang":
+ CC_host = GetEnvironFallback(("CC_host", "CC"), "clang")
+ CXX_target = GetEnvironFallback(("CXX_target", "CXX"), "clang++")
+ CXX_host = GetEnvironFallback(("CXX_host", "CXX"), "clang++")
+ elif CC_target == "ibm-clang64":
+ CC_host = GetEnvironFallback(("CC_host", "CC"), "ibm-clang64")
+ CXX_target = GetEnvironFallback(("CXX_target", "CXX"), "ibm-clang++64")
+ CXX_host = GetEnvironFallback(("CXX_host", "CXX"), "ibm-clang++64")
+ elif CC_target == "ibm-clang":
+ CC_host = GetEnvironFallback(("CC_host", "CC"), "ibm-clang")
+ CXX_target = GetEnvironFallback(("CXX_target", "CXX"), "ibm-clang++")
+ CXX_host = GetEnvironFallback(("CXX_host", "CXX"), "ibm-clang++")
+ else:
+ # Node.js versions prior to v18:
+ makedep_arguments = "-qmakedep=gcc"
+ CC_host = GetEnvironFallback(("CC_host", "CC"), "njsc")
+ CXX_target = GetEnvironFallback(("CXX_target", "CXX"), "njsc++")
+ CXX_host = GetEnvironFallback(("CXX_host", "CXX"), "njsc++")
+ header_params.update(
+ {
+ "copy_archive_args": copy_archive_arguments,
+ "makedep_args": makedep_arguments,
+ "link_commands": LINK_COMMANDS_OS390,
+ "extra_commands": SHARED_HEADER_OS390_COMMANDS,
+ "CC.target": CC_target,
+ "CXX.target": CXX_target,
+ "CC.host": CC_host,
+ "CXX.host": CXX_host,
+ }
+ )
+ elif flavor == "solaris":
+ copy_archive_arguments = "-pPRf@"
+ header_params.update(
+ {
+ "copy_archive_args": copy_archive_arguments,
+ "flock": "./gyp-flock-tool flock",
+ "flock_index": 2,
+ }
+ )
+ elif flavor == "freebsd":
+ # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
+ header_params.update({"flock": "lockf"})
+ elif flavor == "openbsd":
+ copy_archive_arguments = "-pPRf"
+ header_params.update({"copy_archive_args": copy_archive_arguments})
+ elif flavor == "aix":
+ copy_archive_arguments = "-pPRf"
+ header_params.update(
+ {
+ "copy_archive_args": copy_archive_arguments,
+ "link_commands": LINK_COMMANDS_AIX,
+ "flock": "./gyp-flock-tool flock",
+ "flock_index": 2,
+ }
+ )
+ elif flavor == "os400":
+ copy_archive_arguments = "-pPRf"
+ header_params.update(
+ {
+ "copy_archive_args": copy_archive_arguments,
+ "link_commands": LINK_COMMANDS_OS400,
+ "flock": "./gyp-flock-tool flock",
+ "flock_index": 2,
+ }
+ )
+
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings_array = data[build_file].get("make_global_settings", [])
+ wrappers = {}
+ for key, value in make_global_settings_array:
+ if key.endswith("_wrapper"):
+ wrappers[key[: -len("_wrapper")]] = "$(abspath %s)" % value
+ make_global_settings = ""
+ for key, value in make_global_settings_array:
+ if re.match(".*_wrapper", key):
+ continue
+ if value[0] != "$":
+ value = "$(abspath %s)" % value
+ wrapper = wrappers.get(key)
+ if wrapper:
+ value = f"{wrapper} {value}"
+ del wrappers[key]
+ if key in ("CC", "CC.host", "CXX", "CXX.host"):
+ make_global_settings += (
+ "ifneq (,$(filter $(origin %s), undefined default))\n" % key
+ )
+ # Let gyp-time envvars win over global settings.
+ env_key = key.replace(".", "_") # CC.host -> CC_host
+ if env_key in os.environ:
+ value = os.environ[env_key]
+ make_global_settings += f" {key} = {value}\n"
+ make_global_settings += "endif\n"
+ else:
+ make_global_settings += f"{key} ?= {value}\n"
+ # TODO(ukai): define cmd when only wrapper is specified in
+ # make_global_settings.
+
+ header_params["make_global_settings"] = make_global_settings
+
+ gyp.common.EnsureDirExists(makefile_path)
+ root_makefile = open(makefile_path, "w")
+ root_makefile.write(SHARED_HEADER % header_params)
+ # Currently any versions have the same effect, but in future the behavior
+ # could be different.
+ if android_ndk_version:
+ root_makefile.write(
+ "# Define LOCAL_PATH for build of Android applications.\n"
+ "LOCAL_PATH := $(call my-dir)\n"
+ "\n"
+ )
+ for toolset in toolsets:
+ root_makefile.write("TOOLSET := %s\n" % toolset)
+ WriteRootHeaderSuffixRules(root_makefile)
+
+ # Put build-time support tools next to the root Makefile.
+ dest_path = os.path.dirname(makefile_path)
+ gyp.common.CopyTool(flavor, dest_path)
+
+ # Find the list of targets that derive from the gyp file(s) being built.
+ needed_targets = set()
+ for build_file in params["build_files"]:
+ for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
+ needed_targets.add(target)
+
+ build_files = set()
+ include_list = set()
+ for qualified_target in target_list:
+ build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target)
+
+ this_make_global_settings = data[build_file].get("make_global_settings", [])
+ assert make_global_settings_array == this_make_global_settings, (
+ "make_global_settings needs to be the same for all targets "
+ f"{this_make_global_settings} vs. {make_global_settings}"
+ )
+
+ build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
+ included_files = data[build_file]["included_files"]
+ for included_file in included_files:
+ # The included_files entries are relative to the dir of the build file
+ # that included them, so we have to undo that and then make them relative
+ # to the root dir.
+ relative_include_file = gyp.common.RelativePath(
+ gyp.common.UnrelativePath(included_file, build_file),
+ options.toplevel_dir,
+ )
+ abs_include_file = os.path.abspath(relative_include_file)
+ # If the include file is from the ~/.gyp dir, we should use absolute path
+ # so that relocating the src dir doesn't break the path.
+ if params["home_dot_gyp"] and abs_include_file.startswith(
+ params["home_dot_gyp"]
+ ):
+ build_files.add(abs_include_file)
+ else:
+ build_files.add(relative_include_file)
+
+ base_path, output_file = CalculateMakefilePath(
+ build_file, target + "." + toolset + options.suffix + ".mk"
+ )
+
+ spec = target_dicts[qualified_target]
+ configs = spec["configurations"]
+
+ if flavor == "mac":
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
+
+ writer = MakefileWriter(generator_flags, flavor)
+ writer.Write(
+ qualified_target,
+ base_path,
+ output_file,
+ spec,
+ configs,
+ part_of_all=qualified_target in needed_targets,
+ )
+
+ # Our root_makefile lives at the source root. Compute the relative path
+ # from there to the output_file for including.
+ mkfile_rel_path = gyp.common.RelativePath(
+ output_file, os.path.dirname(makefile_path)
+ )
+ include_list.add(mkfile_rel_path)
+
+ # Write out per-gyp (sub-project) Makefiles.
+ depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
+ for build_file in build_files:
+ # The paths in build_files were relativized above, so undo that before
+ # testing against the non-relativized items in target_list and before
+ # calculating the Makefile path.
+ build_file = os.path.join(depth_rel_path, build_file)
+ gyp_targets = [
+ target_dicts[qualified_target]["target_name"]
+ for qualified_target in target_list
+ if qualified_target.startswith(build_file)
+ and qualified_target in needed_targets
+ ]
+ # Only generate Makefiles for gyp files with targets.
+ if not gyp_targets:
+ continue
+ base_path, output_file = CalculateMakefilePath(
+ build_file, os.path.splitext(os.path.basename(build_file))[0] + ".Makefile"
+ )
+ makefile_rel_path = gyp.common.RelativePath(
+ os.path.dirname(makefile_path), os.path.dirname(output_file)
+ )
+ writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, builddir_name)
+
+ # Write out the sorted list of includes.
+ root_makefile.write("\n")
+ for include_file in sorted(include_list):
+ # We wrap each .mk include in an if statement so users can tell make to
+ # not load a file by setting NO_LOAD. The below make code says, only
+ # load the .mk file if the .mk filename doesn't start with a token in
+ # NO_LOAD.
+ root_makefile.write(
+ "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
+ " $(findstring $(join ^,$(prefix)),\\\n"
+ " $(join ^," + include_file + ")))),)\n"
+ )
+ root_makefile.write(" include " + include_file + "\n")
+ root_makefile.write("endif\n")
+ root_makefile.write("\n")
+
+ if not generator_flags.get("standalone") and generator_flags.get(
+ "auto_regeneration", True
+ ):
+ WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
+
+ root_makefile.write(SHARED_FOOTER)
+
+ root_makefile.close()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
new file mode 100644
index 0000000..fd95005
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
@@ -0,0 +1,3981 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import ntpath
+import os
+import posixpath
+import re
+import subprocess
+import sys
+
+from collections import OrderedDict
+
+import gyp.common
+import gyp.easy_xml as easy_xml
+import gyp.generator.ninja as ninja_generator
+import gyp.MSVSNew as MSVSNew
+import gyp.MSVSProject as MSVSProject
+import gyp.MSVSSettings as MSVSSettings
+import gyp.MSVSToolFile as MSVSToolFile
+import gyp.MSVSUserFile as MSVSUserFile
+import gyp.MSVSUtil as MSVSUtil
+import gyp.MSVSVersion as MSVSVersion
+from gyp.common import GypError
+from gyp.common import OrderedSet
+
+
+# Regular expression for validating Visual Studio GUIDs. If the GUID
+# contains lowercase hex letters, MSVS will be fine. However,
+# IncrediBuild BuildConsole will parse the solution file, but then
+# silently skip building the target causing hard to track down errors.
+# Note that this only happens with the BuildConsole, and does not occur
+# if IncrediBuild is executed from inside Visual Studio. This regex
+# validates that the string looks like a GUID with all uppercase hex
+# letters.
+VALID_MSVS_GUID_CHARS = re.compile(r"^[A-F0-9\-]+$")
+
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+generator_default_variables = {
+ "DRIVER_PREFIX": "",
+ "DRIVER_SUFFIX": ".sys",
+ "EXECUTABLE_PREFIX": "",
+ "EXECUTABLE_SUFFIX": ".exe",
+ "STATIC_LIB_PREFIX": "",
+ "SHARED_LIB_PREFIX": "",
+ "STATIC_LIB_SUFFIX": ".lib",
+ "SHARED_LIB_SUFFIX": ".dll",
+ "INTERMEDIATE_DIR": "$(IntDir)",
+ "SHARED_INTERMEDIATE_DIR": "$(OutDir)/obj/global_intermediate",
+ "OS": "win",
+ "PRODUCT_DIR": "$(OutDir)",
+ "LIB_DIR": "$(OutDir)lib",
+ "RULE_INPUT_ROOT": "$(InputName)",
+ "RULE_INPUT_DIRNAME": "$(InputDir)",
+ "RULE_INPUT_EXT": "$(InputExt)",
+ "RULE_INPUT_NAME": "$(InputFileName)",
+ "RULE_INPUT_PATH": "$(InputPath)",
+ "CONFIGURATION_NAME": "$(ConfigurationName)",
+}
+
+
+# The msvs specific sections that hold paths
+generator_additional_path_sections = [
+ "msvs_cygwin_dirs",
+ "msvs_props",
+]
+
+
+generator_additional_non_configuration_keys = [
+ "msvs_cygwin_dirs",
+ "msvs_cygwin_shell",
+ "msvs_large_pdb",
+ "msvs_shard",
+ "msvs_external_builder",
+ "msvs_external_builder_out_dir",
+ "msvs_external_builder_build_cmd",
+ "msvs_external_builder_clean_cmd",
+ "msvs_external_builder_clcompile_cmd",
+ "msvs_enable_winrt",
+ "msvs_requires_importlibrary",
+ "msvs_enable_winphone",
+ "msvs_application_type_revision",
+ "msvs_target_platform_version",
+ "msvs_target_platform_minversion",
+]
+
+generator_filelist_paths = None
+
+# List of precompiled header related keys.
+precomp_keys = [
+ "msvs_precompiled_header",
+ "msvs_precompiled_source",
+]
+
+
+cached_username = None
+
+
+cached_domain = None
+
+
+# TODO(gspencer): Switch the os.environ calls to be
+# win32api.GetDomainName() and win32api.GetUserName() once the
+# python version in depot_tools has been updated to work on Vista
+# 64-bit.
+def _GetDomainAndUserName():
+ if sys.platform not in ("win32", "cygwin"):
+ return ("DOMAIN", "USERNAME")
+ global cached_username
+ global cached_domain
+ if not cached_domain or not cached_username:
+ domain = os.environ.get("USERDOMAIN")
+ username = os.environ.get("USERNAME")
+ if not domain or not username:
+ call = subprocess.Popen(
+ ["net", "config", "Workstation"], stdout=subprocess.PIPE
+ )
+ config = call.communicate()[0].decode("utf-8")
+ username_re = re.compile(r"^User name\s+(\S+)", re.MULTILINE)
+ username_match = username_re.search(config)
+ if username_match:
+ username = username_match.group(1)
+ domain_re = re.compile(r"^Logon domain\s+(\S+)", re.MULTILINE)
+ domain_match = domain_re.search(config)
+ if domain_match:
+ domain = domain_match.group(1)
+ cached_domain = domain
+ cached_username = username
+ return (cached_domain, cached_username)
+
+
+fixpath_prefix = None
+
+
+def _NormalizedSource(source):
+ """Normalize the path.
+
+ But not if that gets rid of a variable, as this may expand to something
+ larger than one directory.
+
+ Arguments:
+ source: The path to be normalize.d
+
+ Returns:
+ The normalized path.
+ """
+ normalized = os.path.normpath(source)
+ if source.count("$") == normalized.count("$"):
+ source = normalized
+ return source
+
+
+def _FixPath(path, separator="\\"):
+ """Convert paths to a form that will make sense in a vcproj file.
+
+ Arguments:
+ path: The path to convert, may contain / etc.
+ Returns:
+ The path with all slashes made into backslashes.
+ """
+ if (
+ fixpath_prefix
+ and path
+ and not os.path.isabs(path)
+ and not path[0] == "$"
+ and not _IsWindowsAbsPath(path)
+ ):
+ path = os.path.join(fixpath_prefix, path)
+ if separator == "\\":
+ path = path.replace("/", "\\")
+ path = _NormalizedSource(path)
+ if separator == "/":
+ path = path.replace("\\", "/")
+ if path and path[-1] == separator:
+ path = path[:-1]
+ return path
+
+
+def _IsWindowsAbsPath(path):
+ """
+ On Cygwin systems Python needs a little help determining if a path
+ is an absolute Windows path or not, so that
+ it does not treat those as relative, which results in bad paths like:
+ '..\\C:\\<some path>\\some_source_code_file.cc'
+ """
+ return path.startswith("c:") or path.startswith("C:")
+
+
+def _FixPaths(paths, separator="\\"):
+ """Fix each of the paths of the list."""
+ return [_FixPath(i, separator) for i in paths]
+
+
+def _ConvertSourcesToFilterHierarchy(
+ sources, prefix=None, excluded=None, list_excluded=True, msvs_version=None
+):
+ """Converts a list split source file paths into a vcproj folder hierarchy.
+
+ Arguments:
+ sources: A list of source file paths split.
+ prefix: A list of source file path layers meant to apply to each of sources.
+ excluded: A set of excluded files.
+ msvs_version: A MSVSVersion object.
+
+ Returns:
+ A hierarchy of filenames and MSVSProject.Filter objects that matches the
+ layout of the source tree.
+ For example:
+ _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
+ prefix=['joe'])
+ -->
+ [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
+ MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
+ """
+ if not prefix:
+ prefix = []
+ result = []
+ excluded_result = []
+ folders = OrderedDict()
+ # Gather files into the final result, excluded, or folders.
+ for s in sources:
+ if len(s) == 1:
+ filename = _NormalizedSource("\\".join(prefix + s))
+ if filename in excluded:
+ excluded_result.append(filename)
+ else:
+ result.append(filename)
+ elif msvs_version and not msvs_version.UsesVcxproj():
+ # For MSVS 2008 and earlier, we need to process all files before walking
+ # the sub folders.
+ if not folders.get(s[0]):
+ folders[s[0]] = []
+ folders[s[0]].append(s[1:])
+ else:
+ contents = _ConvertSourcesToFilterHierarchy(
+ [s[1:]],
+ prefix + [s[0]],
+ excluded=excluded,
+ list_excluded=list_excluded,
+ msvs_version=msvs_version,
+ )
+ contents = MSVSProject.Filter(s[0], contents=contents)
+ result.append(contents)
+ # Add a folder for excluded files.
+ if excluded_result and list_excluded:
+ excluded_folder = MSVSProject.Filter(
+ "_excluded_files", contents=excluded_result
+ )
+ result.append(excluded_folder)
+
+ if msvs_version and msvs_version.UsesVcxproj():
+ return result
+
+ # Populate all the folders.
+ for f in folders:
+ contents = _ConvertSourcesToFilterHierarchy(
+ folders[f],
+ prefix=prefix + [f],
+ excluded=excluded,
+ list_excluded=list_excluded,
+ msvs_version=msvs_version,
+ )
+ contents = MSVSProject.Filter(f, contents=contents)
+ result.append(contents)
+ return result
+
+
+def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
+ if not value:
+ return
+ _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
+
+
+def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
+ # TODO(bradnelson): ugly hack, fix this more generally!!!
+ if "Directories" in setting or "Dependencies" in setting:
+ if type(value) == str:
+ value = value.replace("/", "\\")
+ else:
+ value = [i.replace("/", "\\") for i in value]
+ if not tools.get(tool_name):
+ tools[tool_name] = dict()
+ tool = tools[tool_name]
+ if "CompileAsWinRT" == setting:
+ return
+ if tool.get(setting):
+ if only_if_unset:
+ return
+ if type(tool[setting]) == list and type(value) == list:
+ tool[setting] += value
+ else:
+ raise TypeError(
+ 'Appending "%s" to a non-list setting "%s" for tool "%s" is '
+ "not allowed, previous value: %s"
+ % (value, setting, tool_name, str(tool[setting]))
+ )
+ else:
+ tool[setting] = value
+
+
+def _ConfigTargetVersion(config_data):
+ return config_data.get("msvs_target_version", "Windows7")
+
+
+def _ConfigPlatform(config_data):
+ return config_data.get("msvs_configuration_platform", "Win32")
+
+
+def _ConfigBaseName(config_name, platform_name):
+ if config_name.endswith("_" + platform_name):
+ return config_name[0 : -len(platform_name) - 1]
+ else:
+ return config_name
+
+
+def _ConfigFullName(config_name, config_data):
+ platform_name = _ConfigPlatform(config_data)
+ return f"{_ConfigBaseName(config_name, platform_name)}|{platform_name}"
+
+
+def _ConfigWindowsTargetPlatformVersion(config_data, version):
+ target_ver = config_data.get("msvs_windows_target_platform_version")
+ if target_ver and re.match(r"^\d+", target_ver):
+ return target_ver
+ config_ver = config_data.get("msvs_windows_sdk_version")
+ vers = [config_ver] if config_ver else version.compatible_sdks
+ for ver in vers:
+ for key in [
+ r"HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s",
+ r"HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s",
+ ]:
+ sdk_dir = MSVSVersion._RegistryGetValue(key % ver, "InstallationFolder")
+ if not sdk_dir:
+ continue
+ version = MSVSVersion._RegistryGetValue(key % ver, "ProductVersion") or ""
+ # Find a matching entry in sdk_dir\include.
+ expected_sdk_dir = r"%s\include" % sdk_dir
+ names = sorted(
+ (
+ x
+ for x in (
+ os.listdir(expected_sdk_dir)
+ if os.path.isdir(expected_sdk_dir)
+ else []
+ )
+ if x.startswith(version)
+ ),
+ reverse=True,
+ )
+ if names:
+ return names[0]
+ else:
+ print(
+ "Warning: No include files found for detected "
+ "Windows SDK version %s" % (version),
+ file=sys.stdout,
+ )
+
+
+def _BuildCommandLineForRuleRaw(
+ spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env
+):
+
+ if [x for x in cmd if "$(InputDir)" in x]:
+ input_dir_preamble = (
+ "set INPUTDIR=$(InputDir)\n"
+ "if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n"
+ "set INPUTDIR=%INPUTDIR:~0,-1%\n"
+ )
+ else:
+ input_dir_preamble = ""
+
+ if cygwin_shell:
+ # Find path to cygwin.
+ cygwin_dir = _FixPath(spec.get("msvs_cygwin_dirs", ["."])[0])
+ # Prepare command.
+ direct_cmd = cmd
+ direct_cmd = [
+ i.replace("$(IntDir)", '`cygpath -m "${INTDIR}"`') for i in direct_cmd
+ ]
+ direct_cmd = [
+ i.replace("$(OutDir)", '`cygpath -m "${OUTDIR}"`') for i in direct_cmd
+ ]
+ direct_cmd = [
+ i.replace("$(InputDir)", '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd
+ ]
+ if has_input_path:
+ direct_cmd = [
+ i.replace("$(InputPath)", '`cygpath -m "${INPUTPATH}"`')
+ for i in direct_cmd
+ ]
+ direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
+ # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
+ direct_cmd = " ".join(direct_cmd)
+ # TODO(quote): regularize quoting path names throughout the module
+ cmd = ""
+ if do_setup_env:
+ cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
+ cmd += "set CYGWIN=nontsec&& "
+ if direct_cmd.find("NUMBER_OF_PROCESSORS") >= 0:
+ cmd += "set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& "
+ if direct_cmd.find("INTDIR") >= 0:
+ cmd += "set INTDIR=$(IntDir)&& "
+ if direct_cmd.find("OUTDIR") >= 0:
+ cmd += "set OUTDIR=$(OutDir)&& "
+ if has_input_path and direct_cmd.find("INPUTPATH") >= 0:
+ cmd += "set INPUTPATH=$(InputPath) && "
+ cmd += 'bash -c "%(cmd)s"'
+ cmd = cmd % {"cygwin_dir": cygwin_dir, "cmd": direct_cmd}
+ return input_dir_preamble + cmd
+ else:
+ # Convert cat --> type to mimic unix.
+ if cmd[0] == "cat":
+ command = ["type"]
+ else:
+ command = [cmd[0].replace("/", "\\")]
+ # Add call before command to ensure that commands can be tied together one
+ # after the other without aborting in Incredibuild, since IB makes a bat
+ # file out of the raw command string, and some commands (like python) are
+ # actually batch files themselves.
+ command.insert(0, "call")
+ # Fix the paths
+ # TODO(quote): This is a really ugly heuristic, and will miss path fixing
+ # for arguments like "--arg=path", arg=path, or "/opt:path".
+ # If the argument starts with a slash or dash, or contains an equal sign,
+ # it's probably a command line switch.
+ # Return the path with forward slashes because the command using it might
+ # not support backslashes.
+ arguments = [
+ i if (i[:1] in "/-" or "=" in i) else _FixPath(i, "/")
+ for i in cmd[1:]
+ ]
+ arguments = [i.replace("$(InputDir)", "%INPUTDIR%") for i in arguments]
+ arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
+ if quote_cmd:
+ # Support a mode for using cmd directly.
+ # Convert any paths to native form (first element is used directly).
+ # TODO(quote): regularize quoting path names throughout the module
+ arguments = ['"%s"' % i for i in arguments]
+ # Collapse into a single command.
+ return input_dir_preamble + " ".join(command + arguments)
+
+
+def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
+ # Currently this weird argument munging is used to duplicate the way a
+ # python script would need to be run as part of the chrome tree.
+ # Eventually we should add some sort of rule_default option to set this
+ # per project. For now the behavior chrome needs is the default.
+ mcs = rule.get("msvs_cygwin_shell")
+ if mcs is None:
+ mcs = int(spec.get("msvs_cygwin_shell", 1))
+ elif isinstance(mcs, str):
+ mcs = int(mcs)
+ quote_cmd = int(rule.get("msvs_quote_cmd", 1))
+ return _BuildCommandLineForRuleRaw(
+ spec, rule["action"], mcs, has_input_path, quote_cmd, do_setup_env=do_setup_env
+ )
+
+
+def _AddActionStep(actions_dict, inputs, outputs, description, command):
+ """Merge action into an existing list of actions.
+
+ Care must be taken so that actions which have overlapping inputs either don't
+ get assigned to the same input, or get collapsed into one.
+
+ Arguments:
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ command: command line to execute
+ """
+ # Require there to be at least one input (call sites will ensure this).
+ assert inputs
+
+ action = {
+ "inputs": inputs,
+ "outputs": outputs,
+ "description": description,
+ "command": command,
+ }
+
+ # Pick where to stick this action.
+ # While less than optimal in terms of build time, attach them to the first
+ # input for now.
+ chosen_input = inputs[0]
+
+ # Add it there.
+ if chosen_input not in actions_dict:
+ actions_dict[chosen_input] = []
+ actions_dict[chosen_input].append(action)
+
+
+def _AddCustomBuildToolForMSVS(
+ p, spec, primary_input, inputs, outputs, description, cmd
+):
+ """Add a custom build tool to execute something.
+
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ primary_input: input file to attach the build tool to
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ cmd: command line to execute
+ """
+ inputs = _FixPaths(inputs)
+ outputs = _FixPaths(outputs)
+ tool = MSVSProject.Tool(
+ "VCCustomBuildTool",
+ {
+ "Description": description,
+ "AdditionalDependencies": ";".join(inputs),
+ "Outputs": ";".join(outputs),
+ "CommandLine": cmd,
+ },
+ )
+ # Add to the properties of primary input for each config.
+ for config_name, c_data in spec["configurations"].items():
+ p.AddFileConfig(
+ _FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool]
+ )
+
+
+def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
+ """Add actions accumulated into an actions_dict, merging as needed.
+
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ """
+ for primary_input in actions_dict:
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ descriptions = []
+ commands = []
+ for action in actions_dict[primary_input]:
+ inputs.update(OrderedSet(action["inputs"]))
+ outputs.update(OrderedSet(action["outputs"]))
+ descriptions.append(action["description"])
+ commands.append(action["command"])
+ # Add the custom build step for one input file.
+ description = ", and also ".join(descriptions)
+ command = "\r\n".join(commands)
+ _AddCustomBuildToolForMSVS(
+ p,
+ spec,
+ primary_input=primary_input,
+ inputs=inputs,
+ outputs=outputs,
+ description=description,
+ cmd=command,
+ )
+
+
+def _RuleExpandPath(path, input_file):
+ """Given the input file to which a rule applied, string substitute a path.
+
+ Arguments:
+ path: a path to string expand
+ input_file: the file to which the rule applied.
+ Returns:
+ The string substituted path.
+ """
+ path = path.replace(
+ "$(InputName)", os.path.splitext(os.path.split(input_file)[1])[0]
+ )
+ path = path.replace("$(InputDir)", os.path.dirname(input_file))
+ path = path.replace(
+ "$(InputExt)", os.path.splitext(os.path.split(input_file)[1])[1]
+ )
+ path = path.replace("$(InputFileName)", os.path.split(input_file)[1])
+ path = path.replace("$(InputPath)", input_file)
+ return path
+
+
+def _FindRuleTriggerFiles(rule, sources):
+ """Find the list of files which a particular rule applies to.
+
+ Arguments:
+ rule: the rule in question
+ sources: the set of all known source files for this project
+ Returns:
+ The list of sources that trigger a particular rule.
+ """
+ return rule.get("rule_sources", [])
+
+
+def _RuleInputsAndOutputs(rule, trigger_file):
+ """Find the inputs and outputs generated by a rule.
+
+ Arguments:
+ rule: the rule in question.
+ trigger_file: the main trigger for this rule.
+ Returns:
+ The pair of (inputs, outputs) involved in this rule.
+ """
+ raw_inputs = _FixPaths(rule.get("inputs", []))
+ raw_outputs = _FixPaths(rule.get("outputs", []))
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ inputs.add(trigger_file)
+ for i in raw_inputs:
+ inputs.add(_RuleExpandPath(i, trigger_file))
+ for o in raw_outputs:
+ outputs.add(_RuleExpandPath(o, trigger_file))
+ return (inputs, outputs)
+
+
+def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
+ """Generate a native rules file.
+
+ Arguments:
+ p: the target project
+ rules: the set of rules to include
+ output_dir: the directory in which the project/gyp resides
+ spec: the project dict
+ options: global generator options
+ """
+ rules_filename = "{}{}.rules".format(spec["target_name"], options.suffix)
+ rules_file = MSVSToolFile.Writer(
+ os.path.join(output_dir, rules_filename), spec["target_name"]
+ )
+ # Add each rule.
+ for r in rules:
+ rule_name = r["rule_name"]
+ rule_ext = r["extension"]
+ inputs = _FixPaths(r.get("inputs", []))
+ outputs = _FixPaths(r.get("outputs", []))
+ # Skip a rule with no action and no inputs.
+ if "action" not in r and not r.get("rule_sources", []):
+ continue
+ cmd = _BuildCommandLineForRule(spec, r, has_input_path=True, do_setup_env=True)
+ rules_file.AddCustomBuildRule(
+ name=rule_name,
+ description=r.get("message", rule_name),
+ extensions=[rule_ext],
+ additional_dependencies=inputs,
+ outputs=outputs,
+ cmd=cmd,
+ )
+ # Write out rules file.
+ rules_file.WriteIfChanged()
+
+ # Add rules file to project.
+ p.AddToolFile(rules_filename)
+
+
+def _Cygwinify(path):
+ path = path.replace("$(OutDir)", "$(OutDirCygwin)")
+ path = path.replace("$(IntDir)", "$(IntDirCygwin)")
+ return path
+
+
+def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to_add):
+ """Generate an external makefile to do a set of rules.
+
+ Arguments:
+ rules: the list of rules to include
+ output_dir: path containing project and gyp files
+ spec: project specification data
+ sources: set of sources known
+ options: global generator options
+ actions_to_add: The list of actions we will add to.
+ """
+ filename = "{}_rules{}.mk".format(spec["target_name"], options.suffix)
+ mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
+ # Find cygwin style versions of some paths.
+ mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
+ mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
+ # Gather stuff needed to emit all: target.
+ all_inputs = OrderedSet()
+ all_outputs = OrderedSet()
+ all_output_dirs = OrderedSet()
+ first_outputs = []
+ for rule in rules:
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for tf in trigger_files:
+ inputs, outputs = _RuleInputsAndOutputs(rule, tf)
+ all_inputs.update(OrderedSet(inputs))
+ all_outputs.update(OrderedSet(outputs))
+ # Only use one target from each rule as the dependency for
+ # 'all' so we don't try to build each rule multiple times.
+ first_outputs.append(list(outputs)[0])
+ # Get the unique output directories for this rule.
+ output_dirs = [os.path.split(i)[0] for i in outputs]
+ for od in output_dirs:
+ all_output_dirs.add(od)
+ first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
+ # Write out all: target, including mkdir for each output directory.
+ mk_file.write("all: %s\n" % " ".join(first_outputs_cyg))
+ for od in all_output_dirs:
+ if od:
+ mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
+ mk_file.write("\n")
+ # Define how each output is generated.
+ for rule in rules:
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for tf in trigger_files:
+ # Get all the inputs and outputs for this rule for this trigger file.
+ inputs, outputs = _RuleInputsAndOutputs(rule, tf)
+ inputs = [_Cygwinify(i) for i in inputs]
+ outputs = [_Cygwinify(i) for i in outputs]
+ # Prepare the command line for this rule.
+ cmd = [_RuleExpandPath(c, tf) for c in rule["action"]]
+ cmd = ['"%s"' % i for i in cmd]
+ cmd = " ".join(cmd)
+ # Add it to the makefile.
+ mk_file.write("{}: {}\n".format(" ".join(outputs), " ".join(inputs)))
+ mk_file.write("\t%s\n\n" % cmd)
+ # Close up the file.
+ mk_file.close()
+
+ # Add makefile to list of sources.
+ sources.add(filename)
+ # Add a build action to call makefile.
+ cmd = [
+ "make",
+ "OutDir=$(OutDir)",
+ "IntDir=$(IntDir)",
+ "-j",
+ "${NUMBER_OF_PROCESSORS_PLUS_1}",
+ "-f",
+ filename,
+ ]
+ cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
+ # Insert makefile as 0'th input, so it gets the action attached there,
+ # as this is easier to understand from in the IDE.
+ all_inputs = list(all_inputs)
+ all_inputs.insert(0, filename)
+ _AddActionStep(
+ actions_to_add,
+ inputs=_FixPaths(all_inputs),
+ outputs=_FixPaths(all_outputs),
+ description="Running external rules for %s" % spec["target_name"],
+ command=cmd,
+ )
+
+
+def _EscapeEnvironmentVariableExpansion(s):
+ """Escapes % characters.
+
+ Escapes any % characters so that Windows-style environment variable
+ expansions will leave them alone.
+ See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
+ to understand why we have to do this.
+
+ Args:
+ s: The string to be escaped.
+
+ Returns:
+ The escaped string.
+ """ # noqa: E731,E123,E501
+ s = s.replace("%", "%%")
+ return s
+
+
+quote_replacer_regex = re.compile(r'(\\*)"')
+
+
+def _EscapeCommandLineArgumentForMSVS(s):
+ """Escapes a Windows command-line argument.
+
+ So that the Win32 CommandLineToArgv function will turn the escaped result back
+ into the original string.
+ See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+ ("Parsing C++ Command-Line Arguments") to understand why we have to do
+ this.
+
+ Args:
+ s: the string to be escaped.
+ Returns:
+ the escaped string.
+ """
+
+ def _Replace(match):
+ # For a literal quote, CommandLineToArgv requires an odd number of
+ # backslashes preceding it, and it produces half as many literal backslashes
+ # (rounded down). So we need to produce 2n+1 backslashes.
+ return 2 * match.group(1) + '\\"'
+
+ # Escape all quotes so that they are interpreted literally.
+ s = quote_replacer_regex.sub(_Replace, s)
+ # Now add unescaped quotes so that any whitespace is interpreted literally.
+ s = '"' + s + '"'
+ return s
+
+
+delimiters_replacer_regex = re.compile(r"(\\*)([,;]+)")
+
+
+def _EscapeVCProjCommandLineArgListItem(s):
+ """Escapes command line arguments for MSVS.
+
+ The VCProj format stores string lists in a single string using commas and
+ semi-colons as separators, which must be quoted if they are to be
+ interpreted literally. However, command-line arguments may already have
+ quotes, and the VCProj parser is ignorant of the backslash escaping
+ convention used by CommandLineToArgv, so the command-line quotes and the
+ VCProj quotes may not be the same quotes. So to store a general
+ command-line argument in a VCProj list, we need to parse the existing
+ quoting according to VCProj's convention and quote any delimiters that are
+ not already quoted by that convention. The quotes that we add will also be
+ seen by CommandLineToArgv, so if backslashes precede them then we also have
+ to escape those backslashes according to the CommandLineToArgv
+ convention.
+
+ Args:
+ s: the string to be escaped.
+ Returns:
+ the escaped string.
+ """
+
+ def _Replace(match):
+ # For a non-literal quote, CommandLineToArgv requires an even number of
+ # backslashes preceding it, and it produces half as many literal
+ # backslashes. So we need to produce 2n backslashes.
+ return 2 * match.group(1) + '"' + match.group(2) + '"'
+
+ segments = s.split('"')
+ # The unquoted segments are at the even-numbered indices.
+ for i in range(0, len(segments), 2):
+ segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
+ # Concatenate back into a single string
+ s = '"'.join(segments)
+ if len(segments) % 2 == 0:
+ # String ends while still quoted according to VCProj's convention. This
+ # means the delimiter and the next list item that follow this one in the
+ # .vcproj file will be misinterpreted as part of this item. There is nothing
+ # we can do about this. Adding an extra quote would correct the problem in
+ # the VCProj but cause the same problem on the final command-line. Moving
+ # the item to the end of the list does works, but that's only possible if
+ # there's only one such item. Let's just warn the user.
+ print(
+ "Warning: MSVS may misinterpret the odd number of " + "quotes in " + s,
+ file=sys.stderr,
+ )
+ return s
+
+
+def _EscapeCppDefineForMSVS(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = _EscapeEnvironmentVariableExpansion(s)
+ s = _EscapeCommandLineArgumentForMSVS(s)
+ s = _EscapeVCProjCommandLineArgListItem(s)
+ # cl.exe replaces literal # characters with = in preprocessor definitions for
+ # some reason. Octal-encode to work around that.
+ s = s.replace("#", "\\%03o" % ord("#"))
+ return s
+
+
+quote_replacer_regex2 = re.compile(r'(\\+)"')
+
+
+def _EscapeCommandLineArgumentForMSBuild(s):
+ """Escapes a Windows command-line argument for use by MSBuild."""
+
+ def _Replace(match):
+ return (len(match.group(1)) / 2 * 4) * "\\" + '\\"'
+
+ # Escape all quotes so that they are interpreted literally.
+ s = quote_replacer_regex2.sub(_Replace, s)
+ return s
+
+
+def _EscapeMSBuildSpecialCharacters(s):
+ escape_dictionary = {
+ "%": "%25",
+ "$": "%24",
+ "@": "%40",
+ "'": "%27",
+ ";": "%3B",
+ "?": "%3F",
+ "*": "%2A",
+ }
+ result = "".join([escape_dictionary.get(c, c) for c in s])
+ return result
+
+
+def _EscapeCppDefineForMSBuild(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = _EscapeEnvironmentVariableExpansion(s)
+ s = _EscapeCommandLineArgumentForMSBuild(s)
+ s = _EscapeMSBuildSpecialCharacters(s)
+ # cl.exe replaces literal # characters with = in preprocessor definitions for
+ # some reason. Octal-encode to work around that.
+ s = s.replace("#", "\\%03o" % ord("#"))
+ return s
+
+
+def _GenerateRulesForMSVS(
+ p, output_dir, options, spec, sources, excluded_sources, actions_to_add
+):
+ """Generate all the rules for a particular project.
+
+ Arguments:
+ p: the project
+ output_dir: directory to emit rules to
+ options: global options passed to the generator
+ spec: the specification for this project
+ sources: the set of all known source files in this project
+ excluded_sources: the set of sources excluded from normal processing
+ actions_to_add: deferred list of actions to add in
+ """
+ rules = spec.get("rules", [])
+ rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))]
+ rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))]
+
+ # Handle rules that use a native rules file.
+ if rules_native:
+ _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
+
+ # Handle external rules (non-native rules).
+ if rules_external:
+ _GenerateExternalRules(
+ rules_external, output_dir, spec, sources, options, actions_to_add
+ )
+ _AdjustSourcesForRules(rules, sources, excluded_sources, False)
+
+
+def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
+ # Add outputs generated by each rule (if applicable).
+ for rule in rules:
+ # Add in the outputs from this rule.
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for trigger_file in trigger_files:
+ # Remove trigger_file from excluded_sources to let the rule be triggered
+ # (e.g. rule trigger ax_enums.idl is added to excluded_sources
+ # because it's also in an action's inputs in the same project)
+ excluded_sources.discard(_FixPath(trigger_file))
+ # Done if not processing outputs as sources.
+ if int(rule.get("process_outputs_as_sources", False)):
+ inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
+ inputs = OrderedSet(_FixPaths(inputs))
+ outputs = OrderedSet(_FixPaths(outputs))
+ inputs.remove(_FixPath(trigger_file))
+ sources.update(inputs)
+ if not is_msbuild:
+ excluded_sources.update(inputs)
+ sources.update(outputs)
+
+
+def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
+ """Take inputs with actions attached out of the list of exclusions.
+
+ Arguments:
+ excluded_sources: list of source files not to be built.
+ actions_to_add: dict of actions keyed on source file they're attached to.
+ Returns:
+ excluded_sources with files that have actions attached removed.
+ """
+ must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
+ return [s for s in excluded_sources if s not in must_keep]
+
+
+def _GetDefaultConfiguration(spec):
+ return spec["configurations"][spec["default_configuration"]]
+
+
+def _GetGuidOfProject(proj_path, spec):
+ """Get the guid for the project.
+
+ Arguments:
+ proj_path: Path of the vcproj or vcxproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ the guid.
+ Raises:
+ ValueError: if the specified GUID is invalid.
+ """
+ # Pluck out the default configuration.
+ default_config = _GetDefaultConfiguration(spec)
+ # Decide the guid of the project.
+ guid = default_config.get("msvs_guid")
+ if guid:
+ if VALID_MSVS_GUID_CHARS.match(guid) is None:
+ raise ValueError(
+ 'Invalid MSVS guid: "%s". Must match regex: "%s".'
+ % (guid, VALID_MSVS_GUID_CHARS.pattern)
+ )
+ guid = "{%s}" % guid
+ guid = guid or MSVSNew.MakeGuid(proj_path)
+ return guid
+
+
+def _GetMsbuildToolsetOfProject(proj_path, spec, version):
+ """Get the platform toolset for the project.
+
+ Arguments:
+ proj_path: Path of the vcproj or vcxproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ version: The MSVSVersion object.
+ Returns:
+ the platform toolset string or None.
+ """
+ # Pluck out the default configuration.
+ default_config = _GetDefaultConfiguration(spec)
+ toolset = default_config.get("msbuild_toolset")
+ if not toolset and version.DefaultToolset():
+ toolset = version.DefaultToolset()
+ if spec["type"] == "windows_driver":
+ toolset = "WindowsKernelModeDriver10.0"
+ return toolset
+
+
+def _GenerateProject(project, options, version, generator_flags, spec):
+ """Generates a vcproj file.
+
+ Arguments:
+ project: the MSVSProject object.
+ options: global generator options.
+ version: the MSVSVersion object.
+ generator_flags: dict of generator-specific flags.
+ Returns:
+ A list of source files that cannot be found on disk.
+ """
+ default_config = _GetDefaultConfiguration(project.spec)
+
+ # Skip emitting anything if told to with msvs_existing_vcproj option.
+ if default_config.get("msvs_existing_vcproj"):
+ return []
+
+ if version.UsesVcxproj():
+ return _GenerateMSBuildProject(project, options, version, generator_flags, spec)
+ else:
+ return _GenerateMSVSProject(project, options, version, generator_flags)
+
+
+def _GenerateMSVSProject(project, options, version, generator_flags):
+ """Generates a .vcproj file. It may create .rules and .user files too.
+
+ Arguments:
+ project: The project object we will generate the file for.
+ options: Global options passed to the generator.
+ version: The VisualStudioVersion object.
+ generator_flags: dict of generator-specific flags.
+ """
+ spec = project.spec
+ gyp.common.EnsureDirExists(project.path)
+
+ platforms = _GetUniquePlatforms(spec)
+ p = MSVSProject.Writer(
+ project.path, version, spec["target_name"], project.guid, platforms
+ )
+
+ # Get directory project file is in.
+ project_dir = os.path.split(project.path)[0]
+ gyp_path = _NormalizedSource(project.build_file)
+ relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
+
+ config_type = _GetMSVSConfigurationType(spec, project.build_file)
+ for config_name, config in spec["configurations"].items():
+ _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
+
+ # Prepare list of sources and excluded sources.
+ gyp_file = os.path.split(project.build_file)[1]
+ sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file)
+
+ # Add rules.
+ actions_to_add = {}
+ _GenerateRulesForMSVS(
+ p, project_dir, options, spec, sources, excluded_sources, actions_to_add
+ )
+ list_excluded = generator_flags.get("msvs_list_excluded_files", True)
+ sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy(
+ spec, options, project_dir, sources, excluded_sources, list_excluded, version
+ )
+
+ # Add in files.
+ missing_sources = _VerifySourcesExist(sources, project_dir)
+ p.AddFiles(sources)
+
+ _AddToolFilesToMSVS(p, spec)
+ _HandlePreCompiledHeaders(p, sources, spec)
+ _AddActions(actions_to_add, spec, relative_path_of_gyp_file)
+ _AddCopies(actions_to_add, spec)
+ _WriteMSVSUserFile(project.path, version, spec)
+
+ # NOTE: this stanza must appear after all actions have been decided.
+ # Don't excluded sources with actions attached, or they won't run.
+ excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add)
+ _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded)
+ _AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
+
+ # Write it out.
+ p.WriteIfChanged()
+
+ return missing_sources
+
+
+def _GetUniquePlatforms(spec):
+ """Returns the list of unique platforms for this spec, e.g ['win32', ...].
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
+ # Gather list of unique platforms.
+ platforms = OrderedSet()
+ for configuration in spec["configurations"]:
+ platforms.add(_ConfigPlatform(spec["configurations"][configuration]))
+ platforms = list(platforms)
+ return platforms
+
+
+def _CreateMSVSUserFile(proj_path, version, spec):
+ """Generates a .user file for the user running this Gyp program.
+
+ Arguments:
+ proj_path: The path of the project file being created. The .user file
+ shares the same path (with an appropriate suffix).
+ version: The VisualStudioVersion object.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
+ (domain, username) = _GetDomainAndUserName()
+ vcuser_filename = ".".join([proj_path, domain, username, "user"])
+ user_file = MSVSUserFile.Writer(vcuser_filename, version, spec["target_name"])
+ return user_file
+
+
+def _GetMSVSConfigurationType(spec, build_file):
+ """Returns the configuration type for this project.
+
+ It's a number defined by Microsoft. May raise an exception.
+
+ Args:
+ spec: The target dictionary containing the properties of the target.
+ build_file: The path of the gyp file.
+ Returns:
+ An integer, the configuration type.
+ """
+ try:
+ config_type = {
+ "executable": "1", # .exe
+ "shared_library": "2", # .dll
+ "loadable_module": "2", # .dll
+ "static_library": "4", # .lib
+ "windows_driver": "5", # .sys
+ "none": "10", # Utility type
+ }[spec["type"]]
+ except KeyError:
+ if spec.get("type"):
+ raise GypError(
+ "Target type %s is not a valid target type for "
+ "target %s in %s." % (spec["type"], spec["target_name"], build_file)
+ )
+ else:
+ raise GypError(
+ "Missing type field for target %s in %s."
+ % (spec["target_name"], build_file)
+ )
+ return config_type
+
+
+def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
+ """Adds a configuration to the MSVS project.
+
+ Many settings in a vcproj file are specific to a configuration. This
+ function the main part of the vcproj file that's configuration specific.
+
+ Arguments:
+ p: The target project being generated.
+ spec: The target dictionary containing the properties of the target.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ """
+ # Get the information for this configuration
+ include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(config)
+ libraries = _GetLibraries(spec)
+ library_dirs = _GetLibraryDirs(config)
+ out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
+ defines = _GetDefines(config)
+ defines = [_EscapeCppDefineForMSVS(d) for d in defines]
+ disabled_warnings = _GetDisabledWarnings(config)
+ prebuild = config.get("msvs_prebuild")
+ postbuild = config.get("msvs_postbuild")
+ def_file = _GetModuleDefinition(spec)
+ precompiled_header = config.get("msvs_precompiled_header")
+
+ # Prepare the list of tools as a dictionary.
+ tools = dict()
+ # Add in user specified msvs_settings.
+ msvs_settings = config.get("msvs_settings", {})
+ MSVSSettings.ValidateMSVSSettings(msvs_settings)
+
+ # Prevent default library inheritance from the environment.
+ _ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", ["$(NOINHERIT)"])
+
+ for tool in msvs_settings:
+ settings = config["msvs_settings"][tool]
+ for setting in settings:
+ _ToolAppend(tools, tool, setting, settings[setting])
+ # Add the information to the appropriate tool
+ _ToolAppend(tools, "VCCLCompilerTool", "AdditionalIncludeDirectories", include_dirs)
+ _ToolAppend(tools, "VCMIDLTool", "AdditionalIncludeDirectories", midl_include_dirs)
+ _ToolAppend(
+ tools,
+ "VCResourceCompilerTool",
+ "AdditionalIncludeDirectories",
+ resource_include_dirs,
+ )
+ # Add in libraries.
+ _ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", libraries)
+ _ToolAppend(tools, "VCLinkerTool", "AdditionalLibraryDirectories", library_dirs)
+ if out_file:
+ _ToolAppend(tools, vc_tool, "OutputFile", out_file, only_if_unset=True)
+ # Add defines.
+ _ToolAppend(tools, "VCCLCompilerTool", "PreprocessorDefinitions", defines)
+ _ToolAppend(tools, "VCResourceCompilerTool", "PreprocessorDefinitions", defines)
+ # Change program database directory to prevent collisions.
+ _ToolAppend(
+ tools,
+ "VCCLCompilerTool",
+ "ProgramDataBaseFileName",
+ "$(IntDir)$(ProjectName)\\vc80.pdb",
+ only_if_unset=True,
+ )
+ # Add disabled warnings.
+ _ToolAppend(tools, "VCCLCompilerTool", "DisableSpecificWarnings", disabled_warnings)
+ # Add Pre-build.
+ _ToolAppend(tools, "VCPreBuildEventTool", "CommandLine", prebuild)
+ # Add Post-build.
+ _ToolAppend(tools, "VCPostBuildEventTool", "CommandLine", postbuild)
+ # Turn on precompiled headers if appropriate.
+ if precompiled_header:
+ precompiled_header = os.path.split(precompiled_header)[1]
+ _ToolAppend(tools, "VCCLCompilerTool", "UsePrecompiledHeader", "2")
+ _ToolAppend(
+ tools, "VCCLCompilerTool", "PrecompiledHeaderThrough", precompiled_header
+ )
+ _ToolAppend(tools, "VCCLCompilerTool", "ForcedIncludeFiles", precompiled_header)
+ # Loadable modules don't generate import libraries;
+ # tell dependent projects to not expect one.
+ if spec["type"] == "loadable_module":
+ _ToolAppend(tools, "VCLinkerTool", "IgnoreImportLibrary", "true")
+ # Set the module definition file if any.
+ if def_file:
+ _ToolAppend(tools, "VCLinkerTool", "ModuleDefinitionFile", def_file)
+
+ _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
+
+
+def _GetIncludeDirs(config):
+ """Returns the list of directories to be used for #include directives.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
+ # TODO(bradnelson): include_dirs should really be flexible enough not to
+ # require this sort of thing.
+ include_dirs = config.get("include_dirs", []) + config.get(
+ "msvs_system_include_dirs", []
+ )
+ midl_include_dirs = config.get("midl_include_dirs", []) + config.get(
+ "msvs_system_include_dirs", []
+ )
+ resource_include_dirs = config.get("resource_include_dirs", include_dirs)
+ include_dirs = _FixPaths(include_dirs)
+ midl_include_dirs = _FixPaths(midl_include_dirs)
+ resource_include_dirs = _FixPaths(resource_include_dirs)
+ return include_dirs, midl_include_dirs, resource_include_dirs
+
+
+def _GetLibraryDirs(config):
+ """Returns the list of directories to be used for library search paths.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
+
+ library_dirs = config.get("library_dirs", [])
+ library_dirs = _FixPaths(library_dirs)
+ return library_dirs
+
+
+def _GetLibraries(spec):
+ """Returns the list of libraries for this configuration.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The list of directory paths.
+ """
+ libraries = spec.get("libraries", [])
+ # Strip out -l, as it is not used on windows (but is needed so we can pass
+ # in libraries that are assumed to be in the default library path).
+ # Also remove duplicate entries, leaving only the last duplicate, while
+ # preserving order.
+ found = OrderedSet()
+ unique_libraries_list = []
+ for entry in reversed(libraries):
+ library = re.sub(r"^\-l", "", entry)
+ if not os.path.splitext(library)[1]:
+ library += ".lib"
+ if library not in found:
+ found.add(library)
+ unique_libraries_list.append(library)
+ unique_libraries_list.reverse()
+ return unique_libraries_list
+
+
+def _GetOutputFilePathAndTool(spec, msbuild):
+ """Returns the path and tool to use for this target.
+
+ Figures out the path of the file this spec will create and the name of
+ the VC tool that will create it.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A triple of (file path, name of the vc tool, name of the msbuild tool)
+ """
+ # Select a name for the output file.
+ out_file = ""
+ vc_tool = ""
+ msbuild_tool = ""
+ output_file_map = {
+ "executable": ("VCLinkerTool", "Link", "$(OutDir)", ".exe"),
+ "shared_library": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"),
+ "loadable_module": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"),
+ "windows_driver": ("VCLinkerTool", "Link", "$(OutDir)", ".sys"),
+ "static_library": ("VCLibrarianTool", "Lib", "$(OutDir)lib\\", ".lib"),
+ }
+ output_file_props = output_file_map.get(spec["type"])
+ if output_file_props and int(spec.get("msvs_auto_output_file", 1)):
+ vc_tool, msbuild_tool, out_dir, suffix = output_file_props
+ if spec.get("standalone_static_library", 0):
+ out_dir = "$(OutDir)"
+ out_dir = spec.get("product_dir", out_dir)
+ product_extension = spec.get("product_extension")
+ if product_extension:
+ suffix = "." + product_extension
+ elif msbuild:
+ suffix = "$(TargetExt)"
+ prefix = spec.get("product_prefix", "")
+ product_name = spec.get("product_name", "$(ProjectName)")
+ out_file = ntpath.join(out_dir, prefix + product_name + suffix)
+ return out_file, vc_tool, msbuild_tool
+
+
+def _GetOutputTargetExt(spec):
+ """Returns the extension for this target, including the dot
+
+ If product_extension is specified, set target_extension to this to avoid
+ MSB8012, returns None otherwise. Ignores any target_extension settings in
+ the input files.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A string with the extension, or None
+ """
+ target_extension = spec.get("product_extension")
+ if target_extension:
+ return "." + target_extension
+ return None
+
+
+def _GetDefines(config):
+ """Returns the list of preprocessor definitions for this configuration.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of preprocessor definitions.
+ """
+ defines = []
+ for d in config.get("defines", []):
+ if type(d) == list:
+ fd = "=".join([str(dpart) for dpart in d])
+ else:
+ fd = str(d)
+ defines.append(fd)
+ return defines
+
+
+def _GetDisabledWarnings(config):
+ return [str(i) for i in config.get("msvs_disabled_warnings", [])]
+
+
+def _GetModuleDefinition(spec):
+ def_file = ""
+ if spec["type"] in [
+ "shared_library",
+ "loadable_module",
+ "executable",
+ "windows_driver",
+ ]:
+ def_files = [s for s in spec.get("sources", []) if s.endswith(".def")]
+ if len(def_files) == 1:
+ def_file = _FixPath(def_files[0])
+ elif def_files:
+ raise ValueError(
+ "Multiple module definition files in one target, target %s lists "
+ "multiple .def files: %s" % (spec["target_name"], " ".join(def_files))
+ )
+ return def_file
+
+
+def _ConvertToolsToExpectedForm(tools):
+ """Convert tools to a form expected by Visual Studio.
+
+ Arguments:
+ tools: A dictionary of settings; the tool name is the key.
+ Returns:
+ A list of Tool objects.
+ """
+ tool_list = []
+ for tool, settings in tools.items():
+ # Collapse settings with lists.
+ settings_fixed = {}
+ for setting, value in settings.items():
+ if type(value) == list:
+ if (
+ tool == "VCLinkerTool" and setting == "AdditionalDependencies"
+ ) or setting == "AdditionalOptions":
+ settings_fixed[setting] = " ".join(value)
+ else:
+ settings_fixed[setting] = ";".join(value)
+ else:
+ settings_fixed[setting] = value
+ # Add in this tool.
+ tool_list.append(MSVSProject.Tool(tool, settings_fixed))
+ return tool_list
+
+
+def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
+ """Add to the project file the configuration specified by config.
+
+ Arguments:
+ p: The target project being generated.
+ spec: the target project dict.
+ tools: A dictionary of settings; the tool name is the key.
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ """
+ attributes = _GetMSVSAttributes(spec, config, config_type)
+ # Add in this configuration.
+ tool_list = _ConvertToolsToExpectedForm(tools)
+ p.AddConfig(_ConfigFullName(config_name, config), attrs=attributes, tools=tool_list)
+
+
+def _GetMSVSAttributes(spec, config, config_type):
+ # Prepare configuration attributes.
+ prepared_attrs = {}
+ source_attrs = config.get("msvs_configuration_attributes", {})
+ for a in source_attrs:
+ prepared_attrs[a] = source_attrs[a]
+ # Add props files.
+ vsprops_dirs = config.get("msvs_props", [])
+ vsprops_dirs = _FixPaths(vsprops_dirs)
+ if vsprops_dirs:
+ prepared_attrs["InheritedPropertySheets"] = ";".join(vsprops_dirs)
+ # Set configuration type.
+ prepared_attrs["ConfigurationType"] = config_type
+ output_dir = prepared_attrs.get(
+ "OutputDirectory", "$(SolutionDir)$(ConfigurationName)"
+ )
+ prepared_attrs["OutputDirectory"] = _FixPath(output_dir) + "\\"
+ if "IntermediateDirectory" not in prepared_attrs:
+ intermediate = "$(ConfigurationName)\\obj\\$(ProjectName)"
+ prepared_attrs["IntermediateDirectory"] = _FixPath(intermediate) + "\\"
+ else:
+ intermediate = _FixPath(prepared_attrs["IntermediateDirectory"]) + "\\"
+ intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
+ prepared_attrs["IntermediateDirectory"] = intermediate
+ return prepared_attrs
+
+
+def _AddNormalizedSources(sources_set, sources_array):
+ sources_set.update(_NormalizedSource(s) for s in sources_array)
+
+
+def _PrepareListOfSources(spec, generator_flags, gyp_file):
+ """Prepare list of sources and excluded sources.
+
+ Besides the sources specified directly in the spec, adds the gyp file so
+ that a change to it will cause a re-compile. Also adds appropriate sources
+ for actions and copies. Assumes later stage will un-exclude files which
+ have custom build steps attached.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ gyp_file: The name of the gyp file.
+ Returns:
+ A pair of (list of sources, list of excluded sources).
+ The sources will be relative to the gyp file.
+ """
+ sources = OrderedSet()
+ _AddNormalizedSources(sources, spec.get("sources", []))
+ excluded_sources = OrderedSet()
+ # Add in the gyp file.
+ if not generator_flags.get("standalone"):
+ sources.add(gyp_file)
+
+ # Add in 'action' inputs and outputs.
+ for a in spec.get("actions", []):
+ inputs = a["inputs"]
+ inputs = [_NormalizedSource(i) for i in inputs]
+ # Add all inputs to sources and excluded sources.
+ inputs = OrderedSet(inputs)
+ sources.update(inputs)
+ if not spec.get("msvs_external_builder"):
+ excluded_sources.update(inputs)
+ if int(a.get("process_outputs_as_sources", False)):
+ _AddNormalizedSources(sources, a.get("outputs", []))
+ # Add in 'copies' inputs and outputs.
+ for cpy in spec.get("copies", []):
+ _AddNormalizedSources(sources, cpy.get("files", []))
+ return (sources, excluded_sources)
+
+
+def _AdjustSourcesAndConvertToFilterHierarchy(
+ spec, options, gyp_dir, sources, excluded_sources, list_excluded, version
+):
+ """Adjusts the list of sources and excluded sources.
+
+ Also converts the sets to lists.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ options: Global generator options.
+ gyp_dir: The path to the gyp file being processed.
+ sources: A set of sources to be included for this project.
+ excluded_sources: A set of sources to be excluded for this project.
+ version: A MSVSVersion object.
+ Returns:
+ A trio of (list of sources, list of excluded sources,
+ path of excluded IDL file)
+ """
+ # Exclude excluded sources coming into the generator.
+ excluded_sources.update(OrderedSet(spec.get("sources_excluded", [])))
+ # Add excluded sources into sources for good measure.
+ sources.update(excluded_sources)
+ # Convert to proper windows form.
+ # NOTE: sources goes from being a set to a list here.
+ # NOTE: excluded_sources goes from being a set to a list here.
+ sources = _FixPaths(sources)
+ # Convert to proper windows form.
+ excluded_sources = _FixPaths(excluded_sources)
+
+ excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
+
+ precompiled_related = _GetPrecompileRelatedFiles(spec)
+ # Find the excluded ones, minus the precompiled header related ones.
+ fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
+
+ # Convert to folders and the right slashes.
+ sources = [i.split("\\") for i in sources]
+ sources = _ConvertSourcesToFilterHierarchy(
+ sources,
+ excluded=fully_excluded,
+ list_excluded=list_excluded,
+ msvs_version=version,
+ )
+
+ # Prune filters with a single child to flatten ugly directory structures
+ # such as ../../src/modules/module1 etc.
+ if version.UsesVcxproj():
+ while (
+ all([isinstance(s, MSVSProject.Filter) for s in sources])
+ and len({s.name for s in sources}) == 1
+ ):
+ assert all([len(s.contents) == 1 for s in sources])
+ sources = [s.contents[0] for s in sources]
+ else:
+ while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
+ sources = sources[0].contents
+
+ return sources, excluded_sources, excluded_idl
+
+
+def _IdlFilesHandledNonNatively(spec, sources):
+ # If any non-native rules use 'idl' as an extension exclude idl files.
+ # Gather a list here to use later.
+ using_idl = False
+ for rule in spec.get("rules", []):
+ if rule["extension"] == "idl" and int(rule.get("msvs_external_rule", 0)):
+ using_idl = True
+ break
+ if using_idl:
+ excluded_idl = [i for i in sources if i.endswith(".idl")]
+ else:
+ excluded_idl = []
+ return excluded_idl
+
+
+def _GetPrecompileRelatedFiles(spec):
+ # Gather a list of precompiled header related sources.
+ precompiled_related = []
+ for _, config in spec["configurations"].items():
+ for k in precomp_keys:
+ f = config.get(k)
+ if f:
+ precompiled_related.append(_FixPath(f))
+ return precompiled_related
+
+
+def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded):
+ exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
+ for file_name, excluded_configs in exclusions.items():
+ if not list_excluded and len(excluded_configs) == len(spec["configurations"]):
+ # If we're not listing excluded files, then they won't appear in the
+ # project, so don't try to configure them to be excluded.
+ pass
+ else:
+ for config_name, config in excluded_configs:
+ p.AddFileConfig(
+ file_name,
+ _ConfigFullName(config_name, config),
+ {"ExcludedFromBuild": "true"},
+ )
+
+
+def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
+ exclusions = {}
+ # Exclude excluded sources from being built.
+ for f in excluded_sources:
+ excluded_configs = []
+ for config_name, config in spec["configurations"].items():
+ precomped = [_FixPath(config.get(i, "")) for i in precomp_keys]
+ # Don't do this for ones that are precompiled header related.
+ if f not in precomped:
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ # If any non-native rules use 'idl' as an extension exclude idl files.
+ # Exclude them now.
+ for f in excluded_idl:
+ excluded_configs = []
+ for config_name, config in spec["configurations"].items():
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ return exclusions
+
+
+def _AddToolFilesToMSVS(p, spec):
+ # Add in tool files (rules).
+ tool_files = OrderedSet()
+ for _, config in spec["configurations"].items():
+ for f in config.get("msvs_tool_files", []):
+ tool_files.add(f)
+ for f in tool_files:
+ p.AddToolFile(f)
+
+
+def _HandlePreCompiledHeaders(p, sources, spec):
+ # Pre-compiled header source stubs need a different compiler flag
+ # (generate precompiled header) and any source file not of the same
+ # kind (i.e. C vs. C++) as the precompiled header source stub needs
+ # to have use of precompiled headers disabled.
+ extensions_excluded_from_precompile = []
+ for config_name, config in spec["configurations"].items():
+ source = config.get("msvs_precompiled_source")
+ if source:
+ source = _FixPath(source)
+ # UsePrecompiledHeader=1 for if using precompiled headers.
+ tool = MSVSProject.Tool("VCCLCompilerTool", {"UsePrecompiledHeader": "1"})
+ p.AddFileConfig(
+ source, _ConfigFullName(config_name, config), {}, tools=[tool]
+ )
+ basename, extension = os.path.splitext(source)
+ if extension == ".c":
+ extensions_excluded_from_precompile = [".cc", ".cpp", ".cxx"]
+ else:
+ extensions_excluded_from_precompile = [".c"]
+
+ def DisableForSourceTree(source_tree):
+ for source in source_tree:
+ if isinstance(source, MSVSProject.Filter):
+ DisableForSourceTree(source.contents)
+ else:
+ basename, extension = os.path.splitext(source)
+ if extension in extensions_excluded_from_precompile:
+ for config_name, config in spec["configurations"].items():
+ tool = MSVSProject.Tool(
+ "VCCLCompilerTool",
+ {
+ "UsePrecompiledHeader": "0",
+ "ForcedIncludeFiles": "$(NOINHERIT)",
+ },
+ )
+ p.AddFileConfig(
+ _FixPath(source),
+ _ConfigFullName(config_name, config),
+ {},
+ tools=[tool],
+ )
+
+ # Do nothing if there was no precompiled source.
+ if extensions_excluded_from_precompile:
+ DisableForSourceTree(sources)
+
+
+def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
+ # Add actions.
+ actions = spec.get("actions", [])
+ # Don't setup_env every time. When all the actions are run together in one
+ # batch file in VS, the PATH will grow too long.
+ # Membership in this set means that the cygwin environment has been set up,
+ # and does not need to be set up again.
+ have_setup_env = set()
+ for a in actions:
+ # Attach actions to the gyp file if nothing else is there.
+ inputs = a.get("inputs") or [relative_path_of_gyp_file]
+ attached_to = inputs[0]
+ need_setup_env = attached_to not in have_setup_env
+ cmd = _BuildCommandLineForRule(
+ spec, a, has_input_path=False, do_setup_env=need_setup_env
+ )
+ have_setup_env.add(attached_to)
+ # Add the action.
+ _AddActionStep(
+ actions_to_add,
+ inputs=inputs,
+ outputs=a.get("outputs", []),
+ description=a.get("message", a["action_name"]),
+ command=cmd,
+ )
+
+
+def _WriteMSVSUserFile(project_path, version, spec):
+ # Add run_as and test targets.
+ if "run_as" in spec:
+ run_as = spec["run_as"]
+ action = run_as.get("action", [])
+ environment = run_as.get("environment", [])
+ working_directory = run_as.get("working_directory", ".")
+ elif int(spec.get("test", 0)):
+ action = ["$(TargetPath)", "--gtest_print_time"]
+ environment = []
+ working_directory = "."
+ else:
+ return # Nothing to add
+ # Write out the user file.
+ user_file = _CreateMSVSUserFile(project_path, version, spec)
+ for config_name, c_data in spec["configurations"].items():
+ user_file.AddDebugSettings(
+ _ConfigFullName(config_name, c_data), action, environment, working_directory
+ )
+ user_file.WriteIfChanged()
+
+
+def _AddCopies(actions_to_add, spec):
+ copies = _GetCopies(spec)
+ for inputs, outputs, cmd, description in copies:
+ _AddActionStep(
+ actions_to_add,
+ inputs=inputs,
+ outputs=outputs,
+ description=description,
+ command=cmd,
+ )
+
+
+def _GetCopies(spec):
+ copies = []
+ # Add copies.
+ for cpy in spec.get("copies", []):
+ for src in cpy.get("files", []):
+ dst = os.path.join(cpy["destination"], os.path.basename(src))
+ # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
+ # outputs, so do the same for our generated command line.
+ if src.endswith("/"):
+ src_bare = src[:-1]
+ base_dir = posixpath.split(src_bare)[0]
+ outer_dir = posixpath.split(src_bare)[1]
+ fixed_dst = _FixPath(dst)
+ full_dst = f'"{fixed_dst}\\{outer_dir}\\"'
+ cmd = 'mkdir {} 2>nul & cd "{}" && xcopy /e /f /y "{}" {}'.format(
+ full_dst,
+ _FixPath(base_dir),
+ outer_dir,
+ full_dst,
+ )
+ copies.append(
+ (
+ [src],
+ ["dummy_copies", dst],
+ cmd,
+ f"Copying {src} to {fixed_dst}",
+ )
+ )
+ else:
+ fix_dst = _FixPath(cpy["destination"])
+ cmd = 'mkdir "{}" 2>nul & set ERRORLEVEL=0 & copy /Y "{}" "{}"'.format(
+ fix_dst,
+ _FixPath(src),
+ _FixPath(dst),
+ )
+ copies.append(([src], [dst], cmd, f"Copying {src} to {fix_dst}"))
+ return copies
+
+
+def _GetPathDict(root, path):
+ # |path| will eventually be empty (in the recursive calls) if it was initially
+ # relative; otherwise it will eventually end up as '\', 'D:\', etc.
+ if not path or path.endswith(os.sep):
+ return root
+ parent, folder = os.path.split(path)
+ parent_dict = _GetPathDict(root, parent)
+ if folder not in parent_dict:
+ parent_dict[folder] = dict()
+ return parent_dict[folder]
+
+
+def _DictsToFolders(base_path, bucket, flat):
+ # Convert to folders recursively.
+ children = []
+ for folder, contents in bucket.items():
+ if type(contents) == dict:
+ folder_children = _DictsToFolders(
+ os.path.join(base_path, folder), contents, flat
+ )
+ if flat:
+ children += folder_children
+ else:
+ folder_children = MSVSNew.MSVSFolder(
+ os.path.join(base_path, folder),
+ name="(" + folder + ")",
+ entries=folder_children,
+ )
+ children.append(folder_children)
+ else:
+ children.append(contents)
+ return children
+
+
+def _CollapseSingles(parent, node):
+ # Recursively explorer the tree of dicts looking for projects which are
+ # the sole item in a folder which has the same name as the project. Bring
+ # such projects up one level.
+ if type(node) == dict and len(node) == 1 and next(iter(node)) == parent + ".vcproj":
+ return node[next(iter(node))]
+ if type(node) != dict:
+ return node
+ for child in node:
+ node[child] = _CollapseSingles(child, node[child])
+ return node
+
+
+def _GatherSolutionFolders(sln_projects, project_objects, flat):
+ root = {}
+ # Convert into a tree of dicts on path.
+ for p in sln_projects:
+ gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
+ if p.endswith("#host"):
+ target += "_host"
+ gyp_dir = os.path.dirname(gyp_file)
+ path_dict = _GetPathDict(root, gyp_dir)
+ path_dict[target + ".vcproj"] = project_objects[p]
+ # Walk down from the top until we hit a folder that has more than one entry.
+ # In practice, this strips the top-level "src/" dir from the hierarchy in
+ # the solution.
+ while len(root) == 1 and type(root[next(iter(root))]) == dict:
+ root = root[next(iter(root))]
+ # Collapse singles.
+ root = _CollapseSingles("", root)
+ # Merge buckets until everything is a root entry.
+ return _DictsToFolders("", root, flat)
+
+
+def _GetPathOfProject(qualified_target, spec, options, msvs_version):
+ default_config = _GetDefaultConfiguration(spec)
+ proj_filename = default_config.get("msvs_existing_vcproj")
+ if not proj_filename:
+ proj_filename = spec["target_name"]
+ if spec["toolset"] == "host":
+ proj_filename += "_host"
+ proj_filename = proj_filename + options.suffix + msvs_version.ProjectExtension()
+
+ build_file = gyp.common.BuildFile(qualified_target)
+ proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
+ fix_prefix = None
+ if options.generator_output:
+ project_dir_path = os.path.dirname(os.path.abspath(proj_path))
+ proj_path = os.path.join(options.generator_output, proj_path)
+ fix_prefix = gyp.common.RelativePath(
+ project_dir_path, os.path.dirname(proj_path)
+ )
+ return proj_path, fix_prefix
+
+
+def _GetPlatformOverridesOfProject(spec):
+ # Prepare a dict indicating which project configurations are used for which
+ # solution configurations for this target.
+ config_platform_overrides = {}
+ for config_name, c in spec["configurations"].items():
+ config_fullname = _ConfigFullName(config_name, c)
+ platform = c.get("msvs_target_platform", _ConfigPlatform(c))
+ fixed_config_fullname = "{}|{}".format(
+ _ConfigBaseName(config_name, _ConfigPlatform(c)),
+ platform,
+ )
+ if spec["toolset"] == "host" and generator_supports_multiple_toolsets:
+ fixed_config_fullname = f"{config_name}|x64"
+ config_platform_overrides[config_fullname] = fixed_config_fullname
+ return config_platform_overrides
+
+
+def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
+ """Create a MSVSProject object for the targets found in target list.
+
+ Arguments:
+ target_list: the list of targets to generate project objects for.
+ target_dicts: the dictionary of specifications.
+ options: global generator options.
+ msvs_version: the MSVSVersion object.
+ Returns:
+ A set of created projects, keyed by target.
+ """
+ global fixpath_prefix
+ # Generate each project.
+ projects = {}
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ proj_path, fixpath_prefix = _GetPathOfProject(
+ qualified_target, spec, options, msvs_version
+ )
+ guid = _GetGuidOfProject(proj_path, spec)
+ overrides = _GetPlatformOverridesOfProject(spec)
+ build_file = gyp.common.BuildFile(qualified_target)
+ # Create object for this project.
+ target_name = spec["target_name"]
+ if spec["toolset"] == "host":
+ target_name += "_host"
+ obj = MSVSNew.MSVSProject(
+ proj_path,
+ name=target_name,
+ guid=guid,
+ spec=spec,
+ build_file=build_file,
+ config_platform_overrides=overrides,
+ fixpath_prefix=fixpath_prefix,
+ )
+ # Set project toolset if any (MS build only)
+ if msvs_version.UsesVcxproj():
+ obj.set_msbuild_toolset(
+ _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version)
+ )
+ projects[qualified_target] = obj
+ # Set all the dependencies, but not if we are using an external builder like
+ # ninja
+ for project in projects.values():
+ if not project.spec.get("msvs_external_builder"):
+ deps = project.spec.get("dependencies", [])
+ deps = [projects[d] for d in deps]
+ project.set_dependencies(deps)
+ return projects
+
+
+def _InitNinjaFlavor(params, target_list, target_dicts):
+ """Initialize targets for the ninja flavor.
+
+ This sets up the necessary variables in the targets to generate msvs projects
+ that use ninja as an external builder. The variables in the spec are only set
+ if they have not been set. This allows individual specs to override the
+ default values initialized here.
+ Arguments:
+ params: Params provided to the generator.
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ """
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ if spec.get("msvs_external_builder"):
+ # The spec explicitly defined an external builder, so don't change it.
+ continue
+
+ path_to_ninja = spec.get("msvs_path_to_ninja", "ninja.exe")
+
+ spec["msvs_external_builder"] = "ninja"
+ if not spec.get("msvs_external_builder_out_dir"):
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ gyp_dir = os.path.dirname(gyp_file)
+ configuration = "$(Configuration)"
+ if params.get("target_arch") == "x64":
+ configuration += "_x64"
+ if params.get("target_arch") == "arm64":
+ configuration += "_arm64"
+ spec["msvs_external_builder_out_dir"] = os.path.join(
+ gyp.common.RelativePath(params["options"].toplevel_dir, gyp_dir),
+ ninja_generator.ComputeOutputDir(params),
+ configuration,
+ )
+ if not spec.get("msvs_external_builder_build_cmd"):
+ spec["msvs_external_builder_build_cmd"] = [
+ path_to_ninja,
+ "-C",
+ "$(OutDir)",
+ "$(ProjectName)",
+ ]
+ if not spec.get("msvs_external_builder_clean_cmd"):
+ spec["msvs_external_builder_clean_cmd"] = [
+ path_to_ninja,
+ "-C",
+ "$(OutDir)",
+ "-tclean",
+ "$(ProjectName)",
+ ]
+
+
+def CalculateVariables(default_variables, params):
+ """Generated variables that require params to be known."""
+
+ generator_flags = params.get("generator_flags", {})
+
+ # Select project file format version (if unset, default to auto detecting).
+ msvs_version = MSVSVersion.SelectVisualStudioVersion(
+ generator_flags.get("msvs_version", "auto")
+ )
+ # Stash msvs_version for later (so we don't have to probe the system twice).
+ params["msvs_version"] = msvs_version
+
+ # Set a variable so conditions can be based on msvs_version.
+ default_variables["MSVS_VERSION"] = msvs_version.ShortName()
+
+ # To determine processor word size on Windows, in addition to checking
+ # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
+ # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
+ # contains the actual word size of the system when running thru WOW64).
+ if (
+ os.environ.get("PROCESSOR_ARCHITECTURE", "").find("64") >= 0
+ or os.environ.get("PROCESSOR_ARCHITEW6432", "").find("64") >= 0
+ ):
+ default_variables["MSVS_OS_BITS"] = 64
+ else:
+ default_variables["MSVS_OS_BITS"] = 32
+
+ if gyp.common.GetFlavor(params) == "ninja":
+ default_variables["SHARED_INTERMEDIATE_DIR"] = "$(OutDir)gen"
+
+
+def PerformBuild(data, configurations, params):
+ options = params["options"]
+ msvs_version = params["msvs_version"]
+ devenv = os.path.join(msvs_version.path, "Common7", "IDE", "devenv.com")
+
+ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != ".gyp":
+ continue
+ sln_path = build_file_root + options.suffix + ".sln"
+ if options.generator_output:
+ sln_path = os.path.join(options.generator_output, sln_path)
+
+ for config in configurations:
+ arguments = [devenv, sln_path, "/Build", config]
+ print(f"Building [{config}]: {arguments}")
+ subprocess.check_call(arguments)
+
+
+def CalculateGeneratorInputInfo(params):
+ if params.get("flavor") == "ninja":
+ toplevel = params["options"].toplevel_dir
+ qualified_out_dir = os.path.normpath(
+ os.path.join(
+ toplevel,
+ ninja_generator.ComputeOutputDir(params),
+ "gypfiles-msvs-ninja",
+ )
+ )
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ "toplevel": toplevel,
+ "qualified_out_dir": qualified_out_dir,
+ }
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Generate .sln and .vcproj files.
+
+ This is the entry point for this generator.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dictionary containing per .gyp data.
+ """
+ global fixpath_prefix
+
+ options = params["options"]
+
+ # Get the project file format version back out of where we stashed it in
+ # GeneratorCalculatedVariables.
+ msvs_version = params["msvs_version"]
+
+ generator_flags = params.get("generator_flags", {})
+
+ # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
+ (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
+
+ # Optionally use the large PDB workaround for targets marked with
+ # 'msvs_large_pdb': 1.
+ (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
+ target_list, target_dicts, generator_default_variables
+ )
+
+ # Optionally configure each spec to use ninja as the external builder.
+ if params.get("flavor") == "ninja":
+ _InitNinjaFlavor(params, target_list, target_dicts)
+
+ # Prepare the set of configurations.
+ configs = set()
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ for config_name, config in spec["configurations"].items():
+ config_name = _ConfigFullName(config_name, config)
+ configs.add(config_name)
+ if config_name == "Release|arm64":
+ configs.add("Release|x64")
+ configs = list(configs)
+
+ # Figure out all the projects that will be generated and their guids
+ project_objects = _CreateProjectObjects(
+ target_list, target_dicts, options, msvs_version
+ )
+
+ # Generate each project.
+ missing_sources = []
+ for project in project_objects.values():
+ fixpath_prefix = project.fixpath_prefix
+ missing_sources.extend(
+ _GenerateProject(project, options, msvs_version, generator_flags, spec)
+ )
+ fixpath_prefix = None
+
+ for build_file in data:
+ # Validate build_file extension
+ target_only_configs = configs
+ if generator_supports_multiple_toolsets:
+ target_only_configs = [i for i in configs if i.endswith("arm64")]
+ if not build_file.endswith(".gyp"):
+ continue
+ sln_path = os.path.splitext(build_file)[0] + options.suffix + ".sln"
+ if options.generator_output:
+ sln_path = os.path.join(options.generator_output, sln_path)
+ # Get projects in the solution, and their dependents.
+ sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
+ sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
+ # Create folder hierarchy.
+ root_entries = _GatherSolutionFolders(
+ sln_projects, project_objects, flat=msvs_version.FlatSolution()
+ )
+ # Create solution.
+ sln = MSVSNew.MSVSSolution(
+ sln_path,
+ entries=root_entries,
+ variants=target_only_configs,
+ websiteProperties=False,
+ version=msvs_version,
+ )
+ sln.Write()
+
+ if missing_sources:
+ error_message = "Missing input files:\n" + "\n".join(set(missing_sources))
+ if generator_flags.get("msvs_error_on_missing_sources", False):
+ raise GypError(error_message)
+ else:
+ print("Warning: " + error_message, file=sys.stdout)
+
+
+def _GenerateMSBuildFiltersFile(
+ filters_path,
+ source_files,
+ rule_dependencies,
+ extension_to_rule_name,
+ platforms,
+ toolset,
+):
+ """Generate the filters file.
+
+ This file is used by Visual Studio to organize the presentation of source
+ files into folders.
+
+ Arguments:
+ filters_path: The path of the file to be created.
+ source_files: The hierarchical structure of all the sources.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+ """
+ filter_group = []
+ source_group = []
+ _AppendFiltersForMSBuild(
+ "",
+ source_files,
+ rule_dependencies,
+ extension_to_rule_name,
+ platforms,
+ toolset,
+ filter_group,
+ source_group,
+ )
+ if filter_group:
+ content = [
+ "Project",
+ {
+ "ToolsVersion": "4.0",
+ "xmlns": "http://schemas.microsoft.com/developer/msbuild/2003",
+ },
+ ["ItemGroup"] + filter_group,
+ ["ItemGroup"] + source_group,
+ ]
+ easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
+ elif os.path.exists(filters_path):
+ # We don't need this filter anymore. Delete the old filter file.
+ os.unlink(filters_path)
+
+
+def _AppendFiltersForMSBuild(
+ parent_filter_name,
+ sources,
+ rule_dependencies,
+ extension_to_rule_name,
+ platforms,
+ toolset,
+ filter_group,
+ source_group,
+):
+ """Creates the list of filters and sources to be added in the filter file.
+
+ Args:
+ parent_filter_name: The name of the filter under which the sources are
+ found.
+ sources: The hierarchy of filters and sources to process.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+ filter_group: The list to which filter entries will be appended.
+ source_group: The list to which source entries will be appended.
+ """
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ # We have a sub-filter. Create the name of that sub-filter.
+ if not parent_filter_name:
+ filter_name = source.name
+ else:
+ filter_name = f"{parent_filter_name}\\{source.name}"
+ # Add the filter to the group.
+ filter_group.append(
+ [
+ "Filter",
+ {"Include": filter_name},
+ ["UniqueIdentifier", MSVSNew.MakeGuid(source.name)],
+ ]
+ )
+ # Recurse and add its dependents.
+ _AppendFiltersForMSBuild(
+ filter_name,
+ source.contents,
+ rule_dependencies,
+ extension_to_rule_name,
+ platforms,
+ toolset,
+ filter_group,
+ source_group,
+ )
+ else:
+ # It's a source. Create a source entry.
+ _, element = _MapFileToMsBuildSourceType(
+ source, rule_dependencies, extension_to_rule_name, platforms, toolset
+ )
+ source_entry = [element, {"Include": source}]
+ # Specify the filter it is part of, if any.
+ if parent_filter_name:
+ source_entry.append(["Filter", parent_filter_name])
+ source_group.append(source_entry)
+
+
+def _MapFileToMsBuildSourceType(
+ source, rule_dependencies, extension_to_rule_name, platforms, toolset
+):
+ """Returns the group and element type of the source file.
+
+ Arguments:
+ source: The source file name.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+
+ Returns:
+ A pair of (group this file should be part of, the label of element)
+ """
+ _, ext = os.path.splitext(source)
+ ext = ext.lower()
+ if ext in extension_to_rule_name:
+ group = "rule"
+ element = extension_to_rule_name[ext]
+ elif ext in [".cc", ".cpp", ".c", ".cxx", ".mm"]:
+ group = "compile"
+ element = "ClCompile"
+ elif ext in [".h", ".hxx"]:
+ group = "include"
+ element = "ClInclude"
+ elif ext == ".rc":
+ group = "resource"
+ element = "ResourceCompile"
+ elif ext in [".s", ".asm"]:
+ group = "masm"
+ element = "MASM"
+ if "arm64" in platforms and toolset == "target":
+ element = "MARMASM"
+ elif ext == ".idl":
+ group = "midl"
+ element = "Midl"
+ elif source in rule_dependencies:
+ group = "rule_dependency"
+ element = "CustomBuild"
+ else:
+ group = "none"
+ element = "None"
+ return (group, element)
+
+
+def _GenerateRulesForMSBuild(
+ output_dir,
+ options,
+ spec,
+ sources,
+ excluded_sources,
+ props_files_of_rules,
+ targets_files_of_rules,
+ actions_to_add,
+ rule_dependencies,
+ extension_to_rule_name,
+):
+ # MSBuild rules are implemented using three files: an XML file, a .targets
+ # file and a .props file.
+ # For more details see:
+ # https://devblogs.microsoft.com/cppblog/quick-help-on-vs2010-custom-build-rule/
+ rules = spec.get("rules", [])
+ rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))]
+ rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))]
+
+ msbuild_rules = []
+ for rule in rules_native:
+ # Skip a rule with no action and no inputs.
+ if "action" not in rule and not rule.get("rule_sources", []):
+ continue
+ msbuild_rule = MSBuildRule(rule, spec)
+ msbuild_rules.append(msbuild_rule)
+ rule_dependencies.update(msbuild_rule.additional_dependencies.split(";"))
+ extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
+ if msbuild_rules:
+ base = spec["target_name"] + options.suffix
+ props_name = base + ".props"
+ targets_name = base + ".targets"
+ xml_name = base + ".xml"
+
+ props_files_of_rules.add(props_name)
+ targets_files_of_rules.add(targets_name)
+
+ props_path = os.path.join(output_dir, props_name)
+ targets_path = os.path.join(output_dir, targets_name)
+ xml_path = os.path.join(output_dir, xml_name)
+
+ _GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
+ _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
+ _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
+
+ if rules_external:
+ _GenerateExternalRules(
+ rules_external, output_dir, spec, sources, options, actions_to_add
+ )
+ _AdjustSourcesForRules(rules, sources, excluded_sources, True)
+
+
+class MSBuildRule:
+ """Used to store information used to generate an MSBuild rule.
+
+ Attributes:
+ rule_name: The rule name, sanitized to use in XML.
+ target_name: The name of the target.
+ after_targets: The name of the AfterTargets element.
+ before_targets: The name of the BeforeTargets element.
+ depends_on: The name of the DependsOn element.
+ compute_output: The name of the ComputeOutput element.
+ dirs_to_make: The name of the DirsToMake element.
+ inputs: The name of the _inputs element.
+ tlog: The name of the _tlog element.
+ extension: The extension this rule applies to.
+ description: The message displayed when this rule is invoked.
+ additional_dependencies: A string listing additional dependencies.
+ outputs: The outputs of this rule.
+ command: The command used to run the rule.
+ """
+
+ def __init__(self, rule, spec):
+ self.display_name = rule["rule_name"]
+ # Assure that the rule name is only characters and numbers
+ self.rule_name = re.sub(r"\W", "_", self.display_name)
+ # Create the various element names, following the example set by the
+ # Visual Studio 2008 to 2010 conversion. I don't know if VS2010
+ # is sensitive to the exact names.
+ self.target_name = "_" + self.rule_name
+ self.after_targets = self.rule_name + "AfterTargets"
+ self.before_targets = self.rule_name + "BeforeTargets"
+ self.depends_on = self.rule_name + "DependsOn"
+ self.compute_output = "Compute%sOutput" % self.rule_name
+ self.dirs_to_make = self.rule_name + "DirsToMake"
+ self.inputs = self.rule_name + "_inputs"
+ self.tlog = self.rule_name + "_tlog"
+ self.extension = rule["extension"]
+ if not self.extension.startswith("."):
+ self.extension = "." + self.extension
+
+ self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
+ rule.get("message", self.rule_name)
+ )
+ old_additional_dependencies = _FixPaths(rule.get("inputs", []))
+ self.additional_dependencies = ";".join(
+ [
+ MSVSSettings.ConvertVCMacrosToMSBuild(i)
+ for i in old_additional_dependencies
+ ]
+ )
+ old_outputs = _FixPaths(rule.get("outputs", []))
+ self.outputs = ";".join(
+ [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in old_outputs]
+ )
+ old_command = _BuildCommandLineForRule(
+ spec, rule, has_input_path=True, do_setup_env=True
+ )
+ self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
+
+
+def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
+ """Generate the .props file."""
+ content = [
+ "Project",
+ {"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"},
+ ]
+ for rule in msbuild_rules:
+ content.extend(
+ [
+ [
+ "PropertyGroup",
+ {
+ "Condition": "'$(%s)' == '' and '$(%s)' == '' and "
+ "'$(ConfigurationType)' != 'Makefile'"
+ % (rule.before_targets, rule.after_targets)
+ },
+ [rule.before_targets, "Midl"],
+ [rule.after_targets, "CustomBuild"],
+ ],
+ [
+ "PropertyGroup",
+ [
+ rule.depends_on,
+ {"Condition": "'$(ConfigurationType)' != 'Makefile'"},
+ "_SelectedFiles;$(%s)" % rule.depends_on,
+ ],
+ ],
+ [
+ "ItemDefinitionGroup",
+ [
+ rule.rule_name,
+ ["CommandLineTemplate", rule.command],
+ ["Outputs", rule.outputs],
+ ["ExecutionDescription", rule.description],
+ ["AdditionalDependencies", rule.additional_dependencies],
+ ],
+ ],
+ ]
+ )
+ easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
+
+
+def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
+ """Generate the .targets file."""
+ content = [
+ "Project",
+ {"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"},
+ ]
+ item_group = [
+ "ItemGroup",
+ [
+ "PropertyPageSchema",
+ {"Include": "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"},
+ ],
+ ]
+ for rule in msbuild_rules:
+ item_group.append(
+ [
+ "AvailableItemName",
+ {"Include": rule.rule_name},
+ ["Targets", rule.target_name],
+ ]
+ )
+ content.append(item_group)
+
+ for rule in msbuild_rules:
+ content.append(
+ [
+ "UsingTask",
+ {
+ "TaskName": rule.rule_name,
+ "TaskFactory": "XamlTaskFactory",
+ "AssemblyName": "Microsoft.Build.Tasks.v4.0",
+ },
+ ["Task", "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"],
+ ]
+ )
+ for rule in msbuild_rules:
+ rule_name = rule.rule_name
+ target_outputs = "%%(%s.Outputs)" % rule_name
+ target_inputs = (
+ "%%(%s.Identity);%%(%s.AdditionalDependencies);" "$(MSBuildProjectFile)"
+ ) % (rule_name, rule_name)
+ rule_inputs = "%%(%s.Identity)" % rule_name
+ extension_condition = (
+ "'%(Extension)'=='.obj' or "
+ "'%(Extension)'=='.res' or "
+ "'%(Extension)'=='.rsc' or "
+ "'%(Extension)'=='.lib'"
+ )
+ remove_section = [
+ "ItemGroup",
+ {"Condition": "'@(SelectedFiles)' != ''"},
+ [
+ rule_name,
+ {
+ "Remove": "@(%s)" % rule_name,
+ "Condition": "'%(Identity)' != '@(SelectedFiles)'",
+ },
+ ],
+ ]
+ inputs_section = [
+ "ItemGroup",
+ [rule.inputs, {"Include": "%%(%s.AdditionalDependencies)" % rule_name}],
+ ]
+ logging_section = [
+ "ItemGroup",
+ [
+ rule.tlog,
+ {
+ "Include": "%%(%s.Outputs)" % rule_name,
+ "Condition": (
+ "'%%(%s.Outputs)' != '' and "
+ "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name)
+ ),
+ },
+ ["Source", "@(%s, '|')" % rule_name],
+ ["Inputs", "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
+ ],
+ ]
+ message_section = [
+ "Message",
+ {"Importance": "High", "Text": "%%(%s.ExecutionDescription)" % rule_name},
+ ]
+ write_tlog_section = [
+ "WriteLinesToFile",
+ {
+ "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule.tlog, rule.tlog),
+ "File": "$(IntDir)$(ProjectName).write.1.tlog",
+ "Lines": "^%%(%s.Source);@(%s->'%%(Fullpath)')"
+ % (rule.tlog, rule.tlog),
+ },
+ ]
+ read_tlog_section = [
+ "WriteLinesToFile",
+ {
+ "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule.tlog, rule.tlog),
+ "File": "$(IntDir)$(ProjectName).read.1.tlog",
+ "Lines": f"^%({rule.tlog}.Source);%({rule.tlog}.Inputs)",
+ },
+ ]
+ command_and_input_section = [
+ rule_name,
+ {
+ "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule_name, rule_name),
+ "EchoOff": "true",
+ "StandardOutputImportance": "High",
+ "StandardErrorImportance": "High",
+ "CommandLineTemplate": "%%(%s.CommandLineTemplate)" % rule_name,
+ "AdditionalOptions": "%%(%s.AdditionalOptions)" % rule_name,
+ "Inputs": rule_inputs,
+ },
+ ]
+ content.extend(
+ [
+ [
+ "Target",
+ {
+ "Name": rule.target_name,
+ "BeforeTargets": "$(%s)" % rule.before_targets,
+ "AfterTargets": "$(%s)" % rule.after_targets,
+ "Condition": "'@(%s)' != ''" % rule_name,
+ "DependsOnTargets": "$(%s);%s"
+ % (rule.depends_on, rule.compute_output),
+ "Outputs": target_outputs,
+ "Inputs": target_inputs,
+ },
+ remove_section,
+ inputs_section,
+ logging_section,
+ message_section,
+ write_tlog_section,
+ read_tlog_section,
+ command_and_input_section,
+ ],
+ [
+ "PropertyGroup",
+ [
+ "ComputeLinkInputsTargets",
+ "$(ComputeLinkInputsTargets);",
+ "%s;" % rule.compute_output,
+ ],
+ [
+ "ComputeLibInputsTargets",
+ "$(ComputeLibInputsTargets);",
+ "%s;" % rule.compute_output,
+ ],
+ ],
+ [
+ "Target",
+ {
+ "Name": rule.compute_output,
+ "Condition": "'@(%s)' != ''" % rule_name,
+ },
+ [
+ "ItemGroup",
+ [
+ rule.dirs_to_make,
+ {
+ "Condition": "'@(%s)' != '' and "
+ "'%%(%s.ExcludedFromBuild)' != 'true'"
+ % (rule_name, rule_name),
+ "Include": "%%(%s.Outputs)" % rule_name,
+ },
+ ],
+ [
+ "Link",
+ {
+ "Include": "%%(%s.Identity)" % rule.dirs_to_make,
+ "Condition": extension_condition,
+ },
+ ],
+ [
+ "Lib",
+ {
+ "Include": "%%(%s.Identity)" % rule.dirs_to_make,
+ "Condition": extension_condition,
+ },
+ ],
+ [
+ "ImpLib",
+ {
+ "Include": "%%(%s.Identity)" % rule.dirs_to_make,
+ "Condition": extension_condition,
+ },
+ ],
+ ],
+ [
+ "MakeDir",
+ {
+ "Directories": (
+ "@(%s->'%%(RootDir)%%(Directory)')" % rule.dirs_to_make
+ )
+ },
+ ],
+ ],
+ ]
+ )
+ easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
+
+
+def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
+ # Generate the .xml file
+ content = [
+ "ProjectSchemaDefinitions",
+ {
+ "xmlns": (
+ "clr-namespace:Microsoft.Build.Framework.XamlTypes;"
+ "assembly=Microsoft.Build.Framework"
+ ),
+ "xmlns:x": "http://schemas.microsoft.com/winfx/2006/xaml",
+ "xmlns:sys": "clr-namespace:System;assembly=mscorlib",
+ "xmlns:transformCallback": "Microsoft.Cpp.Dev10.ConvertPropertyCallback",
+ },
+ ]
+ for rule in msbuild_rules:
+ content.extend(
+ [
+ [
+ "Rule",
+ {
+ "Name": rule.rule_name,
+ "PageTemplate": "tool",
+ "DisplayName": rule.display_name,
+ "Order": "200",
+ },
+ [
+ "Rule.DataSource",
+ [
+ "DataSource",
+ {"Persistence": "ProjectFile", "ItemType": rule.rule_name},
+ ],
+ ],
+ [
+ "Rule.Categories",
+ [
+ "Category",
+ {"Name": "General"},
+ ["Category.DisplayName", ["sys:String", "General"]],
+ ],
+ [
+ "Category",
+ {"Name": "Command Line", "Subtype": "CommandLine"},
+ ["Category.DisplayName", ["sys:String", "Command Line"]],
+ ],
+ ],
+ [
+ "StringListProperty",
+ {
+ "Name": "Inputs",
+ "Category": "Command Line",
+ "IsRequired": "true",
+ "Switch": " ",
+ },
+ [
+ "StringListProperty.DataSource",
+ [
+ "DataSource",
+ {
+ "Persistence": "ProjectFile",
+ "ItemType": rule.rule_name,
+ "SourceType": "Item",
+ },
+ ],
+ ],
+ ],
+ [
+ "StringProperty",
+ {
+ "Name": "CommandLineTemplate",
+ "DisplayName": "Command Line",
+ "Visible": "False",
+ "IncludeInCommandLine": "False",
+ },
+ ],
+ [
+ "DynamicEnumProperty",
+ {
+ "Name": rule.before_targets,
+ "Category": "General",
+ "EnumProvider": "Targets",
+ "IncludeInCommandLine": "False",
+ },
+ [
+ "DynamicEnumProperty.DisplayName",
+ ["sys:String", "Execute Before"],
+ ],
+ [
+ "DynamicEnumProperty.Description",
+ [
+ "sys:String",
+ "Specifies the targets for the build customization"
+ " to run before.",
+ ],
+ ],
+ [
+ "DynamicEnumProperty.ProviderSettings",
+ [
+ "NameValuePair",
+ {
+ "Name": "Exclude",
+ "Value": "^%s|^Compute" % rule.before_targets,
+ },
+ ],
+ ],
+ [
+ "DynamicEnumProperty.DataSource",
+ [
+ "DataSource",
+ {
+ "Persistence": "ProjectFile",
+ "HasConfigurationCondition": "true",
+ },
+ ],
+ ],
+ ],
+ [
+ "DynamicEnumProperty",
+ {
+ "Name": rule.after_targets,
+ "Category": "General",
+ "EnumProvider": "Targets",
+ "IncludeInCommandLine": "False",
+ },
+ [
+ "DynamicEnumProperty.DisplayName",
+ ["sys:String", "Execute After"],
+ ],
+ [
+ "DynamicEnumProperty.Description",
+ [
+ "sys:String",
+ (
+ "Specifies the targets for the build customization"
+ " to run after."
+ ),
+ ],
+ ],
+ [
+ "DynamicEnumProperty.ProviderSettings",
+ [
+ "NameValuePair",
+ {
+ "Name": "Exclude",
+ "Value": "^%s|^Compute" % rule.after_targets,
+ },
+ ],
+ ],
+ [
+ "DynamicEnumProperty.DataSource",
+ [
+ "DataSource",
+ {
+ "Persistence": "ProjectFile",
+ "ItemType": "",
+ "HasConfigurationCondition": "true",
+ },
+ ],
+ ],
+ ],
+ [
+ "StringListProperty",
+ {
+ "Name": "Outputs",
+ "DisplayName": "Outputs",
+ "Visible": "False",
+ "IncludeInCommandLine": "False",
+ },
+ ],
+ [
+ "StringProperty",
+ {
+ "Name": "ExecutionDescription",
+ "DisplayName": "Execution Description",
+ "Visible": "False",
+ "IncludeInCommandLine": "False",
+ },
+ ],
+ [
+ "StringListProperty",
+ {
+ "Name": "AdditionalDependencies",
+ "DisplayName": "Additional Dependencies",
+ "IncludeInCommandLine": "False",
+ "Visible": "false",
+ },
+ ],
+ [
+ "StringProperty",
+ {
+ "Subtype": "AdditionalOptions",
+ "Name": "AdditionalOptions",
+ "Category": "Command Line",
+ },
+ [
+ "StringProperty.DisplayName",
+ ["sys:String", "Additional Options"],
+ ],
+ [
+ "StringProperty.Description",
+ ["sys:String", "Additional Options"],
+ ],
+ ],
+ ],
+ [
+ "ItemType",
+ {"Name": rule.rule_name, "DisplayName": rule.display_name},
+ ],
+ [
+ "FileExtension",
+ {"Name": "*" + rule.extension, "ContentType": rule.rule_name},
+ ],
+ [
+ "ContentType",
+ {
+ "Name": rule.rule_name,
+ "DisplayName": "",
+ "ItemType": rule.rule_name,
+ },
+ ],
+ ]
+ )
+ easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
+
+
+def _GetConfigurationAndPlatform(name, settings, spec):
+ configuration = name.rsplit("_", 1)[0]
+ platform = settings.get("msvs_configuration_platform", "Win32")
+ if spec["toolset"] == "host" and platform == "arm64":
+ platform = "x64" # Host-only tools are always built for x64
+ return (configuration, platform)
+
+
+def _GetConfigurationCondition(name, settings, spec):
+ return r"'$(Configuration)|$(Platform)'=='%s|%s'" % _GetConfigurationAndPlatform(
+ name, settings, spec
+ )
+
+
+def _GetMSBuildProjectConfigurations(configurations, spec):
+ group = ["ItemGroup", {"Label": "ProjectConfigurations"}]
+ for (name, settings) in sorted(configurations.items()):
+ configuration, platform = _GetConfigurationAndPlatform(name, settings, spec)
+ designation = f"{configuration}|{platform}"
+ group.append(
+ [
+ "ProjectConfiguration",
+ {"Include": designation},
+ ["Configuration", configuration],
+ ["Platform", platform],
+ ]
+ )
+ return [group]
+
+
+def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name):
+ namespace = os.path.splitext(gyp_file_name)[0]
+ properties = [
+ [
+ "PropertyGroup",
+ {"Label": "Globals"},
+ ["ProjectGuid", guid],
+ ["Keyword", "Win32Proj"],
+ ["RootNamespace", namespace],
+ ["IgnoreWarnCompileDuplicatedFilename", "true"],
+ ]
+ ]
+
+ if (
+ os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64"
+ or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64"
+ ):
+ properties[0].append(["PreferredToolArchitecture", "x64"])
+
+ if spec.get("msvs_target_platform_version"):
+ target_platform_version = spec.get("msvs_target_platform_version")
+ properties[0].append(["WindowsTargetPlatformVersion", target_platform_version])
+ if spec.get("msvs_target_platform_minversion"):
+ target_platform_minversion = spec.get("msvs_target_platform_minversion")
+ properties[0].append(
+ ["WindowsTargetPlatformMinVersion", target_platform_minversion]
+ )
+ else:
+ properties[0].append(
+ ["WindowsTargetPlatformMinVersion", target_platform_version]
+ )
+
+ if spec.get("msvs_enable_winrt"):
+ properties[0].append(["DefaultLanguage", "en-US"])
+ properties[0].append(["AppContainerApplication", "true"])
+ if spec.get("msvs_application_type_revision"):
+ app_type_revision = spec.get("msvs_application_type_revision")
+ properties[0].append(["ApplicationTypeRevision", app_type_revision])
+ else:
+ properties[0].append(["ApplicationTypeRevision", "8.1"])
+ if spec.get("msvs_enable_winphone"):
+ properties[0].append(["ApplicationType", "Windows Phone"])
+ else:
+ properties[0].append(["ApplicationType", "Windows Store"])
+
+ platform_name = None
+ msvs_windows_sdk_version = None
+ for configuration in spec["configurations"].values():
+ platform_name = platform_name or _ConfigPlatform(configuration)
+ msvs_windows_sdk_version = (
+ msvs_windows_sdk_version
+ or _ConfigWindowsTargetPlatformVersion(configuration, version)
+ )
+ if platform_name and msvs_windows_sdk_version:
+ break
+ if msvs_windows_sdk_version:
+ properties[0].append(
+ ["WindowsTargetPlatformVersion", str(msvs_windows_sdk_version)]
+ )
+ elif version.compatible_sdks:
+ raise GypError(
+ "%s requires any SDK of %s version, but none were found"
+ % (version.description, version.compatible_sdks)
+ )
+
+ if platform_name == "ARM":
+ properties[0].append(["WindowsSDKDesktopARMSupport", "true"])
+
+ return properties
+
+
+def _GetMSBuildConfigurationDetails(spec, build_file):
+ properties = {}
+ for name, settings in spec["configurations"].items():
+ msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
+ condition = _GetConfigurationCondition(name, settings, spec)
+ character_set = msbuild_attributes.get("CharacterSet")
+ config_type = msbuild_attributes.get("ConfigurationType")
+ _AddConditionalProperty(properties, condition, "ConfigurationType", config_type)
+ if config_type == "Driver":
+ _AddConditionalProperty(properties, condition, "DriverType", "WDM")
+ _AddConditionalProperty(
+ properties, condition, "TargetVersion", _ConfigTargetVersion(settings)
+ )
+ if character_set:
+ if "msvs_enable_winrt" not in spec:
+ _AddConditionalProperty(
+ properties, condition, "CharacterSet", character_set
+ )
+ return _GetMSBuildPropertyGroup(spec, "Configuration", properties)
+
+
+def _GetMSBuildLocalProperties(msbuild_toolset):
+ # Currently the only local property we support is PlatformToolset
+ properties = {}
+ if msbuild_toolset:
+ properties = [
+ [
+ "PropertyGroup",
+ {"Label": "Locals"},
+ ["PlatformToolset", msbuild_toolset],
+ ]
+ ]
+ return properties
+
+
+def _GetMSBuildPropertySheets(configurations, spec):
+ user_props = r"$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props"
+ additional_props = {}
+ props_specified = False
+ for name, settings in sorted(configurations.items()):
+ configuration = _GetConfigurationCondition(name, settings, spec)
+ if "msbuild_props" in settings:
+ additional_props[configuration] = _FixPaths(settings["msbuild_props"])
+ props_specified = True
+ else:
+ additional_props[configuration] = ""
+
+ if not props_specified:
+ return [
+ [
+ "ImportGroup",
+ {"Label": "PropertySheets"},
+ [
+ "Import",
+ {
+ "Project": user_props,
+ "Condition": "exists('%s')" % user_props,
+ "Label": "LocalAppDataPlatform",
+ },
+ ],
+ ]
+ ]
+ else:
+ sheets = []
+ for condition, props in additional_props.items():
+ import_group = [
+ "ImportGroup",
+ {"Label": "PropertySheets", "Condition": condition},
+ [
+ "Import",
+ {
+ "Project": user_props,
+ "Condition": "exists('%s')" % user_props,
+ "Label": "LocalAppDataPlatform",
+ },
+ ],
+ ]
+ for props_file in props:
+ import_group.append(["Import", {"Project": props_file}])
+ sheets.append(import_group)
+ return sheets
+
+
+def _ConvertMSVSBuildAttributes(spec, config, build_file):
+ config_type = _GetMSVSConfigurationType(spec, build_file)
+ msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
+ msbuild_attributes = {}
+ for a in msvs_attributes:
+ if a in ["IntermediateDirectory", "OutputDirectory"]:
+ directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
+ if not directory.endswith("\\"):
+ directory += "\\"
+ msbuild_attributes[a] = directory
+ elif a == "CharacterSet":
+ msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
+ elif a == "ConfigurationType":
+ msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
+ else:
+ print("Warning: Do not know how to convert MSVS attribute " + a)
+ return msbuild_attributes
+
+
+def _ConvertMSVSCharacterSet(char_set):
+ if char_set.isdigit():
+ char_set = {"0": "MultiByte", "1": "Unicode", "2": "MultiByte"}[char_set]
+ return char_set
+
+
+def _ConvertMSVSConfigurationType(config_type):
+ if config_type.isdigit():
+ config_type = {
+ "1": "Application",
+ "2": "DynamicLibrary",
+ "4": "StaticLibrary",
+ "5": "Driver",
+ "10": "Utility",
+ }[config_type]
+ return config_type
+
+
+def _GetMSBuildAttributes(spec, config, build_file):
+ if "msbuild_configuration_attributes" not in config:
+ msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
+
+ else:
+ config_type = _GetMSVSConfigurationType(spec, build_file)
+ config_type = _ConvertMSVSConfigurationType(config_type)
+ msbuild_attributes = config.get("msbuild_configuration_attributes", {})
+ msbuild_attributes.setdefault("ConfigurationType", config_type)
+ output_dir = msbuild_attributes.get(
+ "OutputDirectory", "$(SolutionDir)$(Configuration)"
+ )
+ msbuild_attributes["OutputDirectory"] = _FixPath(output_dir) + "\\"
+ if "IntermediateDirectory" not in msbuild_attributes:
+ intermediate = _FixPath("$(Configuration)") + "\\"
+ msbuild_attributes["IntermediateDirectory"] = intermediate
+ if "CharacterSet" in msbuild_attributes:
+ msbuild_attributes["CharacterSet"] = _ConvertMSVSCharacterSet(
+ msbuild_attributes["CharacterSet"]
+ )
+ if "TargetName" not in msbuild_attributes:
+ prefix = spec.get("product_prefix", "")
+ product_name = spec.get("product_name", "$(ProjectName)")
+ target_name = prefix + product_name
+ msbuild_attributes["TargetName"] = target_name
+ if "TargetExt" not in msbuild_attributes and "product_extension" in spec:
+ ext = spec.get("product_extension")
+ msbuild_attributes["TargetExt"] = "." + ext
+
+ if spec.get("msvs_external_builder"):
+ external_out_dir = spec.get("msvs_external_builder_out_dir", ".")
+ msbuild_attributes["OutputDirectory"] = _FixPath(external_out_dir) + "\\"
+
+ # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
+ # (depending on the tool used) to avoid MSB8012 warning.
+ msbuild_tool_map = {
+ "executable": "Link",
+ "shared_library": "Link",
+ "loadable_module": "Link",
+ "windows_driver": "Link",
+ "static_library": "Lib",
+ }
+ msbuild_tool = msbuild_tool_map.get(spec["type"])
+ if msbuild_tool:
+ msbuild_settings = config["finalized_msbuild_settings"]
+ out_file = msbuild_settings[msbuild_tool].get("OutputFile")
+ if out_file:
+ msbuild_attributes["TargetPath"] = _FixPath(out_file)
+ target_ext = msbuild_settings[msbuild_tool].get("TargetExt")
+ if target_ext:
+ msbuild_attributes["TargetExt"] = target_ext
+
+ return msbuild_attributes
+
+
+def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
+ # TODO(jeanluc) We could optimize out the following and do it only if
+ # there are actions.
+ # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
+ new_paths = []
+ cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])[0]
+ if cygwin_dirs:
+ cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs)
+ new_paths.append(cyg_path)
+ # TODO(jeanluc) Change the convention to have both a cygwin_dir and a
+ # python_dir.
+ python_path = cyg_path.replace("cygwin\\bin", "python_26")
+ new_paths.append(python_path)
+ if new_paths:
+ new_paths = "$(ExecutablePath);" + ";".join(new_paths)
+
+ properties = {}
+ for (name, configuration) in sorted(configurations.items()):
+ condition = _GetConfigurationCondition(name, configuration, spec)
+ attributes = _GetMSBuildAttributes(spec, configuration, build_file)
+ msbuild_settings = configuration["finalized_msbuild_settings"]
+ _AddConditionalProperty(
+ properties, condition, "IntDir", attributes["IntermediateDirectory"]
+ )
+ _AddConditionalProperty(
+ properties, condition, "OutDir", attributes["OutputDirectory"]
+ )
+ _AddConditionalProperty(
+ properties, condition, "TargetName", attributes["TargetName"]
+ )
+ if "TargetExt" in attributes:
+ _AddConditionalProperty(
+ properties, condition, "TargetExt", attributes["TargetExt"]
+ )
+
+ if attributes.get("TargetPath"):
+ _AddConditionalProperty(
+ properties, condition, "TargetPath", attributes["TargetPath"]
+ )
+ if attributes.get("TargetExt"):
+ _AddConditionalProperty(
+ properties, condition, "TargetExt", attributes["TargetExt"]
+ )
+
+ if new_paths:
+ _AddConditionalProperty(properties, condition, "ExecutablePath", new_paths)
+ tool_settings = msbuild_settings.get("", {})
+ for name, value in sorted(tool_settings.items()):
+ formatted_value = _GetValueFormattedForMSBuild("", name, value)
+ _AddConditionalProperty(properties, condition, name, formatted_value)
+ return _GetMSBuildPropertyGroup(spec, None, properties)
+
+
+def _AddConditionalProperty(properties, condition, name, value):
+ """Adds a property / conditional value pair to a dictionary.
+
+ Arguments:
+ properties: The dictionary to be modified. The key is the name of the
+ property. The value is itself a dictionary; its key is the value and
+ the value a list of condition for which this value is true.
+ condition: The condition under which the named property has the value.
+ name: The name of the property.
+ value: The value of the property.
+ """
+ if name not in properties:
+ properties[name] = {}
+ values = properties[name]
+ if value not in values:
+ values[value] = []
+ conditions = values[value]
+ conditions.append(condition)
+
+
+# Regex for msvs variable references ( i.e. $(FOO) ).
+MSVS_VARIABLE_REFERENCE = re.compile(r"\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)")
+
+
+def _GetMSBuildPropertyGroup(spec, label, properties):
+ """Returns a PropertyGroup definition for the specified properties.
+
+ Arguments:
+ spec: The target project dict.
+ label: An optional label for the PropertyGroup.
+ properties: The dictionary to be converted. The key is the name of the
+ property. The value is itself a dictionary; its key is the value and
+ the value a list of condition for which this value is true.
+ """
+ group = ["PropertyGroup"]
+ if label:
+ group.append({"Label": label})
+ num_configurations = len(spec["configurations"])
+
+ def GetEdges(node):
+ # Use a definition of edges such that user_of_variable -> used_varible.
+ # This happens to be easier in this case, since a variable's
+ # definition contains all variables it references in a single string.
+ edges = set()
+ for value in sorted(properties[node].keys()):
+ # Add to edges all $(...) references to variables.
+ #
+ # Variable references that refer to names not in properties are excluded
+ # These can exist for instance to refer built in definitions like
+ # $(SolutionDir).
+ #
+ # Self references are ignored. Self reference is used in a few places to
+ # append to the default value. I.e. PATH=$(PATH);other_path
+ edges.update(
+ {
+ v
+ for v in MSVS_VARIABLE_REFERENCE.findall(value)
+ if v in properties and v != node
+ }
+ )
+ return edges
+
+ properties_ordered = gyp.common.TopologicallySorted(properties.keys(), GetEdges)
+ # Walk properties in the reverse of a topological sort on
+ # user_of_variable -> used_variable as this ensures variables are
+ # defined before they are used.
+ # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
+ for name in reversed(properties_ordered):
+ values = properties[name]
+ for value, conditions in sorted(values.items()):
+ if len(conditions) == num_configurations:
+ # If the value is the same all configurations,
+ # just add one unconditional entry.
+ group.append([name, value])
+ else:
+ for condition in conditions:
+ group.append([name, {"Condition": condition}, value])
+ return [group]
+
+
+def _GetMSBuildToolSettingsSections(spec, configurations):
+ groups = []
+ for (name, configuration) in sorted(configurations.items()):
+ msbuild_settings = configuration["finalized_msbuild_settings"]
+ group = [
+ "ItemDefinitionGroup",
+ {"Condition": _GetConfigurationCondition(name, configuration, spec)},
+ ]
+ for tool_name, tool_settings in sorted(msbuild_settings.items()):
+ # Skip the tool named '' which is a holder of global settings handled
+ # by _GetMSBuildConfigurationGlobalProperties.
+ if tool_name:
+ if tool_settings:
+ tool = [tool_name]
+ for name, value in sorted(tool_settings.items()):
+ formatted_value = _GetValueFormattedForMSBuild(
+ tool_name, name, value
+ )
+ tool.append([name, formatted_value])
+ group.append(tool)
+ groups.append(group)
+ return groups
+
+
+def _FinalizeMSBuildSettings(spec, configuration):
+ if "msbuild_settings" in configuration:
+ converted = False
+ msbuild_settings = configuration["msbuild_settings"]
+ MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
+ else:
+ converted = True
+ msvs_settings = configuration.get("msvs_settings", {})
+ msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
+ include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(
+ configuration
+ )
+ libraries = _GetLibraries(spec)
+ library_dirs = _GetLibraryDirs(configuration)
+ out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
+ target_ext = _GetOutputTargetExt(spec)
+ defines = _GetDefines(configuration)
+ if converted:
+ # Visual Studio 2010 has TR1
+ defines = [d for d in defines if d != "_HAS_TR1=0"]
+ # Warn of ignored settings
+ ignored_settings = ["msvs_tool_files"]
+ for ignored_setting in ignored_settings:
+ value = configuration.get(ignored_setting)
+ if value:
+ print(
+ "Warning: The automatic conversion to MSBuild does not handle "
+ "%s. Ignoring setting of %s" % (ignored_setting, str(value))
+ )
+
+ defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
+ disabled_warnings = _GetDisabledWarnings(configuration)
+ prebuild = configuration.get("msvs_prebuild")
+ postbuild = configuration.get("msvs_postbuild")
+ def_file = _GetModuleDefinition(spec)
+ precompiled_header = configuration.get("msvs_precompiled_header")
+
+ # Add the information to the appropriate tool
+ # TODO(jeanluc) We could optimize and generate these settings only if
+ # the corresponding files are found, e.g. don't generate ResourceCompile
+ # if you don't have any resources.
+ _ToolAppend(
+ msbuild_settings, "ClCompile", "AdditionalIncludeDirectories", include_dirs
+ )
+ _ToolAppend(
+ msbuild_settings, "Midl", "AdditionalIncludeDirectories", midl_include_dirs
+ )
+ _ToolAppend(
+ msbuild_settings,
+ "ResourceCompile",
+ "AdditionalIncludeDirectories",
+ resource_include_dirs,
+ )
+ # Add in libraries, note that even for empty libraries, we want this
+ # set, to prevent inheriting default libraries from the environment.
+ _ToolSetOrAppend(msbuild_settings, "Link", "AdditionalDependencies", libraries)
+ _ToolAppend(msbuild_settings, "Link", "AdditionalLibraryDirectories", library_dirs)
+ if out_file:
+ _ToolAppend(
+ msbuild_settings, msbuild_tool, "OutputFile", out_file, only_if_unset=True
+ )
+ if target_ext:
+ _ToolAppend(
+ msbuild_settings, msbuild_tool, "TargetExt", target_ext, only_if_unset=True
+ )
+ # Add defines.
+ _ToolAppend(msbuild_settings, "ClCompile", "PreprocessorDefinitions", defines)
+ _ToolAppend(msbuild_settings, "ResourceCompile", "PreprocessorDefinitions", defines)
+ # Add disabled warnings.
+ _ToolAppend(
+ msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings
+ )
+ # Turn on precompiled headers if appropriate.
+ if precompiled_header:
+ precompiled_header = os.path.split(precompiled_header)[1]
+ _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "Use")
+ _ToolAppend(
+ msbuild_settings, "ClCompile", "PrecompiledHeaderFile", precompiled_header
+ )
+ _ToolAppend(
+ msbuild_settings, "ClCompile", "ForcedIncludeFiles", [precompiled_header]
+ )
+ else:
+ _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "NotUsing")
+ # Turn off WinRT compilation
+ _ToolAppend(msbuild_settings, "ClCompile", "CompileAsWinRT", "false")
+ # Turn on import libraries if appropriate
+ if spec.get("msvs_requires_importlibrary"):
+ _ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "false")
+ # Loadable modules don't generate import libraries;
+ # tell dependent projects to not expect one.
+ if spec["type"] == "loadable_module":
+ _ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "true")
+ # Set the module definition file if any.
+ if def_file:
+ _ToolAppend(msbuild_settings, "Link", "ModuleDefinitionFile", def_file)
+ configuration["finalized_msbuild_settings"] = msbuild_settings
+ if prebuild:
+ _ToolAppend(msbuild_settings, "PreBuildEvent", "Command", prebuild)
+ if postbuild:
+ _ToolAppend(msbuild_settings, "PostBuildEvent", "Command", postbuild)
+
+
+def _GetValueFormattedForMSBuild(tool_name, name, value):
+ if type(value) == list:
+ # For some settings, VS2010 does not automatically extends the settings
+ # TODO(jeanluc) Is this what we want?
+ if name in [
+ "AdditionalIncludeDirectories",
+ "AdditionalLibraryDirectories",
+ "AdditionalOptions",
+ "DelayLoadDLLs",
+ "DisableSpecificWarnings",
+ "PreprocessorDefinitions",
+ ]:
+ value.append("%%(%s)" % name)
+ # For most tools, entries in a list should be separated with ';' but some
+ # settings use a space. Check for those first.
+ exceptions = {
+ "ClCompile": ["AdditionalOptions"],
+ "Link": ["AdditionalOptions"],
+ "Lib": ["AdditionalOptions"],
+ }
+ if tool_name in exceptions and name in exceptions[tool_name]:
+ char = " "
+ else:
+ char = ";"
+ formatted_value = char.join(
+ [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value]
+ )
+ else:
+ formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
+ return formatted_value
+
+
+def _VerifySourcesExist(sources, root_dir):
+ """Verifies that all source files exist on disk.
+
+ Checks that all regular source files, i.e. not created at run time,
+ exist on disk. Missing files cause needless recompilation but no otherwise
+ visible errors.
+
+ Arguments:
+ sources: A recursive list of Filter/file names.
+ root_dir: The root directory for the relative path names.
+ Returns:
+ A list of source files that cannot be found on disk.
+ """
+ missing_sources = []
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
+ else:
+ if "$" not in source:
+ full_path = os.path.join(root_dir, source)
+ if not os.path.exists(full_path):
+ missing_sources.append(full_path)
+ return missing_sources
+
+
+def _GetMSBuildSources(
+ spec,
+ sources,
+ exclusions,
+ rule_dependencies,
+ extension_to_rule_name,
+ actions_spec,
+ sources_handled_by_action,
+ list_excluded,
+):
+ groups = [
+ "none",
+ "masm",
+ "midl",
+ "include",
+ "compile",
+ "resource",
+ "rule",
+ "rule_dependency",
+ ]
+ grouped_sources = {}
+ for g in groups:
+ grouped_sources[g] = []
+
+ _AddSources2(
+ spec,
+ sources,
+ exclusions,
+ grouped_sources,
+ rule_dependencies,
+ extension_to_rule_name,
+ sources_handled_by_action,
+ list_excluded,
+ )
+ sources = []
+ for g in groups:
+ if grouped_sources[g]:
+ sources.append(["ItemGroup"] + grouped_sources[g])
+ if actions_spec:
+ sources.append(["ItemGroup"] + actions_spec)
+ return sources
+
+
+def _AddSources2(
+ spec,
+ sources,
+ exclusions,
+ grouped_sources,
+ rule_dependencies,
+ extension_to_rule_name,
+ sources_handled_by_action,
+ list_excluded,
+):
+ extensions_excluded_from_precompile = []
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ _AddSources2(
+ spec,
+ source.contents,
+ exclusions,
+ grouped_sources,
+ rule_dependencies,
+ extension_to_rule_name,
+ sources_handled_by_action,
+ list_excluded,
+ )
+ else:
+ if source not in sources_handled_by_action:
+ detail = []
+ excluded_configurations = exclusions.get(source, [])
+ if len(excluded_configurations) == len(spec["configurations"]):
+ detail.append(["ExcludedFromBuild", "true"])
+ else:
+ for config_name, configuration in sorted(excluded_configurations):
+ condition = _GetConfigurationCondition(
+ config_name, configuration
+ )
+ detail.append(
+ ["ExcludedFromBuild", {"Condition": condition}, "true"]
+ )
+ # Add precompile if needed
+ for config_name, configuration in spec["configurations"].items():
+ precompiled_source = configuration.get(
+ "msvs_precompiled_source", ""
+ )
+ if precompiled_source != "":
+ precompiled_source = _FixPath(precompiled_source)
+ if not extensions_excluded_from_precompile:
+ # If the precompiled header is generated by a C source,
+ # we must not try to use it for C++ sources,
+ # and vice versa.
+ basename, extension = os.path.splitext(precompiled_source)
+ if extension == ".c":
+ extensions_excluded_from_precompile = [
+ ".cc",
+ ".cpp",
+ ".cxx",
+ ]
+ else:
+ extensions_excluded_from_precompile = [".c"]
+
+ if precompiled_source == source:
+ condition = _GetConfigurationCondition(
+ config_name, configuration, spec
+ )
+ detail.append(
+ ["PrecompiledHeader", {"Condition": condition}, "Create"]
+ )
+ else:
+ # Turn off precompiled header usage for source files of a
+ # different type than the file that generated the
+ # precompiled header.
+ for extension in extensions_excluded_from_precompile:
+ if source.endswith(extension):
+ detail.append(["PrecompiledHeader", ""])
+ detail.append(["ForcedIncludeFiles", ""])
+
+ group, element = _MapFileToMsBuildSourceType(
+ source,
+ rule_dependencies,
+ extension_to_rule_name,
+ _GetUniquePlatforms(spec),
+ spec["toolset"],
+ )
+ if group == "compile" and not os.path.isabs(source):
+ # Add an <ObjectFileName> value to support duplicate source
+ # file basenames, except for absolute paths to avoid paths
+ # with more than 260 characters.
+ file_name = os.path.splitext(source)[0] + ".obj"
+ if file_name.startswith("..\\"):
+ file_name = re.sub(r"^(\.\.\\)+", "", file_name)
+ elif file_name.startswith("$("):
+ file_name = re.sub(r"^\$\([^)]+\)\\", "", file_name)
+ detail.append(["ObjectFileName", "$(IntDir)\\" + file_name])
+ grouped_sources[group].append([element, {"Include": source}] + detail)
+
+
+def _GetMSBuildProjectReferences(project):
+ references = []
+ if project.dependencies:
+ group = ["ItemGroup"]
+ added_dependency_set = set()
+ for dependency in project.dependencies:
+ dependency_spec = dependency.spec
+ should_skip_dep = False
+ if project.spec["toolset"] == "target":
+ if dependency_spec["toolset"] == "host":
+ if dependency_spec["type"] == "static_library":
+ should_skip_dep = True
+ if dependency.name.startswith("run_"):
+ should_skip_dep = False
+ if should_skip_dep:
+ continue
+
+ canonical_name = dependency.name.replace("_host", "")
+ added_dependency_set.add(canonical_name)
+ guid = dependency.guid
+ project_dir = os.path.split(project.path)[0]
+ relative_path = gyp.common.RelativePath(dependency.path, project_dir)
+ project_ref = [
+ "ProjectReference",
+ {"Include": relative_path},
+ ["Project", guid],
+ ["ReferenceOutputAssembly", "false"],
+ ]
+ for config in dependency.spec.get("configurations", {}).values():
+ if config.get("msvs_use_library_dependency_inputs", 0):
+ project_ref.append(["UseLibraryDependencyInputs", "true"])
+ break
+ # If it's disabled in any config, turn it off in the reference.
+ if config.get("msvs_2010_disable_uldi_when_referenced", 0):
+ project_ref.append(["UseLibraryDependencyInputs", "false"])
+ break
+ group.append(project_ref)
+ references.append(group)
+ return references
+
+
+def _GenerateMSBuildProject(project, options, version, generator_flags, spec):
+ spec = project.spec
+ configurations = spec["configurations"]
+ toolset = spec["toolset"]
+ project_dir, project_file_name = os.path.split(project.path)
+ gyp.common.EnsureDirExists(project.path)
+ # Prepare list of sources and excluded sources.
+
+ gyp_file = os.path.split(project.build_file)[1]
+ sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file)
+ # Add rules.
+ actions_to_add = {}
+ props_files_of_rules = set()
+ targets_files_of_rules = set()
+ rule_dependencies = set()
+ extension_to_rule_name = {}
+ list_excluded = generator_flags.get("msvs_list_excluded_files", True)
+ platforms = _GetUniquePlatforms(spec)
+
+ # Don't generate rules if we are using an external builder like ninja.
+ if not spec.get("msvs_external_builder"):
+ _GenerateRulesForMSBuild(
+ project_dir,
+ options,
+ spec,
+ sources,
+ excluded_sources,
+ props_files_of_rules,
+ targets_files_of_rules,
+ actions_to_add,
+ rule_dependencies,
+ extension_to_rule_name,
+ )
+ else:
+ rules = spec.get("rules", [])
+ _AdjustSourcesForRules(rules, sources, excluded_sources, True)
+
+ sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy(
+ spec, options, project_dir, sources, excluded_sources, list_excluded, version
+ )
+
+ # Don't add actions if we are using an external builder like ninja.
+ if not spec.get("msvs_external_builder"):
+ _AddActions(actions_to_add, spec, project.build_file)
+ _AddCopies(actions_to_add, spec)
+
+ # NOTE: this stanza must appear after all actions have been decided.
+ # Don't excluded sources with actions attached, or they won't run.
+ excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add)
+
+ exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
+ actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
+ spec, actions_to_add
+ )
+
+ _GenerateMSBuildFiltersFile(
+ project.path + ".filters",
+ sources,
+ rule_dependencies,
+ extension_to_rule_name,
+ platforms,
+ toolset,
+ )
+ missing_sources = _VerifySourcesExist(sources, project_dir)
+
+ for configuration in configurations.values():
+ _FinalizeMSBuildSettings(spec, configuration)
+
+ # Add attributes to root element
+
+ import_default_section = [
+ ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.Default.props"}]
+ ]
+ import_cpp_props_section = [
+ ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.props"}]
+ ]
+ import_cpp_targets_section = [
+ ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.targets"}]
+ ]
+ import_masm_props_section = [
+ ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.props"}]
+ ]
+ import_masm_targets_section = [
+ ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.targets"}]
+ ]
+ import_marmasm_props_section = [
+ ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.props"}]
+ ]
+ import_marmasm_targets_section = [
+ ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.targets"}]
+ ]
+ macro_section = [["PropertyGroup", {"Label": "UserMacros"}]]
+
+ content = [
+ "Project",
+ {
+ "xmlns": "http://schemas.microsoft.com/developer/msbuild/2003",
+ "ToolsVersion": version.ProjectVersion(),
+ "DefaultTargets": "Build",
+ },
+ ]
+
+ content += _GetMSBuildProjectConfigurations(configurations, spec)
+ content += _GetMSBuildGlobalProperties(
+ spec, version, project.guid, project_file_name
+ )
+ content += import_default_section
+ content += _GetMSBuildConfigurationDetails(spec, project.build_file)
+ if spec.get("msvs_enable_winphone"):
+ content += _GetMSBuildLocalProperties("v120_wp81")
+ else:
+ content += _GetMSBuildLocalProperties(project.msbuild_toolset)
+ content += import_cpp_props_section
+ content += import_masm_props_section
+ if "arm64" in platforms and toolset == "target":
+ content += import_marmasm_props_section
+ content += _GetMSBuildExtensions(props_files_of_rules)
+ content += _GetMSBuildPropertySheets(configurations, spec)
+ content += macro_section
+ content += _GetMSBuildConfigurationGlobalProperties(
+ spec, configurations, project.build_file
+ )
+ content += _GetMSBuildToolSettingsSections(spec, configurations)
+ content += _GetMSBuildSources(
+ spec,
+ sources,
+ exclusions,
+ rule_dependencies,
+ extension_to_rule_name,
+ actions_spec,
+ sources_handled_by_action,
+ list_excluded,
+ )
+ content += _GetMSBuildProjectReferences(project)
+ content += import_cpp_targets_section
+ content += import_masm_targets_section
+ if "arm64" in platforms and toolset == "target":
+ content += import_marmasm_targets_section
+ content += _GetMSBuildExtensionTargets(targets_files_of_rules)
+
+ if spec.get("msvs_external_builder"):
+ content += _GetMSBuildExternalBuilderTargets(spec)
+
+ # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
+ # has_run_as = _WriteMSVSUserFile(project.path, version, spec)
+
+ easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
+
+ return missing_sources
+
+
+def _GetMSBuildExternalBuilderTargets(spec):
+ """Return a list of MSBuild targets for external builders.
+
+ The "Build" and "Clean" targets are always generated. If the spec contains
+ 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
+ be generated, to support building selected C/C++ files.
+
+ Arguments:
+ spec: The gyp target spec.
+ Returns:
+ List of MSBuild 'Target' specs.
+ """
+ build_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec["msvs_external_builder_build_cmd"], False, False, False, False
+ )
+ build_target = ["Target", {"Name": "Build"}]
+ build_target.append(["Exec", {"Command": build_cmd}])
+
+ clean_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec["msvs_external_builder_clean_cmd"], False, False, False, False
+ )
+ clean_target = ["Target", {"Name": "Clean"}]
+ clean_target.append(["Exec", {"Command": clean_cmd}])
+
+ targets = [build_target, clean_target]
+
+ if spec.get("msvs_external_builder_clcompile_cmd"):
+ clcompile_cmd = _BuildCommandLineForRuleRaw(
+ spec,
+ spec["msvs_external_builder_clcompile_cmd"],
+ False,
+ False,
+ False,
+ False,
+ )
+ clcompile_target = ["Target", {"Name": "ClCompile"}]
+ clcompile_target.append(["Exec", {"Command": clcompile_cmd}])
+ targets.append(clcompile_target)
+
+ return targets
+
+
+def _GetMSBuildExtensions(props_files_of_rules):
+ extensions = ["ImportGroup", {"Label": "ExtensionSettings"}]
+ for props_file in props_files_of_rules:
+ extensions.append(["Import", {"Project": props_file}])
+ return [extensions]
+
+
+def _GetMSBuildExtensionTargets(targets_files_of_rules):
+ targets_node = ["ImportGroup", {"Label": "ExtensionTargets"}]
+ for targets_file in sorted(targets_files_of_rules):
+ targets_node.append(["Import", {"Project": targets_file}])
+ return [targets_node]
+
+
+def _GenerateActionsForMSBuild(spec, actions_to_add):
+ """Add actions accumulated into an actions_to_add, merging as needed.
+
+ Arguments:
+ spec: the target project dict
+ actions_to_add: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+
+ Returns:
+ A pair of (action specification, the sources handled by this action).
+ """
+ sources_handled_by_action = OrderedSet()
+ actions_spec = []
+ for primary_input, actions in actions_to_add.items():
+ if generator_supports_multiple_toolsets:
+ primary_input = primary_input.replace(".exe", "_host.exe")
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ descriptions = []
+ commands = []
+ for action in actions:
+
+ def fixup_host_exe(i):
+ if "$(OutDir)" in i:
+ i = i.replace(".exe", "_host.exe")
+ return i
+
+ if generator_supports_multiple_toolsets:
+ action["inputs"] = [fixup_host_exe(i) for i in action["inputs"]]
+ inputs.update(OrderedSet(action["inputs"]))
+ outputs.update(OrderedSet(action["outputs"]))
+ descriptions.append(action["description"])
+ cmd = action["command"]
+ if generator_supports_multiple_toolsets:
+ cmd = cmd.replace(".exe", "_host.exe")
+ # For most actions, add 'call' so that actions that invoke batch files
+ # return and continue executing. msbuild_use_call provides a way to
+ # disable this but I have not seen any adverse effect from doing that
+ # for everything.
+ if action.get("msbuild_use_call", True):
+ cmd = "call " + cmd
+ commands.append(cmd)
+ # Add the custom build action for one input file.
+ description = ", and also ".join(descriptions)
+
+ # We can't join the commands simply with && because the command line will
+ # get too long. See also _AddActions: cygwin's setup_env mustn't be called
+ # for every invocation or the command that sets the PATH will grow too
+ # long.
+ command = "\r\n".join(
+ [c + "\r\nif %errorlevel% neq 0 exit /b %errorlevel%" for c in commands]
+ )
+ _AddMSBuildAction(
+ spec,
+ primary_input,
+ inputs,
+ outputs,
+ command,
+ description,
+ sources_handled_by_action,
+ actions_spec,
+ )
+ return actions_spec, sources_handled_by_action
+
+
+def _AddMSBuildAction(
+ spec,
+ primary_input,
+ inputs,
+ outputs,
+ cmd,
+ description,
+ sources_handled_by_action,
+ actions_spec,
+):
+ command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
+ primary_input = _FixPath(primary_input)
+ inputs_array = _FixPaths(inputs)
+ outputs_array = _FixPaths(outputs)
+ additional_inputs = ";".join([i for i in inputs_array if i != primary_input])
+ outputs = ";".join(outputs_array)
+ sources_handled_by_action.add(primary_input)
+ action_spec = ["CustomBuild", {"Include": primary_input}]
+ action_spec.extend(
+ # TODO(jeanluc) 'Document' for all or just if as_sources?
+ [
+ ["FileType", "Document"],
+ ["Command", command],
+ ["Message", description],
+ ["Outputs", outputs],
+ ]
+ )
+ if additional_inputs:
+ action_spec.append(["AdditionalInputs", additional_inputs])
+ actions_spec.append(action_spec)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
new file mode 100755
index 0000000..e80b57f
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python3
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the msvs.py file. """
+
+import gyp.generator.msvs as msvs
+import unittest
+
+from io import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+ def setUp(self):
+ self.stderr = StringIO()
+
+ def test_GetLibraries(self):
+ self.assertEqual(msvs._GetLibraries({}), [])
+ self.assertEqual(msvs._GetLibraries({"libraries": []}), [])
+ self.assertEqual(
+ msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"]
+ )
+ self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"])
+ self.assertEqual(
+ msvs._GetLibraries(
+ {
+ "libraries": [
+ "a.lib",
+ "b.lib",
+ "c.lib",
+ "-lb.lib",
+ "-lb.lib",
+ "d.lib",
+ "a.lib",
+ ]
+ }
+ ),
+ ["c.lib", "b.lib", "d.lib", "a.lib"],
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
new file mode 100644
index 0000000..ca04ee1
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
@@ -0,0 +1,2936 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import collections
+import copy
+import hashlib
+import json
+import multiprocessing
+import os.path
+import re
+import signal
+import subprocess
+import sys
+import gyp
+import gyp.common
+import gyp.msvs_emulation
+import gyp.MSVSUtil as MSVSUtil
+import gyp.xcode_emulation
+
+from io import StringIO
+
+from gyp.common import GetEnvironFallback
+import gyp.ninja_syntax as ninja_syntax
+
+generator_default_variables = {
+ "EXECUTABLE_PREFIX": "",
+ "EXECUTABLE_SUFFIX": "",
+ "STATIC_LIB_PREFIX": "lib",
+ "STATIC_LIB_SUFFIX": ".a",
+ "SHARED_LIB_PREFIX": "lib",
+ # Gyp expects the following variables to be expandable by the build
+ # system to the appropriate locations. Ninja prefers paths to be
+ # known at gyp time. To resolve this, introduce special
+ # variables starting with $! and $| (which begin with a $ so gyp knows it
+ # should be treated specially, but is otherwise an invalid
+ # ninja/shell variable) that are passed to gyp here but expanded
+ # before writing out into the target .ninja files; see
+ # ExpandSpecial.
+ # $! is used for variables that represent a path and that can only appear at
+ # the start of a string, while $| is used for variables that can appear
+ # anywhere in a string.
+ "INTERMEDIATE_DIR": "$!INTERMEDIATE_DIR",
+ "SHARED_INTERMEDIATE_DIR": "$!PRODUCT_DIR/gen",
+ "PRODUCT_DIR": "$!PRODUCT_DIR",
+ "CONFIGURATION_NAME": "$|CONFIGURATION_NAME",
+ # Special variables that may be used by gyp 'rule' targets.
+ # We generate definitions for these variables on the fly when processing a
+ # rule.
+ "RULE_INPUT_ROOT": "${root}",
+ "RULE_INPUT_DIRNAME": "${dirname}",
+ "RULE_INPUT_PATH": "${source}",
+ "RULE_INPUT_EXT": "${ext}",
+ "RULE_INPUT_NAME": "${name}",
+}
+
+# Placates pylint.
+generator_additional_non_configuration_keys = []
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+generator_filelist_paths = None
+
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+
+def StripPrefix(arg, prefix):
+ if arg.startswith(prefix):
+ return arg[len(prefix) :]
+ return arg
+
+
+def QuoteShellArgument(arg, flavor):
+ """Quote a string such that it will be interpreted as a single argument
+ by the shell."""
+ # Rather than attempting to enumerate the bad shell characters, just
+ # allow common OK ones and quote anything else.
+ if re.match(r"^[a-zA-Z0-9_=.\\/-]+$", arg):
+ return arg # No quoting necessary.
+ if flavor == "win":
+ return gyp.msvs_emulation.QuoteForRspFile(arg)
+ return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
+
+
+def Define(d, flavor):
+ """Takes a preprocessor define and returns a -D parameter that's ninja- and
+ shell-escaped."""
+ if flavor == "win":
+ # cl.exe replaces literal # characters with = in preprocessor definitions for
+ # some reason. Octal-encode to work around that.
+ d = d.replace("#", "\\%03o" % ord("#"))
+ return QuoteShellArgument(ninja_syntax.escape("-D" + d), flavor)
+
+
+def AddArch(output, arch):
+ """Adds an arch string to an output path."""
+ output, extension = os.path.splitext(output)
+ return f"{output}.{arch}{extension}"
+
+
+class Target:
+ """Target represents the paths used within a single gyp target.
+
+ Conceptually, building a single target A is a series of steps:
+
+ 1) actions/rules/copies generates source/resources/etc.
+ 2) compiles generates .o files
+ 3) link generates a binary (library/executable)
+ 4) bundle merges the above in a mac bundle
+
+ (Any of these steps can be optional.)
+
+ From a build ordering perspective, a dependent target B could just
+ depend on the last output of this series of steps.
+
+ But some dependent commands sometimes need to reach inside the box.
+ For example, when linking B it needs to get the path to the static
+ library generated by A.
+
+ This object stores those paths. To keep things simple, member
+ variables only store concrete paths to single files, while methods
+ compute derived values like "the last output of the target".
+ """
+
+ def __init__(self, type):
+ # Gyp type ("static_library", etc.) of this target.
+ self.type = type
+ # File representing whether any input dependencies necessary for
+ # dependent actions have completed.
+ self.preaction_stamp = None
+ # File representing whether any input dependencies necessary for
+ # dependent compiles have completed.
+ self.precompile_stamp = None
+ # File representing the completion of actions/rules/copies, if any.
+ self.actions_stamp = None
+ # Path to the output of the link step, if any.
+ self.binary = None
+ # Path to the file representing the completion of building the bundle,
+ # if any.
+ self.bundle = None
+ # On Windows, incremental linking requires linking against all the .objs
+ # that compose a .lib (rather than the .lib itself). That list is stored
+ # here. In this case, we also need to save the compile_deps for the target,
+ # so that the target that directly depends on the .objs can also depend
+ # on those.
+ self.component_objs = None
+ self.compile_deps = None
+ # Windows only. The import .lib is the output of a build step, but
+ # because dependents only link against the lib (not both the lib and the
+ # dll) we keep track of the import library here.
+ self.import_lib = None
+ # Track if this target contains any C++ files, to decide if gcc or g++
+ # should be used for linking.
+ self.uses_cpp = False
+
+ def Linkable(self):
+ """Return true if this is a target that can be linked against."""
+ return self.type in ("static_library", "shared_library")
+
+ def UsesToc(self, flavor):
+ """Return true if the target should produce a restat rule based on a TOC
+ file."""
+ # For bundles, the .TOC should be produced for the binary, not for
+ # FinalOutput(). But the naive approach would put the TOC file into the
+ # bundle, so don't do this for bundles for now.
+ if flavor == "win" or self.bundle:
+ return False
+ return self.type in ("shared_library", "loadable_module")
+
+ def PreActionInput(self, flavor):
+ """Return the path, if any, that should be used as a dependency of
+ any dependent action step."""
+ if self.UsesToc(flavor):
+ return self.FinalOutput() + ".TOC"
+ return self.FinalOutput() or self.preaction_stamp
+
+ def PreCompileInput(self):
+ """Return the path, if any, that should be used as a dependency of
+ any dependent compile step."""
+ return self.actions_stamp or self.precompile_stamp
+
+ def FinalOutput(self):
+ """Return the last output of the target, which depends on all prior
+ steps."""
+ return self.bundle or self.binary or self.actions_stamp
+
+
+# A small discourse on paths as used within the Ninja build:
+# All files we produce (both at gyp and at build time) appear in the
+# build directory (e.g. out/Debug).
+#
+# Paths within a given .gyp file are always relative to the directory
+# containing the .gyp file. Call these "gyp paths". This includes
+# sources as well as the starting directory a given gyp rule/action
+# expects to be run from. We call the path from the source root to
+# the gyp file the "base directory" within the per-.gyp-file
+# NinjaWriter code.
+#
+# All paths as written into the .ninja files are relative to the build
+# directory. Call these paths "ninja paths".
+#
+# We translate between these two notions of paths with two helper
+# functions:
+#
+# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
+# into the equivalent ninja path.
+#
+# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
+# an output file; the result can be namespaced such that it is unique
+# to the input file name as well as the output target name.
+
+
+class NinjaWriter:
+ def __init__(
+ self,
+ hash_for_rules,
+ target_outputs,
+ base_dir,
+ build_dir,
+ output_file,
+ toplevel_build,
+ output_file_name,
+ flavor,
+ toplevel_dir=None,
+ ):
+ """
+ base_dir: path from source root to directory containing this gyp file,
+ by gyp semantics, all input paths are relative to this
+ build_dir: path from source root to build output
+ toplevel_dir: path to the toplevel directory
+ """
+
+ self.hash_for_rules = hash_for_rules
+ self.target_outputs = target_outputs
+ self.base_dir = base_dir
+ self.build_dir = build_dir
+ self.ninja = ninja_syntax.Writer(output_file)
+ self.toplevel_build = toplevel_build
+ self.output_file_name = output_file_name
+
+ self.flavor = flavor
+ self.abs_build_dir = None
+ if toplevel_dir is not None:
+ self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir))
+ self.obj_ext = ".obj" if flavor == "win" else ".o"
+ if flavor == "win":
+ # See docstring of msvs_emulation.GenerateEnvironmentFiles().
+ self.win_env = {}
+ for arch in ("x86", "x64"):
+ self.win_env[arch] = "environment." + arch
+
+ # Relative path from build output dir to base dir.
+ build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
+ self.build_to_base = os.path.join(build_to_top, base_dir)
+ # Relative path from base dir to build dir.
+ base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
+ self.base_to_build = os.path.join(base_to_top, build_dir)
+
+ def ExpandSpecial(self, path, product_dir=None):
+ """Expand specials like $!PRODUCT_DIR in |path|.
+
+ If |product_dir| is None, assumes the cwd is already the product
+ dir. Otherwise, |product_dir| is the relative path to the product
+ dir.
+ """
+
+ PRODUCT_DIR = "$!PRODUCT_DIR"
+ if PRODUCT_DIR in path:
+ if product_dir:
+ path = path.replace(PRODUCT_DIR, product_dir)
+ else:
+ path = path.replace(PRODUCT_DIR + "/", "")
+ path = path.replace(PRODUCT_DIR + "\\", "")
+ path = path.replace(PRODUCT_DIR, ".")
+
+ INTERMEDIATE_DIR = "$!INTERMEDIATE_DIR"
+ if INTERMEDIATE_DIR in path:
+ int_dir = self.GypPathToUniqueOutput("gen")
+ # GypPathToUniqueOutput generates a path relative to the product dir,
+ # so insert product_dir in front if it is provided.
+ path = path.replace(
+ INTERMEDIATE_DIR, os.path.join(product_dir or "", int_dir)
+ )
+
+ CONFIGURATION_NAME = "$|CONFIGURATION_NAME"
+ path = path.replace(CONFIGURATION_NAME, self.config_name)
+
+ return path
+
+ def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
+ if self.flavor == "win":
+ path = self.msvs_settings.ConvertVSMacros(path, config=self.config_name)
+ path = path.replace(generator_default_variables["RULE_INPUT_ROOT"], root)
+ path = path.replace(generator_default_variables["RULE_INPUT_DIRNAME"], dirname)
+ path = path.replace(generator_default_variables["RULE_INPUT_PATH"], source)
+ path = path.replace(generator_default_variables["RULE_INPUT_EXT"], ext)
+ path = path.replace(generator_default_variables["RULE_INPUT_NAME"], name)
+ return path
+
+ def GypPathToNinja(self, path, env=None):
+ """Translate a gyp path to a ninja path, optionally expanding environment
+ variable references in |path| with |env|.
+
+ See the above discourse on path conversions."""
+ if env:
+ if self.flavor == "mac":
+ path = gyp.xcode_emulation.ExpandEnvVars(path, env)
+ elif self.flavor == "win":
+ path = gyp.msvs_emulation.ExpandMacros(path, env)
+ if path.startswith("$!"):
+ expanded = self.ExpandSpecial(path)
+ if self.flavor == "win":
+ expanded = os.path.normpath(expanded)
+ return expanded
+ if "$|" in path:
+ path = self.ExpandSpecial(path)
+ assert "$" not in path, path
+ return os.path.normpath(os.path.join(self.build_to_base, path))
+
+ def GypPathToUniqueOutput(self, path, qualified=True):
+ """Translate a gyp path to a ninja path for writing output.
+
+ If qualified is True, qualify the resulting filename with the name
+ of the target. This is necessary when e.g. compiling the same
+ path twice for two separate output targets.
+
+ See the above discourse on path conversions."""
+
+ path = self.ExpandSpecial(path)
+ assert not path.startswith("$"), path
+
+ # Translate the path following this scheme:
+ # Input: foo/bar.gyp, target targ, references baz/out.o
+ # Output: obj/foo/baz/targ.out.o (if qualified)
+ # obj/foo/baz/out.o (otherwise)
+ # (and obj.host instead of obj for cross-compiles)
+ #
+ # Why this scheme and not some other one?
+ # 1) for a given input, you can compute all derived outputs by matching
+ # its path, even if the input is brought via a gyp file with '..'.
+ # 2) simple files like libraries and stamps have a simple filename.
+
+ obj = "obj"
+ if self.toolset != "target":
+ obj += "." + self.toolset
+
+ path_dir, path_basename = os.path.split(path)
+ assert not os.path.isabs(path_dir), (
+ "'%s' can not be absolute path (see crbug.com/462153)." % path_dir
+ )
+
+ if qualified:
+ path_basename = self.name + "." + path_basename
+ return os.path.normpath(
+ os.path.join(obj, self.base_dir, path_dir, path_basename)
+ )
+
+ def WriteCollapsedDependencies(self, name, targets, order_only=None):
+ """Given a list of targets, return a path for a single file
+ representing the result of building all the targets or None.
+
+ Uses a stamp file if necessary."""
+
+ assert targets == [item for item in targets if item], targets
+ if len(targets) == 0:
+ assert not order_only
+ return None
+ if len(targets) > 1 or order_only:
+ stamp = self.GypPathToUniqueOutput(name + ".stamp")
+ targets = self.ninja.build(stamp, "stamp", targets, order_only=order_only)
+ self.ninja.newline()
+ return targets[0]
+
+ def _SubninjaNameForArch(self, arch):
+ output_file_base = os.path.splitext(self.output_file_name)[0]
+ return f"{output_file_base}.{arch}.ninja"
+
+ def WriteSpec(self, spec, config_name, generator_flags):
+ """The main entry point for NinjaWriter: write the build rules for a spec.
+
+ Returns a Target object, which represents the output paths for this spec.
+ Returns None if there are no outputs (e.g. a settings-only 'none' type
+ target)."""
+
+ self.config_name = config_name
+ self.name = spec["target_name"]
+ self.toolset = spec["toolset"]
+ config = spec["configurations"][config_name]
+ self.target = Target(spec["type"])
+ self.is_standalone_static_library = bool(
+ spec.get("standalone_static_library", 0)
+ )
+
+ self.target_rpath = generator_flags.get("target_rpath", r"\$$ORIGIN/lib/")
+
+ self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
+ self.xcode_settings = self.msvs_settings = None
+ if self.flavor == "mac":
+ self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
+ if mac_toolchain_dir:
+ self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir
+
+ if self.flavor == "win":
+ self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags)
+ arch = self.msvs_settings.GetArch(config_name)
+ self.ninja.variable("arch", self.win_env[arch])
+ self.ninja.variable("cc", "$cl_" + arch)
+ self.ninja.variable("cxx", "$cl_" + arch)
+ self.ninja.variable("cc_host", "$cl_" + arch)
+ self.ninja.variable("cxx_host", "$cl_" + arch)
+ self.ninja.variable("asm", "$ml_" + arch)
+
+ if self.flavor == "mac":
+ self.archs = self.xcode_settings.GetActiveArchs(config_name)
+ if len(self.archs) > 1:
+ self.arch_subninjas = {
+ arch: ninja_syntax.Writer(
+ OpenOutput(
+ os.path.join(
+ self.toplevel_build, self._SubninjaNameForArch(arch)
+ ),
+ "w",
+ )
+ )
+ for arch in self.archs
+ }
+
+ # Compute predepends for all rules.
+ # actions_depends is the dependencies this target depends on before running
+ # any of its action/rule/copy steps.
+ # compile_depends is the dependencies this target depends on before running
+ # any of its compile steps.
+ actions_depends = []
+ compile_depends = []
+ # TODO(evan): it is rather confusing which things are lists and which
+ # are strings. Fix these.
+ if "dependencies" in spec:
+ for dep in spec["dependencies"]:
+ if dep in self.target_outputs:
+ target = self.target_outputs[dep]
+ actions_depends.append(target.PreActionInput(self.flavor))
+ compile_depends.append(target.PreCompileInput())
+ if target.uses_cpp:
+ self.target.uses_cpp = True
+ actions_depends = [item for item in actions_depends if item]
+ compile_depends = [item for item in compile_depends if item]
+ actions_depends = self.WriteCollapsedDependencies(
+ "actions_depends", actions_depends
+ )
+ compile_depends = self.WriteCollapsedDependencies(
+ "compile_depends", compile_depends
+ )
+ self.target.preaction_stamp = actions_depends
+ self.target.precompile_stamp = compile_depends
+
+ # Write out actions, rules, and copies. These must happen before we
+ # compile any sources, so compute a list of predependencies for sources
+ # while we do it.
+ extra_sources = []
+ mac_bundle_depends = []
+ self.target.actions_stamp = self.WriteActionsRulesCopies(
+ spec, extra_sources, actions_depends, mac_bundle_depends
+ )
+
+ # If we have actions/rules/copies, we depend directly on those, but
+ # otherwise we depend on dependent target's actions/rules/copies etc.
+ # We never need to explicitly depend on previous target's link steps,
+ # because no compile ever depends on them.
+ compile_depends_stamp = self.target.actions_stamp or compile_depends
+
+ # Write out the compilation steps, if any.
+ link_deps = []
+ try:
+ sources = extra_sources + spec.get("sources", [])
+ except TypeError:
+ print("extra_sources: ", str(extra_sources))
+ print('spec.get("sources"): ', str(spec.get("sources")))
+ raise
+ if sources:
+ if self.flavor == "mac" and len(self.archs) > 1:
+ # Write subninja file containing compile and link commands scoped to
+ # a single arch if a fat binary is being built.
+ for arch in self.archs:
+ self.ninja.subninja(self._SubninjaNameForArch(arch))
+
+ pch = None
+ if self.flavor == "win":
+ gyp.msvs_emulation.VerifyMissingSources(
+ sources, self.abs_build_dir, generator_flags, self.GypPathToNinja
+ )
+ pch = gyp.msvs_emulation.PrecompiledHeader(
+ self.msvs_settings,
+ config_name,
+ self.GypPathToNinja,
+ self.GypPathToUniqueOutput,
+ self.obj_ext,
+ )
+ else:
+ pch = gyp.xcode_emulation.MacPrefixHeader(
+ self.xcode_settings,
+ self.GypPathToNinja,
+ lambda path, lang: self.GypPathToUniqueOutput(path + "-" + lang),
+ )
+ link_deps = self.WriteSources(
+ self.ninja,
+ config_name,
+ config,
+ sources,
+ compile_depends_stamp,
+ pch,
+ spec,
+ )
+ # Some actions/rules output 'sources' that are already object files.
+ obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
+ if obj_outputs:
+ if self.flavor != "mac" or len(self.archs) == 1:
+ link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
+ else:
+ print(
+ "Warning: Actions/rules writing object files don't work with "
+ "multiarch targets, dropping. (target %s)" % spec["target_name"]
+ )
+ elif self.flavor == "mac" and len(self.archs) > 1:
+ link_deps = collections.defaultdict(list)
+
+ compile_deps = self.target.actions_stamp or actions_depends
+ if self.flavor == "win" and self.target.type == "static_library":
+ self.target.component_objs = link_deps
+ self.target.compile_deps = compile_deps
+
+ # Write out a link step, if needed.
+ output = None
+ is_empty_bundle = not link_deps and not mac_bundle_depends
+ if link_deps or self.target.actions_stamp or actions_depends:
+ output = self.WriteTarget(
+ spec, config_name, config, link_deps, compile_deps
+ )
+ if self.is_mac_bundle:
+ mac_bundle_depends.append(output)
+
+ # Bundle all of the above together, if needed.
+ if self.is_mac_bundle:
+ output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
+
+ if not output:
+ return None
+
+ assert self.target.FinalOutput(), output
+ return self.target
+
+ def _WinIdlRule(self, source, prebuild, outputs):
+ """Handle the implicit VS .idl rule for one source file. Fills |outputs|
+ with files that are generated."""
+ outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
+ source, self.config_name
+ )
+ outdir = self.GypPathToNinja(outdir)
+
+ def fix_path(path, rel=None):
+ path = os.path.join(outdir, path)
+ dirname, basename = os.path.split(source)
+ root, ext = os.path.splitext(basename)
+ path = self.ExpandRuleVariables(path, root, dirname, source, ext, basename)
+ if rel:
+ path = os.path.relpath(path, rel)
+ return path
+
+ vars = [(name, fix_path(value, outdir)) for name, value in vars]
+ output = [fix_path(p) for p in output]
+ vars.append(("outdir", outdir))
+ vars.append(("idlflags", flags))
+ input = self.GypPathToNinja(source)
+ self.ninja.build(output, "idl", input, variables=vars, order_only=prebuild)
+ outputs.extend(output)
+
+ def WriteWinIdlFiles(self, spec, prebuild):
+ """Writes rules to match MSVS's implicit idl handling."""
+ assert self.flavor == "win"
+ if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
+ return []
+ outputs = []
+ for source in filter(lambda x: x.endswith(".idl"), spec["sources"]):
+ self._WinIdlRule(source, prebuild, outputs)
+ return outputs
+
+ def WriteActionsRulesCopies(
+ self, spec, extra_sources, prebuild, mac_bundle_depends
+ ):
+ """Write out the Actions, Rules, and Copies steps. Return a path
+ representing the outputs of these steps."""
+ outputs = []
+ if self.is_mac_bundle:
+ mac_bundle_resources = spec.get("mac_bundle_resources", [])[:]
+ else:
+ mac_bundle_resources = []
+ extra_mac_bundle_resources = []
+
+ if "actions" in spec:
+ outputs += self.WriteActions(
+ spec["actions"], extra_sources, prebuild, extra_mac_bundle_resources
+ )
+ if "rules" in spec:
+ outputs += self.WriteRules(
+ spec["rules"],
+ extra_sources,
+ prebuild,
+ mac_bundle_resources,
+ extra_mac_bundle_resources,
+ )
+ if "copies" in spec:
+ outputs += self.WriteCopies(spec["copies"], prebuild, mac_bundle_depends)
+
+ if "sources" in spec and self.flavor == "win":
+ outputs += self.WriteWinIdlFiles(spec, prebuild)
+
+ if self.xcode_settings and self.xcode_settings.IsIosFramework():
+ self.WriteiOSFrameworkHeaders(spec, outputs, prebuild)
+
+ stamp = self.WriteCollapsedDependencies("actions_rules_copies", outputs)
+
+ if self.is_mac_bundle:
+ xcassets = self.WriteMacBundleResources(
+ extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends
+ )
+ partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
+ self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
+
+ return stamp
+
+ def GenerateDescription(self, verb, message, fallback):
+ """Generate and return a description of a build step.
+
+ |verb| is the short summary, e.g. ACTION or RULE.
+ |message| is a hand-written description, or None if not available.
+ |fallback| is the gyp-level name of the step, usable as a fallback.
+ """
+ if self.toolset != "target":
+ verb += "(%s)" % self.toolset
+ if message:
+ return f"{verb} {self.ExpandSpecial(message)}"
+ else:
+ return f"{verb} {self.name}: {fallback}"
+
+ def WriteActions(
+ self, actions, extra_sources, prebuild, extra_mac_bundle_resources
+ ):
+ # Actions cd into the base directory.
+ env = self.GetToolchainEnv()
+ all_outputs = []
+ for action in actions:
+ # First write out a rule for the action.
+ name = "{}_{}".format(action["action_name"], self.hash_for_rules)
+ description = self.GenerateDescription(
+ "ACTION", action.get("message", None), name
+ )
+ win_shell_flags = (
+ self.msvs_settings.GetRuleShellFlags(action)
+ if self.flavor == "win"
+ else None
+ )
+ args = action["action"]
+ depfile = action.get("depfile", None)
+ if depfile:
+ depfile = self.ExpandSpecial(depfile, self.base_to_build)
+ pool = "console" if int(action.get("ninja_use_console", 0)) else None
+ rule_name, _ = self.WriteNewNinjaRule(
+ name, args, description, win_shell_flags, env, pool, depfile=depfile
+ )
+
+ inputs = [self.GypPathToNinja(i, env) for i in action["inputs"]]
+ if int(action.get("process_outputs_as_sources", False)):
+ extra_sources += action["outputs"]
+ if int(action.get("process_outputs_as_mac_bundle_resources", False)):
+ extra_mac_bundle_resources += action["outputs"]
+ outputs = [self.GypPathToNinja(o, env) for o in action["outputs"]]
+
+ # Then write out an edge using the rule.
+ self.ninja.build(outputs, rule_name, inputs, order_only=prebuild)
+ all_outputs += outputs
+
+ self.ninja.newline()
+
+ return all_outputs
+
+ def WriteRules(
+ self,
+ rules,
+ extra_sources,
+ prebuild,
+ mac_bundle_resources,
+ extra_mac_bundle_resources,
+ ):
+ env = self.GetToolchainEnv()
+ all_outputs = []
+ for rule in rules:
+ # Skip a rule with no action and no inputs.
+ if "action" not in rule and not rule.get("rule_sources", []):
+ continue
+
+ # First write out a rule for the rule action.
+ name = "{}_{}".format(rule["rule_name"], self.hash_for_rules)
+
+ args = rule["action"]
+ description = self.GenerateDescription(
+ "RULE",
+ rule.get("message", None),
+ ("%s " + generator_default_variables["RULE_INPUT_PATH"]) % name,
+ )
+ win_shell_flags = (
+ self.msvs_settings.GetRuleShellFlags(rule)
+ if self.flavor == "win"
+ else None
+ )
+ pool = "console" if int(rule.get("ninja_use_console", 0)) else None
+ rule_name, args = self.WriteNewNinjaRule(
+ name, args, description, win_shell_flags, env, pool
+ )
+
+ # TODO: if the command references the outputs directly, we should
+ # simplify it to just use $out.
+
+ # Rules can potentially make use of some special variables which
+ # must vary per source file.
+ # Compute the list of variables we'll need to provide.
+ special_locals = ("source", "root", "dirname", "ext", "name")
+ needed_variables = {"source"}
+ for argument in args:
+ for var in special_locals:
+ if "${%s}" % var in argument:
+ needed_variables.add(var)
+ needed_variables = sorted(needed_variables)
+
+ def cygwin_munge(path):
+ # pylint: disable=cell-var-from-loop
+ if win_shell_flags and win_shell_flags.cygwin:
+ return path.replace("\\", "/")
+ return path
+
+ inputs = [self.GypPathToNinja(i, env) for i in rule.get("inputs", [])]
+
+ # If there are n source files matching the rule, and m additional rule
+ # inputs, then adding 'inputs' to each build edge written below will
+ # write m * n inputs. Collapsing reduces this to m + n.
+ sources = rule.get("rule_sources", [])
+ num_inputs = len(inputs)
+ if prebuild:
+ num_inputs += 1
+ if num_inputs > 2 and len(sources) > 2:
+ inputs = [
+ self.WriteCollapsedDependencies(
+ rule["rule_name"], inputs, order_only=prebuild
+ )
+ ]
+ prebuild = []
+
+ # For each source file, write an edge that generates all the outputs.
+ for source in sources:
+ source = os.path.normpath(source)
+ dirname, basename = os.path.split(source)
+ root, ext = os.path.splitext(basename)
+
+ # Gather the list of inputs and outputs, expanding $vars if possible.
+ outputs = [
+ self.ExpandRuleVariables(o, root, dirname, source, ext, basename)
+ for o in rule["outputs"]
+ ]
+
+ if int(rule.get("process_outputs_as_sources", False)):
+ extra_sources += outputs
+
+ was_mac_bundle_resource = source in mac_bundle_resources
+ if was_mac_bundle_resource or int(
+ rule.get("process_outputs_as_mac_bundle_resources", False)
+ ):
+ extra_mac_bundle_resources += outputs
+ # Note: This is n_resources * n_outputs_in_rule.
+ # Put to-be-removed items in a set and
+ # remove them all in a single pass
+ # if this becomes a performance issue.
+ if was_mac_bundle_resource:
+ mac_bundle_resources.remove(source)
+
+ extra_bindings = []
+ for var in needed_variables:
+ if var == "root":
+ extra_bindings.append(("root", cygwin_munge(root)))
+ elif var == "dirname":
+ # '$dirname' is a parameter to the rule action, which means
+ # it shouldn't be converted to a Ninja path. But we don't
+ # want $!PRODUCT_DIR in there either.
+ dirname_expanded = self.ExpandSpecial(
+ dirname, self.base_to_build
+ )
+ extra_bindings.append(
+ ("dirname", cygwin_munge(dirname_expanded))
+ )
+ elif var == "source":
+ # '$source' is a parameter to the rule action, which means
+ # it shouldn't be converted to a Ninja path. But we don't
+ # want $!PRODUCT_DIR in there either.
+ source_expanded = self.ExpandSpecial(source, self.base_to_build)
+ extra_bindings.append(("source", cygwin_munge(source_expanded)))
+ elif var == "ext":
+ extra_bindings.append(("ext", ext))
+ elif var == "name":
+ extra_bindings.append(("name", cygwin_munge(basename)))
+ else:
+ assert var is None, repr(var)
+
+ outputs = [self.GypPathToNinja(o, env) for o in outputs]
+ if self.flavor == "win":
+ # WriteNewNinjaRule uses unique_name to create a rsp file on win.
+ extra_bindings.append(
+ ("unique_name", hashlib.md5(outputs[0]).hexdigest())
+ )
+
+ self.ninja.build(
+ outputs,
+ rule_name,
+ self.GypPathToNinja(source),
+ implicit=inputs,
+ order_only=prebuild,
+ variables=extra_bindings,
+ )
+
+ all_outputs.extend(outputs)
+
+ return all_outputs
+
+ def WriteCopies(self, copies, prebuild, mac_bundle_depends):
+ outputs = []
+ if self.xcode_settings:
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetToolchainEnv(additional_settings=extra_env)
+ else:
+ env = self.GetToolchainEnv()
+ for to_copy in copies:
+ for path in to_copy["files"]:
+ # Normalize the path so trailing slashes don't confuse us.
+ path = os.path.normpath(path)
+ basename = os.path.split(path)[1]
+ src = self.GypPathToNinja(path, env)
+ dst = self.GypPathToNinja(
+ os.path.join(to_copy["destination"], basename), env
+ )
+ outputs += self.ninja.build(dst, "copy", src, order_only=prebuild)
+ if self.is_mac_bundle:
+ # gyp has mac_bundle_resources to copy things into a bundle's
+ # Resources folder, but there's no built-in way to copy files
+ # to other places in the bundle.
+ # Hence, some targets use copies for this.
+ # Check if this file is copied into the current bundle,
+ # and if so add it to the bundle depends so
+ # that dependent targets get rebuilt if the copy input changes.
+ if dst.startswith(
+ self.xcode_settings.GetBundleContentsFolderPath()
+ ):
+ mac_bundle_depends.append(dst)
+
+ return outputs
+
+ def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild):
+ """Prebuild steps to generate hmap files and copy headers to destination."""
+ framework = self.ComputeMacBundleOutput()
+ all_sources = spec["sources"]
+ copy_headers = spec["mac_framework_headers"]
+ output = self.GypPathToUniqueOutput("headers.hmap")
+ self.xcode_settings.header_map_path = output
+ all_headers = map(
+ self.GypPathToNinja, filter(lambda x: x.endswith(".h"), all_sources)
+ )
+ variables = [
+ ("framework", framework),
+ ("copy_headers", map(self.GypPathToNinja, copy_headers)),
+ ]
+ outputs.extend(
+ self.ninja.build(
+ output,
+ "compile_ios_framework_headers",
+ all_headers,
+ variables=variables,
+ order_only=prebuild,
+ )
+ )
+
+ def WriteMacBundleResources(self, resources, bundle_depends):
+ """Writes ninja edges for 'mac_bundle_resources'."""
+ xcassets = []
+
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+
+ for output, res in gyp.xcode_emulation.GetMacBundleResources(
+ generator_default_variables["PRODUCT_DIR"],
+ self.xcode_settings,
+ map(self.GypPathToNinja, resources),
+ ):
+ output = self.ExpandSpecial(output)
+ if os.path.splitext(output)[-1] != ".xcassets":
+ self.ninja.build(
+ output,
+ "mac_tool",
+ res,
+ variables=[
+ ("mactool_cmd", "copy-bundle-resource"),
+ ("env", env),
+ ("binary", isBinary),
+ ],
+ )
+ bundle_depends.append(output)
+ else:
+ xcassets.append(res)
+ return xcassets
+
+ def WriteMacXCassets(self, xcassets, bundle_depends):
+ """Writes ninja edges for 'mac_bundle_resources' .xcassets files.
+
+ This add an invocation of 'actool' via the 'mac_tool.py' helper script.
+ It assumes that the assets catalogs define at least one imageset and
+ thus an Assets.car file will be generated in the application resources
+ directory. If this is not the case, then the build will probably be done
+ at each invocation of ninja."""
+ if not xcassets:
+ return
+
+ extra_arguments = {}
+ settings_to_arg = {
+ "XCASSETS_APP_ICON": "app-icon",
+ "XCASSETS_LAUNCH_IMAGE": "launch-image",
+ }
+ settings = self.xcode_settings.xcode_settings[self.config_name]
+ for settings_key, arg_name in settings_to_arg.items():
+ value = settings.get(settings_key)
+ if value:
+ extra_arguments[arg_name] = value
+
+ partial_info_plist = None
+ if extra_arguments:
+ partial_info_plist = self.GypPathToUniqueOutput(
+ "assetcatalog_generated_info.plist"
+ )
+ extra_arguments["output-partial-info-plist"] = partial_info_plist
+
+ outputs = []
+ outputs.append(
+ os.path.join(self.xcode_settings.GetBundleResourceFolder(), "Assets.car")
+ )
+ if partial_info_plist:
+ outputs.append(partial_info_plist)
+
+ keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+
+ bundle_depends.extend(
+ self.ninja.build(
+ outputs,
+ "compile_xcassets",
+ xcassets,
+ variables=[("env", env), ("keys", keys)],
+ )
+ )
+ return partial_info_plist
+
+ def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
+ """Write build rules for bundle Info.plist files."""
+ info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
+ generator_default_variables["PRODUCT_DIR"],
+ self.xcode_settings,
+ self.GypPathToNinja,
+ )
+ if not info_plist:
+ return
+ out = self.ExpandSpecial(out)
+ if defines:
+ # Create an intermediate file to store preprocessed results.
+ intermediate_plist = self.GypPathToUniqueOutput(
+ os.path.basename(info_plist)
+ )
+ defines = " ".join([Define(d, self.flavor) for d in defines])
+ info_plist = self.ninja.build(
+ intermediate_plist,
+ "preprocess_infoplist",
+ info_plist,
+ variables=[("defines", defines)],
+ )
+
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+
+ if partial_info_plist:
+ intermediate_plist = self.GypPathToUniqueOutput("merged_info.plist")
+ info_plist = self.ninja.build(
+ intermediate_plist, "merge_infoplist", [partial_info_plist, info_plist]
+ )
+
+ keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
+ keys = QuoteShellArgument(json.dumps(keys), self.flavor)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+ self.ninja.build(
+ out,
+ "copy_infoplist",
+ info_plist,
+ variables=[("env", env), ("keys", keys), ("binary", isBinary)],
+ )
+ bundle_depends.append(out)
+
+ def WriteSources(
+ self,
+ ninja_file,
+ config_name,
+ config,
+ sources,
+ predepends,
+ precompiled_header,
+ spec,
+ ):
+ """Write build rules to compile all of |sources|."""
+ if self.toolset == "host":
+ self.ninja.variable("ar", "$ar_host")
+ self.ninja.variable("cc", "$cc_host")
+ self.ninja.variable("cxx", "$cxx_host")
+ self.ninja.variable("ld", "$ld_host")
+ self.ninja.variable("ldxx", "$ldxx_host")
+ self.ninja.variable("nm", "$nm_host")
+ self.ninja.variable("readelf", "$readelf_host")
+
+ if self.flavor != "mac" or len(self.archs) == 1:
+ return self.WriteSourcesForArch(
+ self.ninja,
+ config_name,
+ config,
+ sources,
+ predepends,
+ precompiled_header,
+ spec,
+ )
+ else:
+ return {
+ arch: self.WriteSourcesForArch(
+ self.arch_subninjas[arch],
+ config_name,
+ config,
+ sources,
+ predepends,
+ precompiled_header,
+ spec,
+ arch=arch,
+ )
+ for arch in self.archs
+ }
+
+ def WriteSourcesForArch(
+ self,
+ ninja_file,
+ config_name,
+ config,
+ sources,
+ predepends,
+ precompiled_header,
+ spec,
+ arch=None,
+ ):
+ """Write build rules to compile all of |sources|."""
+
+ extra_defines = []
+ if self.flavor == "mac":
+ cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
+ cflags_c = self.xcode_settings.GetCflagsC(config_name)
+ cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
+ cflags_objc = ["$cflags_c"] + self.xcode_settings.GetCflagsObjC(config_name)
+ cflags_objcc = ["$cflags_cc"] + self.xcode_settings.GetCflagsObjCC(
+ config_name
+ )
+ elif self.flavor == "win":
+ asmflags = self.msvs_settings.GetAsmflags(config_name)
+ cflags = self.msvs_settings.GetCflags(config_name)
+ cflags_c = self.msvs_settings.GetCflagsC(config_name)
+ cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
+ extra_defines = self.msvs_settings.GetComputedDefines(config_name)
+ # See comment at cc_command for why there's two .pdb files.
+ pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
+ config_name, self.ExpandSpecial
+ )
+ if not pdbpath_c:
+ obj = "obj"
+ if self.toolset != "target":
+ obj += "." + self.toolset
+ pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
+ pdbpath_c = pdbpath + ".c.pdb"
+ pdbpath_cc = pdbpath + ".cc.pdb"
+ self.WriteVariableList(ninja_file, "pdbname_c", [pdbpath_c])
+ self.WriteVariableList(ninja_file, "pdbname_cc", [pdbpath_cc])
+ self.WriteVariableList(ninja_file, "pchprefix", [self.name])
+ else:
+ cflags = config.get("cflags", [])
+ cflags_c = config.get("cflags_c", [])
+ cflags_cc = config.get("cflags_cc", [])
+
+ # Respect environment variables related to build, but target-specific
+ # flags can still override them.
+ if self.toolset == "target":
+ cflags_c = (
+ os.environ.get("CPPFLAGS", "").split()
+ + os.environ.get("CFLAGS", "").split()
+ + cflags_c
+ )
+ cflags_cc = (
+ os.environ.get("CPPFLAGS", "").split()
+ + os.environ.get("CXXFLAGS", "").split()
+ + cflags_cc
+ )
+ elif self.toolset == "host":
+ cflags_c = (
+ os.environ.get("CPPFLAGS_host", "").split()
+ + os.environ.get("CFLAGS_host", "").split()
+ + cflags_c
+ )
+ cflags_cc = (
+ os.environ.get("CPPFLAGS_host", "").split()
+ + os.environ.get("CXXFLAGS_host", "").split()
+ + cflags_cc
+ )
+
+ defines = config.get("defines", []) + extra_defines
+ self.WriteVariableList(
+ ninja_file, "defines", [Define(d, self.flavor) for d in defines]
+ )
+ if self.flavor == "win":
+ self.WriteVariableList(
+ ninja_file, "asmflags", map(self.ExpandSpecial, asmflags)
+ )
+ self.WriteVariableList(
+ ninja_file,
+ "rcflags",
+ [
+ QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
+ for f in self.msvs_settings.GetRcflags(
+ config_name, self.GypPathToNinja
+ )
+ ],
+ )
+
+ include_dirs = config.get("include_dirs", [])
+
+ env = self.GetToolchainEnv()
+ if self.flavor == "win":
+ include_dirs = self.msvs_settings.AdjustIncludeDirs(
+ include_dirs, config_name
+ )
+ self.WriteVariableList(
+ ninja_file,
+ "includes",
+ [
+ QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor)
+ for i in include_dirs
+ ],
+ )
+
+ if self.flavor == "win":
+ midl_include_dirs = config.get("midl_include_dirs", [])
+ midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
+ midl_include_dirs, config_name
+ )
+ self.WriteVariableList(
+ ninja_file,
+ "midl_includes",
+ [
+ QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor)
+ for i in midl_include_dirs
+ ],
+ )
+
+ pch_commands = precompiled_header.GetPchBuildCommands(arch)
+ if self.flavor == "mac":
+ # Most targets use no precompiled headers, so only write these if needed.
+ for ext, var in [
+ ("c", "cflags_pch_c"),
+ ("cc", "cflags_pch_cc"),
+ ("m", "cflags_pch_objc"),
+ ("mm", "cflags_pch_objcc"),
+ ]:
+ include = precompiled_header.GetInclude(ext, arch)
+ if include:
+ ninja_file.variable(var, include)
+
+ arflags = config.get("arflags", [])
+
+ self.WriteVariableList(ninja_file, "cflags", map(self.ExpandSpecial, cflags))
+ self.WriteVariableList(
+ ninja_file, "cflags_c", map(self.ExpandSpecial, cflags_c)
+ )
+ self.WriteVariableList(
+ ninja_file, "cflags_cc", map(self.ExpandSpecial, cflags_cc)
+ )
+ if self.flavor == "mac":
+ self.WriteVariableList(
+ ninja_file, "cflags_objc", map(self.ExpandSpecial, cflags_objc)
+ )
+ self.WriteVariableList(
+ ninja_file, "cflags_objcc", map(self.ExpandSpecial, cflags_objcc)
+ )
+ self.WriteVariableList(ninja_file, "arflags", map(self.ExpandSpecial, arflags))
+ ninja_file.newline()
+ outputs = []
+ has_rc_source = False
+ for source in sources:
+ filename, ext = os.path.splitext(source)
+ ext = ext[1:]
+ obj_ext = self.obj_ext
+ if ext in ("cc", "cpp", "cxx"):
+ command = "cxx"
+ self.target.uses_cpp = True
+ elif ext == "c" or (ext == "S" and self.flavor != "win"):
+ command = "cc"
+ elif ext == "s" and self.flavor != "win": # Doesn't generate .o.d files.
+ command = "cc_s"
+ elif (
+ self.flavor == "win"
+ and ext in ("asm", "S")
+ and not self.msvs_settings.HasExplicitAsmRules(spec)
+ ):
+ command = "asm"
+ # Add the _asm suffix as msvs is capable of handling .cc and
+ # .asm files of the same name without collision.
+ obj_ext = "_asm.obj"
+ elif self.flavor == "mac" and ext == "m":
+ command = "objc"
+ elif self.flavor == "mac" and ext == "mm":
+ command = "objcxx"
+ self.target.uses_cpp = True
+ elif self.flavor == "win" and ext == "rc":
+ command = "rc"
+ obj_ext = ".res"
+ has_rc_source = True
+ else:
+ # Ignore unhandled extensions.
+ continue
+ input = self.GypPathToNinja(source)
+ output = self.GypPathToUniqueOutput(filename + obj_ext)
+ if arch is not None:
+ output = AddArch(output, arch)
+ implicit = precompiled_header.GetObjDependencies([input], [output], arch)
+ variables = []
+ if self.flavor == "win":
+ variables, output, implicit = precompiled_header.GetFlagsModifications(
+ input,
+ output,
+ implicit,
+ command,
+ cflags_c,
+ cflags_cc,
+ self.ExpandSpecial,
+ )
+ ninja_file.build(
+ output,
+ command,
+ input,
+ implicit=[gch for _, _, gch in implicit],
+ order_only=predepends,
+ variables=variables,
+ )
+ outputs.append(output)
+
+ if has_rc_source:
+ resource_include_dirs = config.get("resource_include_dirs", include_dirs)
+ self.WriteVariableList(
+ ninja_file,
+ "resource_includes",
+ [
+ QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor)
+ for i in resource_include_dirs
+ ],
+ )
+
+ self.WritePchTargets(ninja_file, pch_commands)
+
+ ninja_file.newline()
+ return outputs
+
+ def WritePchTargets(self, ninja_file, pch_commands):
+ """Writes ninja rules to compile prefix headers."""
+ if not pch_commands:
+ return
+
+ for gch, lang_flag, lang, input in pch_commands:
+ var_name = {
+ "c": "cflags_pch_c",
+ "cc": "cflags_pch_cc",
+ "m": "cflags_pch_objc",
+ "mm": "cflags_pch_objcc",
+ }[lang]
+
+ map = {
+ "c": "cc",
+ "cc": "cxx",
+ "m": "objc",
+ "mm": "objcxx",
+ }
+ cmd = map.get(lang)
+ ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
+
+ def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
+ """Write out a link step. Fills out target.binary. """
+ if self.flavor != "mac" or len(self.archs) == 1:
+ return self.WriteLinkForArch(
+ self.ninja, spec, config_name, config, link_deps, compile_deps
+ )
+ else:
+ output = self.ComputeOutput(spec)
+ inputs = [
+ self.WriteLinkForArch(
+ self.arch_subninjas[arch],
+ spec,
+ config_name,
+ config,
+ link_deps[arch],
+ compile_deps,
+ arch=arch,
+ )
+ for arch in self.archs
+ ]
+ extra_bindings = []
+ build_output = output
+ if not self.is_mac_bundle:
+ self.AppendPostbuildVariable(extra_bindings, spec, output, output)
+
+ # TODO(yyanagisawa): more work needed to fix:
+ # https://code.google.com/p/gyp/issues/detail?id=411
+ if (
+ spec["type"] in ("shared_library", "loadable_module")
+ and not self.is_mac_bundle
+ ):
+ extra_bindings.append(("lib", output))
+ self.ninja.build(
+ [output, output + ".TOC"],
+ "solipo",
+ inputs,
+ variables=extra_bindings,
+ )
+ else:
+ self.ninja.build(build_output, "lipo", inputs, variables=extra_bindings)
+ return output
+
+ def WriteLinkForArch(
+ self, ninja_file, spec, config_name, config, link_deps, compile_deps, arch=None
+ ):
+ """Write out a link step. Fills out target.binary. """
+ command = {
+ "executable": "link",
+ "loadable_module": "solink_module",
+ "shared_library": "solink",
+ }[spec["type"]]
+ command_suffix = ""
+
+ implicit_deps = set()
+ solibs = set()
+ order_deps = set()
+
+ if compile_deps:
+ # Normally, the compiles of the target already depend on compile_deps,
+ # but a shared_library target might have no sources and only link together
+ # a few static_library deps, so the link step also needs to depend
+ # on compile_deps to make sure actions in the shared_library target
+ # get run before the link.
+ order_deps.add(compile_deps)
+
+ if "dependencies" in spec:
+ # Two kinds of dependencies:
+ # - Linkable dependencies (like a .a or a .so): add them to the link line.
+ # - Non-linkable dependencies (like a rule that generates a file
+ # and writes a stamp file): add them to implicit_deps
+ extra_link_deps = set()
+ for dep in spec["dependencies"]:
+ target = self.target_outputs.get(dep)
+ if not target:
+ continue
+ linkable = target.Linkable()
+ if linkable:
+ new_deps = []
+ if (
+ self.flavor == "win"
+ and target.component_objs
+ and self.msvs_settings.IsUseLibraryDependencyInputs(config_name)
+ ):
+ new_deps = target.component_objs
+ if target.compile_deps:
+ order_deps.add(target.compile_deps)
+ elif self.flavor == "win" and target.import_lib:
+ new_deps = [target.import_lib]
+ elif target.UsesToc(self.flavor):
+ solibs.add(target.binary)
+ implicit_deps.add(target.binary + ".TOC")
+ else:
+ new_deps = [target.binary]
+ for new_dep in new_deps:
+ if new_dep not in extra_link_deps:
+ extra_link_deps.add(new_dep)
+ link_deps.append(new_dep)
+
+ final_output = target.FinalOutput()
+ if not linkable or final_output != target.binary:
+ implicit_deps.add(final_output)
+
+ extra_bindings = []
+ if self.target.uses_cpp and self.flavor != "win":
+ extra_bindings.append(("ld", "$ldxx"))
+
+ output = self.ComputeOutput(spec, arch)
+ if arch is None and not self.is_mac_bundle:
+ self.AppendPostbuildVariable(extra_bindings, spec, output, output)
+
+ is_executable = spec["type"] == "executable"
+ # The ldflags config key is not used on mac or win. On those platforms
+ # linker flags are set via xcode_settings and msvs_settings, respectively.
+ if self.toolset == "target":
+ env_ldflags = os.environ.get("LDFLAGS", "").split()
+ elif self.toolset == "host":
+ env_ldflags = os.environ.get("LDFLAGS_host", "").split()
+
+ if self.flavor == "mac":
+ ldflags = self.xcode_settings.GetLdflags(
+ config_name,
+ self.ExpandSpecial(generator_default_variables["PRODUCT_DIR"]),
+ self.GypPathToNinja,
+ arch,
+ )
+ ldflags = env_ldflags + ldflags
+ elif self.flavor == "win":
+ manifest_base_name = self.GypPathToUniqueOutput(
+ self.ComputeOutputFileName(spec)
+ )
+ (
+ ldflags,
+ intermediate_manifest,
+ manifest_files,
+ ) = self.msvs_settings.GetLdflags(
+ config_name,
+ self.GypPathToNinja,
+ self.ExpandSpecial,
+ manifest_base_name,
+ output,
+ is_executable,
+ self.toplevel_build,
+ )
+ ldflags = env_ldflags + ldflags
+ self.WriteVariableList(ninja_file, "manifests", manifest_files)
+ implicit_deps = implicit_deps.union(manifest_files)
+ if intermediate_manifest:
+ self.WriteVariableList(
+ ninja_file, "intermediatemanifest", [intermediate_manifest]
+ )
+ command_suffix = _GetWinLinkRuleNameSuffix(
+ self.msvs_settings.IsEmbedManifest(config_name)
+ )
+ def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
+ if def_file:
+ implicit_deps.add(def_file)
+ else:
+ # Respect environment variables related to build, but target-specific
+ # flags can still override them.
+ ldflags = env_ldflags + config.get("ldflags", [])
+ if is_executable and len(solibs):
+ rpath = "lib/"
+ if self.toolset != "target":
+ rpath += self.toolset
+ ldflags.append(r"-Wl,-rpath=\$$ORIGIN/%s" % rpath)
+ else:
+ ldflags.append("-Wl,-rpath=%s" % self.target_rpath)
+ ldflags.append("-Wl,-rpath-link=%s" % rpath)
+ self.WriteVariableList(ninja_file, "ldflags", map(self.ExpandSpecial, ldflags))
+
+ library_dirs = config.get("library_dirs", [])
+ if self.flavor == "win":
+ library_dirs = [
+ self.msvs_settings.ConvertVSMacros(library_dir, config_name)
+ for library_dir in library_dirs
+ ]
+ library_dirs = [
+ "/LIBPATH:"
+ + QuoteShellArgument(self.GypPathToNinja(library_dir), self.flavor)
+ for library_dir in library_dirs
+ ]
+ else:
+ library_dirs = [
+ QuoteShellArgument("-L" + self.GypPathToNinja(library_dir), self.flavor)
+ for library_dir in library_dirs
+ ]
+
+ libraries = gyp.common.uniquer(
+ map(self.ExpandSpecial, spec.get("libraries", []))
+ )
+ if self.flavor == "mac":
+ libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
+ elif self.flavor == "win":
+ libraries = self.msvs_settings.AdjustLibraries(libraries)
+
+ self.WriteVariableList(ninja_file, "libs", library_dirs + libraries)
+
+ linked_binary = output
+
+ if command in ("solink", "solink_module"):
+ extra_bindings.append(("soname", os.path.split(output)[1]))
+ extra_bindings.append(("lib", gyp.common.EncodePOSIXShellArgument(output)))
+ if self.flavor != "win":
+ link_file_list = output
+ if self.is_mac_bundle:
+ # 'Dependency Framework.framework/Versions/A/Dependency Framework'
+ # -> 'Dependency Framework.framework.rsp'
+ link_file_list = self.xcode_settings.GetWrapperName()
+ if arch:
+ link_file_list += "." + arch
+ link_file_list += ".rsp"
+ # If an rspfile contains spaces, ninja surrounds the filename with
+ # quotes around it and then passes it to open(), creating a file with
+ # quotes in its name (and when looking for the rsp file, the name
+ # makes it through bash which strips the quotes) :-/
+ link_file_list = link_file_list.replace(" ", "_")
+ extra_bindings.append(
+ (
+ "link_file_list",
+ gyp.common.EncodePOSIXShellArgument(link_file_list),
+ )
+ )
+ if self.flavor == "win":
+ extra_bindings.append(("binary", output))
+ if (
+ "/NOENTRY" not in ldflags
+ and not self.msvs_settings.GetNoImportLibrary(config_name)
+ ):
+ self.target.import_lib = output + ".lib"
+ extra_bindings.append(
+ ("implibflag", "/IMPLIB:%s" % self.target.import_lib)
+ )
+ pdbname = self.msvs_settings.GetPDBName(
+ config_name, self.ExpandSpecial, output + ".pdb"
+ )
+ output = [output, self.target.import_lib]
+ if pdbname:
+ output.append(pdbname)
+ elif not self.is_mac_bundle:
+ output = [output, output + ".TOC"]
+ else:
+ command = command + "_notoc"
+ elif self.flavor == "win":
+ extra_bindings.append(("binary", output))
+ pdbname = self.msvs_settings.GetPDBName(
+ config_name, self.ExpandSpecial, output + ".pdb"
+ )
+ if pdbname:
+ output = [output, pdbname]
+
+ if len(solibs):
+ extra_bindings.append(
+ ("solibs", gyp.common.EncodePOSIXShellList(sorted(solibs)))
+ )
+
+ ninja_file.build(
+ output,
+ command + command_suffix,
+ link_deps,
+ implicit=sorted(implicit_deps),
+ order_only=list(order_deps),
+ variables=extra_bindings,
+ )
+ return linked_binary
+
+ def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
+ extra_link_deps = any(
+ self.target_outputs.get(dep).Linkable()
+ for dep in spec.get("dependencies", [])
+ if dep in self.target_outputs
+ )
+ if spec["type"] == "none" or (not link_deps and not extra_link_deps):
+ # TODO(evan): don't call this function for 'none' target types, as
+ # it doesn't do anything, and we fake out a 'binary' with a stamp file.
+ self.target.binary = compile_deps
+ self.target.type = "none"
+ elif spec["type"] == "static_library":
+ self.target.binary = self.ComputeOutput(spec)
+ if (
+ self.flavor not in ("ios", "mac", "netbsd", "openbsd", "win")
+ and not self.is_standalone_static_library
+ ):
+ self.ninja.build(
+ self.target.binary, "alink_thin", link_deps, order_only=compile_deps
+ )
+ else:
+ variables = []
+ if self.xcode_settings:
+ libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
+ if libtool_flags:
+ variables.append(("libtool_flags", libtool_flags))
+ if self.msvs_settings:
+ libflags = self.msvs_settings.GetLibFlags(
+ config_name, self.GypPathToNinja
+ )
+ variables.append(("libflags", libflags))
+
+ if self.flavor != "mac" or len(self.archs) == 1:
+ self.AppendPostbuildVariable(
+ variables, spec, self.target.binary, self.target.binary
+ )
+ self.ninja.build(
+ self.target.binary,
+ "alink",
+ link_deps,
+ order_only=compile_deps,
+ variables=variables,
+ )
+ else:
+ inputs = []
+ for arch in self.archs:
+ output = self.ComputeOutput(spec, arch)
+ self.arch_subninjas[arch].build(
+ output,
+ "alink",
+ link_deps[arch],
+ order_only=compile_deps,
+ variables=variables,
+ )
+ inputs.append(output)
+ # TODO: It's not clear if
+ # libtool_flags should be passed to the alink
+ # call that combines single-arch .a files into a fat .a file.
+ self.AppendPostbuildVariable(
+ variables, spec, self.target.binary, self.target.binary
+ )
+ self.ninja.build(
+ self.target.binary,
+ "alink",
+ inputs,
+ # FIXME: test proving order_only=compile_deps isn't
+ # needed.
+ variables=variables,
+ )
+ else:
+ self.target.binary = self.WriteLink(
+ spec, config_name, config, link_deps, compile_deps
+ )
+ return self.target.binary
+
+ def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
+ assert self.is_mac_bundle
+ package_framework = spec["type"] in ("shared_library", "loadable_module")
+ output = self.ComputeMacBundleOutput()
+ if is_empty:
+ output += ".stamp"
+ variables = []
+ self.AppendPostbuildVariable(
+ variables,
+ spec,
+ output,
+ self.target.binary,
+ is_command_start=not package_framework,
+ )
+ if package_framework and not is_empty:
+ if spec["type"] == "shared_library" and self.xcode_settings.isIOS:
+ self.ninja.build(
+ output,
+ "package_ios_framework",
+ mac_bundle_depends,
+ variables=variables,
+ )
+ else:
+ variables.append(("version", self.xcode_settings.GetFrameworkVersion()))
+ self.ninja.build(
+ output, "package_framework", mac_bundle_depends, variables=variables
+ )
+ else:
+ self.ninja.build(output, "stamp", mac_bundle_depends, variables=variables)
+ self.target.bundle = output
+ return output
+
+ def GetToolchainEnv(self, additional_settings=None):
+ """Returns the variables toolchain would set for build steps."""
+ env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
+ if self.flavor == "win":
+ env = self.GetMsvsToolchainEnv(additional_settings=additional_settings)
+ return env
+
+ def GetMsvsToolchainEnv(self, additional_settings=None):
+ """Returns the variables Visual Studio would set for build steps."""
+ return self.msvs_settings.GetVSMacroEnv(
+ "$!PRODUCT_DIR", config=self.config_name
+ )
+
+ def GetSortedXcodeEnv(self, additional_settings=None):
+ """Returns the variables Xcode would set for build steps."""
+ assert self.abs_build_dir
+ abs_build_dir = self.abs_build_dir
+ return gyp.xcode_emulation.GetSortedXcodeEnv(
+ self.xcode_settings,
+ abs_build_dir,
+ os.path.join(abs_build_dir, self.build_to_base),
+ self.config_name,
+ additional_settings,
+ )
+
+ def GetSortedXcodePostbuildEnv(self):
+ """Returns the variables Xcode would set for postbuild steps."""
+ postbuild_settings = {}
+ # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
+ # TODO(thakis): It would be nice to have some general mechanism instead.
+ strip_save_file = self.xcode_settings.GetPerTargetSetting(
+ "CHROMIUM_STRIP_SAVE_FILE"
+ )
+ if strip_save_file:
+ postbuild_settings["CHROMIUM_STRIP_SAVE_FILE"] = strip_save_file
+ return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
+
+ def AppendPostbuildVariable(
+ self, variables, spec, output, binary, is_command_start=False
+ ):
+ """Adds a 'postbuild' variable if there is a postbuild for |output|."""
+ postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
+ if postbuild:
+ variables.append(("postbuilds", postbuild))
+
+ def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
+ """Returns a shell command that runs all the postbuilds, and removes
+ |output| if any of them fails. If |is_command_start| is False, then the
+ returned string will start with ' && '."""
+ if not self.xcode_settings or spec["type"] == "none" or not output:
+ return ""
+ output = QuoteShellArgument(output, self.flavor)
+ postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
+ if output_binary is not None:
+ postbuilds = self.xcode_settings.AddImplicitPostbuilds(
+ self.config_name,
+ os.path.normpath(os.path.join(self.base_to_build, output)),
+ QuoteShellArgument(
+ os.path.normpath(os.path.join(self.base_to_build, output_binary)),
+ self.flavor,
+ ),
+ postbuilds,
+ quiet=True,
+ )
+
+ if not postbuilds:
+ return ""
+ # Postbuilds expect to be run in the gyp file's directory, so insert an
+ # implicit postbuild to cd to there.
+ postbuilds.insert(
+ 0, gyp.common.EncodePOSIXShellList(["cd", self.build_to_base])
+ )
+ env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
+ # G will be non-null if any postbuild fails. Run all postbuilds in a
+ # subshell.
+ commands = (
+ env
+ + " ("
+ + " && ".join([ninja_syntax.escape(command) for command in postbuilds])
+ )
+ command_string = (
+ commands
+ + "); G=$$?; "
+ # Remove the final output if any postbuild failed.
+ "((exit $$G) || rm -rf %s) " % output
+ + "&& exit $$G)"
+ )
+ if is_command_start:
+ return "(" + command_string + " && "
+ else:
+ return "$ && (" + command_string
+
+ def ComputeExportEnvString(self, env):
+ """Given an environment, returns a string looking like
+ 'export FOO=foo; export BAR="${FOO} bar;'
+ that exports |env| to the shell."""
+ export_str = []
+ for k, v in env:
+ export_str.append(
+ "export %s=%s;"
+ % (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v)))
+ )
+ return " ".join(export_str)
+
+ def ComputeMacBundleOutput(self):
+ """Return the 'output' (full output path) to a bundle output directory."""
+ assert self.is_mac_bundle
+ path = generator_default_variables["PRODUCT_DIR"]
+ return self.ExpandSpecial(
+ os.path.join(path, self.xcode_settings.GetWrapperName())
+ )
+
+ def ComputeOutputFileName(self, spec, type=None):
+ """Compute the filename of the final output for the current target."""
+ if not type:
+ type = spec["type"]
+
+ default_variables = copy.copy(generator_default_variables)
+ CalculateVariables(default_variables, {"flavor": self.flavor})
+
+ # Compute filename prefix: the product prefix, or a default for
+ # the product type.
+ DEFAULT_PREFIX = {
+ "loadable_module": default_variables["SHARED_LIB_PREFIX"],
+ "shared_library": default_variables["SHARED_LIB_PREFIX"],
+ "static_library": default_variables["STATIC_LIB_PREFIX"],
+ "executable": default_variables["EXECUTABLE_PREFIX"],
+ }
+ prefix = spec.get("product_prefix", DEFAULT_PREFIX.get(type, ""))
+
+ # Compute filename extension: the product extension, or a default
+ # for the product type.
+ DEFAULT_EXTENSION = {
+ "loadable_module": default_variables["SHARED_LIB_SUFFIX"],
+ "shared_library": default_variables["SHARED_LIB_SUFFIX"],
+ "static_library": default_variables["STATIC_LIB_SUFFIX"],
+ "executable": default_variables["EXECUTABLE_SUFFIX"],
+ }
+ extension = spec.get("product_extension")
+ if extension:
+ extension = "." + extension
+ else:
+ extension = DEFAULT_EXTENSION.get(type, "")
+
+ if "product_name" in spec:
+ # If we were given an explicit name, use that.
+ target = spec["product_name"]
+ else:
+ # Otherwise, derive a name from the target name.
+ target = spec["target_name"]
+ if prefix == "lib":
+ # Snip out an extra 'lib' from libs if appropriate.
+ target = StripPrefix(target, "lib")
+
+ if type in (
+ "static_library",
+ "loadable_module",
+ "shared_library",
+ "executable",
+ ):
+ return f"{prefix}{target}{extension}"
+ elif type == "none":
+ return "%s.stamp" % target
+ else:
+ raise Exception("Unhandled output type %s" % type)
+
+ def ComputeOutput(self, spec, arch=None):
+ """Compute the path for the final output of the spec."""
+ type = spec["type"]
+
+ if self.flavor == "win":
+ override = self.msvs_settings.GetOutputName(
+ self.config_name, self.ExpandSpecial
+ )
+ if override:
+ return override
+
+ if (
+ arch is None
+ and self.flavor == "mac"
+ and type
+ in ("static_library", "executable", "shared_library", "loadable_module")
+ ):
+ filename = self.xcode_settings.GetExecutablePath()
+ else:
+ filename = self.ComputeOutputFileName(spec, type)
+
+ if arch is None and "product_dir" in spec:
+ path = os.path.join(spec["product_dir"], filename)
+ return self.ExpandSpecial(path)
+
+ # Some products go into the output root, libraries go into shared library
+ # dir, and everything else goes into the normal place.
+ type_in_output_root = ["executable", "loadable_module"]
+ if self.flavor == "mac" and self.toolset == "target":
+ type_in_output_root += ["shared_library", "static_library"]
+ elif self.flavor == "win" and self.toolset == "target":
+ type_in_output_root += ["shared_library"]
+
+ if arch is not None:
+ # Make sure partial executables don't end up in a bundle or the regular
+ # output directory.
+ archdir = "arch"
+ if self.toolset != "target":
+ archdir = os.path.join("arch", "%s" % self.toolset)
+ return os.path.join(archdir, AddArch(filename, arch))
+ elif type in type_in_output_root or self.is_standalone_static_library:
+ return filename
+ elif type == "shared_library":
+ libdir = "lib"
+ if self.toolset != "target":
+ libdir = os.path.join("lib", "%s" % self.toolset)
+ return os.path.join(libdir, filename)
+ else:
+ return self.GypPathToUniqueOutput(filename, qualified=False)
+
+ def WriteVariableList(self, ninja_file, var, values):
+ assert not isinstance(values, str)
+ if values is None:
+ values = []
+ ninja_file.variable(var, " ".join(values))
+
+ def WriteNewNinjaRule(
+ self, name, args, description, win_shell_flags, env, pool, depfile=None
+ ):
+ """Write out a new ninja "rule" statement for a given command.
+
+ Returns the name of the new rule, and a copy of |args| with variables
+ expanded."""
+
+ if self.flavor == "win":
+ args = [
+ self.msvs_settings.ConvertVSMacros(
+ arg, self.base_to_build, config=self.config_name
+ )
+ for arg in args
+ ]
+ description = self.msvs_settings.ConvertVSMacros(
+ description, config=self.config_name
+ )
+ elif self.flavor == "mac":
+ # |env| is an empty list on non-mac.
+ args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
+ description = gyp.xcode_emulation.ExpandEnvVars(description, env)
+
+ # TODO: we shouldn't need to qualify names; we do it because
+ # currently the ninja rule namespace is global, but it really
+ # should be scoped to the subninja.
+ rule_name = self.name
+ if self.toolset == "target":
+ rule_name += "." + self.toolset
+ rule_name += "." + name
+ rule_name = re.sub("[^a-zA-Z0-9_]", "_", rule_name)
+
+ # Remove variable references, but not if they refer to the magic rule
+ # variables. This is not quite right, as it also protects these for
+ # actions, not just for rules where they are valid. Good enough.
+ protect = ["${root}", "${dirname}", "${source}", "${ext}", "${name}"]
+ protect = "(?!" + "|".join(map(re.escape, protect)) + ")"
+ description = re.sub(protect + r"\$", "_", description)
+
+ # gyp dictates that commands are run from the base directory.
+ # cd into the directory before running, and adjust paths in
+ # the arguments to point to the proper locations.
+ rspfile = None
+ rspfile_content = None
+ args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
+ if self.flavor == "win":
+ rspfile = rule_name + ".$unique_name.rsp"
+ # The cygwin case handles this inside the bash sub-shell.
+ run_in = "" if win_shell_flags.cygwin else " " + self.build_to_base
+ if win_shell_flags.cygwin:
+ rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
+ args, self.build_to_base
+ )
+ else:
+ rspfile_content = gyp.msvs_emulation.EncodeRspFileList(
+ args, win_shell_flags.quote)
+ command = (
+ "%s gyp-win-tool action-wrapper $arch " % sys.executable
+ + rspfile
+ + run_in
+ )
+ else:
+ env = self.ComputeExportEnvString(env)
+ command = gyp.common.EncodePOSIXShellList(args)
+ command = "cd %s; " % self.build_to_base + env + command
+
+ # GYP rules/actions express being no-ops by not touching their outputs.
+ # Avoid executing downstream dependencies in this case by specifying
+ # restat=1 to ninja.
+ self.ninja.rule(
+ rule_name,
+ command,
+ description,
+ depfile=depfile,
+ restat=True,
+ pool=pool,
+ rspfile=rspfile,
+ rspfile_content=rspfile_content,
+ )
+ self.ninja.newline()
+
+ return rule_name, args
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ global generator_additional_non_configuration_keys
+ global generator_additional_path_sections
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == "mac":
+ default_variables.setdefault("OS", "mac")
+ default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib")
+ default_variables.setdefault(
+ "SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"]
+ )
+ default_variables.setdefault(
+ "LIB_DIR", generator_default_variables["PRODUCT_DIR"]
+ )
+
+ # Copy additional generator configuration data from Xcode, which is shared
+ # by the Mac Ninja generator.
+ import gyp.generator.xcode as xcode_generator
+
+ generator_additional_non_configuration_keys = getattr(
+ xcode_generator, "generator_additional_non_configuration_keys", []
+ )
+ generator_additional_path_sections = getattr(
+ xcode_generator, "generator_additional_path_sections", []
+ )
+ global generator_extra_sources_for_rules
+ generator_extra_sources_for_rules = getattr(
+ xcode_generator, "generator_extra_sources_for_rules", []
+ )
+ elif flavor == "win":
+ exts = gyp.MSVSUtil.TARGET_TYPE_EXT
+ default_variables.setdefault("OS", "win")
+ default_variables["EXECUTABLE_SUFFIX"] = "." + exts["executable"]
+ default_variables["STATIC_LIB_PREFIX"] = ""
+ default_variables["STATIC_LIB_SUFFIX"] = "." + exts["static_library"]
+ default_variables["SHARED_LIB_PREFIX"] = ""
+ default_variables["SHARED_LIB_SUFFIX"] = "." + exts["shared_library"]
+
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Windows Ninja generator.
+ import gyp.generator.msvs as msvs_generator
+
+ generator_additional_non_configuration_keys = getattr(
+ msvs_generator, "generator_additional_non_configuration_keys", []
+ )
+ generator_additional_path_sections = getattr(
+ msvs_generator, "generator_additional_path_sections", []
+ )
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+ else:
+ operating_system = flavor
+ if flavor == "android":
+ operating_system = "linux" # Keep this legacy behavior for now.
+ default_variables.setdefault("OS", operating_system)
+ default_variables.setdefault("SHARED_LIB_SUFFIX", ".so")
+ default_variables.setdefault(
+ "SHARED_LIB_DIR", os.path.join("$!PRODUCT_DIR", "lib")
+ )
+ default_variables.setdefault("LIB_DIR", os.path.join("$!PRODUCT_DIR", "obj"))
+
+
+def ComputeOutputDir(params):
+ """Returns the path from the toplevel_dir to the build output directory."""
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to ninja easier, ninja doesn't put anything here.
+ generator_dir = os.path.relpath(params["options"].generator_output or ".")
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = params.get("generator_flags", {}).get("output_dir", "out")
+
+ # Relative path from source root to our output files. e.g. "out"
+ return os.path.normpath(os.path.join(generator_dir, output_dir))
+
+
+def CalculateGeneratorInputInfo(params):
+ """Called by __init__ to initialize generator values based on params."""
+ # E.g. "out/gypfiles"
+ toplevel = params["options"].toplevel_dir
+ qualified_out_dir = os.path.normpath(
+ os.path.join(toplevel, ComputeOutputDir(params), "gypfiles")
+ )
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ "toplevel": toplevel,
+ "qualified_out_dir": qualified_out_dir,
+ }
+
+
+def OpenOutput(path, mode="w"):
+ """Open |path| for writing, creating directories if necessary."""
+ gyp.common.EnsureDirExists(path)
+ return open(path, mode)
+
+
+def CommandWithWrapper(cmd, wrappers, prog):
+ wrapper = wrappers.get(cmd, "")
+ if wrapper:
+ return wrapper + " " + prog
+ return prog
+
+
+def GetDefaultConcurrentLinks():
+ """Returns a best-guess for a number of concurrent links."""
+ pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY", 0))
+ if pool_size:
+ return pool_size
+
+ if sys.platform in ("win32", "cygwin"):
+ import ctypes
+
+ class MEMORYSTATUSEX(ctypes.Structure):
+ _fields_ = [
+ ("dwLength", ctypes.c_ulong),
+ ("dwMemoryLoad", ctypes.c_ulong),
+ ("ullTotalPhys", ctypes.c_ulonglong),
+ ("ullAvailPhys", ctypes.c_ulonglong),
+ ("ullTotalPageFile", ctypes.c_ulonglong),
+ ("ullAvailPageFile", ctypes.c_ulonglong),
+ ("ullTotalVirtual", ctypes.c_ulonglong),
+ ("ullAvailVirtual", ctypes.c_ulonglong),
+ ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+ ]
+
+ stat = MEMORYSTATUSEX()
+ stat.dwLength = ctypes.sizeof(stat)
+ ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+
+ # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
+ # on a 64 GiB machine.
+ mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GiB
+ hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX", 2 ** 32)))
+ return min(mem_limit, hard_cap)
+ elif sys.platform.startswith("linux"):
+ if os.path.exists("/proc/meminfo"):
+ with open("/proc/meminfo") as meminfo:
+ memtotal_re = re.compile(r"^MemTotal:\s*(\d*)\s*kB")
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ # Allow 8Gb per link on Linux because Gold is quite memory hungry
+ return max(1, int(match.group(1)) // (8 * (2 ** 20)))
+ return 1
+ elif sys.platform == "darwin":
+ try:
+ avail_bytes = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"]))
+ # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+ # 4GB per ld process allows for some more bloat.
+ return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB
+ except subprocess.CalledProcessError:
+ return 1
+ else:
+ # TODO(scottmg): Implement this for other platforms.
+ return 1
+
+
+def _GetWinLinkRuleNameSuffix(embed_manifest):
+ """Returns the suffix used to select an appropriate linking rule depending on
+ whether the manifest embedding is enabled."""
+ return "_embed" if embed_manifest else ""
+
+
+def _AddWinLinkRules(master_ninja, embed_manifest):
+ """Adds link rules for Windows platform to |master_ninja|."""
+
+ def FullLinkCommand(ldcmd, out, binary_type):
+ resource_name = {"exe": "1", "dll": "2"}[binary_type]
+ return (
+ "%(python)s gyp-win-tool link-with-manifests $arch %(embed)s "
+ '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" '
+ "$manifests"
+ % {
+ "python": sys.executable,
+ "out": out,
+ "ldcmd": ldcmd,
+ "resname": resource_name,
+ "embed": embed_manifest,
+ }
+ )
+
+ rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
+ use_separate_mspdbsrv = int(os.environ.get("GYP_USE_SEPARATE_MSPDBSRV", "0")) != 0
+ dlldesc = "LINK%s(DLL) $binary" % rule_name_suffix.upper()
+ dllcmd = (
+ "%s gyp-win-tool link-wrapper $arch %s "
+ "$ld /nologo $implibflag /DLL /OUT:$binary "
+ "@$binary.rsp" % (sys.executable, use_separate_mspdbsrv)
+ )
+ dllcmd = FullLinkCommand(dllcmd, "$binary", "dll")
+ master_ninja.rule(
+ "solink" + rule_name_suffix,
+ description=dlldesc,
+ command=dllcmd,
+ rspfile="$binary.rsp",
+ rspfile_content="$libs $in_newline $ldflags",
+ restat=True,
+ pool="link_pool",
+ )
+ master_ninja.rule(
+ "solink_module" + rule_name_suffix,
+ description=dlldesc,
+ command=dllcmd,
+ rspfile="$binary.rsp",
+ rspfile_content="$libs $in_newline $ldflags",
+ restat=True,
+ pool="link_pool",
+ )
+ # Note that ldflags goes at the end so that it has the option of
+ # overriding default settings earlier in the command line.
+ exe_cmd = (
+ "%s gyp-win-tool link-wrapper $arch %s "
+ "$ld /nologo /OUT:$binary @$binary.rsp"
+ % (sys.executable, use_separate_mspdbsrv)
+ )
+ exe_cmd = FullLinkCommand(exe_cmd, "$binary", "exe")
+ master_ninja.rule(
+ "link" + rule_name_suffix,
+ description="LINK%s $binary" % rule_name_suffix.upper(),
+ command=exe_cmd,
+ rspfile="$binary.rsp",
+ rspfile_content="$in_newline $libs $ldflags",
+ pool="link_pool",
+ )
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name):
+ options = params["options"]
+ flavor = gyp.common.GetFlavor(params)
+ generator_flags = params.get("generator_flags", {})
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(os.path.join(ComputeOutputDir(params), config_name))
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+
+ master_ninja_file = OpenOutput(os.path.join(toplevel_build, "build.ninja"))
+ master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
+
+ # Put build-time support tools in out/{config_name}.
+ gyp.common.CopyTool(flavor, toplevel_build, generator_flags)
+
+ # Grab make settings for CC/CXX.
+ # The rules are
+ # - The priority from low to high is gcc/g++, the 'make_global_settings' in
+ # gyp, the environment variable.
+ # - If there is no 'make_global_settings' for CC.host/CXX.host or
+ # 'CC_host'/'CXX_host' environment variable, cc_host/cxx_host should be set
+ # to cc/cxx.
+ if flavor == "win":
+ ar = "lib.exe"
+ # cc and cxx must be set to the correct architecture by overriding with one
+ # of cl_x86 or cl_x64 below.
+ cc = "UNSET"
+ cxx = "UNSET"
+ ld = "link.exe"
+ ld_host = "$ld"
+ else:
+ ar = "ar"
+ cc = "cc"
+ cxx = "c++"
+ ld = "$cc"
+ ldxx = "$cxx"
+ ld_host = "$cc_host"
+ ldxx_host = "$cxx_host"
+
+ ar_host = ar
+ cc_host = None
+ cxx_host = None
+ cc_host_global_setting = None
+ cxx_host_global_setting = None
+ clang_cl = None
+ nm = "nm"
+ nm_host = "nm"
+ readelf = "readelf"
+ readelf_host = "readelf"
+
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings = data[build_file].get("make_global_settings", [])
+ build_to_root = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
+ wrappers = {}
+ for key, value in make_global_settings:
+ if key == "AR":
+ ar = os.path.join(build_to_root, value)
+ if key == "AR.host":
+ ar_host = os.path.join(build_to_root, value)
+ if key == "CC":
+ cc = os.path.join(build_to_root, value)
+ if cc.endswith("clang-cl"):
+ clang_cl = cc
+ if key == "CXX":
+ cxx = os.path.join(build_to_root, value)
+ if key == "CC.host":
+ cc_host = os.path.join(build_to_root, value)
+ cc_host_global_setting = value
+ if key == "CXX.host":
+ cxx_host = os.path.join(build_to_root, value)
+ cxx_host_global_setting = value
+ if key == "LD":
+ ld = os.path.join(build_to_root, value)
+ if key == "LD.host":
+ ld_host = os.path.join(build_to_root, value)
+ if key == "LDXX":
+ ldxx = os.path.join(build_to_root, value)
+ if key == "LDXX.host":
+ ldxx_host = os.path.join(build_to_root, value)
+ if key == "NM":
+ nm = os.path.join(build_to_root, value)
+ if key == "NM.host":
+ nm_host = os.path.join(build_to_root, value)
+ if key == "READELF":
+ readelf = os.path.join(build_to_root, value)
+ if key == "READELF.host":
+ readelf_host = os.path.join(build_to_root, value)
+ if key.endswith("_wrapper"):
+ wrappers[key[: -len("_wrapper")]] = os.path.join(build_to_root, value)
+
+ # Support wrappers from environment variables too.
+ for key, value in os.environ.items():
+ if key.lower().endswith("_wrapper"):
+ key_prefix = key[: -len("_wrapper")]
+ key_prefix = re.sub(r"\.HOST$", ".host", key_prefix)
+ wrappers[key_prefix] = os.path.join(build_to_root, value)
+
+ mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
+ if mac_toolchain_dir:
+ wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
+
+ if flavor == "win":
+ configs = [
+ target_dicts[qualified_target]["configurations"][config_name]
+ for qualified_target in target_list
+ ]
+ shared_system_includes = None
+ if not generator_flags.get("ninja_use_custom_environment_files", 0):
+ shared_system_includes = gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
+ configs, generator_flags
+ )
+ cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
+ toplevel_build, generator_flags, shared_system_includes, OpenOutput
+ )
+ for arch, path in sorted(cl_paths.items()):
+ if clang_cl:
+ # If we have selected clang-cl, use that instead.
+ path = clang_cl
+ command = CommandWithWrapper(
+ "CC", wrappers, QuoteShellArgument(path, "win")
+ )
+ if clang_cl:
+ # Use clang-cl to cross-compile for x86 or x86_64.
+ command += " -m32" if arch == "x86" else " -m64"
+ master_ninja.variable("cl_" + arch, command)
+
+ cc = GetEnvironFallback(["CC_target", "CC"], cc)
+ master_ninja.variable("cc", CommandWithWrapper("CC", wrappers, cc))
+ cxx = GetEnvironFallback(["CXX_target", "CXX"], cxx)
+ master_ninja.variable("cxx", CommandWithWrapper("CXX", wrappers, cxx))
+
+ if flavor == "win":
+ master_ninja.variable("ld", ld)
+ master_ninja.variable("idl", "midl.exe")
+ master_ninja.variable("ar", ar)
+ master_ninja.variable("rc", "rc.exe")
+ master_ninja.variable("ml_x86", "ml.exe")
+ master_ninja.variable("ml_x64", "ml64.exe")
+ master_ninja.variable("mt", "mt.exe")
+ else:
+ master_ninja.variable("ld", CommandWithWrapper("LINK", wrappers, ld))
+ master_ninja.variable("ldxx", CommandWithWrapper("LINK", wrappers, ldxx))
+ master_ninja.variable("ar", GetEnvironFallback(["AR_target", "AR"], ar))
+ if flavor != "mac":
+ # Mac does not use readelf/nm for .TOC generation, so avoiding polluting
+ # the master ninja with extra unused variables.
+ master_ninja.variable("nm", GetEnvironFallback(["NM_target", "NM"], nm))
+ master_ninja.variable(
+ "readelf", GetEnvironFallback(["READELF_target", "READELF"], readelf)
+ )
+
+ if generator_supports_multiple_toolsets:
+ if not cc_host:
+ cc_host = cc
+ if not cxx_host:
+ cxx_host = cxx
+
+ master_ninja.variable("ar_host", GetEnvironFallback(["AR_host"], ar_host))
+ master_ninja.variable("nm_host", GetEnvironFallback(["NM_host"], nm_host))
+ master_ninja.variable(
+ "readelf_host", GetEnvironFallback(["READELF_host"], readelf_host)
+ )
+ cc_host = GetEnvironFallback(["CC_host"], cc_host)
+ cxx_host = GetEnvironFallback(["CXX_host"], cxx_host)
+
+ # The environment variable could be used in 'make_global_settings', like
+ # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
+ if "$(CC)" in cc_host and cc_host_global_setting:
+ cc_host = cc_host_global_setting.replace("$(CC)", cc)
+ if "$(CXX)" in cxx_host and cxx_host_global_setting:
+ cxx_host = cxx_host_global_setting.replace("$(CXX)", cxx)
+ master_ninja.variable(
+ "cc_host", CommandWithWrapper("CC.host", wrappers, cc_host)
+ )
+ master_ninja.variable(
+ "cxx_host", CommandWithWrapper("CXX.host", wrappers, cxx_host)
+ )
+ if flavor == "win":
+ master_ninja.variable("ld_host", ld_host)
+ else:
+ master_ninja.variable(
+ "ld_host", CommandWithWrapper("LINK", wrappers, ld_host)
+ )
+ master_ninja.variable(
+ "ldxx_host", CommandWithWrapper("LINK", wrappers, ldxx_host)
+ )
+
+ master_ninja.newline()
+
+ master_ninja.pool("link_pool", depth=GetDefaultConcurrentLinks())
+ master_ninja.newline()
+
+ deps = "msvc" if flavor == "win" else "gcc"
+
+ if flavor != "win":
+ master_ninja.rule(
+ "cc",
+ description="CC $out",
+ command=(
+ "$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c "
+ "$cflags_pch_c -c $in -o $out"
+ ),
+ depfile="$out.d",
+ deps=deps,
+ )
+ master_ninja.rule(
+ "cc_s",
+ description="CC $out",
+ command=(
+ "$cc $defines $includes $cflags $cflags_c "
+ "$cflags_pch_c -c $in -o $out"
+ ),
+ )
+ master_ninja.rule(
+ "cxx",
+ description="CXX $out",
+ command=(
+ "$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc "
+ "$cflags_pch_cc -c $in -o $out"
+ ),
+ depfile="$out.d",
+ deps=deps,
+ )
+ else:
+ # TODO(scottmg) Separate pdb names is a test to see if it works around
+ # http://crbug.com/142362. It seems there's a race between the creation of
+ # the .pdb by the precompiled header step for .cc and the compilation of
+ # .c files. This should be handled by mspdbsrv, but rarely errors out with
+ # c1xx : fatal error C1033: cannot open program database
+ # By making the rules target separate pdb files this might be avoided.
+ cc_command = (
+ "ninja -t msvc -e $arch " + "-- "
+ "$cc /nologo /showIncludes /FC "
+ "@$out.rsp /c $in /Fo$out /Fd$pdbname_c "
+ )
+ cxx_command = (
+ "ninja -t msvc -e $arch " + "-- "
+ "$cxx /nologo /showIncludes /FC "
+ "@$out.rsp /c $in /Fo$out /Fd$pdbname_cc "
+ )
+ master_ninja.rule(
+ "cc",
+ description="CC $out",
+ command=cc_command,
+ rspfile="$out.rsp",
+ rspfile_content="$defines $includes $cflags $cflags_c",
+ deps=deps,
+ )
+ master_ninja.rule(
+ "cxx",
+ description="CXX $out",
+ command=cxx_command,
+ rspfile="$out.rsp",
+ rspfile_content="$defines $includes $cflags $cflags_cc",
+ deps=deps,
+ )
+ master_ninja.rule(
+ "idl",
+ description="IDL $in",
+ command=(
+ "%s gyp-win-tool midl-wrapper $arch $outdir "
+ "$tlb $h $dlldata $iid $proxy $in "
+ "$midl_includes $idlflags" % sys.executable
+ ),
+ )
+ master_ninja.rule(
+ "rc",
+ description="RC $in",
+ # Note: $in must be last otherwise rc.exe complains.
+ command=(
+ "%s gyp-win-tool rc-wrapper "
+ "$arch $rc $defines $resource_includes $rcflags /fo$out $in"
+ % sys.executable
+ ),
+ )
+ master_ninja.rule(
+ "asm",
+ description="ASM $out",
+ command=(
+ "%s gyp-win-tool asm-wrapper "
+ "$arch $asm $defines $includes $asmflags /c /Fo $out $in"
+ % sys.executable
+ ),
+ )
+
+ if flavor not in ("ios", "mac", "win"):
+ master_ninja.rule(
+ "alink",
+ description="AR $out",
+ command="rm -f $out && $ar rcs $arflags $out $in",
+ )
+ master_ninja.rule(
+ "alink_thin",
+ description="AR $out",
+ command="rm -f $out && $ar rcsT $arflags $out $in",
+ )
+
+ # This allows targets that only need to depend on $lib's API to declare an
+ # order-only dependency on $lib.TOC and avoid relinking such downstream
+ # dependencies when $lib changes only in non-public ways.
+ # The resulting string leaves an uninterpolated %{suffix} which
+ # is used in the final substitution below.
+ mtime_preserving_solink_base = (
+ "if [ ! -e $lib -o ! -e $lib.TOC ]; then "
+ "%(solink)s && %(extract_toc)s > $lib.TOC; else "
+ "%(solink)s && %(extract_toc)s > $lib.tmp && "
+ "if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; "
+ "fi; fi"
+ % {
+ "solink": "$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s",
+ "extract_toc": (
+ "{ $readelf -d $lib | grep SONAME ; "
+ "$nm -gD -f p $lib | cut -f1-2 -d' '; }"
+ ),
+ }
+ )
+
+ master_ninja.rule(
+ "solink",
+ description="SOLINK $lib",
+ restat=True,
+ command=mtime_preserving_solink_base
+ % {"suffix": "@$link_file_list"}, # noqa: E501
+ rspfile="$link_file_list",
+ rspfile_content=(
+ "-Wl,--whole-archive $in $solibs -Wl," "--no-whole-archive $libs"
+ ),
+ pool="link_pool",
+ )
+ master_ninja.rule(
+ "solink_module",
+ description="SOLINK(module) $lib",
+ restat=True,
+ command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"},
+ rspfile="$link_file_list",
+ rspfile_content="-Wl,--start-group $in $solibs $libs -Wl,--end-group",
+ pool="link_pool",
+ )
+ master_ninja.rule(
+ "link",
+ description="LINK $out",
+ command=(
+ "$ld $ldflags -o $out "
+ "-Wl,--start-group $in $solibs $libs -Wl,--end-group"
+ ),
+ pool="link_pool",
+ )
+ elif flavor == "win":
+ master_ninja.rule(
+ "alink",
+ description="LIB $out",
+ command=(
+ "%s gyp-win-tool link-wrapper $arch False "
+ "$ar /nologo /ignore:4221 /OUT:$out @$out.rsp" % sys.executable
+ ),
+ rspfile="$out.rsp",
+ rspfile_content="$in_newline $libflags",
+ )
+ _AddWinLinkRules(master_ninja, embed_manifest=True)
+ _AddWinLinkRules(master_ninja, embed_manifest=False)
+ else:
+ master_ninja.rule(
+ "objc",
+ description="OBJC $out",
+ command=(
+ "$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc "
+ "$cflags_pch_objc -c $in -o $out"
+ ),
+ depfile="$out.d",
+ deps=deps,
+ )
+ master_ninja.rule(
+ "objcxx",
+ description="OBJCXX $out",
+ command=(
+ "$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc "
+ "$cflags_pch_objcc -c $in -o $out"
+ ),
+ depfile="$out.d",
+ deps=deps,
+ )
+ master_ninja.rule(
+ "alink",
+ description="LIBTOOL-STATIC $out, POSTBUILDS",
+ command="rm -f $out && "
+ "./gyp-mac-tool filter-libtool libtool $libtool_flags "
+ "-static -o $out $in"
+ "$postbuilds",
+ )
+ master_ninja.rule(
+ "lipo",
+ description="LIPO $out, POSTBUILDS",
+ command="rm -f $out && lipo -create $in -output $out$postbuilds",
+ )
+ master_ninja.rule(
+ "solipo",
+ description="SOLIPO $out, POSTBUILDS",
+ command=(
+ "rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&"
+ "%(extract_toc)s > $lib.TOC"
+ % {
+ "extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; "
+ "nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }"
+ }
+ ),
+ )
+
+ # Record the public interface of $lib in $lib.TOC. See the corresponding
+ # comment in the posix section above for details.
+ solink_base = "$ld %(type)s $ldflags -o $lib %(suffix)s"
+ mtime_preserving_solink_base = (
+ "if [ ! -e $lib -o ! -e $lib.TOC ] || "
+ # Always force dependent targets to relink if this library
+ # reexports something. Handling this correctly would require
+ # recursive TOC dumping but this is rare in practice, so punt.
+ "otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then "
+ "%(solink)s && %(extract_toc)s > $lib.TOC; "
+ "else "
+ "%(solink)s && %(extract_toc)s > $lib.tmp && "
+ "if ! cmp -s $lib.tmp $lib.TOC; then "
+ "mv $lib.tmp $lib.TOC ; "
+ "fi; "
+ "fi"
+ % {
+ "solink": solink_base,
+ "extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; "
+ "nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }",
+ }
+ )
+
+ solink_suffix = "@$link_file_list$postbuilds"
+ master_ninja.rule(
+ "solink",
+ description="SOLINK $lib, POSTBUILDS",
+ restat=True,
+ command=mtime_preserving_solink_base
+ % {"suffix": solink_suffix, "type": "-shared"},
+ rspfile="$link_file_list",
+ rspfile_content="$in $solibs $libs",
+ pool="link_pool",
+ )
+ master_ninja.rule(
+ "solink_notoc",
+ description="SOLINK $lib, POSTBUILDS",
+ restat=True,
+ command=solink_base % {"suffix": solink_suffix, "type": "-shared"},
+ rspfile="$link_file_list",
+ rspfile_content="$in $solibs $libs",
+ pool="link_pool",
+ )
+
+ master_ninja.rule(
+ "solink_module",
+ description="SOLINK(module) $lib, POSTBUILDS",
+ restat=True,
+ command=mtime_preserving_solink_base
+ % {"suffix": solink_suffix, "type": "-bundle"},
+ rspfile="$link_file_list",
+ rspfile_content="$in $solibs $libs",
+ pool="link_pool",
+ )
+ master_ninja.rule(
+ "solink_module_notoc",
+ description="SOLINK(module) $lib, POSTBUILDS",
+ restat=True,
+ command=solink_base % {"suffix": solink_suffix, "type": "-bundle"},
+ rspfile="$link_file_list",
+ rspfile_content="$in $solibs $libs",
+ pool="link_pool",
+ )
+
+ master_ninja.rule(
+ "link",
+ description="LINK $out, POSTBUILDS",
+ command=("$ld $ldflags -o $out " "$in $solibs $libs$postbuilds"),
+ pool="link_pool",
+ )
+ master_ninja.rule(
+ "preprocess_infoplist",
+ description="PREPROCESS INFOPLIST $out",
+ command=(
+ "$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && "
+ "plutil -convert xml1 $out $out"
+ ),
+ )
+ master_ninja.rule(
+ "copy_infoplist",
+ description="COPY INFOPLIST $in",
+ command="$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys",
+ )
+ master_ninja.rule(
+ "merge_infoplist",
+ description="MERGE INFOPLISTS $in",
+ command="$env ./gyp-mac-tool merge-info-plist $out $in",
+ )
+ master_ninja.rule(
+ "compile_xcassets",
+ description="COMPILE XCASSETS $in",
+ command="$env ./gyp-mac-tool compile-xcassets $keys $in",
+ )
+ master_ninja.rule(
+ "compile_ios_framework_headers",
+ description="COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in",
+ command="$env ./gyp-mac-tool compile-ios-framework-header-map $out "
+ "$framework $in && $env ./gyp-mac-tool "
+ "copy-ios-framework-headers $framework $copy_headers",
+ )
+ master_ninja.rule(
+ "mac_tool",
+ description="MACTOOL $mactool_cmd $in",
+ command="$env ./gyp-mac-tool $mactool_cmd $in $out $binary",
+ )
+ master_ninja.rule(
+ "package_framework",
+ description="PACKAGE FRAMEWORK $out, POSTBUILDS",
+ command="./gyp-mac-tool package-framework $out $version$postbuilds "
+ "&& touch $out",
+ )
+ master_ninja.rule(
+ "package_ios_framework",
+ description="PACKAGE IOS FRAMEWORK $out, POSTBUILDS",
+ command="./gyp-mac-tool package-ios-framework $out $postbuilds "
+ "&& touch $out",
+ )
+ if flavor == "win":
+ master_ninja.rule(
+ "stamp",
+ description="STAMP $out",
+ command="%s gyp-win-tool stamp $out" % sys.executable,
+ )
+ else:
+ master_ninja.rule(
+ "stamp", description="STAMP $out", command="${postbuilds}touch $out"
+ )
+ if flavor == "win":
+ master_ninja.rule(
+ "copy",
+ description="COPY $in $out",
+ command="%s gyp-win-tool recursive-mirror $in $out" % sys.executable,
+ )
+ elif flavor == "zos":
+ master_ninja.rule(
+ "copy",
+ description="COPY $in $out",
+ command="rm -rf $out && cp -fRP $in $out",
+ )
+ else:
+ master_ninja.rule(
+ "copy",
+ description="COPY $in $out",
+ command="ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)",
+ )
+ master_ninja.newline()
+
+ all_targets = set()
+ for build_file in params["build_files"]:
+ for target in gyp.common.AllTargets(
+ target_list, target_dicts, os.path.normpath(build_file)
+ ):
+ all_targets.add(target)
+ all_outputs = set()
+
+ # target_outputs is a map from qualified target name to a Target object.
+ target_outputs = {}
+ # target_short_names is a map from target short name to a list of Target
+ # objects.
+ target_short_names = {}
+
+ # short name of targets that were skipped because they didn't contain anything
+ # interesting.
+ # NOTE: there may be overlap between this an non_empty_target_names.
+ empty_target_names = set()
+
+ # Set of non-empty short target names.
+ # NOTE: there may be overlap between this an empty_target_names.
+ non_empty_target_names = set()
+
+ for qualified_target in target_list:
+ # qualified_target is like: third_party/icu/icu.gyp:icui18n#target
+ build_file, name, toolset = gyp.common.ParseQualifiedTarget(qualified_target)
+
+ this_make_global_settings = data[build_file].get("make_global_settings", [])
+ assert make_global_settings == this_make_global_settings, (
+ "make_global_settings needs to be the same for all targets. "
+ f"{this_make_global_settings} vs. {make_global_settings}"
+ )
+
+ spec = target_dicts[qualified_target]
+ if flavor == "mac":
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
+
+ # If build_file is a symlink, we must not follow it because there's a chance
+ # it could point to a path above toplevel_dir, and we cannot correctly deal
+ # with that case at the moment.
+ build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, False)
+
+ qualified_target_for_hash = gyp.common.QualifiedTarget(
+ build_file, name, toolset
+ )
+ qualified_target_for_hash = qualified_target_for_hash.encode("utf-8")
+ hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
+
+ base_path = os.path.dirname(build_file)
+ obj = "obj"
+ if toolset != "target":
+ obj += "." + toolset
+ output_file = os.path.join(obj, base_path, name + ".ninja")
+
+ ninja_output = StringIO()
+ writer = NinjaWriter(
+ hash_for_rules,
+ target_outputs,
+ base_path,
+ build_dir,
+ ninja_output,
+ toplevel_build,
+ output_file,
+ flavor,
+ toplevel_dir=options.toplevel_dir,
+ )
+
+ target = writer.WriteSpec(spec, config_name, generator_flags)
+
+ if ninja_output.tell() > 0:
+ # Only create files for ninja files that actually have contents.
+ with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
+ ninja_file.write(ninja_output.getvalue())
+ ninja_output.close()
+ master_ninja.subninja(output_file)
+
+ if target:
+ if name != target.FinalOutput() and spec["toolset"] == "target":
+ target_short_names.setdefault(name, []).append(target)
+ target_outputs[qualified_target] = target
+ if qualified_target in all_targets:
+ all_outputs.add(target.FinalOutput())
+ non_empty_target_names.add(name)
+ else:
+ empty_target_names.add(name)
+
+ if target_short_names:
+ # Write a short name to build this target. This benefits both the
+ # "build chrome" case as well as the gyp tests, which expect to be
+ # able to run actions and build libraries by their short name.
+ master_ninja.newline()
+ master_ninja.comment("Short names for targets.")
+ for short_name in sorted(target_short_names):
+ master_ninja.build(
+ short_name,
+ "phony",
+ [x.FinalOutput() for x in target_short_names[short_name]],
+ )
+
+ # Write phony targets for any empty targets that weren't written yet. As
+ # short names are not necessarily unique only do this for short names that
+ # haven't already been output for another target.
+ empty_target_names = empty_target_names - non_empty_target_names
+ if empty_target_names:
+ master_ninja.newline()
+ master_ninja.comment("Empty targets (output for completeness).")
+ for name in sorted(empty_target_names):
+ master_ninja.build(name, "phony")
+
+ if all_outputs:
+ master_ninja.newline()
+ master_ninja.build("all", "phony", sorted(all_outputs))
+ master_ninja.default(generator_flags.get("default_target", "all"))
+
+ master_ninja_file.close()
+
+
+def PerformBuild(data, configurations, params):
+ options = params["options"]
+ for config in configurations:
+ builddir = os.path.join(options.toplevel_dir, "out", config)
+ arguments = ["ninja", "-C", builddir]
+ print(f"Building [{config}]: {arguments}")
+ subprocess.check_call(arguments)
+
+
+def CallGenerateOutputForConfig(arglist):
+ # Ignore the interrupt signal so that the parent process catches it and
+ # kills all multiprocessing children.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ (target_list, target_dicts, data, params, config_name) = arglist
+ GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Update target_dicts for iOS device builds.
+ target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
+ target_dicts
+ )
+
+ user_config = params.get("generator_flags", {}).get("config", None)
+ if gyp.common.GetFlavor(params) == "win":
+ target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
+ target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
+ target_list, target_dicts, generator_default_variables
+ )
+
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
+ else:
+ config_names = target_dicts[target_list[0]]["configurations"]
+ if params["parallel"]:
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+ arglists = []
+ for config_name in config_names:
+ arglists.append(
+ (target_list, target_dicts, data, params, config_name)
+ )
+ pool.map(CallGenerateOutputForConfig, arglists)
+ except KeyboardInterrupt as e:
+ pool.terminate()
+ raise e
+ else:
+ for config_name in config_names:
+ GenerateOutputForConfig(
+ target_list, target_dicts, data, params, config_name
+ )
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
new file mode 100644
index 0000000..7d18068
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the ninja.py file. """
+
+import sys
+import unittest
+
+import gyp.generator.ninja as ninja
+
+
+class TestPrefixesAndSuffixes(unittest.TestCase):
+ def test_BinaryNamesWindows(self):
+ # These cannot run on non-Windows as they require a VS installation to
+ # correctly handle variable expansion.
+ if sys.platform.startswith("win"):
+ writer = ninja.NinjaWriter(
+ "foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win"
+ )
+ spec = {"target_name": "wee"}
+ self.assertTrue(
+ writer.ComputeOutputFileName(spec, "executable").endswith(".exe")
+ )
+ self.assertTrue(
+ writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll")
+ )
+ self.assertTrue(
+ writer.ComputeOutputFileName(spec, "static_library").endswith(".lib")
+ )
+
+ def test_BinaryNamesLinux(self):
+ writer = ninja.NinjaWriter(
+ "foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux"
+ )
+ spec = {"target_name": "wee"}
+ self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable"))
+ self.assertTrue(
+ writer.ComputeOutputFileName(spec, "shared_library").startswith("lib")
+ )
+ self.assertTrue(
+ writer.ComputeOutputFileName(spec, "static_library").startswith("lib")
+ )
+ self.assertTrue(
+ writer.ComputeOutputFileName(spec, "shared_library").endswith(".so")
+ )
+ self.assertTrue(
+ writer.ComputeOutputFileName(spec, "static_library").endswith(".a")
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
new file mode 100644
index 0000000..2f4d17e
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
@@ -0,0 +1,1394 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import filecmp
+import gyp.common
+import gyp.xcodeproj_file
+import gyp.xcode_ninja
+import errno
+import os
+import sys
+import posixpath
+import re
+import shutil
+import subprocess
+import tempfile
+
+
+# Project files generated by this module will use _intermediate_var as a
+# custom Xcode setting whose value is a DerivedSources-like directory that's
+# project-specific and configuration-specific. The normal choice,
+# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
+# as it is likely that multiple targets within a single project file will want
+# to access the same set of generated files. The other option,
+# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
+# it is not configuration-specific. INTERMEDIATE_DIR is defined as
+# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
+_intermediate_var = "INTERMEDIATE_DIR"
+
+# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
+# targets that share the same BUILT_PRODUCTS_DIR.
+_shared_intermediate_var = "SHARED_INTERMEDIATE_DIR"
+
+_library_search_paths_var = "LIBRARY_SEARCH_PATHS"
+
+generator_default_variables = {
+ "EXECUTABLE_PREFIX": "",
+ "EXECUTABLE_SUFFIX": "",
+ "STATIC_LIB_PREFIX": "lib",
+ "SHARED_LIB_PREFIX": "lib",
+ "STATIC_LIB_SUFFIX": ".a",
+ "SHARED_LIB_SUFFIX": ".dylib",
+ # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
+ # It is specific to each build environment. It is only guaranteed to exist
+ # and be constant within the context of a project, corresponding to a single
+ # input file. Some build environments may allow their intermediate directory
+ # to be shared on a wider scale, but this is not guaranteed.
+ "INTERMEDIATE_DIR": "$(%s)" % _intermediate_var,
+ "OS": "mac",
+ "PRODUCT_DIR": "$(BUILT_PRODUCTS_DIR)",
+ "LIB_DIR": "$(BUILT_PRODUCTS_DIR)",
+ "RULE_INPUT_ROOT": "$(INPUT_FILE_BASE)",
+ "RULE_INPUT_EXT": "$(INPUT_FILE_SUFFIX)",
+ "RULE_INPUT_NAME": "$(INPUT_FILE_NAME)",
+ "RULE_INPUT_PATH": "$(INPUT_FILE_PATH)",
+ "RULE_INPUT_DIRNAME": "$(INPUT_FILE_DIRNAME)",
+ "SHARED_INTERMEDIATE_DIR": "$(%s)" % _shared_intermediate_var,
+ "CONFIGURATION_NAME": "$(CONFIGURATION)",
+}
+
+# The Xcode-specific sections that hold paths.
+generator_additional_path_sections = [
+ "mac_bundle_resources",
+ "mac_framework_headers",
+ "mac_framework_private_headers",
+ # 'mac_framework_dirs', input already handles _dirs endings.
+]
+
+# The Xcode-specific keys that exist on targets and aren't moved down to
+# configurations.
+generator_additional_non_configuration_keys = [
+ "ios_app_extension",
+ "ios_watch_app",
+ "ios_watchkit_extension",
+ "mac_bundle",
+ "mac_bundle_resources",
+ "mac_framework_headers",
+ "mac_framework_private_headers",
+ "mac_xctest_bundle",
+ "mac_xcuitest_bundle",
+ "xcode_create_dependents_test_runner",
+]
+
+# We want to let any rules apply to files that are resources also.
+generator_extra_sources_for_rules = [
+ "mac_bundle_resources",
+ "mac_framework_headers",
+ "mac_framework_private_headers",
+]
+
+generator_filelist_paths = None
+
+# Xcode's standard set of library directories, which don't need to be duplicated
+# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
+xcode_standard_library_dirs = frozenset(
+ ["$(SDKROOT)/usr/lib", "$(SDKROOT)/usr/local/lib"]
+)
+
+
+def CreateXCConfigurationList(configuration_names):
+ xccl = gyp.xcodeproj_file.XCConfigurationList({"buildConfigurations": []})
+ if len(configuration_names) == 0:
+ configuration_names = ["Default"]
+ for configuration_name in configuration_names:
+ xcbc = gyp.xcodeproj_file.XCBuildConfiguration({"name": configuration_name})
+ xccl.AppendProperty("buildConfigurations", xcbc)
+ xccl.SetProperty("defaultConfigurationName", configuration_names[0])
+ return xccl
+
+
+class XcodeProject:
+ def __init__(self, gyp_path, path, build_file_dict):
+ self.gyp_path = gyp_path
+ self.path = path
+ self.project = gyp.xcodeproj_file.PBXProject(path=path)
+ projectDirPath = gyp.common.RelativePath(
+ os.path.dirname(os.path.abspath(self.gyp_path)),
+ os.path.dirname(path) or ".",
+ )
+ self.project.SetProperty("projectDirPath", projectDirPath)
+ self.project_file = gyp.xcodeproj_file.XCProjectFile(
+ {"rootObject": self.project}
+ )
+ self.build_file_dict = build_file_dict
+
+ # TODO(mark): add destructor that cleans up self.path if created_dir is
+ # True and things didn't complete successfully. Or do something even
+ # better with "try"?
+ self.created_dir = False
+ try:
+ os.makedirs(self.path)
+ self.created_dir = True
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ def Finalize1(self, xcode_targets, serialize_all_tests):
+ # Collect a list of all of the build configuration names used by the
+ # various targets in the file. It is very heavily advised to keep each
+ # target in an entire project (even across multiple project files) using
+ # the same set of configuration names.
+ configurations = []
+ for xct in self.project.GetProperty("targets"):
+ xccl = xct.GetProperty("buildConfigurationList")
+ xcbcs = xccl.GetProperty("buildConfigurations")
+ for xcbc in xcbcs:
+ name = xcbc.GetProperty("name")
+ if name not in configurations:
+ configurations.append(name)
+
+ # Replace the XCConfigurationList attached to the PBXProject object with
+ # a new one specifying all of the configuration names used by the various
+ # targets.
+ try:
+ xccl = CreateXCConfigurationList(configurations)
+ self.project.SetProperty("buildConfigurationList", xccl)
+ except Exception:
+ sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
+ raise
+
+ # The need for this setting is explained above where _intermediate_var is
+ # defined. The comments below about wanting to avoid project-wide build
+ # settings apply here too, but this needs to be set on a project-wide basis
+ # so that files relative to the _intermediate_var setting can be displayed
+ # properly in the Xcode UI.
+ #
+ # Note that for configuration-relative files such as anything relative to
+ # _intermediate_var, for the purposes of UI tree view display, Xcode will
+ # only resolve the configuration name once, when the project file is
+ # opened. If the active build configuration is changed, the project file
+ # must be closed and reopened if it is desired for the tree view to update.
+ # This is filed as Apple radar 6588391.
+ xccl.SetBuildSetting(
+ _intermediate_var, "$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)"
+ )
+ xccl.SetBuildSetting(
+ _shared_intermediate_var, "$(SYMROOT)/DerivedSources/$(CONFIGURATION)"
+ )
+
+ # Set user-specified project-wide build settings and config files. This
+ # is intended to be used very sparingly. Really, almost everything should
+ # go into target-specific build settings sections. The project-wide
+ # settings are only intended to be used in cases where Xcode attempts to
+ # resolve variable references in a project context as opposed to a target
+ # context, such as when resolving sourceTree references while building up
+ # the tree tree view for UI display.
+ # Any values set globally are applied to all configurations, then any
+ # per-configuration values are applied.
+ for xck, xcv in self.build_file_dict.get("xcode_settings", {}).items():
+ xccl.SetBuildSetting(xck, xcv)
+ if "xcode_config_file" in self.build_file_dict:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+ self.build_file_dict["xcode_config_file"]
+ )
+ xccl.SetBaseConfiguration(config_ref)
+ build_file_configurations = self.build_file_dict.get("configurations", {})
+ if build_file_configurations:
+ for config_name in configurations:
+ build_file_configuration_named = build_file_configurations.get(
+ config_name, {}
+ )
+ if build_file_configuration_named:
+ xcc = xccl.ConfigurationNamed(config_name)
+ for xck, xcv in build_file_configuration_named.get(
+ "xcode_settings", {}
+ ).items():
+ xcc.SetBuildSetting(xck, xcv)
+ if "xcode_config_file" in build_file_configuration_named:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+ build_file_configurations[config_name]["xcode_config_file"]
+ )
+ xcc.SetBaseConfiguration(config_ref)
+
+ # Sort the targets based on how they appeared in the input.
+ # TODO(mark): Like a lot of other things here, this assumes internal
+ # knowledge of PBXProject - in this case, of its "targets" property.
+
+ # ordinary_targets are ordinary targets that are already in the project
+ # file. run_test_targets are the targets that run unittests and should be
+ # used for the Run All Tests target. support_targets are the action/rule
+ # targets used by GYP file targets, just kept for the assert check.
+ ordinary_targets = []
+ run_test_targets = []
+ support_targets = []
+
+ # targets is full list of targets in the project.
+ targets = []
+
+ # does the it define it's own "all"?
+ has_custom_all = False
+
+ # targets_for_all is the list of ordinary_targets that should be listed
+ # in this project's "All" target. It includes each non_runtest_target
+ # that does not have suppress_wildcard set.
+ targets_for_all = []
+
+ for target in self.build_file_dict["targets"]:
+ target_name = target["target_name"]
+ toolset = target["toolset"]
+ qualified_target = gyp.common.QualifiedTarget(
+ self.gyp_path, target_name, toolset
+ )
+ xcode_target = xcode_targets[qualified_target]
+ # Make sure that the target being added to the sorted list is already in
+ # the unsorted list.
+ assert xcode_target in self.project._properties["targets"]
+ targets.append(xcode_target)
+ ordinary_targets.append(xcode_target)
+ if xcode_target.support_target:
+ support_targets.append(xcode_target.support_target)
+ targets.append(xcode_target.support_target)
+
+ if not int(target.get("suppress_wildcard", False)):
+ targets_for_all.append(xcode_target)
+
+ if target_name.lower() == "all":
+ has_custom_all = True
+
+ # If this target has a 'run_as' attribute, add its target to the
+ # targets, and add it to the test targets.
+ if target.get("run_as"):
+ # Make a target to run something. It should have one
+ # dependency, the parent xcode target.
+ xccl = CreateXCConfigurationList(configurations)
+ run_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {
+ "name": "Run " + target_name,
+ "productName": xcode_target.GetProperty("productName"),
+ "buildConfigurationList": xccl,
+ },
+ parent=self.project,
+ )
+ run_target.AddDependency(xcode_target)
+
+ command = target["run_as"]
+ script = ""
+ if command.get("working_directory"):
+ script = (
+ script
+ + 'cd "%s"\n'
+ % gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ command.get("working_directory")
+ )
+ )
+
+ if command.get("environment"):
+ script = (
+ script
+ + "\n".join(
+ [
+ 'export %s="%s"'
+ % (
+ key,
+ gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ val
+ ),
+ )
+ for (key, val) in command.get("environment").items()
+ ]
+ )
+ + "\n"
+ )
+
+ # Some test end up using sockets, files on disk, etc. and can get
+ # confused if more then one test runs at a time. The generator
+ # flag 'xcode_serialize_all_test_runs' controls the forcing of all
+ # tests serially. It defaults to True. To get serial runs this
+ # little bit of python does the same as the linux flock utility to
+ # make sure only one runs at a time.
+ command_prefix = ""
+ if serialize_all_tests:
+ command_prefix = """python -c "import fcntl, subprocess, sys
+file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
+fcntl.flock(file.fileno(), fcntl.LOCK_EX)
+sys.exit(subprocess.call(sys.argv[1:]))" """
+
+ # If we were unable to exec for some reason, we want to exit
+ # with an error, and fixup variable references to be shell
+ # syntax instead of xcode syntax.
+ script = (
+ script
+ + "exec "
+ + command_prefix
+ + "%s\nexit 1\n"
+ % gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ gyp.common.EncodePOSIXShellList(command.get("action"))
+ )
+ )
+
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase(
+ {"shellScript": script, "showEnvVarsInLog": 0}
+ )
+ run_target.AppendProperty("buildPhases", ssbp)
+
+ # Add the run target to the project file.
+ targets.append(run_target)
+ run_test_targets.append(run_target)
+ xcode_target.test_runner = run_target
+
+ # Make sure that the list of targets being replaced is the same length as
+ # the one replacing it, but allow for the added test runner targets.
+ assert len(self.project._properties["targets"]) == len(ordinary_targets) + len(
+ support_targets
+ )
+
+ self.project._properties["targets"] = targets
+
+ # Get rid of unnecessary levels of depth in groups like the Source group.
+ self.project.RootGroupsTakeOverOnlyChildren(True)
+
+ # Sort the groups nicely. Do this after sorting the targets, because the
+ # Products group is sorted based on the order of the targets.
+ self.project.SortGroups()
+
+ # Create an "All" target if there's more than one target in this project
+ # file and the project didn't define its own "All" target. Put a generated
+ # "All" target first so that people opening up the project for the first
+ # time will build everything by default.
+ if len(targets_for_all) > 1 and not has_custom_all:
+ xccl = CreateXCConfigurationList(configurations)
+ all_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {"buildConfigurationList": xccl, "name": "All"}, parent=self.project
+ )
+
+ for target in targets_for_all:
+ all_target.AddDependency(target)
+
+ # TODO(mark): This is evil because it relies on internal knowledge of
+ # PBXProject._properties. It's important to get the "All" target first,
+ # though.
+ self.project._properties["targets"].insert(0, all_target)
+
+ # The same, but for run_test_targets.
+ if len(run_test_targets) > 1:
+ xccl = CreateXCConfigurationList(configurations)
+ run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {"buildConfigurationList": xccl, "name": "Run All Tests"},
+ parent=self.project,
+ )
+ for run_test_target in run_test_targets:
+ run_all_tests_target.AddDependency(run_test_target)
+
+ # Insert after the "All" target, which must exist if there is more than
+ # one run_test_target.
+ self.project._properties["targets"].insert(1, run_all_tests_target)
+
+ def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
+ # Finalize2 needs to happen in a separate step because the process of
+ # updating references to other projects depends on the ordering of targets
+ # within remote project files. Finalize1 is responsible for sorting duty,
+ # and once all project files are sorted, Finalize2 can come in and update
+ # these references.
+
+ # To support making a "test runner" target that will run all the tests
+ # that are direct dependents of any given target, we look for
+ # xcode_create_dependents_test_runner being set on an Aggregate target,
+ # and generate a second target that will run the tests runners found under
+ # the marked target.
+ for bf_tgt in self.build_file_dict["targets"]:
+ if int(bf_tgt.get("xcode_create_dependents_test_runner", 0)):
+ tgt_name = bf_tgt["target_name"]
+ toolset = bf_tgt["toolset"]
+ qualified_target = gyp.common.QualifiedTarget(
+ self.gyp_path, tgt_name, toolset
+ )
+ xcode_target = xcode_targets[qualified_target]
+ if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
+ # Collect all the run test targets.
+ all_run_tests = []
+ pbxtds = xcode_target.GetProperty("dependencies")
+ for pbxtd in pbxtds:
+ pbxcip = pbxtd.GetProperty("targetProxy")
+ dependency_xct = pbxcip.GetProperty("remoteGlobalIDString")
+ if hasattr(dependency_xct, "test_runner"):
+ all_run_tests.append(dependency_xct.test_runner)
+
+ # Directly depend on all the runners as they depend on the target
+ # that builds them.
+ if len(all_run_tests) > 0:
+ run_all_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {
+ "name": "Run %s Tests" % tgt_name,
+ "productName": tgt_name,
+ },
+ parent=self.project,
+ )
+ for run_test_target in all_run_tests:
+ run_all_target.AddDependency(run_test_target)
+
+ # Insert the test runner after the related target.
+ idx = self.project._properties["targets"].index(xcode_target)
+ self.project._properties["targets"].insert(
+ idx + 1, run_all_target
+ )
+
+ # Update all references to other projects, to make sure that the lists of
+ # remote products are complete. Otherwise, Xcode will fill them in when
+ # it opens the project file, which will result in unnecessary diffs.
+ # TODO(mark): This is evil because it relies on internal knowledge of
+ # PBXProject._other_pbxprojects.
+ for other_pbxproject in self.project._other_pbxprojects.keys():
+ self.project.AddOrGetProjectReference(other_pbxproject)
+
+ self.project.SortRemoteProductReferences()
+
+ # Give everything an ID.
+ self.project_file.ComputeIDs()
+
+ # Make sure that no two objects in the project file have the same ID. If
+ # multiple objects wind up with the same ID, upon loading the file, Xcode
+ # will only recognize one object (the last one in the file?) and the
+ # results are unpredictable.
+ self.project_file.EnsureNoIDCollisions()
+
+ def Write(self):
+ # Write the project file to a temporary location first. Xcode watches for
+ # changes to the project file and presents a UI sheet offering to reload
+ # the project when it does change. However, in some cases, especially when
+ # multiple projects are open or when Xcode is busy, things don't work so
+ # seamlessly. Sometimes, Xcode is able to detect that a project file has
+ # changed but can't unload it because something else is referencing it.
+ # To mitigate this problem, and to avoid even having Xcode present the UI
+ # sheet when an open project is rewritten for inconsequential changes, the
+ # project file is written to a temporary file in the xcodeproj directory
+ # first. The new temporary file is then compared to the existing project
+ # file, if any. If they differ, the new file replaces the old; otherwise,
+ # the new project file is simply deleted. Xcode properly detects a file
+ # being renamed over an open project file as a change and so it remains
+ # able to present the "project file changed" sheet under this system.
+ # Writing to a temporary file first also avoids the possible problem of
+ # Xcode rereading an incomplete project file.
+ (output_fd, new_pbxproj_path) = tempfile.mkstemp(
+ suffix=".tmp", prefix="project.pbxproj.gyp.", dir=self.path
+ )
+
+ try:
+ output_file = os.fdopen(output_fd, "w")
+
+ self.project_file.Print(output_file)
+ output_file.close()
+
+ pbxproj_path = os.path.join(self.path, "project.pbxproj")
+
+ same = False
+ try:
+ same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if same:
+ # The new file is identical to the old one, just get rid of the new
+ # one.
+ os.unlink(new_pbxproj_path)
+ else:
+ # The new file is different from the old one, or there is no old one.
+ # Rename the new file to the permanent name.
+ #
+ # tempfile.mkstemp uses an overly restrictive mode, resulting in a
+ # file that can only be read by the owner, regardless of the umask.
+ # There's no reason to not respect the umask here, which means that
+ # an extra hoop is required to fetch it and reset the new file's mode.
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+ umask = os.umask(0o77)
+ os.umask(umask)
+
+ os.chmod(new_pbxproj_path, 0o666 & ~umask)
+ os.rename(new_pbxproj_path, pbxproj_path)
+
+ except Exception:
+ # Don't leave turds behind. In fact, if this code was responsible for
+ # creating the xcodeproj directory, get rid of that too.
+ os.unlink(new_pbxproj_path)
+ if self.created_dir:
+ shutil.rmtree(self.path, True)
+ raise
+
+
+def AddSourceToTarget(source, type, pbxp, xct):
+ # TODO(mark): Perhaps source_extensions and library_extensions can be made a
+ # little bit fancier.
+ source_extensions = ["c", "cc", "cpp", "cxx", "m", "mm", "s", "swift"]
+
+ # .o is conceptually more of a "source" than a "library," but Xcode thinks
+ # of "sources" as things to compile and "libraries" (or "frameworks") as
+ # things to link with. Adding an object file to an Xcode target's frameworks
+ # phase works properly.
+ library_extensions = ["a", "dylib", "framework", "o"]
+
+ basename = posixpath.basename(source)
+ (root, ext) = posixpath.splitext(basename)
+ if ext:
+ ext = ext[1:].lower()
+
+ if ext in source_extensions and type != "none":
+ xct.SourcesPhase().AddFile(source)
+ elif ext in library_extensions and type != "none":
+ xct.FrameworksPhase().AddFile(source)
+ else:
+ # Files that aren't added to a sources or frameworks build phase can still
+ # go into the project file, just not as part of a build phase.
+ pbxp.AddOrGetFileInRootGroup(source)
+
+
+def AddResourceToTarget(resource, pbxp, xct):
+ # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
+ # where it's used.
+ xct.ResourcesPhase().AddFile(resource)
+
+
+def AddHeaderToTarget(header, pbxp, xct, is_public):
+ # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
+ # where it's used.
+ settings = "{ATTRIBUTES = (%s, ); }" % ("Private", "Public")[is_public]
+ xct.HeadersPhase().AddFile(header, settings)
+
+
+_xcode_variable_re = re.compile(r"(\$\((.*?)\))")
+
+
+def ExpandXcodeVariables(string, expansions):
+ """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
+
+ In some rare cases, it is appropriate to expand Xcode variables when a
+ project file is generated. For any substring $(VAR) in string, if VAR is a
+ key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
+ Any $(VAR) substring in string for which VAR is not a key in the expansions
+ dict will remain in the returned string.
+ """
+
+ matches = _xcode_variable_re.findall(string)
+ if matches is None:
+ return string
+
+ matches.reverse()
+ for match in matches:
+ (to_replace, variable) = match
+ if variable not in expansions:
+ continue
+
+ replacement = expansions[variable]
+ string = re.sub(re.escape(to_replace), replacement, string)
+
+ return string
+
+
+_xcode_define_re = re.compile(r"([\\\"\' ])")
+
+
+def EscapeXcodeDefine(s):
+ """We must escape the defines that we give to XCode so that it knows not to
+ split on spaces and to respect backslash and quote literals. However, we
+ must not quote the define, or Xcode will incorrectly interpret variables
+ especially $(inherited)."""
+ return re.sub(_xcode_define_re, r"\\\1", s)
+
+
+def PerformBuild(data, configurations, params):
+ options = params["options"]
+
+ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != ".gyp":
+ continue
+ xcodeproj_path = build_file_root + options.suffix + ".xcodeproj"
+ if options.generator_output:
+ xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
+
+ for config in configurations:
+ arguments = ["xcodebuild", "-project", xcodeproj_path]
+ arguments += ["-configuration", config]
+ print(f"Building [{config}]: {arguments}")
+ subprocess.check_call(arguments)
+
+
+def CalculateGeneratorInputInfo(params):
+ toplevel = params["options"].toplevel_dir
+ if params.get("flavor") == "ninja":
+ generator_dir = os.path.relpath(params["options"].generator_output or ".")
+ output_dir = params.get("generator_flags", {}).get("output_dir", "out")
+ output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
+ qualified_out_dir = os.path.normpath(
+ os.path.join(toplevel, output_dir, "gypfiles-xcode-ninja")
+ )
+ else:
+ output_dir = os.path.normpath(os.path.join(toplevel, "xcodebuild"))
+ qualified_out_dir = os.path.normpath(
+ os.path.join(toplevel, output_dir, "gypfiles")
+ )
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ "toplevel": toplevel,
+ "qualified_out_dir": qualified_out_dir,
+ }
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Optionally configure each spec to use ninja as the external builder.
+ ninja_wrapper = params.get("flavor") == "ninja"
+ if ninja_wrapper:
+ (target_list, target_dicts, data) = gyp.xcode_ninja.CreateWrapper(
+ target_list, target_dicts, data, params
+ )
+
+ options = params["options"]
+ generator_flags = params.get("generator_flags", {})
+ parallel_builds = generator_flags.get("xcode_parallel_builds", True)
+ serialize_all_tests = generator_flags.get("xcode_serialize_all_test_runs", True)
+ upgrade_check_project_version = generator_flags.get(
+ "xcode_upgrade_check_project_version", None
+ )
+
+ # Format upgrade_check_project_version with leading zeros as needed.
+ if upgrade_check_project_version:
+ upgrade_check_project_version = str(upgrade_check_project_version)
+ while len(upgrade_check_project_version) < 4:
+ upgrade_check_project_version = "0" + upgrade_check_project_version
+
+ skip_excluded_files = not generator_flags.get("xcode_list_excluded_files", True)
+ xcode_projects = {}
+ for build_file, build_file_dict in data.items():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != ".gyp":
+ continue
+ xcodeproj_path = build_file_root + options.suffix + ".xcodeproj"
+ if options.generator_output:
+ xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
+ xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
+ xcode_projects[build_file] = xcp
+ pbxp = xcp.project
+
+ # Set project-level attributes from multiple options
+ project_attributes = {}
+ if parallel_builds:
+ project_attributes["BuildIndependentTargetsInParallel"] = "YES"
+ if upgrade_check_project_version:
+ project_attributes["LastUpgradeCheck"] = upgrade_check_project_version
+ project_attributes[
+ "LastTestingUpgradeCheck"
+ ] = upgrade_check_project_version
+ project_attributes["LastSwiftUpdateCheck"] = upgrade_check_project_version
+ pbxp.SetProperty("attributes", project_attributes)
+
+ # Add gyp/gypi files to project
+ if not generator_flags.get("standalone"):
+ main_group = pbxp.GetProperty("mainGroup")
+ build_group = gyp.xcodeproj_file.PBXGroup({"name": "Build"})
+ main_group.AppendChild(build_group)
+ for included_file in build_file_dict["included_files"]:
+ build_group.AddOrGetFileByPath(included_file, False)
+
+ xcode_targets = {}
+ xcode_target_to_target_dict = {}
+ for qualified_target in target_list:
+ [build_file, target_name, toolset] = gyp.common.ParseQualifiedTarget(
+ qualified_target
+ )
+
+ spec = target_dicts[qualified_target]
+ if spec["toolset"] != "target":
+ raise Exception(
+ "Multiple toolsets not supported in xcode build (target %s)"
+ % qualified_target
+ )
+ configuration_names = [spec["default_configuration"]]
+ for configuration_name in sorted(spec["configurations"].keys()):
+ if configuration_name not in configuration_names:
+ configuration_names.append(configuration_name)
+ xcp = xcode_projects[build_file]
+ pbxp = xcp.project
+
+ # Set up the configurations for the target according to the list of names
+ # supplied.
+ xccl = CreateXCConfigurationList(configuration_names)
+
+ # Create an XCTarget subclass object for the target. The type with
+ # "+bundle" appended will be used if the target has "mac_bundle" set.
+ # loadable_modules not in a mac_bundle are mapped to
+ # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
+ # to create a single-file mh_bundle.
+ _types = {
+ "executable": "com.apple.product-type.tool",
+ "loadable_module": "com.googlecode.gyp.xcode.bundle",
+ "shared_library": "com.apple.product-type.library.dynamic",
+ "static_library": "com.apple.product-type.library.static",
+ "mac_kernel_extension": "com.apple.product-type.kernel-extension",
+ "executable+bundle": "com.apple.product-type.application",
+ "loadable_module+bundle": "com.apple.product-type.bundle",
+ "loadable_module+xctest": "com.apple.product-type.bundle.unit-test",
+ "loadable_module+xcuitest": "com.apple.product-type.bundle.ui-testing",
+ "shared_library+bundle": "com.apple.product-type.framework",
+ "executable+extension+bundle": "com.apple.product-type.app-extension",
+ "executable+watch+extension+bundle":
+ "com.apple.product-type.watchkit-extension",
+ "executable+watch+bundle": "com.apple.product-type.application.watchapp",
+ "mac_kernel_extension+bundle": "com.apple.product-type.kernel-extension",
+ }
+
+ target_properties = {
+ "buildConfigurationList": xccl,
+ "name": target_name,
+ }
+
+ type = spec["type"]
+ is_xctest = int(spec.get("mac_xctest_bundle", 0))
+ is_xcuitest = int(spec.get("mac_xcuitest_bundle", 0))
+ is_bundle = int(spec.get("mac_bundle", 0)) or is_xctest
+ is_app_extension = int(spec.get("ios_app_extension", 0))
+ is_watchkit_extension = int(spec.get("ios_watchkit_extension", 0))
+ is_watch_app = int(spec.get("ios_watch_app", 0))
+ if type != "none":
+ type_bundle_key = type
+ if is_xcuitest:
+ type_bundle_key += "+xcuitest"
+ assert type == "loadable_module", (
+ "mac_xcuitest_bundle targets must have type loadable_module "
+ "(target %s)" % target_name
+ )
+ elif is_xctest:
+ type_bundle_key += "+xctest"
+ assert type == "loadable_module", (
+ "mac_xctest_bundle targets must have type loadable_module "
+ "(target %s)" % target_name
+ )
+ elif is_app_extension:
+ assert is_bundle, (
+ "ios_app_extension flag requires mac_bundle "
+ "(target %s)" % target_name
+ )
+ type_bundle_key += "+extension+bundle"
+ elif is_watchkit_extension:
+ assert is_bundle, (
+ "ios_watchkit_extension flag requires mac_bundle "
+ "(target %s)" % target_name
+ )
+ type_bundle_key += "+watch+extension+bundle"
+ elif is_watch_app:
+ assert is_bundle, (
+ "ios_watch_app flag requires mac_bundle "
+ "(target %s)" % target_name
+ )
+ type_bundle_key += "+watch+bundle"
+ elif is_bundle:
+ type_bundle_key += "+bundle"
+
+ xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
+ try:
+ target_properties["productType"] = _types[type_bundle_key]
+ except KeyError as e:
+ gyp.common.ExceptionAppend(
+ e,
+ "-- unknown product type while " "writing target %s" % target_name,
+ )
+ raise
+ else:
+ xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
+ assert not is_bundle, (
+ 'mac_bundle targets cannot have type none (target "%s")' % target_name
+ )
+ assert not is_xcuitest, (
+ 'mac_xcuitest_bundle targets cannot have type none (target "%s")'
+ % target_name
+ )
+ assert not is_xctest, (
+ 'mac_xctest_bundle targets cannot have type none (target "%s")'
+ % target_name
+ )
+
+ target_product_name = spec.get("product_name")
+ if target_product_name is not None:
+ target_properties["productName"] = target_product_name
+
+ xct = xctarget_type(
+ target_properties,
+ parent=pbxp,
+ force_outdir=spec.get("product_dir"),
+ force_prefix=spec.get("product_prefix"),
+ force_extension=spec.get("product_extension"),
+ )
+ pbxp.AppendProperty("targets", xct)
+ xcode_targets[qualified_target] = xct
+ xcode_target_to_target_dict[xct] = spec
+
+ spec_actions = spec.get("actions", [])
+ spec_rules = spec.get("rules", [])
+
+ # Xcode has some "issues" with checking dependencies for the "Compile
+ # sources" step with any source files/headers generated by actions/rules.
+ # To work around this, if a target is building anything directly (not
+ # type "none"), then a second target is used to run the GYP actions/rules
+ # and is made a dependency of this target. This way the work is done
+ # before the dependency checks for what should be recompiled.
+ support_xct = None
+ # The Xcode "issues" don't affect xcode-ninja builds, since the dependency
+ # logic all happens in ninja. Don't bother creating the extra targets in
+ # that case.
+ if type != "none" and (spec_actions or spec_rules) and not ninja_wrapper:
+ support_xccl = CreateXCConfigurationList(configuration_names)
+ support_target_suffix = generator_flags.get(
+ "support_target_suffix", " Support"
+ )
+ support_target_properties = {
+ "buildConfigurationList": support_xccl,
+ "name": target_name + support_target_suffix,
+ }
+ if target_product_name:
+ support_target_properties["productName"] = (
+ target_product_name + " Support"
+ )
+ support_xct = gyp.xcodeproj_file.PBXAggregateTarget(
+ support_target_properties, parent=pbxp
+ )
+ pbxp.AppendProperty("targets", support_xct)
+ xct.AddDependency(support_xct)
+ # Hang the support target off the main target so it can be tested/found
+ # by the generator during Finalize.
+ xct.support_target = support_xct
+
+ prebuild_index = 0
+
+ # Add custom shell script phases for "actions" sections.
+ for action in spec_actions:
+ # There's no need to write anything into the script to ensure that the
+ # output directories already exist, because Xcode will look at the
+ # declared outputs and automatically ensure that they exist for us.
+
+ # Do we have a message to print when this action runs?
+ message = action.get("message")
+ if message:
+ message = "echo note: " + gyp.common.EncodePOSIXShellArgument(message)
+ else:
+ message = ""
+
+ # Turn the list into a string that can be passed to a shell.
+ action_string = gyp.common.EncodePOSIXShellList(action["action"])
+
+ # Convert Xcode-type variable references to sh-compatible environment
+ # variable references.
+ message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
+ action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ action_string
+ )
+
+ script = ""
+ # Include the optional message
+ if message_sh:
+ script += message_sh + "\n"
+ # Be sure the script runs in exec, and that if exec fails, the script
+ # exits signalling an error.
+ script += "exec " + action_string_sh + "\nexit 1\n"
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase(
+ {
+ "inputPaths": action["inputs"],
+ "name": 'Action "' + action["action_name"] + '"',
+ "outputPaths": action["outputs"],
+ "shellScript": script,
+ "showEnvVarsInLog": 0,
+ }
+ )
+
+ if support_xct:
+ support_xct.AppendProperty("buildPhases", ssbp)
+ else:
+ # TODO(mark): this assumes too much knowledge of the internals of
+ # xcodeproj_file; some of these smarts should move into xcodeproj_file
+ # itself.
+ xct._properties["buildPhases"].insert(prebuild_index, ssbp)
+ prebuild_index = prebuild_index + 1
+
+ # TODO(mark): Should verify that at most one of these is specified.
+ if int(action.get("process_outputs_as_sources", False)):
+ for output in action["outputs"]:
+ AddSourceToTarget(output, type, pbxp, xct)
+
+ if int(action.get("process_outputs_as_mac_bundle_resources", False)):
+ for output in action["outputs"]:
+ AddResourceToTarget(output, pbxp, xct)
+
+ # tgt_mac_bundle_resources holds the list of bundle resources so
+ # the rule processing can check against it.
+ if is_bundle:
+ tgt_mac_bundle_resources = spec.get("mac_bundle_resources", [])
+ else:
+ tgt_mac_bundle_resources = []
+
+ # Add custom shell script phases driving "make" for "rules" sections.
+ #
+ # Xcode's built-in rule support is almost powerful enough to use directly,
+ # but there are a few significant deficiencies that render them unusable.
+ # There are workarounds for some of its inadequacies, but in aggregate,
+ # the workarounds added complexity to the generator, and some workarounds
+ # actually require input files to be crafted more carefully than I'd like.
+ # Consequently, until Xcode rules are made more capable, "rules" input
+ # sections will be handled in Xcode output by shell script build phases
+ # performed prior to the compilation phase.
+ #
+ # The following problems with Xcode rules were found. The numbers are
+ # Apple radar IDs. I hope that these shortcomings are addressed, I really
+ # liked having the rules handled directly in Xcode during the period that
+ # I was prototyping this.
+ #
+ # 6588600 Xcode compiles custom script rule outputs too soon, compilation
+ # fails. This occurs when rule outputs from distinct inputs are
+ # interdependent. The only workaround is to put rules and their
+ # inputs in a separate target from the one that compiles the rule
+ # outputs. This requires input file cooperation and it means that
+ # process_outputs_as_sources is unusable.
+ # 6584932 Need to declare that custom rule outputs should be excluded from
+ # compilation. A possible workaround is to lie to Xcode about a
+ # rule's output, giving it a dummy file it doesn't know how to
+ # compile. The rule action script would need to touch the dummy.
+ # 6584839 I need a way to declare additional inputs to a custom rule.
+ # A possible workaround is a shell script phase prior to
+ # compilation that touches a rule's primary input files if any
+ # would-be additional inputs are newer than the output. Modifying
+ # the source tree - even just modification times - feels dirty.
+ # 6564240 Xcode "custom script" build rules always dump all environment
+ # variables. This is a low-prioroty problem and is not a
+ # show-stopper.
+ rules_by_ext = {}
+ for rule in spec_rules:
+ rules_by_ext[rule["extension"]] = rule
+
+ # First, some definitions:
+ #
+ # A "rule source" is a file that was listed in a target's "sources"
+ # list and will have a rule applied to it on the basis of matching the
+ # rule's "extensions" attribute. Rule sources are direct inputs to
+ # rules.
+ #
+ # Rule definitions may specify additional inputs in their "inputs"
+ # attribute. These additional inputs are used for dependency tracking
+ # purposes.
+ #
+ # A "concrete output" is a rule output with input-dependent variables
+ # resolved. For example, given a rule with:
+ # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
+ # if the target's "sources" list contained "one.ext" and "two.ext",
+ # the "concrete output" for rule input "two.ext" would be "two.cc". If
+ # a rule specifies multiple outputs, each input file that the rule is
+ # applied to will have the same number of concrete outputs.
+ #
+ # If any concrete outputs are outdated or missing relative to their
+ # corresponding rule_source or to any specified additional input, the
+ # rule action must be performed to generate the concrete outputs.
+
+ # concrete_outputs_by_rule_source will have an item at the same index
+ # as the rule['rule_sources'] that it corresponds to. Each item is a
+ # list of all of the concrete outputs for the rule_source.
+ concrete_outputs_by_rule_source = []
+
+ # concrete_outputs_all is a flat list of all concrete outputs that this
+ # rule is able to produce, given the known set of input files
+ # (rule_sources) that apply to it.
+ concrete_outputs_all = []
+
+ # messages & actions are keyed by the same indices as rule['rule_sources']
+ # and concrete_outputs_by_rule_source. They contain the message and
+ # action to perform after resolving input-dependent variables. The
+ # message is optional, in which case None is stored for each rule source.
+ messages = []
+ actions = []
+
+ for rule_source in rule.get("rule_sources", []):
+ rule_source_dirname, rule_source_basename = posixpath.split(rule_source)
+ (rule_source_root, rule_source_ext) = posixpath.splitext(
+ rule_source_basename
+ )
+
+ # These are the same variable names that Xcode uses for its own native
+ # rule support. Because Xcode's rule engine is not being used, they
+ # need to be expanded as they are written to the makefile.
+ rule_input_dict = {
+ "INPUT_FILE_BASE": rule_source_root,
+ "INPUT_FILE_SUFFIX": rule_source_ext,
+ "INPUT_FILE_NAME": rule_source_basename,
+ "INPUT_FILE_PATH": rule_source,
+ "INPUT_FILE_DIRNAME": rule_source_dirname,
+ }
+
+ concrete_outputs_for_this_rule_source = []
+ for output in rule.get("outputs", []):
+ # Fortunately, Xcode and make both use $(VAR) format for their
+ # variables, so the expansion is the only transformation necessary.
+ # Any remaining $(VAR)-type variables in the string can be given
+ # directly to make, which will pick up the correct settings from
+ # what Xcode puts into the environment.
+ concrete_output = ExpandXcodeVariables(output, rule_input_dict)
+ concrete_outputs_for_this_rule_source.append(concrete_output)
+
+ # Add all concrete outputs to the project.
+ pbxp.AddOrGetFileInRootGroup(concrete_output)
+
+ concrete_outputs_by_rule_source.append(
+ concrete_outputs_for_this_rule_source
+ )
+ concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
+
+ # TODO(mark): Should verify that at most one of these is specified.
+ if int(rule.get("process_outputs_as_sources", False)):
+ for output in concrete_outputs_for_this_rule_source:
+ AddSourceToTarget(output, type, pbxp, xct)
+
+ # If the file came from the mac_bundle_resources list or if the rule
+ # is marked to process outputs as bundle resource, do so.
+ was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
+ if was_mac_bundle_resource or int(
+ rule.get("process_outputs_as_mac_bundle_resources", False)
+ ):
+ for output in concrete_outputs_for_this_rule_source:
+ AddResourceToTarget(output, pbxp, xct)
+
+ # Do we have a message to print when this rule runs?
+ message = rule.get("message")
+ if message:
+ message = gyp.common.EncodePOSIXShellArgument(message)
+ message = ExpandXcodeVariables(message, rule_input_dict)
+ messages.append(message)
+
+ # Turn the list into a string that can be passed to a shell.
+ action_string = gyp.common.EncodePOSIXShellList(rule["action"])
+
+ action = ExpandXcodeVariables(action_string, rule_input_dict)
+ actions.append(action)
+
+ if len(concrete_outputs_all) > 0:
+ # TODO(mark): There's a possibility for collision here. Consider
+ # target "t" rule "A_r" and target "t_A" rule "r".
+ makefile_name = "%s.make" % re.sub(
+ "[^a-zA-Z0-9_]", "_", "{}_{}".format(target_name, rule["rule_name"])
+ )
+ makefile_path = os.path.join(
+ xcode_projects[build_file].path, makefile_name
+ )
+ # TODO(mark): try/close? Write to a temporary file and swap it only
+ # if it's got changes?
+ makefile = open(makefile_path, "w")
+
+ # make will build the first target in the makefile by default. By
+ # convention, it's called "all". List all (or at least one)
+ # concrete output for each rule source as a prerequisite of the "all"
+ # target.
+ makefile.write("all: \\\n")
+ for concrete_output_index, concrete_output_by_rule_source in enumerate(
+ concrete_outputs_by_rule_source
+ ):
+ # Only list the first (index [0]) concrete output of each input
+ # in the "all" target. Otherwise, a parallel make (-j > 1) would
+ # attempt to process each input multiple times simultaneously.
+ # Otherwise, "all" could just contain the entire list of
+ # concrete_outputs_all.
+ concrete_output = concrete_output_by_rule_source[0]
+ if (
+ concrete_output_index
+ == len(concrete_outputs_by_rule_source) - 1
+ ):
+ eol = ""
+ else:
+ eol = " \\"
+ makefile.write(f" {concrete_output}{eol}\n")
+
+ for (rule_source, concrete_outputs, message, action) in zip(
+ rule["rule_sources"],
+ concrete_outputs_by_rule_source,
+ messages,
+ actions,
+ ):
+ makefile.write("\n")
+
+ # Add a rule that declares it can build each concrete output of a
+ # rule source. Collect the names of the directories that are
+ # required.
+ concrete_output_dirs = []
+ for concrete_output_index, concrete_output in enumerate(
+ concrete_outputs
+ ):
+ if concrete_output_index == 0:
+ bol = ""
+ else:
+ bol = " "
+ makefile.write(f"{bol}{concrete_output} \\\n")
+
+ concrete_output_dir = posixpath.dirname(concrete_output)
+ if (
+ concrete_output_dir
+ and concrete_output_dir not in concrete_output_dirs
+ ):
+ concrete_output_dirs.append(concrete_output_dir)
+
+ makefile.write(" : \\\n")
+
+ # The prerequisites for this rule are the rule source itself and
+ # the set of additional rule inputs, if any.
+ prerequisites = [rule_source]
+ prerequisites.extend(rule.get("inputs", []))
+ for prerequisite_index, prerequisite in enumerate(prerequisites):
+ if prerequisite_index == len(prerequisites) - 1:
+ eol = ""
+ else:
+ eol = " \\"
+ makefile.write(f" {prerequisite}{eol}\n")
+
+ # Make sure that output directories exist before executing the rule
+ # action.
+ if len(concrete_output_dirs) > 0:
+ makefile.write(
+ '\t@mkdir -p "%s"\n' % '" "'.join(concrete_output_dirs)
+ )
+
+ # The rule message and action have already had
+ # the necessary variable substitutions performed.
+ if message:
+ # Mark it with note: so Xcode picks it up in build output.
+ makefile.write("\t@echo note: %s\n" % message)
+ makefile.write("\t%s\n" % action)
+
+ makefile.close()
+
+ # It might be nice to ensure that needed output directories exist
+ # here rather than in each target in the Makefile, but that wouldn't
+ # work if there ever was a concrete output that had an input-dependent
+ # variable anywhere other than in the leaf position.
+
+ # Don't declare any inputPaths or outputPaths. If they're present,
+ # Xcode will provide a slight optimization by only running the script
+ # phase if any output is missing or outdated relative to any input.
+ # Unfortunately, it will also assume that all outputs are touched by
+ # the script, and if the outputs serve as files in a compilation
+ # phase, they will be unconditionally rebuilt. Since make might not
+ # rebuild everything that could be declared here as an output, this
+ # extra compilation activity is unnecessary. With inputPaths and
+ # outputPaths not supplied, make will always be called, but it knows
+ # enough to not do anything when everything is up-to-date.
+
+ # To help speed things up, pass -j COUNT to make so it does some work
+ # in parallel. Don't use ncpus because Xcode will build ncpus targets
+ # in parallel and if each target happens to have a rules step, there
+ # would be ncpus^2 things going. With a machine that has 2 quad-core
+ # Xeons, a build can quickly run out of processes based on
+ # scheduling/other tasks, and randomly failing builds are no good.
+ script = (
+ """JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
+if [ "${JOB_COUNT}" -gt 4 ]; then
+ JOB_COUNT=4
+fi
+exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
+exit 1
+"""
+ % makefile_name
+ )
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase(
+ {
+ "name": 'Rule "' + rule["rule_name"] + '"',
+ "shellScript": script,
+ "showEnvVarsInLog": 0,
+ }
+ )
+
+ if support_xct:
+ support_xct.AppendProperty("buildPhases", ssbp)
+ else:
+ # TODO(mark): this assumes too much knowledge of the internals of
+ # xcodeproj_file; some of these smarts should move
+ # into xcodeproj_file itself.
+ xct._properties["buildPhases"].insert(prebuild_index, ssbp)
+ prebuild_index = prebuild_index + 1
+
+ # Extra rule inputs also go into the project file. Concrete outputs were
+ # already added when they were computed.
+ groups = ["inputs", "inputs_excluded"]
+ if skip_excluded_files:
+ groups = [x for x in groups if not x.endswith("_excluded")]
+ for group in groups:
+ for item in rule.get(group, []):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ # Add "sources".
+ for source in spec.get("sources", []):
+ (source_root, source_extension) = posixpath.splitext(source)
+ if source_extension[1:] not in rules_by_ext:
+ # AddSourceToTarget will add the file to a root group if it's not
+ # already there.
+ AddSourceToTarget(source, type, pbxp, xct)
+ else:
+ pbxp.AddOrGetFileInRootGroup(source)
+
+ # Add "mac_bundle_resources" and "mac_framework_private_headers" if
+ # it's a bundle of any type.
+ if is_bundle:
+ for resource in tgt_mac_bundle_resources:
+ (resource_root, resource_extension) = posixpath.splitext(resource)
+ if resource_extension[1:] not in rules_by_ext:
+ AddResourceToTarget(resource, pbxp, xct)
+ else:
+ pbxp.AddOrGetFileInRootGroup(resource)
+
+ for header in spec.get("mac_framework_private_headers", []):
+ AddHeaderToTarget(header, pbxp, xct, False)
+
+ # Add "mac_framework_headers". These can be valid for both frameworks
+ # and static libraries.
+ if is_bundle or type == "static_library":
+ for header in spec.get("mac_framework_headers", []):
+ AddHeaderToTarget(header, pbxp, xct, True)
+
+ # Add "copies".
+ pbxcp_dict = {}
+ for copy_group in spec.get("copies", []):
+ dest = copy_group["destination"]
+ if dest[0] not in ("/", "$"):
+ # Relative paths are relative to $(SRCROOT).
+ dest = "$(SRCROOT)/" + dest
+
+ code_sign = int(copy_group.get("xcode_code_sign", 0))
+ settings = (None, "{ATTRIBUTES = (CodeSignOnCopy, ); }")[code_sign]
+
+ # Coalesce multiple "copies" sections in the same target with the same
+ # "destination" property into the same PBXCopyFilesBuildPhase, otherwise
+ # they'll wind up with ID collisions.
+ pbxcp = pbxcp_dict.get(dest, None)
+ if pbxcp is None:
+ pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase(
+ {"name": "Copy to " + copy_group["destination"]}, parent=xct
+ )
+ pbxcp.SetDestination(dest)
+
+ # TODO(mark): The usual comment about this knowing too much about
+ # gyp.xcodeproj_file internals applies.
+ xct._properties["buildPhases"].insert(prebuild_index, pbxcp)
+
+ pbxcp_dict[dest] = pbxcp
+
+ for file in copy_group["files"]:
+ pbxcp.AddFile(file, settings)
+
+ # Excluded files can also go into the project file.
+ if not skip_excluded_files:
+ for key in [
+ "sources",
+ "mac_bundle_resources",
+ "mac_framework_headers",
+ "mac_framework_private_headers",
+ ]:
+ excluded_key = key + "_excluded"
+ for item in spec.get(excluded_key, []):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ # So can "inputs" and "outputs" sections of "actions" groups.
+ groups = ["inputs", "inputs_excluded", "outputs", "outputs_excluded"]
+ if skip_excluded_files:
+ groups = [x for x in groups if not x.endswith("_excluded")]
+ for action in spec.get("actions", []):
+ for group in groups:
+ for item in action.get(group, []):
+ # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
+ # sources.
+ if not item.startswith("$(BUILT_PRODUCTS_DIR)/"):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ for postbuild in spec.get("postbuilds", []):
+ action_string_sh = gyp.common.EncodePOSIXShellList(postbuild["action"])
+ script = "exec " + action_string_sh + "\nexit 1\n"
+
+ # Make the postbuild step depend on the output of ld or ar from this
+ # target. Apparently putting the script step after the link step isn't
+ # sufficient to ensure proper ordering in all cases. With an input
+ # declared but no outputs, the script step should run every time, as
+ # desired.
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase(
+ {
+ "inputPaths": ["$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)"],
+ "name": 'Postbuild "' + postbuild["postbuild_name"] + '"',
+ "shellScript": script,
+ "showEnvVarsInLog": 0,
+ }
+ )
+ xct.AppendProperty("buildPhases", ssbp)
+
+ # Add dependencies before libraries, because adding a dependency may imply
+ # adding a library. It's preferable to keep dependencies listed first
+ # during a link phase so that they can override symbols that would
+ # otherwise be provided by libraries, which will usually include system
+ # libraries. On some systems, ld is finicky and even requires the
+ # libraries to be ordered in such a way that unresolved symbols in
+ # earlier-listed libraries may only be resolved by later-listed libraries.
+ # The Mac linker doesn't work that way, but other platforms do, and so
+ # their linker invocations need to be constructed in this way. There's
+ # no compelling reason for Xcode's linker invocations to differ.
+
+ if "dependencies" in spec:
+ for dependency in spec["dependencies"]:
+ xct.AddDependency(xcode_targets[dependency])
+ # The support project also gets the dependencies (in case they are
+ # needed for the actions/rules to work).
+ if support_xct:
+ support_xct.AddDependency(xcode_targets[dependency])
+
+ if "libraries" in spec:
+ for library in spec["libraries"]:
+ xct.FrameworksPhase().AddFile(library)
+ # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
+ # I wish Xcode handled this automatically.
+ library_dir = posixpath.dirname(library)
+ if library_dir not in xcode_standard_library_dirs and (
+ not xct.HasBuildSetting(_library_search_paths_var)
+ or library_dir not in xct.GetBuildSetting(_library_search_paths_var)
+ ):
+ xct.AppendBuildSetting(_library_search_paths_var, library_dir)
+
+ for configuration_name in configuration_names:
+ configuration = spec["configurations"][configuration_name]
+ xcbc = xct.ConfigurationNamed(configuration_name)
+ for include_dir in configuration.get("mac_framework_dirs", []):
+ xcbc.AppendBuildSetting("FRAMEWORK_SEARCH_PATHS", include_dir)
+ for include_dir in configuration.get("include_dirs", []):
+ xcbc.AppendBuildSetting("HEADER_SEARCH_PATHS", include_dir)
+ for library_dir in configuration.get("library_dirs", []):
+ if library_dir not in xcode_standard_library_dirs and (
+ not xcbc.HasBuildSetting(_library_search_paths_var)
+ or library_dir
+ not in xcbc.GetBuildSetting(_library_search_paths_var)
+ ):
+ xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
+
+ if "defines" in configuration:
+ for define in configuration["defines"]:
+ set_define = EscapeXcodeDefine(define)
+ xcbc.AppendBuildSetting("GCC_PREPROCESSOR_DEFINITIONS", set_define)
+ if "xcode_settings" in configuration:
+ for xck, xcv in configuration["xcode_settings"].items():
+ xcbc.SetBuildSetting(xck, xcv)
+ if "xcode_config_file" in configuration:
+ config_ref = pbxp.AddOrGetFileInRootGroup(
+ configuration["xcode_config_file"]
+ )
+ xcbc.SetBaseConfiguration(config_ref)
+
+ build_files = []
+ for build_file, build_file_dict in data.items():
+ if build_file.endswith(".gyp"):
+ build_files.append(build_file)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Finalize2(xcode_targets, xcode_target_to_target_dict)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Write()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
new file mode 100644
index 0000000..49772d1
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the xcode.py file. """
+
+import gyp.generator.xcode as xcode
+import unittest
+import sys
+
+
+class TestEscapeXcodeDefine(unittest.TestCase):
+ if sys.platform == "darwin":
+
+ def test_InheritedRemainsUnescaped(self):
+ self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)")
+
+ def test_Escaping(self):
+ self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/input.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/input.py
new file mode 100644
index 0000000..d9699a0
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/input.py
@@ -0,0 +1,3130 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import ast
+
+import gyp.common
+import gyp.simple_copy
+import multiprocessing
+import os.path
+import re
+import shlex
+import signal
+import subprocess
+import sys
+import threading
+import traceback
+from distutils.version import StrictVersion
+from gyp.common import GypError
+from gyp.common import OrderedSet
+
+# A list of types that are treated as linkable.
+linkable_types = [
+ "executable",
+ "shared_library",
+ "loadable_module",
+ "mac_kernel_extension",
+ "windows_driver",
+]
+
+# A list of sections that contain links to other targets.
+dependency_sections = ["dependencies", "export_dependent_settings"]
+
+# base_path_sections is a list of sections defined by GYP that contain
+# pathnames. The generators can provide more keys, the two lists are merged
+# into path_sections, but you should call IsPathSection instead of using either
+# list directly.
+base_path_sections = [
+ "destination",
+ "files",
+ "include_dirs",
+ "inputs",
+ "libraries",
+ "outputs",
+ "sources",
+]
+path_sections = set()
+
+# These per-process dictionaries are used to cache build file data when loading
+# in parallel mode.
+per_process_data = {}
+per_process_aux_data = {}
+
+
+def IsPathSection(section):
+ # If section ends in one of the '=+?!' characters, it's applied to a section
+ # without the trailing characters. '/' is notably absent from this list,
+ # because there's no way for a regular expression to be treated as a path.
+ while section and section[-1:] in "=+?!":
+ section = section[:-1]
+
+ if section in path_sections:
+ return True
+
+ # Sections matching the regexp '_(dir|file|path)s?$' are also
+ # considered PathSections. Using manual string matching since that
+ # is much faster than the regexp and this can be called hundreds of
+ # thousands of times so micro performance matters.
+ if "_" in section:
+ tail = section[-6:]
+ if tail[-1] == "s":
+ tail = tail[:-1]
+ if tail[-5:] in ("_file", "_path"):
+ return True
+ return tail[-4:] == "_dir"
+
+ return False
+
+
+# base_non_configuration_keys is a list of key names that belong in the target
+# itself and should not be propagated into its configurations. It is merged
+# with a list that can come from the generator to
+# create non_configuration_keys.
+base_non_configuration_keys = [
+ # Sections that must exist inside targets and not configurations.
+ "actions",
+ "configurations",
+ "copies",
+ "default_configuration",
+ "dependencies",
+ "dependencies_original",
+ "libraries",
+ "postbuilds",
+ "product_dir",
+ "product_extension",
+ "product_name",
+ "product_prefix",
+ "rules",
+ "run_as",
+ "sources",
+ "standalone_static_library",
+ "suppress_wildcard",
+ "target_name",
+ "toolset",
+ "toolsets",
+ "type",
+ # Sections that can be found inside targets or configurations, but that
+ # should not be propagated from targets into their configurations.
+ "variables",
+]
+non_configuration_keys = []
+
+# Keys that do not belong inside a configuration dictionary.
+invalid_configuration_keys = [
+ "actions",
+ "all_dependent_settings",
+ "configurations",
+ "dependencies",
+ "direct_dependent_settings",
+ "libraries",
+ "link_settings",
+ "sources",
+ "standalone_static_library",
+ "target_name",
+ "type",
+]
+
+# Controls whether or not the generator supports multiple toolsets.
+multiple_toolsets = False
+
+# Paths for converting filelist paths to output paths: {
+# toplevel,
+# qualified_output_dir,
+# }
+generator_filelist_paths = None
+
+
+def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
+ """Return a list of all build files included into build_file_path.
+
+ The returned list will contain build_file_path as well as all other files
+ that it included, either directly or indirectly. Note that the list may
+ contain files that were included into a conditional section that evaluated
+ to false and was not merged into build_file_path's dict.
+
+ aux_data is a dict containing a key for each build file or included build
+ file. Those keys provide access to dicts whose "included" keys contain
+ lists of all other files included by the build file.
+
+ included should be left at its default None value by external callers. It
+ is used for recursion.
+
+ The returned list will not contain any duplicate entries. Each build file
+ in the list will be relative to the current directory.
+ """
+
+ if included is None:
+ included = []
+
+ if build_file_path in included:
+ return included
+
+ included.append(build_file_path)
+
+ for included_build_file in aux_data[build_file_path].get("included", []):
+ GetIncludedBuildFiles(included_build_file, aux_data, included)
+
+ return included
+
+
+def CheckedEval(file_contents):
+ """Return the eval of a gyp file.
+ The gyp file is restricted to dictionaries and lists only, and
+ repeated keys are not allowed.
+ Note that this is slower than eval() is.
+ """
+
+ syntax_tree = ast.parse(file_contents)
+ assert isinstance(syntax_tree, ast.Module)
+ c1 = syntax_tree.body
+ assert len(c1) == 1
+ c2 = c1[0]
+ assert isinstance(c2, ast.Expr)
+ return CheckNode(c2.value, [])
+
+
+def CheckNode(node, keypath):
+ if isinstance(node, ast.Dict):
+ dict = {}
+ for key, value in zip(node.keys, node.values):
+ assert isinstance(key, ast.Str)
+ key = key.s
+ if key in dict:
+ raise GypError(
+ "Key '"
+ + key
+ + "' repeated at level "
+ + repr(len(keypath) + 1)
+ + " with key path '"
+ + ".".join(keypath)
+ + "'"
+ )
+ kp = list(keypath) # Make a copy of the list for descending this node.
+ kp.append(key)
+ dict[key] = CheckNode(value, kp)
+ return dict
+ elif isinstance(node, ast.List):
+ children = []
+ for index, child in enumerate(node.elts):
+ kp = list(keypath) # Copy list.
+ kp.append(repr(index))
+ children.append(CheckNode(child, kp))
+ return children
+ elif isinstance(node, ast.Str):
+ return node.s
+ else:
+ raise TypeError(
+ "Unknown AST node at key path '" + ".".join(keypath) + "': " + repr(node)
+ )
+
+
+def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check):
+ if build_file_path in data:
+ return data[build_file_path]
+
+ if os.path.exists(build_file_path):
+ build_file_contents = open(build_file_path, encoding='utf-8').read()
+ else:
+ raise GypError(f"{build_file_path} not found (cwd: {os.getcwd()})")
+
+ build_file_data = None
+ try:
+ if check:
+ build_file_data = CheckedEval(build_file_contents)
+ else:
+ build_file_data = eval(build_file_contents, {"__builtins__": {}}, None)
+ except SyntaxError as e:
+ e.filename = build_file_path
+ raise
+ except Exception as e:
+ gyp.common.ExceptionAppend(e, "while reading " + build_file_path)
+ raise
+
+ if type(build_file_data) is not dict:
+ raise GypError("%s does not evaluate to a dictionary." % build_file_path)
+
+ data[build_file_path] = build_file_data
+ aux_data[build_file_path] = {}
+
+ # Scan for includes and merge them in.
+ if "skip_includes" not in build_file_data or not build_file_data["skip_includes"]:
+ try:
+ if is_target:
+ LoadBuildFileIncludesIntoDict(
+ build_file_data, build_file_path, data, aux_data, includes, check
+ )
+ else:
+ LoadBuildFileIncludesIntoDict(
+ build_file_data, build_file_path, data, aux_data, None, check
+ )
+ except Exception as e:
+ gyp.common.ExceptionAppend(
+ e, "while reading includes of " + build_file_path
+ )
+ raise
+
+ return build_file_data
+
+
+def LoadBuildFileIncludesIntoDict(
+ subdict, subdict_path, data, aux_data, includes, check
+):
+ includes_list = []
+ if includes is not None:
+ includes_list.extend(includes)
+ if "includes" in subdict:
+ for include in subdict["includes"]:
+ # "include" is specified relative to subdict_path, so compute the real
+ # path to include by appending the provided "include" to the directory
+ # in which subdict_path resides.
+ relative_include = os.path.normpath(
+ os.path.join(os.path.dirname(subdict_path), include)
+ )
+ includes_list.append(relative_include)
+ # Unhook the includes list, it's no longer needed.
+ del subdict["includes"]
+
+ # Merge in the included files.
+ for include in includes_list:
+ if "included" not in aux_data[subdict_path]:
+ aux_data[subdict_path]["included"] = []
+ aux_data[subdict_path]["included"].append(include)
+
+ gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
+
+ MergeDicts(
+ subdict,
+ LoadOneBuildFile(include, data, aux_data, None, False, check),
+ subdict_path,
+ include,
+ )
+
+ # Recurse into subdictionaries.
+ for k, v in subdict.items():
+ if type(v) is dict:
+ LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check)
+ elif type(v) is list:
+ LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, check)
+
+
+# This recurses into lists so that it can look for dicts.
+def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
+ for item in sublist:
+ if type(item) is dict:
+ LoadBuildFileIncludesIntoDict(
+ item, sublist_path, data, aux_data, None, check
+ )
+ elif type(item) is list:
+ LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
+
+
+# Processes toolsets in all the targets. This recurses into condition entries
+# since they can contain toolsets as well.
+def ProcessToolsetsInDict(data):
+ if "targets" in data:
+ target_list = data["targets"]
+ new_target_list = []
+ for target in target_list:
+ # If this target already has an explicit 'toolset', and no 'toolsets'
+ # list, don't modify it further.
+ if "toolset" in target and "toolsets" not in target:
+ new_target_list.append(target)
+ continue
+ if multiple_toolsets:
+ toolsets = target.get("toolsets", ["target"])
+ else:
+ toolsets = ["target"]
+ # Make sure this 'toolsets' definition is only processed once.
+ if "toolsets" in target:
+ del target["toolsets"]
+ if len(toolsets) > 0:
+ # Optimization: only do copies if more than one toolset is specified.
+ for build in toolsets[1:]:
+ new_target = gyp.simple_copy.deepcopy(target)
+ new_target["toolset"] = build
+ new_target_list.append(new_target)
+ target["toolset"] = toolsets[0]
+ new_target_list.append(target)
+ data["targets"] = new_target_list
+ if "conditions" in data:
+ for condition in data["conditions"]:
+ if type(condition) is list:
+ for condition_dict in condition[1:]:
+ if type(condition_dict) is dict:
+ ProcessToolsetsInDict(condition_dict)
+
+
+# TODO(mark): I don't love this name. It just means that it's going to load
+# a build file that contains targets and is expected to provide a targets dict
+# that contains the targets...
+def LoadTargetBuildFile(
+ build_file_path,
+ data,
+ aux_data,
+ variables,
+ includes,
+ depth,
+ check,
+ load_dependencies,
+):
+ # If depth is set, predefine the DEPTH variable to be a relative path from
+ # this build file's directory to the directory identified by depth.
+ if depth:
+ # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
+ # temporary measure. This should really be addressed by keeping all paths
+ # in POSIX until actual project generation.
+ d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
+ if d == "":
+ variables["DEPTH"] = "."
+ else:
+ variables["DEPTH"] = d.replace("\\", "/")
+
+ # The 'target_build_files' key is only set when loading target build files in
+ # the non-parallel code path, where LoadTargetBuildFile is called
+ # recursively. In the parallel code path, we don't need to check whether the
+ # |build_file_path| has already been loaded, because the 'scheduled' set in
+ # ParallelState guarantees that we never load the same |build_file_path|
+ # twice.
+ if "target_build_files" in data:
+ if build_file_path in data["target_build_files"]:
+ # Already loaded.
+ return False
+ data["target_build_files"].add(build_file_path)
+
+ gyp.DebugOutput(
+ gyp.DEBUG_INCLUDES, "Loading Target Build File '%s'", build_file_path
+ )
+
+ build_file_data = LoadOneBuildFile(
+ build_file_path, data, aux_data, includes, True, check
+ )
+
+ # Store DEPTH for later use in generators.
+ build_file_data["_DEPTH"] = depth
+
+ # Set up the included_files key indicating which .gyp files contributed to
+ # this target dict.
+ if "included_files" in build_file_data:
+ raise GypError(build_file_path + " must not contain included_files key")
+
+ included = GetIncludedBuildFiles(build_file_path, aux_data)
+ build_file_data["included_files"] = []
+ for included_file in included:
+ # included_file is relative to the current directory, but it needs to
+ # be made relative to build_file_path's directory.
+ included_relative = gyp.common.RelativePath(
+ included_file, os.path.dirname(build_file_path)
+ )
+ build_file_data["included_files"].append(included_relative)
+
+ # Do a first round of toolsets expansion so that conditions can be defined
+ # per toolset.
+ ProcessToolsetsInDict(build_file_data)
+
+ # Apply "pre"/"early" variable expansions and condition evaluations.
+ ProcessVariablesAndConditionsInDict(
+ build_file_data, PHASE_EARLY, variables, build_file_path
+ )
+
+ # Since some toolsets might have been defined conditionally, perform
+ # a second round of toolsets expansion now.
+ ProcessToolsetsInDict(build_file_data)
+
+ # Look at each project's target_defaults dict, and merge settings into
+ # targets.
+ if "target_defaults" in build_file_data:
+ if "targets" not in build_file_data:
+ raise GypError("Unable to find targets in build file %s" % build_file_path)
+
+ index = 0
+ while index < len(build_file_data["targets"]):
+ # This procedure needs to give the impression that target_defaults is
+ # used as defaults, and the individual targets inherit from that.
+ # The individual targets need to be merged into the defaults. Make
+ # a deep copy of the defaults for each target, merge the target dict
+ # as found in the input file into that copy, and then hook up the
+ # copy with the target-specific data merged into it as the replacement
+ # target dict.
+ old_target_dict = build_file_data["targets"][index]
+ new_target_dict = gyp.simple_copy.deepcopy(
+ build_file_data["target_defaults"]
+ )
+ MergeDicts(
+ new_target_dict, old_target_dict, build_file_path, build_file_path
+ )
+ build_file_data["targets"][index] = new_target_dict
+ index += 1
+
+ # No longer needed.
+ del build_file_data["target_defaults"]
+
+ # Look for dependencies. This means that dependency resolution occurs
+ # after "pre" conditionals and variable expansion, but before "post" -
+ # in other words, you can't put a "dependencies" section inside a "post"
+ # conditional within a target.
+
+ dependencies = []
+ if "targets" in build_file_data:
+ for target_dict in build_file_data["targets"]:
+ if "dependencies" not in target_dict:
+ continue
+ for dependency in target_dict["dependencies"]:
+ dependencies.append(
+ gyp.common.ResolveTarget(build_file_path, dependency, None)[0]
+ )
+
+ if load_dependencies:
+ for dependency in dependencies:
+ try:
+ LoadTargetBuildFile(
+ dependency,
+ data,
+ aux_data,
+ variables,
+ includes,
+ depth,
+ check,
+ load_dependencies,
+ )
+ except Exception as e:
+ gyp.common.ExceptionAppend(
+ e, "while loading dependencies of %s" % build_file_path
+ )
+ raise
+ else:
+ return (build_file_path, dependencies)
+
+
+def CallLoadTargetBuildFile(
+ global_flags,
+ build_file_path,
+ variables,
+ includes,
+ depth,
+ check,
+ generator_input_info,
+):
+ """Wrapper around LoadTargetBuildFile for parallel processing.
+
+ This wrapper is used when LoadTargetBuildFile is executed in
+ a worker process.
+ """
+
+ try:
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ # Apply globals so that the worker process behaves the same.
+ for key, value in global_flags.items():
+ globals()[key] = value
+
+ SetGeneratorGlobals(generator_input_info)
+ result = LoadTargetBuildFile(
+ build_file_path,
+ per_process_data,
+ per_process_aux_data,
+ variables,
+ includes,
+ depth,
+ check,
+ False,
+ )
+ if not result:
+ return result
+
+ (build_file_path, dependencies) = result
+
+ # We can safely pop the build_file_data from per_process_data because it
+ # will never be referenced by this process again, so we don't need to keep
+ # it in the cache.
+ build_file_data = per_process_data.pop(build_file_path)
+
+ # This gets serialized and sent back to the main process via a pipe.
+ # It's handled in LoadTargetBuildFileCallback.
+ return (build_file_path, build_file_data, dependencies)
+ except GypError as e:
+ sys.stderr.write("gyp: %s\n" % e)
+ return None
+ except Exception as e:
+ print("Exception:", e, file=sys.stderr)
+ print(traceback.format_exc(), file=sys.stderr)
+ return None
+
+
+class ParallelProcessingError(Exception):
+ pass
+
+
+class ParallelState:
+ """Class to keep track of state when processing input files in parallel.
+
+ If build files are loaded in parallel, use this to keep track of
+ state during farming out and processing parallel jobs. It's stored
+ in a global so that the callback function can have access to it.
+ """
+
+ def __init__(self):
+ # The multiprocessing pool.
+ self.pool = None
+ # The condition variable used to protect this object and notify
+ # the main loop when there might be more data to process.
+ self.condition = None
+ # The "data" dict that was passed to LoadTargetBuildFileParallel
+ self.data = None
+ # The number of parallel calls outstanding; decremented when a response
+ # was received.
+ self.pending = 0
+ # The set of all build files that have been scheduled, so we don't
+ # schedule the same one twice.
+ self.scheduled = set()
+ # A list of dependency build file paths that haven't been scheduled yet.
+ self.dependencies = []
+ # Flag to indicate if there was an error in a child process.
+ self.error = False
+
+ def LoadTargetBuildFileCallback(self, result):
+ """Handle the results of running LoadTargetBuildFile in another process.
+ """
+ self.condition.acquire()
+ if not result:
+ self.error = True
+ self.condition.notify()
+ self.condition.release()
+ return
+ (build_file_path0, build_file_data0, dependencies0) = result
+ self.data[build_file_path0] = build_file_data0
+ self.data["target_build_files"].add(build_file_path0)
+ for new_dependency in dependencies0:
+ if new_dependency not in self.scheduled:
+ self.scheduled.add(new_dependency)
+ self.dependencies.append(new_dependency)
+ self.pending -= 1
+ self.condition.notify()
+ self.condition.release()
+
+
+def LoadTargetBuildFilesParallel(
+ build_files, data, variables, includes, depth, check, generator_input_info
+):
+ parallel_state = ParallelState()
+ parallel_state.condition = threading.Condition()
+ # Make copies of the build_files argument that we can modify while working.
+ parallel_state.dependencies = list(build_files)
+ parallel_state.scheduled = set(build_files)
+ parallel_state.pending = 0
+ parallel_state.data = data
+
+ try:
+ parallel_state.condition.acquire()
+ while parallel_state.dependencies or parallel_state.pending:
+ if parallel_state.error:
+ break
+ if not parallel_state.dependencies:
+ parallel_state.condition.wait()
+ continue
+
+ dependency = parallel_state.dependencies.pop()
+
+ parallel_state.pending += 1
+ global_flags = {
+ "path_sections": globals()["path_sections"],
+ "non_configuration_keys": globals()["non_configuration_keys"],
+ "multiple_toolsets": globals()["multiple_toolsets"],
+ }
+
+ if not parallel_state.pool:
+ parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
+ parallel_state.pool.apply_async(
+ CallLoadTargetBuildFile,
+ args=(
+ global_flags,
+ dependency,
+ variables,
+ includes,
+ depth,
+ check,
+ generator_input_info,
+ ),
+ callback=parallel_state.LoadTargetBuildFileCallback,
+ )
+ except KeyboardInterrupt as e:
+ parallel_state.pool.terminate()
+ raise e
+
+ parallel_state.condition.release()
+
+ parallel_state.pool.close()
+ parallel_state.pool.join()
+ parallel_state.pool = None
+
+ if parallel_state.error:
+ sys.exit(1)
+
+
+# Look for the bracket that matches the first bracket seen in a
+# string, and return the start and end as a tuple. For example, if
+# the input is something like "<(foo <(bar)) blah", then it would
+# return (1, 13), indicating the entire string except for the leading
+# "<" and trailing " blah".
+LBRACKETS = set("{[(")
+BRACKETS = {"}": "{", "]": "[", ")": "("}
+
+
+def FindEnclosingBracketGroup(input_str):
+ stack = []
+ start = -1
+ for index, char in enumerate(input_str):
+ if char in LBRACKETS:
+ stack.append(char)
+ if start == -1:
+ start = index
+ elif char in BRACKETS:
+ if not stack:
+ return (-1, -1)
+ if stack.pop() != BRACKETS[char]:
+ return (-1, -1)
+ if not stack:
+ return (start, index + 1)
+ return (-1, -1)
+
+
+def IsStrCanonicalInt(string):
+ """Returns True if |string| is in its canonical integer form.
+
+ The canonical form is such that str(int(string)) == string.
+ """
+ if type(string) is str:
+ # This function is called a lot so for maximum performance, avoid
+ # involving regexps which would otherwise make the code much
+ # shorter. Regexps would need twice the time of this function.
+ if string:
+ if string == "0":
+ return True
+ if string[0] == "-":
+ string = string[1:]
+ if not string:
+ return False
+ if "1" <= string[0] <= "9":
+ return string.isdigit()
+
+ return False
+
+
+# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
+# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
+# In the last case, the inner "<()" is captured in match['content'].
+early_variable_re = re.compile(
+ r"(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)"
+ r"(?P<command_string>[-a-zA-Z0-9_.]+)?"
+ r"\((?P<is_array>\s*\[?)"
+ r"(?P<content>.*?)(\]?)\))"
+)
+
+# This matches the same as early_variable_re, but with '>' instead of '<'.
+late_variable_re = re.compile(
+ r"(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)"
+ r"(?P<command_string>[-a-zA-Z0-9_.]+)?"
+ r"\((?P<is_array>\s*\[?)"
+ r"(?P<content>.*?)(\]?)\))"
+)
+
+# This matches the same as early_variable_re, but with '^' instead of '<'.
+latelate_variable_re = re.compile(
+ r"(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)"
+ r"(?P<command_string>[-a-zA-Z0-9_.]+)?"
+ r"\((?P<is_array>\s*\[?)"
+ r"(?P<content>.*?)(\]?)\))"
+)
+
+# Global cache of results from running commands so they don't have to be run
+# more then once.
+cached_command_results = {}
+
+
+def FixupPlatformCommand(cmd):
+ if sys.platform == "win32":
+ if type(cmd) is list:
+ cmd = [re.sub("^cat ", "type ", cmd[0])] + cmd[1:]
+ else:
+ cmd = re.sub("^cat ", "type ", cmd)
+ return cmd
+
+
+PHASE_EARLY = 0
+PHASE_LATE = 1
+PHASE_LATELATE = 2
+
+
+def ExpandVariables(input, phase, variables, build_file):
+ # Look for the pattern that gets expanded into variables
+ if phase == PHASE_EARLY:
+ variable_re = early_variable_re
+ expansion_symbol = "<"
+ elif phase == PHASE_LATE:
+ variable_re = late_variable_re
+ expansion_symbol = ">"
+ elif phase == PHASE_LATELATE:
+ variable_re = latelate_variable_re
+ expansion_symbol = "^"
+ else:
+ assert False
+
+ input_str = str(input)
+ if IsStrCanonicalInt(input_str):
+ return int(input_str)
+
+ # Do a quick scan to determine if an expensive regex search is warranted.
+ if expansion_symbol not in input_str:
+ return input_str
+
+ # Get the entire list of matches as a list of MatchObject instances.
+ # (using findall here would return strings instead of MatchObjects).
+ matches = list(variable_re.finditer(input_str))
+ if not matches:
+ return input_str
+
+ output = input_str
+ # Reverse the list of matches so that replacements are done right-to-left.
+ # That ensures that earlier replacements won't mess up the string in a
+ # way that causes later calls to find the earlier substituted text instead
+ # of what's intended for replacement.
+ matches.reverse()
+ for match_group in matches:
+ match = match_group.groupdict()
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
+ # match['replace'] is the substring to look for, match['type']
+ # is the character code for the replacement type (< > <! >! <| >| <@
+ # >@ <!@ >!@), match['is_array'] contains a '[' for command
+ # arrays, and match['content'] is the name of the variable (< >)
+ # or command to run (<! >!). match['command_string'] is an optional
+ # command string. Currently, only 'pymod_do_main' is supported.
+
+ # run_command is true if a ! variant is used.
+ run_command = "!" in match["type"]
+ command_string = match["command_string"]
+
+ # file_list is true if a | variant is used.
+ file_list = "|" in match["type"]
+
+ # Capture these now so we can adjust them later.
+ replace_start = match_group.start("replace")
+ replace_end = match_group.end("replace")
+
+ # Find the ending paren, and re-evaluate the contained string.
+ (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
+
+ # Adjust the replacement range to match the entire command
+ # found by FindEnclosingBracketGroup (since the variable_re
+ # probably doesn't match the entire command if it contained
+ # nested variables).
+ replace_end = replace_start + c_end
+
+ # Find the "real" replacement, matching the appropriate closing
+ # paren, and adjust the replacement start and end.
+ replacement = input_str[replace_start:replace_end]
+
+ # Figure out what the contents of the variable parens are.
+ contents_start = replace_start + c_start + 1
+ contents_end = replace_end - 1
+ contents = input_str[contents_start:contents_end]
+
+ # Do filter substitution now for <|().
+ # Admittedly, this is different than the evaluation order in other
+ # contexts. However, since filtration has no chance to run on <|(),
+ # this seems like the only obvious way to give them access to filters.
+ if file_list:
+ processed_variables = gyp.simple_copy.deepcopy(variables)
+ ProcessListFiltersInDict(contents, processed_variables)
+ # Recurse to expand variables in the contents
+ contents = ExpandVariables(contents, phase, processed_variables, build_file)
+ else:
+ # Recurse to expand variables in the contents
+ contents = ExpandVariables(contents, phase, variables, build_file)
+
+ # Strip off leading/trailing whitespace so that variable matches are
+ # simpler below (and because they are rarely needed).
+ contents = contents.strip()
+
+ # expand_to_list is true if an @ variant is used. In that case,
+ # the expansion should result in a list. Note that the caller
+ # is to be expecting a list in return, and not all callers do
+ # because not all are working in list context. Also, for list
+ # expansions, there can be no other text besides the variable
+ # expansion in the input string.
+ expand_to_list = "@" in match["type"] and input_str == replacement
+
+ if run_command or file_list:
+ # Find the build file's directory, so commands can be run or file lists
+ # generated relative to it.
+ build_file_dir = os.path.dirname(build_file)
+ if build_file_dir == "" and not file_list:
+ # If build_file is just a leaf filename indicating a file in the
+ # current directory, build_file_dir might be an empty string. Set
+ # it to None to signal to subprocess.Popen that it should run the
+ # command in the current directory.
+ build_file_dir = None
+
+ # Support <|(listfile.txt ...) which generates a file
+ # containing items from a gyp list, generated at gyp time.
+ # This works around actions/rules which have more inputs than will
+ # fit on the command line.
+ if file_list:
+ if type(contents) is list:
+ contents_list = contents
+ else:
+ contents_list = contents.split(" ")
+ replacement = contents_list[0]
+ if os.path.isabs(replacement):
+ raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
+
+ if not generator_filelist_paths:
+ path = os.path.join(build_file_dir, replacement)
+ else:
+ if os.path.isabs(build_file_dir):
+ toplevel = generator_filelist_paths["toplevel"]
+ rel_build_file_dir = gyp.common.RelativePath(
+ build_file_dir, toplevel
+ )
+ else:
+ rel_build_file_dir = build_file_dir
+ qualified_out_dir = generator_filelist_paths["qualified_out_dir"]
+ path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
+ gyp.common.EnsureDirExists(path)
+
+ replacement = gyp.common.RelativePath(path, build_file_dir)
+ f = gyp.common.WriteOnDiff(path)
+ for i in contents_list[1:]:
+ f.write("%s\n" % i)
+ f.close()
+
+ elif run_command:
+ use_shell = True
+ if match["is_array"]:
+ contents = eval(contents)
+ use_shell = False
+
+ # Check for a cached value to avoid executing commands, or generating
+ # file lists more than once. The cache key contains the command to be
+ # run as well as the directory to run it from, to account for commands
+ # that depend on their current directory.
+ # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
+ # someone could author a set of GYP files where each time the command
+ # is invoked it produces different output by design. When the need
+ # arises, the syntax should be extended to support no caching off a
+ # command's output so it is run every time.
+ cache_key = (str(contents), build_file_dir)
+ cached_value = cached_command_results.get(cache_key, None)
+ if cached_value is None:
+ gyp.DebugOutput(
+ gyp.DEBUG_VARIABLES,
+ "Executing command '%s' in directory '%s'",
+ contents,
+ build_file_dir,
+ )
+
+ replacement = ""
+
+ if command_string == "pymod_do_main":
+ # <!pymod_do_main(modulename param eters) loads |modulename| as a
+ # python module and then calls that module's DoMain() function,
+ # passing ["param", "eters"] as a single list argument. For modules
+ # that don't load quickly, this can be faster than
+ # <!(python modulename param eters). Do this in |build_file_dir|.
+ oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
+ if build_file_dir: # build_file_dir may be None (see above).
+ os.chdir(build_file_dir)
+ sys.path.append(os.getcwd())
+ try:
+
+ parsed_contents = shlex.split(contents)
+ try:
+ py_module = __import__(parsed_contents[0])
+ except ImportError as e:
+ raise GypError(
+ "Error importing pymod_do_main"
+ "module (%s): %s" % (parsed_contents[0], e)
+ )
+ replacement = str(
+ py_module.DoMain(parsed_contents[1:])
+ ).rstrip()
+ finally:
+ sys.path.pop()
+ os.chdir(oldwd)
+ assert replacement is not None
+ elif command_string:
+ raise GypError(
+ "Unknown command string '%s' in '%s'."
+ % (command_string, contents)
+ )
+ else:
+ # Fix up command with platform specific workarounds.
+ contents = FixupPlatformCommand(contents)
+ try:
+ # stderr will be printed no matter what
+ result = subprocess.run(
+ contents,
+ stdout=subprocess.PIPE,
+ shell=use_shell,
+ cwd=build_file_dir,
+ check=False
+ )
+ except Exception as e:
+ raise GypError(
+ "%s while executing command '%s' in %s"
+ % (e, contents, build_file)
+ )
+
+ if result.returncode > 0:
+ raise GypError(
+ "Call to '%s' returned exit status %d while in %s."
+ % (contents, result.returncode, build_file)
+ )
+ replacement = result.stdout.decode("utf-8").rstrip()
+
+ cached_command_results[cache_key] = replacement
+ else:
+ gyp.DebugOutput(
+ gyp.DEBUG_VARIABLES,
+ "Had cache value for command '%s' in directory '%s'",
+ contents,
+ build_file_dir,
+ )
+ replacement = cached_value
+
+ else:
+ if contents not in variables:
+ if contents[-1] in ["!", "/"]:
+ # In order to allow cross-compiles (nacl) to happen more naturally,
+ # we will allow references to >(sources/) etc. to resolve to
+ # and empty list if undefined. This allows actions to:
+ # 'action!': [
+ # '>@(_sources!)',
+ # ],
+ # 'action/': [
+ # '>@(_sources/)',
+ # ],
+ replacement = []
+ else:
+ raise GypError(
+ "Undefined variable " + contents + " in " + build_file
+ )
+ else:
+ replacement = variables[contents]
+
+ if isinstance(replacement, bytes) and not isinstance(replacement, str):
+ replacement = replacement.decode("utf-8") # done on Python 3 only
+ if type(replacement) is list:
+ for item in replacement:
+ if isinstance(item, bytes) and not isinstance(item, str):
+ item = item.decode("utf-8") # done on Python 3 only
+ if not contents[-1] == "/" and type(item) not in (str, int):
+ raise GypError(
+ "Variable "
+ + contents
+ + " must expand to a string or list of strings; "
+ + "list contains a "
+ + item.__class__.__name__
+ )
+ # Run through the list and handle variable expansions in it. Since
+ # the list is guaranteed not to contain dicts, this won't do anything
+ # with conditions sections.
+ ProcessVariablesAndConditionsInList(
+ replacement, phase, variables, build_file
+ )
+ elif type(replacement) not in (str, int):
+ raise GypError(
+ "Variable "
+ + contents
+ + " must expand to a string or list of strings; "
+ + "found a "
+ + replacement.__class__.__name__
+ )
+
+ if expand_to_list:
+ # Expanding in list context. It's guaranteed that there's only one
+ # replacement to do in |input_str| and that it's this replacement. See
+ # above.
+ if type(replacement) is list:
+ # If it's already a list, make a copy.
+ output = replacement[:]
+ else:
+ # Split it the same way sh would split arguments.
+ output = shlex.split(str(replacement))
+ else:
+ # Expanding in string context.
+ encoded_replacement = ""
+ if type(replacement) is list:
+ # When expanding a list into string context, turn the list items
+ # into a string in a way that will work with a subprocess call.
+ #
+ # TODO(mark): This isn't completely correct. This should
+ # call a generator-provided function that observes the
+ # proper list-to-argument quoting rules on a specific
+ # platform instead of just calling the POSIX encoding
+ # routine.
+ encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
+ else:
+ encoded_replacement = replacement
+
+ output = (
+ output[:replace_start] + str(encoded_replacement) + output[replace_end:]
+ )
+ # Prepare for the next match iteration.
+ input_str = output
+
+ if output == input:
+ gyp.DebugOutput(
+ gyp.DEBUG_VARIABLES,
+ "Found only identity matches on %r, avoiding infinite " "recursion.",
+ output,
+ )
+ else:
+ # Look for more matches now that we've replaced some, to deal with
+ # expanding local variables (variables defined in the same
+ # variables block as this one).
+ gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
+ if type(output) is list:
+ if output and type(output[0]) is list:
+ # Leave output alone if it's a list of lists.
+ # We don't want such lists to be stringified.
+ pass
+ else:
+ new_output = []
+ for item in output:
+ new_output.append(
+ ExpandVariables(item, phase, variables, build_file)
+ )
+ output = new_output
+ else:
+ output = ExpandVariables(output, phase, variables, build_file)
+
+ # Convert all strings that are canonically-represented integers into integers.
+ if type(output) is list:
+ for index, outstr in enumerate(output):
+ if IsStrCanonicalInt(outstr):
+ output[index] = int(outstr)
+ elif IsStrCanonicalInt(output):
+ output = int(output)
+
+ return output
+
+
+# The same condition is often evaluated over and over again so it
+# makes sense to cache as much as possible between evaluations.
+cached_conditions_asts = {}
+
+
+def EvalCondition(condition, conditions_key, phase, variables, build_file):
+ """Returns the dict that should be used or None if the result was
+ that nothing should be used."""
+ if type(condition) is not list:
+ raise GypError(conditions_key + " must be a list")
+ if len(condition) < 2:
+ # It's possible that condition[0] won't work in which case this
+ # attempt will raise its own IndexError. That's probably fine.
+ raise GypError(
+ conditions_key
+ + " "
+ + condition[0]
+ + " must be at least length 2, not "
+ + str(len(condition))
+ )
+
+ i = 0
+ result = None
+ while i < len(condition):
+ cond_expr = condition[i]
+ true_dict = condition[i + 1]
+ if type(true_dict) is not dict:
+ raise GypError(
+ "{} {} must be followed by a dictionary, not {}".format(
+ conditions_key, cond_expr, type(true_dict)
+ )
+ )
+ if len(condition) > i + 2 and type(condition[i + 2]) is dict:
+ false_dict = condition[i + 2]
+ i = i + 3
+ if i != len(condition):
+ raise GypError(
+ "{} {} has {} unexpected trailing items".format(
+ conditions_key, cond_expr, len(condition) - i
+ )
+ )
+ else:
+ false_dict = None
+ i = i + 2
+ if result is None:
+ result = EvalSingleCondition(
+ cond_expr, true_dict, false_dict, phase, variables, build_file
+ )
+
+ return result
+
+
+def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file):
+ """Returns true_dict if cond_expr evaluates to true, and false_dict
+ otherwise."""
+ # Do expansions on the condition itself. Since the condition can naturally
+ # contain variable references without needing to resort to GYP expansion
+ # syntax, this is of dubious value for variables, but someone might want to
+ # use a command expansion directly inside a condition.
+ cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, build_file)
+ if type(cond_expr_expanded) not in (str, int):
+ raise ValueError(
+ "Variable expansion in this context permits str and int "
+ + "only, found "
+ + cond_expr_expanded.__class__.__name__
+ )
+
+ try:
+ if cond_expr_expanded in cached_conditions_asts:
+ ast_code = cached_conditions_asts[cond_expr_expanded]
+ else:
+ ast_code = compile(cond_expr_expanded, "<string>", "eval")
+ cached_conditions_asts[cond_expr_expanded] = ast_code
+ env = {"__builtins__": {}, "v": StrictVersion}
+ if eval(ast_code, env, variables):
+ return true_dict
+ return false_dict
+ except SyntaxError as e:
+ syntax_error = SyntaxError(
+ "%s while evaluating condition '%s' in %s "
+ "at character %d." % (str(e.args[0]), e.text, build_file, e.offset),
+ e.filename,
+ e.lineno,
+ e.offset,
+ e.text,
+ )
+ raise syntax_error
+ except NameError as e:
+ gyp.common.ExceptionAppend(
+ e,
+ f"while evaluating condition '{cond_expr_expanded}' in {build_file}",
+ )
+ raise GypError(e)
+
+
+def ProcessConditionsInDict(the_dict, phase, variables, build_file):
+ # Process a 'conditions' or 'target_conditions' section in the_dict,
+ # depending on phase.
+ # early -> conditions
+ # late -> target_conditions
+ # latelate -> no conditions
+ #
+ # Each item in a conditions list consists of cond_expr, a string expression
+ # evaluated as the condition, and true_dict, a dict that will be merged into
+ # the_dict if cond_expr evaluates to true. Optionally, a third item,
+ # false_dict, may be present. false_dict is merged into the_dict if
+ # cond_expr evaluates to false.
+ #
+ # Any dict merged into the_dict will be recursively processed for nested
+ # conditionals and other expansions, also according to phase, immediately
+ # prior to being merged.
+
+ if phase == PHASE_EARLY:
+ conditions_key = "conditions"
+ elif phase == PHASE_LATE:
+ conditions_key = "target_conditions"
+ elif phase == PHASE_LATELATE:
+ return
+ else:
+ assert False
+
+ if conditions_key not in the_dict:
+ return
+
+ conditions_list = the_dict[conditions_key]
+ # Unhook the conditions list, it's no longer needed.
+ del the_dict[conditions_key]
+
+ for condition in conditions_list:
+ merge_dict = EvalCondition(
+ condition, conditions_key, phase, variables, build_file
+ )
+
+ if merge_dict is not None:
+ # Expand variables and nested conditinals in the merge_dict before
+ # merging it.
+ ProcessVariablesAndConditionsInDict(
+ merge_dict, phase, variables, build_file
+ )
+
+ MergeDicts(the_dict, merge_dict, build_file, build_file)
+
+
+def LoadAutomaticVariablesFromDict(variables, the_dict):
+ # Any keys with plain string values in the_dict become automatic variables.
+ # The variable name is the key name with a "_" character prepended.
+ for key, value in the_dict.items():
+ if type(value) in (str, int, list):
+ variables["_" + key] = value
+
+
+def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
+ # Any keys in the_dict's "variables" dict, if it has one, becomes a
+ # variable. The variable name is the key name in the "variables" dict.
+ # Variables that end with the % character are set only if they are unset in
+ # the variables dict. the_dict_key is the name of the key that accesses
+ # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
+ # (it could be a list or it could be parentless because it is a root dict),
+ # the_dict_key will be None.
+ for key, value in the_dict.get("variables", {}).items():
+ if type(value) not in (str, int, list):
+ continue
+
+ if key.endswith("%"):
+ variable_name = key[:-1]
+ if variable_name in variables:
+ # If the variable is already set, don't set it.
+ continue
+ if the_dict_key == "variables" and variable_name in the_dict:
+ # If the variable is set without a % in the_dict, and the_dict is a
+ # variables dict (making |variables| a variables sub-dict of a
+ # variables dict), use the_dict's definition.
+ value = the_dict[variable_name]
+ else:
+ variable_name = key
+
+ variables[variable_name] = value
+
+
+def ProcessVariablesAndConditionsInDict(
+ the_dict, phase, variables_in, build_file, the_dict_key=None
+):
+ """Handle all variable and command expansion and conditional evaluation.
+
+ This function is the public entry point for all variable expansions and
+ conditional evaluations. The variables_in dictionary will not be modified
+ by this function.
+ """
+
+ # Make a copy of the variables_in dict that can be modified during the
+ # loading of automatics and the loading of the variables dict.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+
+ if "variables" in the_dict:
+ # Make sure all the local variables are added to the variables
+ # list before we process them so that you can reference one
+ # variable from another. They will be fully expanded by recursion
+ # in ExpandVariables.
+ for key, value in the_dict["variables"].items():
+ variables[key] = value
+
+ # Handle the associated variables dict first, so that any variable
+ # references within can be resolved prior to using them as variables.
+ # Pass a copy of the variables dict to avoid having it be tainted.
+ # Otherwise, it would have extra automatics added for everything that
+ # should just be an ordinary variable in this scope.
+ ProcessVariablesAndConditionsInDict(
+ the_dict["variables"], phase, variables, build_file, "variables"
+ )
+
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ for key, value in the_dict.items():
+ # Skip "variables", which was already processed if present.
+ if key != "variables" and type(value) is str:
+ expanded = ExpandVariables(value, phase, variables, build_file)
+ if type(expanded) not in (str, int):
+ raise ValueError(
+ "Variable expansion in this context permits str and int "
+ + "only, found "
+ + expanded.__class__.__name__
+ + " for "
+ + key
+ )
+ the_dict[key] = expanded
+
+ # Variable expansion may have resulted in changes to automatics. Reload.
+ # TODO(mark): Optimization: only reload if no changes were made.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ # Process conditions in this dict. This is done after variable expansion
+ # so that conditions may take advantage of expanded variables. For example,
+ # if the_dict contains:
+ # {'type': '<(library_type)',
+ # 'conditions': [['_type=="static_library"', { ... }]]},
+ # _type, as used in the condition, will only be set to the value of
+ # library_type if variable expansion is performed before condition
+ # processing. However, condition processing should occur prior to recursion
+ # so that variables (both automatic and "variables" dict type) may be
+ # adjusted by conditions sections, merged into the_dict, and have the
+ # intended impact on contained dicts.
+ #
+ # This arrangement means that a "conditions" section containing a "variables"
+ # section will only have those variables effective in subdicts, not in
+ # the_dict. The workaround is to put a "conditions" section within a
+ # "variables" section. For example:
+ # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
+ # 'defines': ['<(define)'],
+ # 'my_subdict': {'defines': ['<(define)']}},
+ # will not result in "IS_MAC" being appended to the "defines" list in the
+ # current scope but would result in it being appended to the "defines" list
+ # within "my_subdict". By comparison:
+ # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
+ # 'defines': ['<(define)'],
+ # 'my_subdict': {'defines': ['<(define)']}},
+ # will append "IS_MAC" to both "defines" lists.
+
+ # Evaluate conditions sections, allowing variable expansions within them
+ # as well as nested conditionals. This will process a 'conditions' or
+ # 'target_conditions' section, perform appropriate merging and recursive
+ # conditional and variable processing, and then remove the conditions section
+ # from the_dict if it is present.
+ ProcessConditionsInDict(the_dict, phase, variables, build_file)
+
+ # Conditional processing may have resulted in changes to automatics or the
+ # variables dict. Reload.
+ variables = variables_in.copy()
+ LoadAutomaticVariablesFromDict(variables, the_dict)
+ LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
+
+ # Recurse into child dicts, or process child lists which may result in
+ # further recursion into descendant dicts.
+ for key, value in the_dict.items():
+ # Skip "variables" and string values, which were already processed if
+ # present.
+ if key == "variables" or type(value) is str:
+ continue
+ if type(value) is dict:
+ # Pass a copy of the variables dict so that subdicts can't influence
+ # parents.
+ ProcessVariablesAndConditionsInDict(
+ value, phase, variables, build_file, key
+ )
+ elif type(value) is list:
+ # The list itself can't influence the variables dict, and
+ # ProcessVariablesAndConditionsInList will make copies of the variables
+ # dict if it needs to pass it to something that can influence it. No
+ # copy is necessary here.
+ ProcessVariablesAndConditionsInList(value, phase, variables, build_file)
+ elif type(value) is not int:
+ raise TypeError("Unknown type " + value.__class__.__name__ + " for " + key)
+
+
+def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file):
+ # Iterate using an index so that new values can be assigned into the_list.
+ index = 0
+ while index < len(the_list):
+ item = the_list[index]
+ if type(item) is dict:
+ # Make a copy of the variables dict so that it won't influence anything
+ # outside of its own scope.
+ ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
+ elif type(item) is list:
+ ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
+ elif type(item) is str:
+ expanded = ExpandVariables(item, phase, variables, build_file)
+ if type(expanded) in (str, int):
+ the_list[index] = expanded
+ elif type(expanded) is list:
+ the_list[index : index + 1] = expanded
+ index += len(expanded)
+
+ # index now identifies the next item to examine. Continue right now
+ # without falling into the index increment below.
+ continue
+ else:
+ raise ValueError(
+ "Variable expansion in this context permits strings and "
+ + "lists only, found "
+ + expanded.__class__.__name__
+ + " at "
+ + index
+ )
+ elif type(item) is not int:
+ raise TypeError(
+ "Unknown type " + item.__class__.__name__ + " at index " + index
+ )
+ index = index + 1
+
+
+def BuildTargetsDict(data):
+ """Builds a dict mapping fully-qualified target names to their target dicts.
+
+ |data| is a dict mapping loaded build files by pathname relative to the
+ current directory. Values in |data| are build file contents. For each
+ |data| value with a "targets" key, the value of the "targets" key is taken
+ as a list containing target dicts. Each target's fully-qualified name is
+ constructed from the pathname of the build file (|data| key) and its
+ "target_name" property. These fully-qualified names are used as the keys
+ in the returned dict. These keys provide access to the target dicts,
+ the dicts in the "targets" lists.
+ """
+
+ targets = {}
+ for build_file in data["target_build_files"]:
+ for target in data[build_file].get("targets", []):
+ target_name = gyp.common.QualifiedTarget(
+ build_file, target["target_name"], target["toolset"]
+ )
+ if target_name in targets:
+ raise GypError("Duplicate target definitions for " + target_name)
+ targets[target_name] = target
+
+ return targets
+
+
+def QualifyDependencies(targets):
+ """Make dependency links fully-qualified relative to the current directory.
+
+ |targets| is a dict mapping fully-qualified target names to their target
+ dicts. For each target in this dict, keys known to contain dependency
+ links are examined, and any dependencies referenced will be rewritten
+ so that they are fully-qualified and relative to the current directory.
+ All rewritten dependencies are suitable for use as keys to |targets| or a
+ similar dict.
+ """
+
+ all_dependency_sections = [
+ dep + op for dep in dependency_sections for op in ("", "!", "/")
+ ]
+
+ for target, target_dict in targets.items():
+ target_build_file = gyp.common.BuildFile(target)
+ toolset = target_dict["toolset"]
+ for dependency_key in all_dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ for index, dep in enumerate(dependencies):
+ dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
+ target_build_file, dep, toolset
+ )
+ if not multiple_toolsets:
+ # Ignore toolset specification in the dependency if it is specified.
+ dep_toolset = toolset
+ dependency = gyp.common.QualifiedTarget(
+ dep_file, dep_target, dep_toolset
+ )
+ dependencies[index] = dependency
+
+ # Make sure anything appearing in a list other than "dependencies" also
+ # appears in the "dependencies" list.
+ if (
+ dependency_key != "dependencies"
+ and dependency not in target_dict["dependencies"]
+ ):
+ raise GypError(
+ "Found "
+ + dependency
+ + " in "
+ + dependency_key
+ + " of "
+ + target
+ + ", but not in dependencies"
+ )
+
+
+def ExpandWildcardDependencies(targets, data):
+ """Expands dependencies specified as build_file:*.
+
+ For each target in |targets|, examines sections containing links to other
+ targets. If any such section contains a link of the form build_file:*, it
+ is taken as a wildcard link, and is expanded to list each target in
+ build_file. The |data| dict provides access to build file dicts.
+
+ Any target that does not wish to be included by wildcard can provide an
+ optional "suppress_wildcard" key in its target dict. When present and
+ true, a wildcard dependency link will not include such targets.
+
+ All dependency names, including the keys to |targets| and the values in each
+ dependency list, must be qualified when this function is called.
+ """
+
+ for target, target_dict in targets.items():
+ target_build_file = gyp.common.BuildFile(target)
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+
+ # Loop this way instead of "for dependency in" or "for index in range"
+ # because the dependencies list will be modified within the loop body.
+ index = 0
+ while index < len(dependencies):
+ (
+ dependency_build_file,
+ dependency_target,
+ dependency_toolset,
+ ) = gyp.common.ParseQualifiedTarget(dependencies[index])
+ if dependency_target != "*" and dependency_toolset != "*":
+ # Not a wildcard. Keep it moving.
+ index = index + 1
+ continue
+
+ if dependency_build_file == target_build_file:
+ # It's an error for a target to depend on all other targets in
+ # the same file, because a target cannot depend on itself.
+ raise GypError(
+ "Found wildcard in "
+ + dependency_key
+ + " of "
+ + target
+ + " referring to same build file"
+ )
+
+ # Take the wildcard out and adjust the index so that the next
+ # dependency in the list will be processed the next time through the
+ # loop.
+ del dependencies[index]
+ index = index - 1
+
+ # Loop through the targets in the other build file, adding them to
+ # this target's list of dependencies in place of the removed
+ # wildcard.
+ dependency_target_dicts = data[dependency_build_file]["targets"]
+ for dependency_target_dict in dependency_target_dicts:
+ if int(dependency_target_dict.get("suppress_wildcard", False)):
+ continue
+ dependency_target_name = dependency_target_dict["target_name"]
+ if (
+ dependency_target != "*"
+ and dependency_target != dependency_target_name
+ ):
+ continue
+ dependency_target_toolset = dependency_target_dict["toolset"]
+ if (
+ dependency_toolset != "*"
+ and dependency_toolset != dependency_target_toolset
+ ):
+ continue
+ dependency = gyp.common.QualifiedTarget(
+ dependency_build_file,
+ dependency_target_name,
+ dependency_target_toolset,
+ )
+ index = index + 1
+ dependencies.insert(index, dependency)
+
+ index = index + 1
+
+
+def Unify(items):
+ """Removes duplicate elements from items, keeping the first element."""
+ seen = {}
+ return [seen.setdefault(e, e) for e in items if e not in seen]
+
+
+def RemoveDuplicateDependencies(targets):
+ """Makes sure every dependency appears only once in all targets's dependency
+ lists."""
+ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+ target_dict[dependency_key] = Unify(dependencies)
+
+
+def Filter(items, item):
+ """Removes item from items."""
+ res = {}
+ return [res.setdefault(e, e) for e in items if e != item]
+
+
+def RemoveSelfDependencies(targets):
+ """Remove self dependencies from targets that have the prune_self_dependency
+ variable set."""
+ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+ for t in dependencies:
+ if t == target_name:
+ if (
+ targets[t]
+ .get("variables", {})
+ .get("prune_self_dependency", 0)
+ ):
+ target_dict[dependency_key] = Filter(
+ dependencies, target_name
+ )
+
+
+def RemoveLinkDependenciesFromNoneTargets(targets):
+ """Remove dependencies having the 'link_dependency' attribute from the 'none'
+ targets."""
+ for target_name, target_dict in targets.items():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+ for t in dependencies:
+ if target_dict.get("type", None) == "none":
+ if targets[t].get("variables", {}).get("link_dependency", 0):
+ target_dict[dependency_key] = Filter(
+ target_dict[dependency_key], t
+ )
+
+
+class DependencyGraphNode:
+ """
+
+ Attributes:
+ ref: A reference to an object that this DependencyGraphNode represents.
+ dependencies: List of DependencyGraphNodes on which this one depends.
+ dependents: List of DependencyGraphNodes that depend on this one.
+ """
+
+ class CircularException(GypError):
+ pass
+
+ def __init__(self, ref):
+ self.ref = ref
+ self.dependencies = []
+ self.dependents = []
+
+ def __repr__(self):
+ return "<DependencyGraphNode: %r>" % self.ref
+
+ def FlattenToList(self):
+ # flat_list is the sorted list of dependencies - actually, the list items
+ # are the "ref" attributes of DependencyGraphNodes. Every target will
+ # appear in flat_list after all of its dependencies, and before all of its
+ # dependents.
+ flat_list = OrderedSet()
+
+ def ExtractNodeRef(node):
+ """Extracts the object that the node represents from the given node."""
+ return node.ref
+
+ # in_degree_zeros is the list of DependencyGraphNodes that have no
+ # dependencies not in flat_list. Initially, it is a copy of the children
+ # of this node, because when the graph was built, nodes with no
+ # dependencies were made implicit dependents of the root node.
+ in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef)
+
+ while in_degree_zeros:
+ # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
+ # can be appended to flat_list. Take these nodes out of in_degree_zeros
+ # as work progresses, so that the next node to process from the list can
+ # always be accessed at a consistent position.
+ node = in_degree_zeros.pop()
+ flat_list.add(node.ref)
+
+ # Look at dependents of the node just added to flat_list. Some of them
+ # may now belong in in_degree_zeros.
+ for node_dependent in sorted(node.dependents, key=ExtractNodeRef):
+ is_in_degree_zero = True
+ # TODO: We want to check through the
+ # node_dependent.dependencies list but if it's long and we
+ # always start at the beginning, then we get O(n^2) behaviour.
+ for node_dependent_dependency in sorted(
+ node_dependent.dependencies, key=ExtractNodeRef
+ ):
+ if node_dependent_dependency.ref not in flat_list:
+ # The dependent one or more dependencies not in flat_list.
+ # There will be more chances to add it to flat_list
+ # when examining it again as a dependent of those other
+ # dependencies, provided that there are no cycles.
+ is_in_degree_zero = False
+ break
+
+ if is_in_degree_zero:
+ # All of the dependent's dependencies are already in flat_list. Add
+ # it to in_degree_zeros where it will be processed in a future
+ # iteration of the outer loop.
+ in_degree_zeros += [node_dependent]
+
+ return list(flat_list)
+
+ def FindCycles(self):
+ """
+ Returns a list of cycles in the graph, where each cycle is its own list.
+ """
+ results = []
+ visited = set()
+
+ def Visit(node, path):
+ for child in node.dependents:
+ if child in path:
+ results.append([child] + path[: path.index(child) + 1])
+ elif child not in visited:
+ visited.add(child)
+ Visit(child, [child] + path)
+
+ visited.add(self)
+ Visit(self, [self])
+
+ return results
+
+ def DirectDependencies(self, dependencies=None):
+ """Returns a list of just direct dependencies."""
+ if dependencies is None:
+ dependencies = []
+
+ for dependency in self.dependencies:
+ # Check for None, corresponding to the root node.
+ if dependency.ref and dependency.ref not in dependencies:
+ dependencies.append(dependency.ref)
+
+ return dependencies
+
+ def _AddImportedDependencies(self, targets, dependencies=None):
+ """Given a list of direct dependencies, adds indirect dependencies that
+ other dependencies have declared to export their settings.
+
+ This method does not operate on self. Rather, it operates on the list
+ of dependencies in the |dependencies| argument. For each dependency in
+ that list, if any declares that it exports the settings of one of its
+ own dependencies, those dependencies whose settings are "passed through"
+ are added to the list. As new items are added to the list, they too will
+ be processed, so it is possible to import settings through multiple levels
+ of dependencies.
+
+ This method is not terribly useful on its own, it depends on being
+ "primed" with a list of direct dependencies such as one provided by
+ DirectDependencies. DirectAndImportedDependencies is intended to be the
+ public entry point.
+ """
+
+ if dependencies is None:
+ dependencies = []
+
+ index = 0
+ while index < len(dependencies):
+ dependency = dependencies[index]
+ dependency_dict = targets[dependency]
+ # Add any dependencies whose settings should be imported to the list
+ # if not already present. Newly-added items will be checked for
+ # their own imports when the list iteration reaches them.
+ # Rather than simply appending new items, insert them after the
+ # dependency that exported them. This is done to more closely match
+ # the depth-first method used by DeepDependencies.
+ add_index = 1
+ for imported_dependency in dependency_dict.get(
+ "export_dependent_settings", []
+ ):
+ if imported_dependency not in dependencies:
+ dependencies.insert(index + add_index, imported_dependency)
+ add_index = add_index + 1
+ index = index + 1
+
+ return dependencies
+
+ def DirectAndImportedDependencies(self, targets, dependencies=None):
+ """Returns a list of a target's direct dependencies and all indirect
+ dependencies that a dependency has advertised settings should be exported
+ through the dependency for.
+ """
+
+ dependencies = self.DirectDependencies(dependencies)
+ return self._AddImportedDependencies(targets, dependencies)
+
+ def DeepDependencies(self, dependencies=None):
+ """Returns an OrderedSet of all of a target's dependencies, recursively."""
+ if dependencies is None:
+ # Using a list to get ordered output and a set to do fast "is it
+ # already added" checks.
+ dependencies = OrderedSet()
+
+ for dependency in self.dependencies:
+ # Check for None, corresponding to the root node.
+ if dependency.ref is None:
+ continue
+ if dependency.ref not in dependencies:
+ dependency.DeepDependencies(dependencies)
+ dependencies.add(dependency.ref)
+
+ return dependencies
+
+ def _LinkDependenciesInternal(
+ self, targets, include_shared_libraries, dependencies=None, initial=True
+ ):
+ """Returns an OrderedSet of dependency targets that are linked
+ into this target.
+
+ This function has a split personality, depending on the setting of
+ |initial|. Outside callers should always leave |initial| at its default
+ setting.
+
+ When adding a target to the list of dependencies, this function will
+ recurse into itself with |initial| set to False, to collect dependencies
+ that are linked into the linkable target for which the list is being built.
+
+ If |include_shared_libraries| is False, the resulting dependencies will not
+ include shared_library targets that are linked into this target.
+ """
+ if dependencies is None:
+ # Using a list to get ordered output and a set to do fast "is it
+ # already added" checks.
+ dependencies = OrderedSet()
+
+ # Check for None, corresponding to the root node.
+ if self.ref is None:
+ return dependencies
+
+ # It's kind of sucky that |targets| has to be passed into this function,
+ # but that's presently the easiest way to access the target dicts so that
+ # this function can find target types.
+
+ if "target_name" not in targets[self.ref]:
+ raise GypError("Missing 'target_name' field in target.")
+
+ if "type" not in targets[self.ref]:
+ raise GypError(
+ "Missing 'type' field in target %s" % targets[self.ref]["target_name"]
+ )
+
+ target_type = targets[self.ref]["type"]
+
+ is_linkable = target_type in linkable_types
+
+ if initial and not is_linkable:
+ # If this is the first target being examined and it's not linkable,
+ # return an empty list of link dependencies, because the link
+ # dependencies are intended to apply to the target itself (initial is
+ # True) and this target won't be linked.
+ return dependencies
+
+ # Don't traverse 'none' targets if explicitly excluded.
+ if target_type == "none" and not targets[self.ref].get(
+ "dependencies_traverse", True
+ ):
+ dependencies.add(self.ref)
+ return dependencies
+
+ # Executables, mac kernel extensions, windows drivers and loadable modules
+ # are already fully and finally linked. Nothing else can be a link
+ # dependency of them, there can only be dependencies in the sense that a
+ # dependent target might run an executable or load the loadable_module.
+ if not initial and target_type in (
+ "executable",
+ "loadable_module",
+ "mac_kernel_extension",
+ "windows_driver",
+ ):
+ return dependencies
+
+ # Shared libraries are already fully linked. They should only be included
+ # in |dependencies| when adjusting static library dependencies (in order to
+ # link against the shared_library's import lib), but should not be included
+ # in |dependencies| when propagating link_settings.
+ # The |include_shared_libraries| flag controls which of these two cases we
+ # are handling.
+ if (
+ not initial
+ and target_type == "shared_library"
+ and not include_shared_libraries
+ ):
+ return dependencies
+
+ # The target is linkable, add it to the list of link dependencies.
+ if self.ref not in dependencies:
+ dependencies.add(self.ref)
+ if initial or not is_linkable:
+ # If this is a subsequent target and it's linkable, don't look any
+ # further for linkable dependencies, as they'll already be linked into
+ # this target linkable. Always look at dependencies of the initial
+ # target, and always look at dependencies of non-linkables.
+ for dependency in self.dependencies:
+ dependency._LinkDependenciesInternal(
+ targets, include_shared_libraries, dependencies, False
+ )
+
+ return dependencies
+
+ def DependenciesForLinkSettings(self, targets):
+ """
+ Returns a list of dependency targets whose link_settings should be merged
+ into this target.
+ """
+
+ # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
+ # link_settings are propagated. So for now, we will allow it, unless the
+ # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
+ # False. Once chrome is fixed, we can remove this flag.
+ include_shared_libraries = targets[self.ref].get(
+ "allow_sharedlib_linksettings_propagation", True
+ )
+ return self._LinkDependenciesInternal(targets, include_shared_libraries)
+
+ def DependenciesToLinkAgainst(self, targets):
+ """
+ Returns a list of dependency targets that are linked into this target.
+ """
+ return self._LinkDependenciesInternal(targets, True)
+
+
+def BuildDependencyList(targets):
+ # Create a DependencyGraphNode for each target. Put it into a dict for easy
+ # access.
+ dependency_nodes = {}
+ for target, spec in targets.items():
+ if target not in dependency_nodes:
+ dependency_nodes[target] = DependencyGraphNode(target)
+
+ # Set up the dependency links. Targets that have no dependencies are treated
+ # as dependent on root_node.
+ root_node = DependencyGraphNode(None)
+ for target, spec in targets.items():
+ target_node = dependency_nodes[target]
+ dependencies = spec.get("dependencies")
+ if not dependencies:
+ target_node.dependencies = [root_node]
+ root_node.dependents.append(target_node)
+ else:
+ for dependency in dependencies:
+ dependency_node = dependency_nodes.get(dependency)
+ if not dependency_node:
+ raise GypError(
+ "Dependency '%s' not found while "
+ "trying to load target %s" % (dependency, target)
+ )
+ target_node.dependencies.append(dependency_node)
+ dependency_node.dependents.append(target_node)
+
+ flat_list = root_node.FlattenToList()
+
+ # If there's anything left unvisited, there must be a circular dependency
+ # (cycle).
+ if len(flat_list) != len(targets):
+ if not root_node.dependents:
+ # If all targets have dependencies, add the first target as a dependent
+ # of root_node so that the cycle can be discovered from root_node.
+ target = next(iter(targets))
+ target_node = dependency_nodes[target]
+ target_node.dependencies.append(root_node)
+ root_node.dependents.append(target_node)
+
+ cycles = []
+ for cycle in root_node.FindCycles():
+ paths = [node.ref for node in cycle]
+ cycles.append("Cycle: %s" % " -> ".join(paths))
+ raise DependencyGraphNode.CircularException(
+ "Cycles in dependency graph detected:\n" + "\n".join(cycles)
+ )
+
+ return [dependency_nodes, flat_list]
+
+
+def VerifyNoGYPFileCircularDependencies(targets):
+ # Create a DependencyGraphNode for each gyp file containing a target. Put
+ # it into a dict for easy access.
+ dependency_nodes = {}
+ for target in targets:
+ build_file = gyp.common.BuildFile(target)
+ if build_file not in dependency_nodes:
+ dependency_nodes[build_file] = DependencyGraphNode(build_file)
+
+ # Set up the dependency links.
+ for target, spec in targets.items():
+ build_file = gyp.common.BuildFile(target)
+ build_file_node = dependency_nodes[build_file]
+ target_dependencies = spec.get("dependencies", [])
+ for dependency in target_dependencies:
+ try:
+ dependency_build_file = gyp.common.BuildFile(dependency)
+ except GypError as e:
+ gyp.common.ExceptionAppend(
+ e, "while computing dependencies of .gyp file %s" % build_file
+ )
+ raise
+
+ if dependency_build_file == build_file:
+ # A .gyp file is allowed to refer back to itself.
+ continue
+ dependency_node = dependency_nodes.get(dependency_build_file)
+ if not dependency_node:
+ raise GypError("Dependency '%s' not found" % dependency_build_file)
+ if dependency_node not in build_file_node.dependencies:
+ build_file_node.dependencies.append(dependency_node)
+ dependency_node.dependents.append(build_file_node)
+
+ # Files that have no dependencies are treated as dependent on root_node.
+ root_node = DependencyGraphNode(None)
+ for build_file_node in dependency_nodes.values():
+ if len(build_file_node.dependencies) == 0:
+ build_file_node.dependencies.append(root_node)
+ root_node.dependents.append(build_file_node)
+
+ flat_list = root_node.FlattenToList()
+
+ # If there's anything left unvisited, there must be a circular dependency
+ # (cycle).
+ if len(flat_list) != len(dependency_nodes):
+ if not root_node.dependents:
+ # If all files have dependencies, add the first file as a dependent
+ # of root_node so that the cycle can be discovered from root_node.
+ file_node = next(iter(dependency_nodes.values()))
+ file_node.dependencies.append(root_node)
+ root_node.dependents.append(file_node)
+ cycles = []
+ for cycle in root_node.FindCycles():
+ paths = [node.ref for node in cycle]
+ cycles.append("Cycle: %s" % " -> ".join(paths))
+ raise DependencyGraphNode.CircularException(
+ "Cycles in .gyp file dependency graph detected:\n" + "\n".join(cycles)
+ )
+
+
+def DoDependentSettings(key, flat_list, targets, dependency_nodes):
+ # key should be one of all_dependent_settings, direct_dependent_settings,
+ # or link_settings.
+
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+
+ if key == "all_dependent_settings":
+ dependencies = dependency_nodes[target].DeepDependencies()
+ elif key == "direct_dependent_settings":
+ dependencies = dependency_nodes[target].DirectAndImportedDependencies(
+ targets
+ )
+ elif key == "link_settings":
+ dependencies = dependency_nodes[target].DependenciesForLinkSettings(targets)
+ else:
+ raise GypError(
+ "DoDependentSettings doesn't know how to determine "
+ "dependencies for " + key
+ )
+
+ for dependency in dependencies:
+ dependency_dict = targets[dependency]
+ if key not in dependency_dict:
+ continue
+ dependency_build_file = gyp.common.BuildFile(dependency)
+ MergeDicts(
+ target_dict, dependency_dict[key], build_file, dependency_build_file
+ )
+
+
+def AdjustStaticLibraryDependencies(
+ flat_list, targets, dependency_nodes, sort_dependencies
+):
+ # Recompute target "dependencies" properties. For each static library
+ # target, remove "dependencies" entries referring to other static libraries,
+ # unless the dependency has the "hard_dependency" attribute set. For each
+ # linkable target, add a "dependencies" entry referring to all of the
+ # target's computed list of link dependencies (including static libraries
+ # if no such entry is already present.
+ for target in flat_list:
+ target_dict = targets[target]
+ target_type = target_dict["type"]
+
+ if target_type == "static_library":
+ if "dependencies" not in target_dict:
+ continue
+
+ target_dict["dependencies_original"] = target_dict.get("dependencies", [])[
+ :
+ ]
+
+ # A static library should not depend on another static library unless
+ # the dependency relationship is "hard," which should only be done when
+ # a dependent relies on some side effect other than just the build
+ # product, like a rule or action output. Further, if a target has a
+ # non-hard dependency, but that dependency exports a hard dependency,
+ # the non-hard dependency can safely be removed, but the exported hard
+ # dependency must be added to the target to keep the same dependency
+ # ordering.
+ dependencies = dependency_nodes[target].DirectAndImportedDependencies(
+ targets
+ )
+ index = 0
+ while index < len(dependencies):
+ dependency = dependencies[index]
+ dependency_dict = targets[dependency]
+
+ # Remove every non-hard static library dependency and remove every
+ # non-static library dependency that isn't a direct dependency.
+ if (
+ dependency_dict["type"] == "static_library"
+ and not dependency_dict.get("hard_dependency", False)
+ ) or (
+ dependency_dict["type"] != "static_library"
+ and dependency not in target_dict["dependencies"]
+ ):
+ # Take the dependency out of the list, and don't increment index
+ # because the next dependency to analyze will shift into the index
+ # formerly occupied by the one being removed.
+ del dependencies[index]
+ else:
+ index = index + 1
+
+ # Update the dependencies. If the dependencies list is empty, it's not
+ # needed, so unhook it.
+ if len(dependencies) > 0:
+ target_dict["dependencies"] = dependencies
+ else:
+ del target_dict["dependencies"]
+
+ elif target_type in linkable_types:
+ # Get a list of dependency targets that should be linked into this
+ # target. Add them to the dependencies list if they're not already
+ # present.
+
+ link_dependencies = dependency_nodes[target].DependenciesToLinkAgainst(
+ targets
+ )
+ for dependency in link_dependencies:
+ if dependency == target:
+ continue
+ if "dependencies" not in target_dict:
+ target_dict["dependencies"] = []
+ if dependency not in target_dict["dependencies"]:
+ target_dict["dependencies"].append(dependency)
+ # Sort the dependencies list in the order from dependents to dependencies.
+ # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
+ # Note: flat_list is already sorted in the order from dependencies to
+ # dependents.
+ if sort_dependencies and "dependencies" in target_dict:
+ target_dict["dependencies"] = [
+ dep
+ for dep in reversed(flat_list)
+ if dep in target_dict["dependencies"]
+ ]
+
+
+# Initialize this here to speed up MakePathRelative.
+exception_re = re.compile(r"""["']?[-/$<>^]""")
+
+
+def MakePathRelative(to_file, fro_file, item):
+ # If item is a relative path, it's relative to the build file dict that it's
+ # coming from. Fix it up to make it relative to the build file dict that
+ # it's going into.
+ # Exception: any |item| that begins with these special characters is
+ # returned without modification.
+ # / Used when a path is already absolute (shortcut optimization;
+ # such paths would be returned as absolute anyway)
+ # $ Used for build environment variables
+ # - Used for some build environment flags (such as -lapr-1 in a
+ # "libraries" section)
+ # < Used for our own variable and command expansions (see ExpandVariables)
+ # > Used for our own variable and command expansions (see ExpandVariables)
+ # ^ Used for our own variable and command expansions (see ExpandVariables)
+ #
+ # "/' Used when a value is quoted. If these are present, then we
+ # check the second character instead.
+ #
+ if to_file == fro_file or exception_re.match(item):
+ return item
+ else:
+ # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
+ # temporary measure. This should really be addressed by keeping all paths
+ # in POSIX until actual project generation.
+ ret = os.path.normpath(
+ os.path.join(
+ gyp.common.RelativePath(
+ os.path.dirname(fro_file), os.path.dirname(to_file)
+ ),
+ item,
+ )
+ ).replace("\\", "/")
+ if item.endswith("/"):
+ ret += "/"
+ return ret
+
+
+def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
+ # Python documentation recommends objects which do not support hash
+ # set this value to None. Python library objects follow this rule.
+ def is_hashable(val):
+ return val.__hash__
+
+ # If x is hashable, returns whether x is in s. Else returns whether x is in items.
+ def is_in_set_or_list(x, s, items):
+ if is_hashable(x):
+ return x in s
+ return x in items
+
+ prepend_index = 0
+
+ # Make membership testing of hashables in |to| (in particular, strings)
+ # faster.
+ hashable_to_set = {x for x in to if is_hashable(x)}
+ for item in fro:
+ singleton = False
+ if type(item) in (str, int):
+ # The cheap and easy case.
+ if is_paths:
+ to_item = MakePathRelative(to_file, fro_file, item)
+ else:
+ to_item = item
+
+ if not (type(item) is str and item.startswith("-")):
+ # Any string that doesn't begin with a "-" is a singleton - it can
+ # only appear once in a list, to be enforced by the list merge append
+ # or prepend.
+ singleton = True
+ elif type(item) is dict:
+ # Make a copy of the dictionary, continuing to look for paths to fix.
+ # The other intelligent aspects of merge processing won't apply because
+ # item is being merged into an empty dict.
+ to_item = {}
+ MergeDicts(to_item, item, to_file, fro_file)
+ elif type(item) is list:
+ # Recurse, making a copy of the list. If the list contains any
+ # descendant dicts, path fixing will occur. Note that here, custom
+ # values for is_paths and append are dropped; those are only to be
+ # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
+ # matter anyway because the new |to_item| list is empty.
+ to_item = []
+ MergeLists(to_item, item, to_file, fro_file)
+ else:
+ raise TypeError(
+ "Attempt to merge list item of unsupported type "
+ + item.__class__.__name__
+ )
+
+ if append:
+ # If appending a singleton that's already in the list, don't append.
+ # This ensures that the earliest occurrence of the item will stay put.
+ if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
+ to.append(to_item)
+ if is_hashable(to_item):
+ hashable_to_set.add(to_item)
+ else:
+ # If prepending a singleton that's already in the list, remove the
+ # existing instance and proceed with the prepend. This ensures that the
+ # item appears at the earliest possible position in the list.
+ while singleton and to_item in to:
+ to.remove(to_item)
+
+ # Don't just insert everything at index 0. That would prepend the new
+ # items to the list in reverse order, which would be an unwelcome
+ # surprise.
+ to.insert(prepend_index, to_item)
+ if is_hashable(to_item):
+ hashable_to_set.add(to_item)
+ prepend_index = prepend_index + 1
+
+
+def MergeDicts(to, fro, to_file, fro_file):
+ # I wanted to name the parameter "from" but it's a Python keyword...
+ for k, v in fro.items():
+ # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
+ # copy semantics. Something else may want to merge from the |fro| dict
+ # later, and having the same dict ref pointed to twice in the tree isn't
+ # what anyone wants considering that the dicts may subsequently be
+ # modified.
+ if k in to:
+ bad_merge = False
+ if type(v) in (str, int):
+ if type(to[k]) not in (str, int):
+ bad_merge = True
+ elif not isinstance(v, type(to[k])):
+ bad_merge = True
+
+ if bad_merge:
+ raise TypeError(
+ "Attempt to merge dict value of type "
+ + v.__class__.__name__
+ + " into incompatible type "
+ + to[k].__class__.__name__
+ + " for key "
+ + k
+ )
+ if type(v) in (str, int):
+ # Overwrite the existing value, if any. Cheap and easy.
+ is_path = IsPathSection(k)
+ if is_path:
+ to[k] = MakePathRelative(to_file, fro_file, v)
+ else:
+ to[k] = v
+ elif type(v) is dict:
+ # Recurse, guaranteeing copies will be made of objects that require it.
+ if k not in to:
+ to[k] = {}
+ MergeDicts(to[k], v, to_file, fro_file)
+ elif type(v) is list:
+ # Lists in dicts can be merged with different policies, depending on
+ # how the key in the "from" dict (k, the from-key) is written.
+ #
+ # If the from-key has ...the to-list will have this action
+ # this character appended:... applied when receiving the from-list:
+ # = replace
+ # + prepend
+ # ? set, only if to-list does not yet exist
+ # (none) append
+ #
+ # This logic is list-specific, but since it relies on the associated
+ # dict key, it's checked in this dict-oriented function.
+ ext = k[-1]
+ append = True
+ if ext == "=":
+ list_base = k[:-1]
+ lists_incompatible = [list_base, list_base + "?"]
+ to[list_base] = []
+ elif ext == "+":
+ list_base = k[:-1]
+ lists_incompatible = [list_base + "=", list_base + "?"]
+ append = False
+ elif ext == "?":
+ list_base = k[:-1]
+ lists_incompatible = [list_base, list_base + "=", list_base + "+"]
+ else:
+ list_base = k
+ lists_incompatible = [list_base + "=", list_base + "?"]
+
+ # Some combinations of merge policies appearing together are meaningless.
+ # It's stupid to replace and append simultaneously, for example. Append
+ # and prepend are the only policies that can coexist.
+ for list_incompatible in lists_incompatible:
+ if list_incompatible in fro:
+ raise GypError(
+ "Incompatible list policies " + k + " and " + list_incompatible
+ )
+
+ if list_base in to:
+ if ext == "?":
+ # If the key ends in "?", the list will only be merged if it doesn't
+ # already exist.
+ continue
+ elif type(to[list_base]) is not list:
+ # This may not have been checked above if merging in a list with an
+ # extension character.
+ raise TypeError(
+ "Attempt to merge dict value of type "
+ + v.__class__.__name__
+ + " into incompatible type "
+ + to[list_base].__class__.__name__
+ + " for key "
+ + list_base
+ + "("
+ + k
+ + ")"
+ )
+ else:
+ to[list_base] = []
+
+ # Call MergeLists, which will make copies of objects that require it.
+ # MergeLists can recurse back into MergeDicts, although this will be
+ # to make copies of dicts (with paths fixed), there will be no
+ # subsequent dict "merging" once entering a list because lists are
+ # always replaced, appended to, or prepended to.
+ is_paths = IsPathSection(list_base)
+ MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
+ else:
+ raise TypeError(
+ "Attempt to merge dict value of unsupported type "
+ + v.__class__.__name__
+ + " for key "
+ + k
+ )
+
+
+def MergeConfigWithInheritance(
+ new_configuration_dict, build_file, target_dict, configuration, visited
+):
+ # Skip if previously visited.
+ if configuration in visited:
+ return
+
+ # Look at this configuration.
+ configuration_dict = target_dict["configurations"][configuration]
+
+ # Merge in parents.
+ for parent in configuration_dict.get("inherit_from", []):
+ MergeConfigWithInheritance(
+ new_configuration_dict,
+ build_file,
+ target_dict,
+ parent,
+ visited + [configuration],
+ )
+
+ # Merge it into the new config.
+ MergeDicts(new_configuration_dict, configuration_dict, build_file, build_file)
+
+ # Drop abstract.
+ if "abstract" in new_configuration_dict:
+ del new_configuration_dict["abstract"]
+
+
+def SetUpConfigurations(target, target_dict):
+ # key_suffixes is a list of key suffixes that might appear on key names.
+ # These suffixes are handled in conditional evaluations (for =, +, and ?)
+ # and rules/exclude processing (for ! and /). Keys with these suffixes
+ # should be treated the same as keys without.
+ key_suffixes = ["=", "+", "?", "!", "/"]
+
+ build_file = gyp.common.BuildFile(target)
+
+ # Provide a single configuration by default if none exists.
+ # TODO(mark): Signal an error if default_configurations exists but
+ # configurations does not.
+ if "configurations" not in target_dict:
+ target_dict["configurations"] = {"Default": {}}
+ if "default_configuration" not in target_dict:
+ concrete = [
+ i
+ for (i, config) in target_dict["configurations"].items()
+ if not config.get("abstract")
+ ]
+ target_dict["default_configuration"] = sorted(concrete)[0]
+
+ merged_configurations = {}
+ configs = target_dict["configurations"]
+ for (configuration, old_configuration_dict) in configs.items():
+ # Skip abstract configurations (saves work only).
+ if old_configuration_dict.get("abstract"):
+ continue
+ # Configurations inherit (most) settings from the enclosing target scope.
+ # Get the inheritance relationship right by making a copy of the target
+ # dict.
+ new_configuration_dict = {}
+ for (key, target_val) in target_dict.items():
+ key_ext = key[-1:]
+ if key_ext in key_suffixes:
+ key_base = key[:-1]
+ else:
+ key_base = key
+ if key_base not in non_configuration_keys:
+ new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
+
+ # Merge in configuration (with all its parents first).
+ MergeConfigWithInheritance(
+ new_configuration_dict, build_file, target_dict, configuration, []
+ )
+
+ merged_configurations[configuration] = new_configuration_dict
+
+ # Put the new configurations back into the target dict as a configuration.
+ for configuration in merged_configurations.keys():
+ target_dict["configurations"][configuration] = merged_configurations[
+ configuration
+ ]
+
+ # Now drop all the abstract ones.
+ configs = target_dict["configurations"]
+ target_dict["configurations"] = {
+ k: v for k, v in configs.items() if not v.get("abstract")
+ }
+
+ # Now that all of the target's configurations have been built, go through
+ # the target dict's keys and remove everything that's been moved into a
+ # "configurations" section.
+ delete_keys = []
+ for key in target_dict:
+ key_ext = key[-1:]
+ if key_ext in key_suffixes:
+ key_base = key[:-1]
+ else:
+ key_base = key
+ if key_base not in non_configuration_keys:
+ delete_keys.append(key)
+ for key in delete_keys:
+ del target_dict[key]
+
+ # Check the configurations to see if they contain invalid keys.
+ for configuration in target_dict["configurations"].keys():
+ configuration_dict = target_dict["configurations"][configuration]
+ for key in configuration_dict.keys():
+ if key in invalid_configuration_keys:
+ raise GypError(
+ "%s not allowed in the %s configuration, found in "
+ "target %s" % (key, configuration, target)
+ )
+
+
+def ProcessListFiltersInDict(name, the_dict):
+ """Process regular expression and exclusion-based filters on lists.
+
+ An exclusion list is in a dict key named with a trailing "!", like
+ "sources!". Every item in such a list is removed from the associated
+ main list, which in this example, would be "sources". Removed items are
+ placed into a "sources_excluded" list in the dict.
+
+ Regular expression (regex) filters are contained in dict keys named with a
+ trailing "/", such as "sources/" to operate on the "sources" list. Regex
+ filters in a dict take the form:
+ 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
+ ['include', '_mac\\.cc$'] ],
+ The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
+ _win.cc. The second filter then includes all files ending in _mac.cc that
+ are now or were once in the "sources" list. Items matching an "exclude"
+ filter are subject to the same processing as would occur if they were listed
+ by name in an exclusion list (ending in "!"). Items matching an "include"
+ filter are brought back into the main list if previously excluded by an
+ exclusion list or exclusion regex filter. Subsequent matching "exclude"
+ patterns can still cause items to be excluded after matching an "include".
+ """
+
+ # Look through the dictionary for any lists whose keys end in "!" or "/".
+ # These are lists that will be treated as exclude lists and regular
+ # expression-based exclude/include lists. Collect the lists that are
+ # needed first, looking for the lists that they operate on, and assemble
+ # then into |lists|. This is done in a separate loop up front, because
+ # the _included and _excluded keys need to be added to the_dict, and that
+ # can't be done while iterating through it.
+
+ lists = []
+ del_lists = []
+ for key, value in the_dict.items():
+ operation = key[-1]
+ if operation != "!" and operation != "/":
+ continue
+
+ if type(value) is not list:
+ raise ValueError(
+ name + " key " + key + " must be list, not " + value.__class__.__name__
+ )
+
+ list_key = key[:-1]
+ if list_key not in the_dict:
+ # This happens when there's a list like "sources!" but no corresponding
+ # "sources" list. Since there's nothing for it to operate on, queue up
+ # the "sources!" list for deletion now.
+ del_lists.append(key)
+ continue
+
+ if type(the_dict[list_key]) is not list:
+ value = the_dict[list_key]
+ raise ValueError(
+ name
+ + " key "
+ + list_key
+ + " must be list, not "
+ + value.__class__.__name__
+ + " when applying "
+ + {"!": "exclusion", "/": "regex"}[operation]
+ )
+
+ if list_key not in lists:
+ lists.append(list_key)
+
+ # Delete the lists that are known to be unneeded at this point.
+ for del_list in del_lists:
+ del the_dict[del_list]
+
+ for list_key in lists:
+ the_list = the_dict[list_key]
+
+ # Initialize the list_actions list, which is parallel to the_list. Each
+ # item in list_actions identifies whether the corresponding item in
+ # the_list should be excluded, unconditionally preserved (included), or
+ # whether no exclusion or inclusion has been applied. Items for which
+ # no exclusion or inclusion has been applied (yet) have value -1, items
+ # excluded have value 0, and items included have value 1. Includes and
+ # excludes override previous actions. All items in list_actions are
+ # initialized to -1 because no excludes or includes have been processed
+ # yet.
+ list_actions = list((-1,) * len(the_list))
+
+ exclude_key = list_key + "!"
+ if exclude_key in the_dict:
+ for exclude_item in the_dict[exclude_key]:
+ for index, list_item in enumerate(the_list):
+ if exclude_item == list_item:
+ # This item matches the exclude_item, so set its action to 0
+ # (exclude).
+ list_actions[index] = 0
+
+ # The "whatever!" list is no longer needed, dump it.
+ del the_dict[exclude_key]
+
+ regex_key = list_key + "/"
+ if regex_key in the_dict:
+ for regex_item in the_dict[regex_key]:
+ [action, pattern] = regex_item
+ pattern_re = re.compile(pattern)
+
+ if action == "exclude":
+ # This item matches an exclude regex, set its value to 0 (exclude).
+ action_value = 0
+ elif action == "include":
+ # This item matches an include regex, set its value to 1 (include).
+ action_value = 1
+ else:
+ # This is an action that doesn't make any sense.
+ raise ValueError(
+ "Unrecognized action "
+ + action
+ + " in "
+ + name
+ + " key "
+ + regex_key
+ )
+
+ for index, list_item in enumerate(the_list):
+ if list_actions[index] == action_value:
+ # Even if the regex matches, nothing will change so continue
+ # (regex searches are expensive).
+ continue
+ if pattern_re.search(list_item):
+ # Regular expression match.
+ list_actions[index] = action_value
+
+ # The "whatever/" list is no longer needed, dump it.
+ del the_dict[regex_key]
+
+ # Add excluded items to the excluded list.
+ #
+ # Note that exclude_key ("sources!") is different from excluded_key
+ # ("sources_excluded"). The exclude_key list is input and it was already
+ # processed and deleted; the excluded_key list is output and it's about
+ # to be created.
+ excluded_key = list_key + "_excluded"
+ if excluded_key in the_dict:
+ raise GypError(
+ name + " key " + excluded_key + " must not be present prior "
+ " to applying exclusion/regex filters for " + list_key
+ )
+
+ excluded_list = []
+
+ # Go backwards through the list_actions list so that as items are deleted,
+ # the indices of items that haven't been seen yet don't shift. That means
+ # that things need to be prepended to excluded_list to maintain them in the
+ # same order that they existed in the_list.
+ for index in range(len(list_actions) - 1, -1, -1):
+ if list_actions[index] == 0:
+ # Dump anything with action 0 (exclude). Keep anything with action 1
+ # (include) or -1 (no include or exclude seen for the item).
+ excluded_list.insert(0, the_list[index])
+ del the_list[index]
+
+ # If anything was excluded, put the excluded list into the_dict at
+ # excluded_key.
+ if len(excluded_list) > 0:
+ the_dict[excluded_key] = excluded_list
+
+ # Now recurse into subdicts and lists that may contain dicts.
+ for key, value in the_dict.items():
+ if type(value) is dict:
+ ProcessListFiltersInDict(key, value)
+ elif type(value) is list:
+ ProcessListFiltersInList(key, value)
+
+
+def ProcessListFiltersInList(name, the_list):
+ for item in the_list:
+ if type(item) is dict:
+ ProcessListFiltersInDict(name, item)
+ elif type(item) is list:
+ ProcessListFiltersInList(name, item)
+
+
+def ValidateTargetType(target, target_dict):
+ """Ensures the 'type' field on the target is one of the known types.
+
+ Arguments:
+ target: string, name of target.
+ target_dict: dict, target spec.
+
+ Raises an exception on error.
+ """
+ VALID_TARGET_TYPES = (
+ "executable",
+ "loadable_module",
+ "static_library",
+ "shared_library",
+ "mac_kernel_extension",
+ "none",
+ "windows_driver",
+ )
+ target_type = target_dict.get("type", None)
+ if target_type not in VALID_TARGET_TYPES:
+ raise GypError(
+ "Target %s has an invalid target type '%s'. "
+ "Must be one of %s." % (target, target_type, "/".join(VALID_TARGET_TYPES))
+ )
+ if (
+ target_dict.get("standalone_static_library", 0)
+ and not target_type == "static_library"
+ ):
+ raise GypError(
+ "Target %s has type %s but standalone_static_library flag is"
+ " only valid for static_library type." % (target, target_type)
+ )
+
+
+def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
+ """Ensures that the rules sections in target_dict are valid and consistent,
+ and determines which sources they apply to.
+
+ Arguments:
+ target: string, name of target.
+ target_dict: dict, target spec containing "rules" and "sources" lists.
+ extra_sources_for_rules: a list of keys to scan for rule matches in
+ addition to 'sources'.
+ """
+
+ # Dicts to map between values found in rules' 'rule_name' and 'extension'
+ # keys and the rule dicts themselves.
+ rule_names = {}
+ rule_extensions = {}
+
+ rules = target_dict.get("rules", [])
+ for rule in rules:
+ # Make sure that there's no conflict among rule names and extensions.
+ rule_name = rule["rule_name"]
+ if rule_name in rule_names:
+ raise GypError(
+ f"rule {rule_name} exists in duplicate, target {target}"
+ )
+ rule_names[rule_name] = rule
+
+ rule_extension = rule["extension"]
+ if rule_extension.startswith("."):
+ rule_extension = rule_extension[1:]
+ if rule_extension in rule_extensions:
+ raise GypError(
+ (
+ "extension %s associated with multiple rules, "
+ + "target %s rules %s and %s"
+ )
+ % (
+ rule_extension,
+ target,
+ rule_extensions[rule_extension]["rule_name"],
+ rule_name,
+ )
+ )
+ rule_extensions[rule_extension] = rule
+
+ # Make sure rule_sources isn't already there. It's going to be
+ # created below if needed.
+ if "rule_sources" in rule:
+ raise GypError(
+ "rule_sources must not exist in input, target %s rule %s"
+ % (target, rule_name)
+ )
+
+ rule_sources = []
+ source_keys = ["sources"]
+ source_keys.extend(extra_sources_for_rules)
+ for source_key in source_keys:
+ for source in target_dict.get(source_key, []):
+ (source_root, source_extension) = os.path.splitext(source)
+ if source_extension.startswith("."):
+ source_extension = source_extension[1:]
+ if source_extension == rule_extension:
+ rule_sources.append(source)
+
+ if len(rule_sources) > 0:
+ rule["rule_sources"] = rule_sources
+
+
+def ValidateRunAsInTarget(target, target_dict, build_file):
+ target_name = target_dict.get("target_name")
+ run_as = target_dict.get("run_as")
+ if not run_as:
+ return
+ if type(run_as) is not dict:
+ raise GypError(
+ "The 'run_as' in target %s from file %s should be a "
+ "dictionary." % (target_name, build_file)
+ )
+ action = run_as.get("action")
+ if not action:
+ raise GypError(
+ "The 'run_as' in target %s from file %s must have an "
+ "'action' section." % (target_name, build_file)
+ )
+ if type(action) is not list:
+ raise GypError(
+ "The 'action' for 'run_as' in target %s from file %s "
+ "must be a list." % (target_name, build_file)
+ )
+ working_directory = run_as.get("working_directory")
+ if working_directory and type(working_directory) is not str:
+ raise GypError(
+ "The 'working_directory' for 'run_as' in target %s "
+ "in file %s should be a string." % (target_name, build_file)
+ )
+ environment = run_as.get("environment")
+ if environment and type(environment) is not dict:
+ raise GypError(
+ "The 'environment' for 'run_as' in target %s "
+ "in file %s should be a dictionary." % (target_name, build_file)
+ )
+
+
+def ValidateActionsInTarget(target, target_dict, build_file):
+ """Validates the inputs to the actions in a target."""
+ target_name = target_dict.get("target_name")
+ actions = target_dict.get("actions", [])
+ for action in actions:
+ action_name = action.get("action_name")
+ if not action_name:
+ raise GypError(
+ "Anonymous action in target %s. "
+ "An action must have an 'action_name' field." % target_name
+ )
+ inputs = action.get("inputs", None)
+ if inputs is None:
+ raise GypError("Action in target %s has no inputs." % target_name)
+ action_command = action.get("action")
+ if action_command and not action_command[0]:
+ raise GypError("Empty action as command in target %s." % target_name)
+
+
+def TurnIntIntoStrInDict(the_dict):
+ """Given dict the_dict, recursively converts all integers into strings.
+ """
+ # Use items instead of iteritems because there's no need to try to look at
+ # reinserted keys and their associated values.
+ for k, v in the_dict.items():
+ if type(v) is int:
+ v = str(v)
+ the_dict[k] = v
+ elif type(v) is dict:
+ TurnIntIntoStrInDict(v)
+ elif type(v) is list:
+ TurnIntIntoStrInList(v)
+
+ if type(k) is int:
+ del the_dict[k]
+ the_dict[str(k)] = v
+
+
+def TurnIntIntoStrInList(the_list):
+ """Given list the_list, recursively converts all integers into strings.
+ """
+ for index, item in enumerate(the_list):
+ if type(item) is int:
+ the_list[index] = str(item)
+ elif type(item) is dict:
+ TurnIntIntoStrInDict(item)
+ elif type(item) is list:
+ TurnIntIntoStrInList(item)
+
+
+def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data):
+ """Return only the targets that are deep dependencies of |root_targets|."""
+ qualified_root_targets = []
+ for target in root_targets:
+ target = target.strip()
+ qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
+ if not qualified_targets:
+ raise GypError("Could not find target %s" % target)
+ qualified_root_targets.extend(qualified_targets)
+
+ wanted_targets = {}
+ for target in qualified_root_targets:
+ wanted_targets[target] = targets[target]
+ for dependency in dependency_nodes[target].DeepDependencies():
+ wanted_targets[dependency] = targets[dependency]
+
+ wanted_flat_list = [t for t in flat_list if t in wanted_targets]
+
+ # Prune unwanted targets from each build_file's data dict.
+ for build_file in data["target_build_files"]:
+ if "targets" not in data[build_file]:
+ continue
+ new_targets = []
+ for target in data[build_file]["targets"]:
+ qualified_name = gyp.common.QualifiedTarget(
+ build_file, target["target_name"], target["toolset"]
+ )
+ if qualified_name in wanted_targets:
+ new_targets.append(target)
+ data[build_file]["targets"] = new_targets
+
+ return wanted_targets, wanted_flat_list
+
+
+def VerifyNoCollidingTargets(targets):
+ """Verify that no two targets in the same directory share the same name.
+
+ Arguments:
+ targets: A list of targets in the form 'path/to/file.gyp:target_name'.
+ """
+ # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
+ used = {}
+ for target in targets:
+ # Separate out 'path/to/file.gyp, 'target_name' from
+ # 'path/to/file.gyp:target_name'.
+ path, name = target.rsplit(":", 1)
+ # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
+ subdir, gyp = os.path.split(path)
+ # Use '.' for the current directory '', so that the error messages make
+ # more sense.
+ if not subdir:
+ subdir = "."
+ # Prepare a key like 'path/to:target_name'.
+ key = subdir + ":" + name
+ if key in used:
+ # Complain if this target is already used.
+ raise GypError(
+ 'Duplicate target name "%s" in directory "%s" used both '
+ 'in "%s" and "%s".' % (name, subdir, gyp, used[key])
+ )
+ used[key] = gyp
+
+
+def SetGeneratorGlobals(generator_input_info):
+ # Set up path_sections and non_configuration_keys with the default data plus
+ # the generator-specific data.
+ global path_sections
+ path_sections = set(base_path_sections)
+ path_sections.update(generator_input_info["path_sections"])
+
+ global non_configuration_keys
+ non_configuration_keys = base_non_configuration_keys[:]
+ non_configuration_keys.extend(generator_input_info["non_configuration_keys"])
+
+ global multiple_toolsets
+ multiple_toolsets = generator_input_info["generator_supports_multiple_toolsets"]
+
+ global generator_filelist_paths
+ generator_filelist_paths = generator_input_info["generator_filelist_paths"]
+
+
+def Load(
+ build_files,
+ variables,
+ includes,
+ depth,
+ generator_input_info,
+ check,
+ circular_check,
+ parallel,
+ root_targets,
+):
+ SetGeneratorGlobals(generator_input_info)
+ # A generator can have other lists (in addition to sources) be processed
+ # for rules.
+ extra_sources_for_rules = generator_input_info["extra_sources_for_rules"]
+
+ # Load build files. This loads every target-containing build file into
+ # the |data| dictionary such that the keys to |data| are build file names,
+ # and the values are the entire build file contents after "early" or "pre"
+ # processing has been done and includes have been resolved.
+ # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
+ # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
+ # track of the keys corresponding to "target" files.
+ data = {"target_build_files": set()}
+ # Normalize paths everywhere. This is important because paths will be
+ # used as keys to the data dict and for references between input files.
+ build_files = set(map(os.path.normpath, build_files))
+ if parallel:
+ LoadTargetBuildFilesParallel(
+ build_files, data, variables, includes, depth, check, generator_input_info
+ )
+ else:
+ aux_data = {}
+ for build_file in build_files:
+ try:
+ LoadTargetBuildFile(
+ build_file, data, aux_data, variables, includes, depth, check, True
+ )
+ except Exception as e:
+ gyp.common.ExceptionAppend(e, "while trying to load %s" % build_file)
+ raise
+
+ # Build a dict to access each target's subdict by qualified name.
+ targets = BuildTargetsDict(data)
+
+ # Fully qualify all dependency links.
+ QualifyDependencies(targets)
+
+ # Remove self-dependencies from targets that have 'prune_self_dependencies'
+ # set to 1.
+ RemoveSelfDependencies(targets)
+
+ # Expand dependencies specified as build_file:*.
+ ExpandWildcardDependencies(targets, data)
+
+ # Remove all dependencies marked as 'link_dependency' from the targets of
+ # type 'none'.
+ RemoveLinkDependenciesFromNoneTargets(targets)
+
+ # Apply exclude (!) and regex (/) list filters only for dependency_sections.
+ for target_name, target_dict in targets.items():
+ tmp_dict = {}
+ for key_base in dependency_sections:
+ for op in ("", "!", "/"):
+ key = key_base + op
+ if key in target_dict:
+ tmp_dict[key] = target_dict[key]
+ del target_dict[key]
+ ProcessListFiltersInDict(target_name, tmp_dict)
+ # Write the results back to |target_dict|.
+ for key in tmp_dict:
+ target_dict[key] = tmp_dict[key]
+
+ # Make sure every dependency appears at most once.
+ RemoveDuplicateDependencies(targets)
+
+ if circular_check:
+ # Make sure that any targets in a.gyp don't contain dependencies in other
+ # .gyp files that further depend on a.gyp.
+ VerifyNoGYPFileCircularDependencies(targets)
+
+ [dependency_nodes, flat_list] = BuildDependencyList(targets)
+
+ if root_targets:
+ # Remove, from |targets| and |flat_list|, the targets that are not deep
+ # dependencies of the targets specified in |root_targets|.
+ targets, flat_list = PruneUnwantedTargets(
+ targets, flat_list, dependency_nodes, root_targets, data
+ )
+
+ # Check that no two targets in the same directory have the same name.
+ VerifyNoCollidingTargets(flat_list)
+
+ # Handle dependent settings of various types.
+ for settings_type in [
+ "all_dependent_settings",
+ "direct_dependent_settings",
+ "link_settings",
+ ]:
+ DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
+
+ # Take out the dependent settings now that they've been published to all
+ # of the targets that require them.
+ for target in flat_list:
+ if settings_type in targets[target]:
+ del targets[target][settings_type]
+
+ # Make sure static libraries don't declare dependencies on other static
+ # libraries, but that linkables depend on all unlinked static libraries
+ # that they need so that their link steps will be correct.
+ gii = generator_input_info
+ if gii["generator_wants_static_library_dependencies_adjusted"]:
+ AdjustStaticLibraryDependencies(
+ flat_list,
+ targets,
+ dependency_nodes,
+ gii["generator_wants_sorted_dependencies"],
+ )
+
+ # Apply "post"/"late"/"target" variable expansions and condition evaluations.
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+ ProcessVariablesAndConditionsInDict(
+ target_dict, PHASE_LATE, variables, build_file
+ )
+
+ # Move everything that can go into a "configurations" section into one.
+ for target in flat_list:
+ target_dict = targets[target]
+ SetUpConfigurations(target, target_dict)
+
+ # Apply exclude (!) and regex (/) list filters.
+ for target in flat_list:
+ target_dict = targets[target]
+ ProcessListFiltersInDict(target, target_dict)
+
+ # Apply "latelate" variable expansions and condition evaluations.
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+ ProcessVariablesAndConditionsInDict(
+ target_dict, PHASE_LATELATE, variables, build_file
+ )
+
+ # Make sure that the rules make sense, and build up rule_sources lists as
+ # needed. Not all generators will need to use the rule_sources lists, but
+ # some may, and it seems best to build the list in a common spot.
+ # Also validate actions and run_as elements in targets.
+ for target in flat_list:
+ target_dict = targets[target]
+ build_file = gyp.common.BuildFile(target)
+ ValidateTargetType(target, target_dict)
+ ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
+ ValidateRunAsInTarget(target, target_dict, build_file)
+ ValidateActionsInTarget(target, target_dict, build_file)
+
+ # Generators might not expect ints. Turn them into strs.
+ TurnIntIntoStrInDict(data)
+
+ # TODO(mark): Return |data| for now because the generator needs a list of
+ # build files that came in. In the future, maybe it should just accept
+ # a list, and not the whole data dict.
+ return [flat_list, targets, data]
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
new file mode 100755
index 0000000..a18f72e
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python3
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the input.py file."""
+
+import gyp.input
+import unittest
+
+
+class TestFindCycles(unittest.TestCase):
+ def setUp(self):
+ self.nodes = {}
+ for x in ("a", "b", "c", "d", "e"):
+ self.nodes[x] = gyp.input.DependencyGraphNode(x)
+
+ def _create_dependency(self, dependent, dependency):
+ dependent.dependencies.append(dependency)
+ dependency.dependents.append(dependent)
+
+ def test_no_cycle_empty_graph(self):
+ for label, node in self.nodes.items():
+ self.assertEqual([], node.FindCycles())
+
+ def test_no_cycle_line(self):
+ self._create_dependency(self.nodes["a"], self.nodes["b"])
+ self._create_dependency(self.nodes["b"], self.nodes["c"])
+ self._create_dependency(self.nodes["c"], self.nodes["d"])
+
+ for label, node in self.nodes.items():
+ self.assertEqual([], node.FindCycles())
+
+ def test_no_cycle_dag(self):
+ self._create_dependency(self.nodes["a"], self.nodes["b"])
+ self._create_dependency(self.nodes["a"], self.nodes["c"])
+ self._create_dependency(self.nodes["b"], self.nodes["c"])
+
+ for label, node in self.nodes.items():
+ self.assertEqual([], node.FindCycles())
+
+ def test_cycle_self_reference(self):
+ self._create_dependency(self.nodes["a"], self.nodes["a"])
+
+ self.assertEqual(
+ [[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles()
+ )
+
+ def test_cycle_two_nodes(self):
+ self._create_dependency(self.nodes["a"], self.nodes["b"])
+ self._create_dependency(self.nodes["b"], self.nodes["a"])
+
+ self.assertEqual(
+ [[self.nodes["a"], self.nodes["b"], self.nodes["a"]]],
+ self.nodes["a"].FindCycles(),
+ )
+ self.assertEqual(
+ [[self.nodes["b"], self.nodes["a"], self.nodes["b"]]],
+ self.nodes["b"].FindCycles(),
+ )
+
+ def test_two_cycles(self):
+ self._create_dependency(self.nodes["a"], self.nodes["b"])
+ self._create_dependency(self.nodes["b"], self.nodes["a"])
+
+ self._create_dependency(self.nodes["b"], self.nodes["c"])
+ self._create_dependency(self.nodes["c"], self.nodes["b"])
+
+ cycles = self.nodes["a"].FindCycles()
+ self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles)
+ self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles)
+ self.assertEqual(2, len(cycles))
+
+ def test_big_cycle(self):
+ self._create_dependency(self.nodes["a"], self.nodes["b"])
+ self._create_dependency(self.nodes["b"], self.nodes["c"])
+ self._create_dependency(self.nodes["c"], self.nodes["d"])
+ self._create_dependency(self.nodes["d"], self.nodes["e"])
+ self._create_dependency(self.nodes["e"], self.nodes["a"])
+
+ self.assertEqual(
+ [
+ [
+ self.nodes["a"],
+ self.nodes["b"],
+ self.nodes["c"],
+ self.nodes["d"],
+ self.nodes["e"],
+ self.nodes["a"],
+ ]
+ ],
+ self.nodes["a"].FindCycles(),
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
new file mode 100755
index 0000000..59647c9
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
@@ -0,0 +1,771 @@
+#!/usr/bin/env python3
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions to perform Xcode-style build steps.
+
+These functions are executed via gyp-mac-tool when using the Makefile generator.
+"""
+
+
+import fcntl
+import fnmatch
+import glob
+import json
+import os
+import plistlib
+import re
+import shutil
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+def main(args):
+ executor = MacTool()
+ exit_code = executor.Dispatch(args)
+ if exit_code is not None:
+ sys.exit(exit_code)
+
+
+class MacTool:
+ """This class performs all the Mac tooling steps. The methods can either be
+ executed directly, or dispatched from an argument list."""
+
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ return getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
+ return name_string.title().replace("-", "")
+
+ def ExecCopyBundleResource(self, source, dest, convert_to_binary):
+ """Copies a resource file to the bundle/Resources directory, performing any
+ necessary compilation on each resource."""
+ convert_to_binary = convert_to_binary == "True"
+ extension = os.path.splitext(source)[1].lower()
+ if os.path.isdir(source):
+ # Copy tree.
+ # TODO(thakis): This copies file attributes like mtime, while the
+ # single-file branch below doesn't. This should probably be changed to
+ # be consistent with the single-file branch.
+ if os.path.exists(dest):
+ shutil.rmtree(dest)
+ shutil.copytree(source, dest)
+ elif extension == ".xib":
+ return self._CopyXIBFile(source, dest)
+ elif extension == ".storyboard":
+ return self._CopyXIBFile(source, dest)
+ elif extension == ".strings" and not convert_to_binary:
+ self._CopyStringsFile(source, dest)
+ else:
+ if os.path.exists(dest):
+ os.unlink(dest)
+ shutil.copy(source, dest)
+
+ if convert_to_binary and extension in (".plist", ".strings"):
+ self._ConvertToBinary(dest)
+
+ def _CopyXIBFile(self, source, dest):
+ """Compiles a XIB file with ibtool into a binary plist in the bundle."""
+
+ # ibtool sometimes crashes with relative paths. See crbug.com/314728.
+ base = os.path.dirname(os.path.realpath(__file__))
+ if os.path.relpath(source):
+ source = os.path.join(base, source)
+ if os.path.relpath(dest):
+ dest = os.path.join(base, dest)
+
+ args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"]
+
+ if os.environ["XCODE_VERSION_ACTUAL"] > "0700":
+ args.extend(["--auto-activate-custom-fonts"])
+ if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ:
+ args.extend(
+ [
+ "--target-device",
+ "iphone",
+ "--target-device",
+ "ipad",
+ "--minimum-deployment-target",
+ os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
+ ]
+ )
+ else:
+ args.extend(
+ [
+ "--target-device",
+ "mac",
+ "--minimum-deployment-target",
+ os.environ["MACOSX_DEPLOYMENT_TARGET"],
+ ]
+ )
+
+ args.extend(
+ ["--output-format", "human-readable-text", "--compile", dest, source]
+ )
+
+ ibtool_section_re = re.compile(r"/\*.*\*/")
+ ibtool_re = re.compile(r".*note:.*is clipping its content")
+ try:
+ stdout = subprocess.check_output(args)
+ except subprocess.CalledProcessError as e:
+ print(e.output)
+ raise
+ current_section_header = None
+ for line in stdout.splitlines():
+ if ibtool_section_re.match(line):
+ current_section_header = line
+ elif not ibtool_re.match(line):
+ if current_section_header:
+ print(current_section_header)
+ current_section_header = None
+ print(line)
+ return 0
+
+ def _ConvertToBinary(self, dest):
+ subprocess.check_call(
+ ["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest]
+ )
+
+ def _CopyStringsFile(self, source, dest):
+ """Copies a .strings file using iconv to reconvert the input into UTF-16."""
+ input_code = self._DetectInputEncoding(source) or "UTF-8"
+
+ # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
+ # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
+ # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
+ # semicolon in dictionary.
+ # on invalid files. Do the same kind of validation.
+ import CoreFoundation
+
+ with open(source, "rb") as in_file:
+ s = in_file.read()
+ d = CoreFoundation.CFDataCreate(None, s, len(s))
+ _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
+ if error:
+ return
+
+ with open(dest, "wb") as fp:
+ fp.write(s.decode(input_code).encode("UTF-16"))
+
+ def _DetectInputEncoding(self, file_name):
+ """Reads the first few bytes from file_name and tries to guess the text
+ encoding. Returns None as a guess if it can't detect it."""
+ with open(file_name, "rb") as fp:
+ try:
+ header = fp.read(3)
+ except Exception:
+ return None
+ if header.startswith(b"\xFE\xFF"):
+ return "UTF-16"
+ elif header.startswith(b"\xFF\xFE"):
+ return "UTF-16"
+ elif header.startswith(b"\xEF\xBB\xBF"):
+ return "UTF-8"
+ else:
+ return None
+
+ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
+ """Copies the |source| Info.plist to the destination directory |dest|."""
+ # Read the source Info.plist into memory.
+ with open(source) as fd:
+ lines = fd.read()
+
+ # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
+ plist = plistlib.readPlistFromString(lines)
+ if keys:
+ plist.update(json.loads(keys[0]))
+ lines = plistlib.writePlistToString(plist)
+
+ # Go through all the environment variables and replace them as variables in
+ # the file.
+ IDENT_RE = re.compile(r"[_/\s]")
+ for key in os.environ:
+ if key.startswith("_"):
+ continue
+ evar = "${%s}" % key
+ evalue = os.environ[key]
+ lines = lines.replace(lines, evar, evalue)
+
+ # Xcode supports various suffices on environment variables, which are
+ # all undocumented. :rfc1034identifier is used in the standard project
+ # template these days, and :identifier was used earlier. They are used to
+ # convert non-url characters into things that look like valid urls --
+ # except that the replacement character for :identifier, '_' isn't valid
+ # in a URL either -- oops, hence :rfc1034identifier was born.
+ evar = "${%s:identifier}" % key
+ evalue = IDENT_RE.sub("_", os.environ[key])
+ lines = lines.replace(lines, evar, evalue)
+
+ evar = "${%s:rfc1034identifier}" % key
+ evalue = IDENT_RE.sub("-", os.environ[key])
+ lines = lines.replace(lines, evar, evalue)
+
+ # Remove any keys with values that haven't been replaced.
+ lines = lines.splitlines()
+ for i in range(len(lines)):
+ if lines[i].strip().startswith("<string>${"):
+ lines[i] = None
+ lines[i - 1] = None
+ lines = "\n".join(line for line in lines if line is not None)
+
+ # Write out the file with variables replaced.
+ with open(dest, "w") as fd:
+ fd.write(lines)
+
+ # Now write out PkgInfo file now that the Info.plist file has been
+ # "compiled".
+ self._WritePkgInfo(dest)
+
+ if convert_to_binary == "True":
+ self._ConvertToBinary(dest)
+
+ def _WritePkgInfo(self, info_plist):
+ """This writes the PkgInfo file from the data stored in Info.plist."""
+ plist = plistlib.readPlist(info_plist)
+ if not plist:
+ return
+
+ # Only create PkgInfo for executable types.
+ package_type = plist["CFBundlePackageType"]
+ if package_type != "APPL":
+ return
+
+ # The format of PkgInfo is eight characters, representing the bundle type
+ # and bundle signature, each four characters. If that is missing, four
+ # '?' characters are used instead.
+ signature_code = plist.get("CFBundleSignature", "????")
+ if len(signature_code) != 4: # Wrong length resets everything, too.
+ signature_code = "?" * 4
+
+ dest = os.path.join(os.path.dirname(info_plist), "PkgInfo")
+ with open(dest, "w") as fp:
+ fp.write(f"{package_type}{signature_code}")
+
+ def ExecFlock(self, lockfile, *cmd_list):
+ """Emulates the most basic behavior of Linux's flock(1)."""
+ # Rely on exception handling to report errors.
+ fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
+ fcntl.flock(fd, fcntl.LOCK_EX)
+ return subprocess.call(cmd_list)
+
+ def ExecFilterLibtool(self, *cmd_list):
+ """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
+ symbols'."""
+ libtool_re = re.compile(
+ r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$"
+ )
+ libtool_re5 = re.compile(
+ r"^.*libtool: warning for library: "
+ + r".* the table of contents is empty "
+ + r"\(no object file members in the library define global symbols\)$"
+ )
+ env = os.environ.copy()
+ # Ref:
+ # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
+ # The problem with this flag is that it resets the file mtime on the file to
+ # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
+ env["ZERO_AR_DATE"] = "1"
+ libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
+ err = libtoolout.communicate()[1].decode("utf-8")
+ for line in err.splitlines():
+ if not libtool_re.match(line) and not libtool_re5.match(line):
+ print(line, file=sys.stderr)
+ # Unconditionally touch the output .a file on the command line if present
+ # and the command succeeded. A bit hacky.
+ if not libtoolout.returncode:
+ for i in range(len(cmd_list) - 1):
+ if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"):
+ os.utime(cmd_list[i + 1], None)
+ break
+ return libtoolout.returncode
+
+ def ExecPackageIosFramework(self, framework):
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split(".")[0]
+ module_path = os.path.join(framework, "Modules")
+ if not os.path.exists(module_path):
+ os.mkdir(module_path)
+ module_template = (
+ "framework module %s {\n"
+ ' umbrella header "%s.h"\n'
+ "\n"
+ " export *\n"
+ " module * { export * }\n"
+ "}\n" % (binary, binary)
+ )
+
+ with open(os.path.join(module_path, "module.modulemap"), "w") as module_file:
+ module_file.write(module_template)
+
+ def ExecPackageFramework(self, framework, version):
+ """Takes a path to Something.framework and the Current version of that and
+ sets up all the symlinks."""
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split(".")[0]
+
+ CURRENT = "Current"
+ RESOURCES = "Resources"
+ VERSIONS = "Versions"
+
+ if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
+ # Binary-less frameworks don't seem to contain symlinks (see e.g.
+ # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
+ return
+
+ # Move into the framework directory to set the symlinks correctly.
+ pwd = os.getcwd()
+ os.chdir(framework)
+
+ # Set up the Current version.
+ self._Relink(version, os.path.join(VERSIONS, CURRENT))
+
+ # Set up the root symlinks.
+ self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
+ self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
+
+ # Back to where we were before!
+ os.chdir(pwd)
+
+ def _Relink(self, dest, link):
+ """Creates a symlink to |dest| named |link|. If |link| already exists,
+ it is overwritten."""
+ if os.path.lexists(link):
+ os.remove(link)
+ os.symlink(dest, link)
+
+ def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
+ framework_name = os.path.basename(framework).split(".")[0]
+ all_headers = [os.path.abspath(header) for header in all_headers]
+ filelist = {}
+ for header in all_headers:
+ filename = os.path.basename(header)
+ filelist[filename] = header
+ filelist[os.path.join(framework_name, filename)] = header
+ WriteHmap(out, filelist)
+
+ def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
+ header_path = os.path.join(framework, "Headers")
+ if not os.path.exists(header_path):
+ os.makedirs(header_path)
+ for header in copy_headers:
+ shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
+
+ def ExecCompileXcassets(self, keys, *inputs):
+ """Compiles multiple .xcassets files into a single .car file.
+
+ This invokes 'actool' to compile all the inputs .xcassets files. The
+ |keys| arguments is a json-encoded dictionary of extra arguments to
+ pass to 'actool' when the asset catalogs contains an application icon
+ or a launch image.
+
+ Note that 'actool' does not create the Assets.car file if the asset
+ catalogs does not contains imageset.
+ """
+ command_line = [
+ "xcrun",
+ "actool",
+ "--output-format",
+ "human-readable-text",
+ "--compress-pngs",
+ "--notices",
+ "--warnings",
+ "--errors",
+ ]
+ is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ
+ if is_iphone_target:
+ platform = os.environ["CONFIGURATION"].split("-")[-1]
+ if platform not in ("iphoneos", "iphonesimulator"):
+ platform = "iphonesimulator"
+ command_line.extend(
+ [
+ "--platform",
+ platform,
+ "--target-device",
+ "iphone",
+ "--target-device",
+ "ipad",
+ "--minimum-deployment-target",
+ os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
+ "--compile",
+ os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]),
+ ]
+ )
+ else:
+ command_line.extend(
+ [
+ "--platform",
+ "macosx",
+ "--target-device",
+ "mac",
+ "--minimum-deployment-target",
+ os.environ["MACOSX_DEPLOYMENT_TARGET"],
+ "--compile",
+ os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]),
+ ]
+ )
+ if keys:
+ keys = json.loads(keys)
+ for key, value in keys.items():
+ arg_name = "--" + key
+ if isinstance(value, bool):
+ if value:
+ command_line.append(arg_name)
+ elif isinstance(value, list):
+ for v in value:
+ command_line.append(arg_name)
+ command_line.append(str(v))
+ else:
+ command_line.append(arg_name)
+ command_line.append(str(value))
+ # Note: actool crashes if inputs path are relative, so use os.path.abspath
+ # to get absolute path name for inputs.
+ command_line.extend(map(os.path.abspath, inputs))
+ subprocess.check_call(command_line)
+
+ def ExecMergeInfoPlist(self, output, *inputs):
+ """Merge multiple .plist files into a single .plist file."""
+ merged_plist = {}
+ for path in inputs:
+ plist = self._LoadPlistMaybeBinary(path)
+ self._MergePlist(merged_plist, plist)
+ plistlib.writePlist(merged_plist, output)
+
+ def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
+ """Code sign a bundle.
+
+ This function tries to code sign an iOS bundle, following the same
+ algorithm as Xcode:
+ 1. pick the provisioning profile that best match the bundle identifier,
+ and copy it into the bundle as embedded.mobileprovision,
+ 2. copy Entitlements.plist from user or SDK next to the bundle,
+ 3. code sign the bundle.
+ """
+ substitutions, overrides = self._InstallProvisioningProfile(
+ provisioning, self._GetCFBundleIdentifier()
+ )
+ entitlements_path = self._InstallEntitlements(
+ entitlements, substitutions, overrides
+ )
+
+ args = ["codesign", "--force", "--sign", key]
+ if preserve == "True":
+ args.extend(["--deep", "--preserve-metadata=identifier,entitlements"])
+ else:
+ args.extend(["--entitlements", entitlements_path])
+ args.extend(["--timestamp=none", path])
+ subprocess.check_call(args)
+
+ def _InstallProvisioningProfile(self, profile, bundle_identifier):
+ """Installs embedded.mobileprovision into the bundle.
+
+ Args:
+ profile: string, optional, short name of the .mobileprovision file
+ to use, if empty or the file is missing, the best file installed
+ will be used
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+ Returns:
+ A tuple containing two dictionary: variables substitutions and values
+ to overrides when generating the entitlements file.
+ """
+ source_path, provisioning_data, team_id = self._FindProvisioningProfile(
+ profile, bundle_identifier
+ )
+ target_path = os.path.join(
+ os.environ["BUILT_PRODUCTS_DIR"],
+ os.environ["CONTENTS_FOLDER_PATH"],
+ "embedded.mobileprovision",
+ )
+ shutil.copy2(source_path, target_path)
+ substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".")
+ return substitutions, provisioning_data["Entitlements"]
+
+ def _FindProvisioningProfile(self, profile, bundle_identifier):
+ """Finds the .mobileprovision file to use for signing the bundle.
+
+ Checks all the installed provisioning profiles (or if the user specified
+ the PROVISIONING_PROFILE variable, only consult it) and select the most
+ specific that correspond to the bundle identifier.
+
+ Args:
+ profile: string, optional, short name of the .mobileprovision file
+ to use, if empty or the file is missing, the best file installed
+ will be used
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+ Returns:
+ A tuple of the path to the selected provisioning profile, the data of
+ the embedded plist in the provisioning profile and the team identifier
+ to use for code signing.
+
+ Raises:
+ SystemExit: if no .mobileprovision can be used to sign the bundle.
+ """
+ profiles_dir = os.path.join(
+ os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles"
+ )
+ if not os.path.isdir(profiles_dir):
+ print(
+ "cannot find mobile provisioning for %s" % (bundle_identifier),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ provisioning_profiles = None
+ if profile:
+ profile_path = os.path.join(profiles_dir, profile + ".mobileprovision")
+ if os.path.exists(profile_path):
+ provisioning_profiles = [profile_path]
+ if not provisioning_profiles:
+ provisioning_profiles = glob.glob(
+ os.path.join(profiles_dir, "*.mobileprovision")
+ )
+ valid_provisioning_profiles = {}
+ for profile_path in provisioning_profiles:
+ profile_data = self._LoadProvisioningProfile(profile_path)
+ app_id_pattern = profile_data.get("Entitlements", {}).get(
+ "application-identifier", ""
+ )
+ for team_identifier in profile_data.get("TeamIdentifier", []):
+ app_id = f"{team_identifier}.{bundle_identifier}"
+ if fnmatch.fnmatch(app_id, app_id_pattern):
+ valid_provisioning_profiles[app_id_pattern] = (
+ profile_path,
+ profile_data,
+ team_identifier,
+ )
+ if not valid_provisioning_profiles:
+ print(
+ "cannot find mobile provisioning for %s" % (bundle_identifier),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ # If the user has multiple provisioning profiles installed that can be
+ # used for ${bundle_identifier}, pick the most specific one (ie. the
+ # provisioning profile whose pattern is the longest).
+ selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
+ return valid_provisioning_profiles[selected_key]
+
+ def _LoadProvisioningProfile(self, profile_path):
+ """Extracts the plist embedded in a provisioning profile.
+
+ Args:
+ profile_path: string, path to the .mobileprovision file
+
+ Returns:
+ Content of the plist embedded in the provisioning profile as a dictionary.
+ """
+ with tempfile.NamedTemporaryFile() as temp:
+ subprocess.check_call(
+ ["security", "cms", "-D", "-i", profile_path, "-o", temp.name]
+ )
+ return self._LoadPlistMaybeBinary(temp.name)
+
+ def _MergePlist(self, merged_plist, plist):
+ """Merge |plist| into |merged_plist|."""
+ for key, value in plist.items():
+ if isinstance(value, dict):
+ merged_value = merged_plist.get(key, {})
+ if isinstance(merged_value, dict):
+ self._MergePlist(merged_value, value)
+ merged_plist[key] = merged_value
+ else:
+ merged_plist[key] = value
+ else:
+ merged_plist[key] = value
+
+ def _LoadPlistMaybeBinary(self, plist_path):
+ """Loads into a memory a plist possibly encoded in binary format.
+
+ This is a wrapper around plistlib.readPlist that tries to convert the
+ plist to the XML format if it can't be parsed (assuming that it is in
+ the binary format).
+
+ Args:
+ plist_path: string, path to a plist file, in XML or binary format
+
+ Returns:
+ Content of the plist as a dictionary.
+ """
+ try:
+ # First, try to read the file using plistlib that only supports XML,
+ # and if an exception is raised, convert a temporary copy to XML and
+ # load that copy.
+ return plistlib.readPlist(plist_path)
+ except Exception:
+ pass
+ with tempfile.NamedTemporaryFile() as temp:
+ shutil.copy2(plist_path, temp.name)
+ subprocess.check_call(["plutil", "-convert", "xml1", temp.name])
+ return plistlib.readPlist(temp.name)
+
+ def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
+ """Constructs a dictionary of variable substitutions for Entitlements.plist.
+
+ Args:
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+ app_identifier_prefix: string, value for AppIdentifierPrefix
+
+ Returns:
+ Dictionary of substitutions to apply when generating Entitlements.plist.
+ """
+ return {
+ "CFBundleIdentifier": bundle_identifier,
+ "AppIdentifierPrefix": app_identifier_prefix,
+ }
+
+ def _GetCFBundleIdentifier(self):
+ """Extracts CFBundleIdentifier value from Info.plist in the bundle.
+
+ Returns:
+ Value of CFBundleIdentifier in the Info.plist located in the bundle.
+ """
+ info_plist_path = os.path.join(
+ os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"]
+ )
+ info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
+ return info_plist_data["CFBundleIdentifier"]
+
+ def _InstallEntitlements(self, entitlements, substitutions, overrides):
+ """Generates and install the ${BundleName}.xcent entitlements file.
+
+ Expands variables "$(variable)" pattern in the source entitlements file,
+ add extra entitlements defined in the .mobileprovision file and the copy
+ the generated plist to "${BundlePath}.xcent".
+
+ Args:
+ entitlements: string, optional, path to the Entitlements.plist template
+ to use, defaults to "${SDKROOT}/Entitlements.plist"
+ substitutions: dictionary, variable substitutions
+ overrides: dictionary, values to add to the entitlements
+
+ Returns:
+ Path to the generated entitlements file.
+ """
+ source_path = entitlements
+ target_path = os.path.join(
+ os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent"
+ )
+ if not source_path:
+ source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist")
+ shutil.copy2(source_path, target_path)
+ data = self._LoadPlistMaybeBinary(target_path)
+ data = self._ExpandVariables(data, substitutions)
+ if overrides:
+ for key in overrides:
+ if key not in data:
+ data[key] = overrides[key]
+ plistlib.writePlist(data, target_path)
+ return target_path
+
+ def _ExpandVariables(self, data, substitutions):
+ """Expands variables "$(variable)" in data.
+
+ Args:
+ data: object, can be either string, list or dictionary
+ substitutions: dictionary, variable substitutions to perform
+
+ Returns:
+ Copy of data where each references to "$(variable)" has been replaced
+ by the corresponding value found in substitutions, or left intact if
+ the key was not found.
+ """
+ if isinstance(data, str):
+ for key, value in substitutions.items():
+ data = data.replace("$(%s)" % key, value)
+ return data
+ if isinstance(data, list):
+ return [self._ExpandVariables(v, substitutions) for v in data]
+ if isinstance(data, dict):
+ return {k: self._ExpandVariables(data[k], substitutions) for k in data}
+ return data
+
+
+def NextGreaterPowerOf2(x):
+ return 2 ** (x).bit_length()
+
+
+def WriteHmap(output_name, filelist):
+ """Generates a header map based on |filelist|.
+
+ Per Mark Mentovai:
+ A header map is structured essentially as a hash table, keyed by names used
+ in #includes, and providing pathnames to the actual files.
+
+ The implementation below and the comment above comes from inspecting:
+ http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+ while also looking at the implementation in clang in:
+ https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+ """
+ magic = 1751998832
+ version = 1
+ _reserved = 0
+ count = len(filelist)
+ capacity = NextGreaterPowerOf2(count)
+ strings_offset = 24 + (12 * capacity)
+ max_value_length = max(len(value) for value in filelist.values())
+
+ out = open(output_name, "wb")
+ out.write(
+ struct.pack(
+ "<LHHLLLL",
+ magic,
+ version,
+ _reserved,
+ strings_offset,
+ count,
+ capacity,
+ max_value_length,
+ )
+ )
+
+ # Create empty hashmap buckets.
+ buckets = [None] * capacity
+ for file, path in filelist.items():
+ key = 0
+ for c in file:
+ key += ord(c.lower()) * 13
+
+ # Fill next empty bucket.
+ while buckets[key & capacity - 1] is not None:
+ key = key + 1
+ buckets[key & capacity - 1] = (file, path)
+
+ next_offset = 1
+ for bucket in buckets:
+ if bucket is None:
+ out.write(struct.pack("<LLL", 0, 0, 0))
+ else:
+ (file, path) = bucket
+ key_offset = next_offset
+ prefix_offset = key_offset + len(file) + 1
+ suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
+ next_offset = suffix_offset + len(os.path.basename(path)) + 1
+ out.write(struct.pack("<LLL", key_offset, prefix_offset, suffix_offset))
+
+ # Pad byte since next offset starts at 1.
+ out.write(struct.pack("<x"))
+
+ for bucket in buckets:
+ if bucket is not None:
+ (file, path) = bucket
+ out.write(struct.pack("<%ds" % len(file), file))
+ out.write(struct.pack("<s", "\0"))
+ base = os.path.dirname(path) + os.sep
+ out.write(struct.pack("<%ds" % len(base), base))
+ out.write(struct.pack("<s", "\0"))
+ path = os.path.basename(path)
+ out.write(struct.pack("<%ds" % len(path), path))
+ out.write(struct.pack("<s", "\0"))
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
new file mode 100644
index 0000000..5b9c271
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
@@ -0,0 +1,1271 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This module helps emulate Visual Studio 2008 behavior on top of other
+build systems, primarily ninja.
+"""
+
+import collections
+import os
+import re
+import subprocess
+import sys
+
+from gyp.common import OrderedSet
+import gyp.MSVSUtil
+import gyp.MSVSVersion
+
+windows_quoter_regex = re.compile(r'(\\*)"')
+
+
+def QuoteForRspFile(arg, quote_cmd=True):
+ """Quote a command line argument so that it appears as one argument when
+ processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
+ Windows programs)."""
+ # See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
+ # threads. This is actually the quoting rules for CommandLineToArgvW, not
+ # for the shell, because the shell doesn't do anything in Windows. This
+ # works more or less because most programs (including the compiler, etc.)
+ # use that function to handle command line arguments.
+
+ # Use a heuristic to try to find args that are paths, and normalize them
+ if arg.find("/") > 0 or arg.count("/") > 1:
+ arg = os.path.normpath(arg)
+
+ # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
+ # preceding it, and results in n backslashes + the quote. So we substitute
+ # in 2* what we match, +1 more, plus the quote.
+ if quote_cmd:
+ arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
+
+ # %'s also need to be doubled otherwise they're interpreted as batch
+ # positional arguments. Also make sure to escape the % so that they're
+ # passed literally through escaping so they can be singled to just the
+ # original %. Otherwise, trying to pass the literal representation that
+ # looks like an environment variable to the shell (e.g. %PATH%) would fail.
+ arg = arg.replace("%", "%%")
+
+ # These commands are used in rsp files, so no escaping for the shell (via ^)
+ # is necessary.
+
+ # As a workaround for programs that don't use CommandLineToArgvW, gyp
+ # supports msvs_quote_cmd=0, which simply disables all quoting.
+ if quote_cmd:
+ # Finally, wrap the whole thing in quotes so that the above quote rule
+ # applies and whitespace isn't a word break.
+ return f'"{arg}"'
+
+ return arg
+
+
+def EncodeRspFileList(args, quote_cmd):
+ """Process a list of arguments using QuoteCmdExeArgument."""
+ # Note that the first argument is assumed to be the command. Don't add
+ # quotes around it because then built-ins like 'echo', etc. won't work.
+ # Take care to normpath only the path in the case of 'call ../x.bat' because
+ # otherwise the whole thing is incorrectly interpreted as a path and not
+ # normalized correctly.
+ if not args:
+ return ""
+ if args[0].startswith("call "):
+ call, program = args[0].split(" ", 1)
+ program = call + " " + os.path.normpath(program)
+ else:
+ program = os.path.normpath(args[0])
+ return (program + " "
+ + " ".join(QuoteForRspFile(arg, quote_cmd) for arg in args[1:]))
+
+
+def _GenericRetrieve(root, default, path):
+ """Given a list of dictionary keys |path| and a tree of dicts |root|, find
+ value at path, or return |default| if any of the path doesn't exist."""
+ if not root:
+ return default
+ if not path:
+ return root
+ return _GenericRetrieve(root.get(path[0]), default, path[1:])
+
+
+def _AddPrefix(element, prefix):
+ """Add |prefix| to |element| or each subelement if element is iterable."""
+ if element is None:
+ return element
+ # Note, not Iterable because we don't want to handle strings like that.
+ if isinstance(element, list) or isinstance(element, tuple):
+ return [prefix + e for e in element]
+ else:
+ return prefix + element
+
+
+def _DoRemapping(element, map):
+ """If |element| then remap it through |map|. If |element| is iterable then
+ each item will be remapped. Any elements not found will be removed."""
+ if map is not None and element is not None:
+ if not callable(map):
+ map = map.get # Assume it's a dict, otherwise a callable to do the remap.
+ if isinstance(element, list) or isinstance(element, tuple):
+ element = filter(None, [map(elem) for elem in element])
+ else:
+ element = map(element)
+ return element
+
+
+def _AppendOrReturn(append, element):
+ """If |append| is None, simply return |element|. If |append| is not None,
+ then add |element| to it, adding each item in |element| if it's a list or
+ tuple."""
+ if append is not None and element is not None:
+ if isinstance(element, list) or isinstance(element, tuple):
+ append.extend(element)
+ else:
+ append.append(element)
+ else:
+ return element
+
+
+def _FindDirectXInstallation():
+ """Try to find an installation location for the DirectX SDK. Check for the
+ standard environment variable, and if that doesn't exist, try to find
+ via the registry. May return None if not found in either location."""
+ # Return previously calculated value, if there is one
+ if hasattr(_FindDirectXInstallation, "dxsdk_dir"):
+ return _FindDirectXInstallation.dxsdk_dir
+
+ dxsdk_dir = os.environ.get("DXSDK_DIR")
+ if not dxsdk_dir:
+ # Setup params to pass to and attempt to launch reg.exe.
+ cmd = ["reg.exe", "query", r"HKLM\Software\Microsoft\DirectX", "/s"]
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout = p.communicate()[0].decode("utf-8")
+ for line in stdout.splitlines():
+ if "InstallPath" in line:
+ dxsdk_dir = line.split(" ")[3] + "\\"
+
+ # Cache return value
+ _FindDirectXInstallation.dxsdk_dir = dxsdk_dir
+ return dxsdk_dir
+
+
+def GetGlobalVSMacroEnv(vs_version):
+ """Get a dict of variables mapping internal VS macro names to their gyp
+ equivalents. Returns all variables that are independent of the target."""
+ env = {}
+ # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
+ # Visual Studio is actually installed.
+ if vs_version.Path():
+ env["$(VSInstallDir)"] = vs_version.Path()
+ env["$(VCInstallDir)"] = os.path.join(vs_version.Path(), "VC") + "\\"
+ # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
+ # set. This happens when the SDK is sync'd via src-internal, rather than
+ # by typical end-user installation of the SDK. If it's not set, we don't
+ # want to leave the unexpanded variable in the path, so simply strip it.
+ dxsdk_dir = _FindDirectXInstallation()
+ env["$(DXSDK_DIR)"] = dxsdk_dir if dxsdk_dir else ""
+ # Try to find an installation location for the Windows DDK by checking
+ # the WDK_DIR environment variable, may be None.
+ env["$(WDK_DIR)"] = os.environ.get("WDK_DIR", "")
+ return env
+
+
+def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
+ """Finds msvs_system_include_dirs that are common to all targets, removes
+ them from all targets, and returns an OrderedSet containing them."""
+ all_system_includes = OrderedSet(configs[0].get("msvs_system_include_dirs", []))
+ for config in configs[1:]:
+ system_includes = config.get("msvs_system_include_dirs", [])
+ all_system_includes = all_system_includes & OrderedSet(system_includes)
+ if not all_system_includes:
+ return None
+ # Expand macros in all_system_includes.
+ env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
+ expanded_system_includes = OrderedSet(
+ [ExpandMacros(include, env) for include in all_system_includes]
+ )
+ if any(["$" in include for include in expanded_system_includes]):
+ # Some path relies on target-specific variables, bail.
+ return None
+
+ # Remove system includes shared by all targets from the targets.
+ for config in configs:
+ includes = config.get("msvs_system_include_dirs", [])
+ if includes: # Don't insert a msvs_system_include_dirs key if not needed.
+ # This must check the unexpanded includes list:
+ new_includes = [i for i in includes if i not in all_system_includes]
+ config["msvs_system_include_dirs"] = new_includes
+ return expanded_system_includes
+
+
+class MsvsSettings:
+ """A class that understands the gyp 'msvs_...' values (especially the
+ msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
+ class helps map those settings to command line options."""
+
+ def __init__(self, spec, generator_flags):
+ self.spec = spec
+ self.vs_version = GetVSVersion(generator_flags)
+
+ supported_fields = [
+ ("msvs_configuration_attributes", dict),
+ ("msvs_settings", dict),
+ ("msvs_system_include_dirs", list),
+ ("msvs_disabled_warnings", list),
+ ("msvs_precompiled_header", str),
+ ("msvs_precompiled_source", str),
+ ("msvs_configuration_platform", str),
+ ("msvs_target_platform", str),
+ ]
+ configs = spec["configurations"]
+ for field, default in supported_fields:
+ setattr(self, field, {})
+ for configname, config in configs.items():
+ getattr(self, field)[configname] = config.get(field, default())
+
+ self.msvs_cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])
+
+ unsupported_fields = [
+ "msvs_prebuild",
+ "msvs_postbuild",
+ ]
+ unsupported = []
+ for field in unsupported_fields:
+ for config in configs.values():
+ if field in config:
+ unsupported += [
+ "{} not supported (target {}).".format(
+ field, spec["target_name"]
+ )
+ ]
+ if unsupported:
+ raise Exception("\n".join(unsupported))
+
+ def GetExtension(self):
+ """Returns the extension for the target, with no leading dot.
+
+ Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
+ the target type.
+ """
+ ext = self.spec.get("product_extension", None)
+ if ext:
+ return ext
+ return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "")
+
+ def GetVSMacroEnv(self, base_to_build=None, config=None):
+ """Get a dict of variables mapping internal VS macro names to their gyp
+ equivalents."""
+ target_arch = self.GetArch(config)
+ if target_arch == "x86":
+ target_platform = "Win32"
+ else:
+ target_platform = target_arch
+ target_name = self.spec.get("product_prefix", "") + self.spec.get(
+ "product_name", self.spec["target_name"]
+ )
+ target_dir = base_to_build + "\\" if base_to_build else ""
+ target_ext = "." + self.GetExtension()
+ target_file_name = target_name + target_ext
+
+ replacements = {
+ "$(InputName)": "${root}",
+ "$(InputPath)": "${source}",
+ "$(IntDir)": "$!INTERMEDIATE_DIR",
+ "$(OutDir)\\": target_dir,
+ "$(PlatformName)": target_platform,
+ "$(ProjectDir)\\": "",
+ "$(ProjectName)": self.spec["target_name"],
+ "$(TargetDir)\\": target_dir,
+ "$(TargetExt)": target_ext,
+ "$(TargetFileName)": target_file_name,
+ "$(TargetName)": target_name,
+ "$(TargetPath)": os.path.join(target_dir, target_file_name),
+ }
+ replacements.update(GetGlobalVSMacroEnv(self.vs_version))
+ return replacements
+
+ def ConvertVSMacros(self, s, base_to_build=None, config=None):
+ """Convert from VS macro names to something equivalent."""
+ env = self.GetVSMacroEnv(base_to_build, config=config)
+ return ExpandMacros(s, env)
+
+ def AdjustLibraries(self, libraries):
+ """Strip -l from library if it's specified with that."""
+ libs = [lib[2:] if lib.startswith("-l") else lib for lib in libraries]
+ return [
+ lib + ".lib"
+ if not lib.lower().endswith(".lib") and not lib.lower().endswith(".obj")
+ else lib
+ for lib in libs
+ ]
+
+ def _GetAndMunge(self, field, path, default, prefix, append, map):
+ """Retrieve a value from |field| at |path| or return |default|. If
+ |append| is specified, and the item is found, it will be appended to that
+ object instead of returned. If |map| is specified, results will be
+ remapped through |map| before being returned or appended."""
+ result = _GenericRetrieve(field, default, path)
+ result = _DoRemapping(result, map)
+ result = _AddPrefix(result, prefix)
+ return _AppendOrReturn(append, result)
+
+ class _GetWrapper:
+ def __init__(self, parent, field, base_path, append=None):
+ self.parent = parent
+ self.field = field
+ self.base_path = [base_path]
+ self.append = append
+
+ def __call__(self, name, map=None, prefix="", default=None):
+ return self.parent._GetAndMunge(
+ self.field,
+ self.base_path + [name],
+ default=default,
+ prefix=prefix,
+ append=self.append,
+ map=map,
+ )
+
+ def GetArch(self, config):
+ """Get architecture based on msvs_configuration_platform and
+ msvs_target_platform. Returns either 'x86' or 'x64'."""
+ configuration_platform = self.msvs_configuration_platform.get(config, "")
+ platform = self.msvs_target_platform.get(config, "")
+ if not platform: # If no specific override, use the configuration's.
+ platform = configuration_platform
+ # Map from platform to architecture.
+ return {"Win32": "x86", "x64": "x64", "ARM64": "arm64"}.get(platform, "x86")
+
+ def _TargetConfig(self, config):
+ """Returns the target-specific configuration."""
+ # There's two levels of architecture/platform specification in VS. The
+ # first level is globally for the configuration (this is what we consider
+ # "the" config at the gyp level, which will be something like 'Debug' or
+ # 'Release'), VS2015 and later only use this level
+ if int(self.vs_version.short_name) >= 2015:
+ return config
+ # and a second target-specific configuration, which is an
+ # override for the global one. |config| is remapped here to take into
+ # account the local target-specific overrides to the global configuration.
+ arch = self.GetArch(config)
+ if arch == "x64" and not config.endswith("_x64"):
+ config += "_x64"
+ if arch == "x86" and config.endswith("_x64"):
+ config = config.rsplit("_", 1)[0]
+ return config
+
+ def _Setting(self, path, config, default=None, prefix="", append=None, map=None):
+ """_GetAndMunge for msvs_settings."""
+ return self._GetAndMunge(
+ self.msvs_settings[config], path, default, prefix, append, map
+ )
+
+ def _ConfigAttrib(
+ self, path, config, default=None, prefix="", append=None, map=None
+ ):
+ """_GetAndMunge for msvs_configuration_attributes."""
+ return self._GetAndMunge(
+ self.msvs_configuration_attributes[config],
+ path,
+ default,
+ prefix,
+ append,
+ map,
+ )
+
+ def AdjustIncludeDirs(self, include_dirs, config):
+ """Updates include_dirs to expand VS specific paths, and adds the system
+ include dirs used for platform SDK and similar."""
+ config = self._TargetConfig(config)
+ includes = include_dirs + self.msvs_system_include_dirs[config]
+ includes.extend(
+ self._Setting(
+ ("VCCLCompilerTool", "AdditionalIncludeDirectories"), config, default=[]
+ )
+ )
+ return [self.ConvertVSMacros(p, config=config) for p in includes]
+
+ def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
+ """Updates midl_include_dirs to expand VS specific paths, and adds the
+ system include dirs used for platform SDK and similar."""
+ config = self._TargetConfig(config)
+ includes = midl_include_dirs + self.msvs_system_include_dirs[config]
+ includes.extend(
+ self._Setting(
+ ("VCMIDLTool", "AdditionalIncludeDirectories"), config, default=[]
+ )
+ )
+ return [self.ConvertVSMacros(p, config=config) for p in includes]
+
+ def GetComputedDefines(self, config):
+ """Returns the set of defines that are injected to the defines list based
+ on other VS settings."""
+ config = self._TargetConfig(config)
+ defines = []
+ if self._ConfigAttrib(["CharacterSet"], config) == "1":
+ defines.extend(("_UNICODE", "UNICODE"))
+ if self._ConfigAttrib(["CharacterSet"], config) == "2":
+ defines.append("_MBCS")
+ defines.extend(
+ self._Setting(
+ ("VCCLCompilerTool", "PreprocessorDefinitions"), config, default=[]
+ )
+ )
+ return defines
+
+ def GetCompilerPdbName(self, config, expand_special):
+ """Get the pdb file name that should be used for compiler invocations, or
+ None if there's no explicit name specified."""
+ config = self._TargetConfig(config)
+ pdbname = self._Setting(("VCCLCompilerTool", "ProgramDataBaseFileName"), config)
+ if pdbname:
+ pdbname = expand_special(self.ConvertVSMacros(pdbname))
+ return pdbname
+
+ def GetMapFileName(self, config, expand_special):
+ """Gets the explicitly overridden map file name for a target or returns None
+ if it's not set."""
+ config = self._TargetConfig(config)
+ map_file = self._Setting(("VCLinkerTool", "MapFileName"), config)
+ if map_file:
+ map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
+ return map_file
+
+ def GetOutputName(self, config, expand_special):
+ """Gets the explicitly overridden output name for a target or returns None
+ if it's not overridden."""
+ config = self._TargetConfig(config)
+ type = self.spec["type"]
+ root = "VCLibrarianTool" if type == "static_library" else "VCLinkerTool"
+ # TODO(scottmg): Handle OutputDirectory without OutputFile.
+ output_file = self._Setting((root, "OutputFile"), config)
+ if output_file:
+ output_file = expand_special(
+ self.ConvertVSMacros(output_file, config=config)
+ )
+ return output_file
+
+ def GetPDBName(self, config, expand_special, default):
+ """Gets the explicitly overridden pdb name for a target or returns
+ default if it's not overridden, or if no pdb will be generated."""
+ config = self._TargetConfig(config)
+ output_file = self._Setting(("VCLinkerTool", "ProgramDatabaseFile"), config)
+ generate_debug_info = self._Setting(
+ ("VCLinkerTool", "GenerateDebugInformation"), config
+ )
+ if generate_debug_info == "true":
+ if output_file:
+ return expand_special(self.ConvertVSMacros(output_file, config=config))
+ else:
+ return default
+ else:
+ return None
+
+ def GetNoImportLibrary(self, config):
+ """If NoImportLibrary: true, ninja will not expect the output to include
+ an import library."""
+ config = self._TargetConfig(config)
+ noimplib = self._Setting(("NoImportLibrary",), config)
+ return noimplib == "true"
+
+ def GetAsmflags(self, config):
+ """Returns the flags that need to be added to ml invocations."""
+ config = self._TargetConfig(config)
+ asmflags = []
+ safeseh = self._Setting(("MASM", "UseSafeExceptionHandlers"), config)
+ if safeseh == "true":
+ asmflags.append("/safeseh")
+ return asmflags
+
+ def GetCflags(self, config):
+ """Returns the flags that need to be added to .c and .cc compilations."""
+ config = self._TargetConfig(config)
+ cflags = []
+ cflags.extend(["/wd" + w for w in self.msvs_disabled_warnings[config]])
+ cl = self._GetWrapper(
+ self, self.msvs_settings[config], "VCCLCompilerTool", append=cflags
+ )
+ cl(
+ "Optimization",
+ map={"0": "d", "1": "1", "2": "2", "3": "x"},
+ prefix="/O",
+ default="2",
+ )
+ cl("InlineFunctionExpansion", prefix="/Ob")
+ cl("DisableSpecificWarnings", prefix="/wd")
+ cl("StringPooling", map={"true": "/GF"})
+ cl("EnableFiberSafeOptimizations", map={"true": "/GT"})
+ cl("OmitFramePointers", map={"false": "-", "true": ""}, prefix="/Oy")
+ cl("EnableIntrinsicFunctions", map={"false": "-", "true": ""}, prefix="/Oi")
+ cl("FavorSizeOrSpeed", map={"1": "t", "2": "s"}, prefix="/O")
+ cl(
+ "FloatingPointModel",
+ map={"0": "precise", "1": "strict", "2": "fast"},
+ prefix="/fp:",
+ default="0",
+ )
+ cl("CompileAsManaged", map={"false": "", "true": "/clr"})
+ cl("WholeProgramOptimization", map={"true": "/GL"})
+ cl("WarningLevel", prefix="/W")
+ cl("WarnAsError", map={"true": "/WX"})
+ cl(
+ "CallingConvention",
+ map={"0": "d", "1": "r", "2": "z", "3": "v"},
+ prefix="/G",
+ )
+ cl("DebugInformationFormat", map={"1": "7", "3": "i", "4": "I"}, prefix="/Z")
+ cl("RuntimeTypeInfo", map={"true": "/GR", "false": "/GR-"})
+ cl("EnableFunctionLevelLinking", map={"true": "/Gy", "false": "/Gy-"})
+ cl("MinimalRebuild", map={"true": "/Gm"})
+ cl("BufferSecurityCheck", map={"true": "/GS", "false": "/GS-"})
+ cl("BasicRuntimeChecks", map={"1": "s", "2": "u", "3": "1"}, prefix="/RTC")
+ cl(
+ "RuntimeLibrary",
+ map={"0": "T", "1": "Td", "2": "D", "3": "Dd"},
+ prefix="/M",
+ )
+ cl("ExceptionHandling", map={"1": "sc", "2": "a"}, prefix="/EH")
+ cl("DefaultCharIsUnsigned", map={"true": "/J"})
+ cl(
+ "TreatWChar_tAsBuiltInType",
+ map={"false": "-", "true": ""},
+ prefix="/Zc:wchar_t",
+ )
+ cl("EnablePREfast", map={"true": "/analyze"})
+ cl("AdditionalOptions", prefix="")
+ cl(
+ "EnableEnhancedInstructionSet",
+ map={"1": "SSE", "2": "SSE2", "3": "AVX", "4": "IA32", "5": "AVX2"},
+ prefix="/arch:",
+ )
+ cflags.extend(
+ [
+ "/FI" + f
+ for f in self._Setting(
+ ("VCCLCompilerTool", "ForcedIncludeFiles"), config, default=[]
+ )
+ ]
+ )
+ if float(self.vs_version.project_version) >= 12.0:
+ # New flag introduced in VS2013 (project version 12.0) Forces writes to
+ # the program database (PDB) to be serialized through MSPDBSRV.EXE.
+ # https://msdn.microsoft.com/en-us/library/dn502518.aspx
+ cflags.append("/FS")
+ # ninja handles parallelism by itself, don't have the compiler do it too.
+ cflags = [x for x in cflags if not x.startswith("/MP")]
+ return cflags
+
+ def _GetPchFlags(self, config, extension):
+ """Get the flags to be added to the cflags for precompiled header support."""
+ config = self._TargetConfig(config)
+ # The PCH is only built once by a particular source file. Usage of PCH must
+ # only be for the same language (i.e. C vs. C++), so only include the pch
+ # flags when the language matches.
+ if self.msvs_precompiled_header[config]:
+ source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
+ if _LanguageMatchesForPch(source_ext, extension):
+ pch = self.msvs_precompiled_header[config]
+ pchbase = os.path.split(pch)[1]
+ return ["/Yu" + pch, "/FI" + pch, "/Fp${pchprefix}." + pchbase + ".pch"]
+ return []
+
+ def GetCflagsC(self, config):
+ """Returns the flags that need to be added to .c compilations."""
+ config = self._TargetConfig(config)
+ return self._GetPchFlags(config, ".c")
+
+ def GetCflagsCC(self, config):
+ """Returns the flags that need to be added to .cc compilations."""
+ config = self._TargetConfig(config)
+ return ["/TP"] + self._GetPchFlags(config, ".cc")
+
+ def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
+ """Get and normalize the list of paths in AdditionalLibraryDirectories
+ setting."""
+ config = self._TargetConfig(config)
+ libpaths = self._Setting(
+ (root, "AdditionalLibraryDirectories"), config, default=[]
+ )
+ libpaths = [
+ os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
+ for p in libpaths
+ ]
+ return ['/LIBPATH:"' + p + '"' for p in libpaths]
+
+ def GetLibFlags(self, config, gyp_to_build_path):
+ """Returns the flags that need to be added to lib commands."""
+ config = self._TargetConfig(config)
+ libflags = []
+ lib = self._GetWrapper(
+ self, self.msvs_settings[config], "VCLibrarianTool", append=libflags
+ )
+ libflags.extend(
+ self._GetAdditionalLibraryDirectories(
+ "VCLibrarianTool", config, gyp_to_build_path
+ )
+ )
+ lib("LinkTimeCodeGeneration", map={"true": "/LTCG"})
+ lib(
+ "TargetMachine",
+ map={"1": "X86", "17": "X64", "3": "ARM"},
+ prefix="/MACHINE:",
+ )
+ lib("AdditionalOptions")
+ return libflags
+
+ def GetDefFile(self, gyp_to_build_path):
+ """Returns the .def file from sources, if any. Otherwise returns None."""
+ spec = self.spec
+ if spec["type"] in ("shared_library", "loadable_module", "executable"):
+ def_files = [
+ s for s in spec.get("sources", []) if s.lower().endswith(".def")
+ ]
+ if len(def_files) == 1:
+ return gyp_to_build_path(def_files[0])
+ elif len(def_files) > 1:
+ raise Exception("Multiple .def files")
+ return None
+
+ def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
+ """.def files get implicitly converted to a ModuleDefinitionFile for the
+ linker in the VS generator. Emulate that behaviour here."""
+ def_file = self.GetDefFile(gyp_to_build_path)
+ if def_file:
+ ldflags.append('/DEF:"%s"' % def_file)
+
+ def GetPGDName(self, config, expand_special):
+ """Gets the explicitly overridden pgd name for a target or returns None
+ if it's not overridden."""
+ config = self._TargetConfig(config)
+ output_file = self._Setting(("VCLinkerTool", "ProfileGuidedDatabase"), config)
+ if output_file:
+ output_file = expand_special(
+ self.ConvertVSMacros(output_file, config=config)
+ )
+ return output_file
+
+ def GetLdflags(
+ self,
+ config,
+ gyp_to_build_path,
+ expand_special,
+ manifest_base_name,
+ output_name,
+ is_executable,
+ build_dir,
+ ):
+ """Returns the flags that need to be added to link commands, and the
+ manifest files."""
+ config = self._TargetConfig(config)
+ ldflags = []
+ ld = self._GetWrapper(
+ self, self.msvs_settings[config], "VCLinkerTool", append=ldflags
+ )
+ self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
+ ld("GenerateDebugInformation", map={"true": "/DEBUG"})
+ # TODO: These 'map' values come from machineTypeOption enum,
+ # and does not have an official value for ARM64 in VS2017 (yet).
+ # It needs to verify the ARM64 value when machineTypeOption is updated.
+ ld(
+ "TargetMachine",
+ map={"1": "X86", "17": "X64", "3": "ARM", "18": "ARM64"},
+ prefix="/MACHINE:",
+ )
+ ldflags.extend(
+ self._GetAdditionalLibraryDirectories(
+ "VCLinkerTool", config, gyp_to_build_path
+ )
+ )
+ ld("DelayLoadDLLs", prefix="/DELAYLOAD:")
+ ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"})
+ out = self.GetOutputName(config, expand_special)
+ if out:
+ ldflags.append("/OUT:" + out)
+ pdb = self.GetPDBName(config, expand_special, output_name + ".pdb")
+ if pdb:
+ ldflags.append("/PDB:" + pdb)
+ pgd = self.GetPGDName(config, expand_special)
+ if pgd:
+ ldflags.append("/PGD:" + pgd)
+ map_file = self.GetMapFileName(config, expand_special)
+ ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"})
+ ld("MapExports", map={"true": "/MAPINFO:EXPORTS"})
+ ld("AdditionalOptions", prefix="")
+
+ minimum_required_version = self._Setting(
+ ("VCLinkerTool", "MinimumRequiredVersion"), config, default=""
+ )
+ if minimum_required_version:
+ minimum_required_version = "," + minimum_required_version
+ ld(
+ "SubSystem",
+ map={
+ "1": "CONSOLE%s" % minimum_required_version,
+ "2": "WINDOWS%s" % minimum_required_version,
+ },
+ prefix="/SUBSYSTEM:",
+ )
+
+ stack_reserve_size = self._Setting(
+ ("VCLinkerTool", "StackReserveSize"), config, default=""
+ )
+ if stack_reserve_size:
+ stack_commit_size = self._Setting(
+ ("VCLinkerTool", "StackCommitSize"), config, default=""
+ )
+ if stack_commit_size:
+ stack_commit_size = "," + stack_commit_size
+ ldflags.append(f"/STACK:{stack_reserve_size}{stack_commit_size}")
+
+ ld("TerminalServerAware", map={"1": ":NO", "2": ""}, prefix="/TSAWARE")
+ ld("LinkIncremental", map={"1": ":NO", "2": ""}, prefix="/INCREMENTAL")
+ ld("BaseAddress", prefix="/BASE:")
+ ld("FixedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/FIXED")
+ ld("RandomizedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/DYNAMICBASE")
+ ld("DataExecutionPrevention", map={"1": ":NO", "2": ""}, prefix="/NXCOMPAT")
+ ld("OptimizeReferences", map={"1": "NOREF", "2": "REF"}, prefix="/OPT:")
+ ld("ForceSymbolReferences", prefix="/INCLUDE:")
+ ld("EnableCOMDATFolding", map={"1": "NOICF", "2": "ICF"}, prefix="/OPT:")
+ ld(
+ "LinkTimeCodeGeneration",
+ map={"1": "", "2": ":PGINSTRUMENT", "3": ":PGOPTIMIZE", "4": ":PGUPDATE"},
+ prefix="/LTCG",
+ )
+ ld("IgnoreDefaultLibraryNames", prefix="/NODEFAULTLIB:")
+ ld("ResourceOnlyDLL", map={"true": "/NOENTRY"})
+ ld("EntryPointSymbol", prefix="/ENTRY:")
+ ld("Profile", map={"true": "/PROFILE"})
+ ld("LargeAddressAware", map={"1": ":NO", "2": ""}, prefix="/LARGEADDRESSAWARE")
+ # TODO(scottmg): This should sort of be somewhere else (not really a flag).
+ ld("AdditionalDependencies", prefix="")
+
+ if self.GetArch(config) == "x86":
+ safeseh_default = "true"
+ else:
+ safeseh_default = None
+ ld(
+ "ImageHasSafeExceptionHandlers",
+ map={"false": ":NO", "true": ""},
+ prefix="/SAFESEH",
+ default=safeseh_default,
+ )
+
+ # If the base address is not specifically controlled, DYNAMICBASE should
+ # be on by default.
+ if not any("DYNAMICBASE" in flag or flag == "/FIXED" for flag in ldflags):
+ ldflags.append("/DYNAMICBASE")
+
+ # If the NXCOMPAT flag has not been specified, default to on. Despite the
+ # documentation that says this only defaults to on when the subsystem is
+ # Vista or greater (which applies to the linker), the IDE defaults it on
+ # unless it's explicitly off.
+ if not any("NXCOMPAT" in flag for flag in ldflags):
+ ldflags.append("/NXCOMPAT")
+
+ have_def_file = any(flag.startswith("/DEF:") for flag in ldflags)
+ (
+ manifest_flags,
+ intermediate_manifest,
+ manifest_files,
+ ) = self._GetLdManifestFlags(
+ config,
+ manifest_base_name,
+ gyp_to_build_path,
+ is_executable and not have_def_file,
+ build_dir,
+ )
+ ldflags.extend(manifest_flags)
+ return ldflags, intermediate_manifest, manifest_files
+
+ def _GetLdManifestFlags(
+ self, config, name, gyp_to_build_path, allow_isolation, build_dir
+ ):
+ """Returns a 3-tuple:
+ - the set of flags that need to be added to the link to generate
+ a default manifest
+ - the intermediate manifest that the linker will generate that should be
+ used to assert it doesn't add anything to the merged one.
+ - the list of all the manifest files to be merged by the manifest tool and
+ included into the link."""
+ generate_manifest = self._Setting(
+ ("VCLinkerTool", "GenerateManifest"), config, default="true"
+ )
+ if generate_manifest != "true":
+ # This means not only that the linker should not generate the intermediate
+ # manifest but also that the manifest tool should do nothing even when
+ # additional manifests are specified.
+ return ["/MANIFEST:NO"], [], []
+
+ output_name = name + ".intermediate.manifest"
+ flags = [
+ "/MANIFEST",
+ "/ManifestFile:" + output_name,
+ ]
+
+ # Instead of using the MANIFESTUAC flags, we generate a .manifest to
+ # include into the list of manifests. This allows us to avoid the need to
+ # do two passes during linking. The /MANIFEST flag and /ManifestFile are
+ # still used, and the intermediate manifest is used to assert that the
+ # final manifest we get from merging all the additional manifest files
+ # (plus the one we generate here) isn't modified by merging the
+ # intermediate into it.
+
+ # Always NO, because we generate a manifest file that has what we want.
+ flags.append("/MANIFESTUAC:NO")
+
+ config = self._TargetConfig(config)
+ enable_uac = self._Setting(
+ ("VCLinkerTool", "EnableUAC"), config, default="true"
+ )
+ manifest_files = []
+ generated_manifest_outer = (
+ "<?xml version='1.0' encoding='UTF-8' standalone='yes'?>"
+ "<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>"
+ "%s</assembly>"
+ )
+ if enable_uac == "true":
+ execution_level = self._Setting(
+ ("VCLinkerTool", "UACExecutionLevel"), config, default="0"
+ )
+ execution_level_map = {
+ "0": "asInvoker",
+ "1": "highestAvailable",
+ "2": "requireAdministrator",
+ }
+
+ ui_access = self._Setting(
+ ("VCLinkerTool", "UACUIAccess"), config, default="false"
+ )
+
+ inner = """
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level='{}' uiAccess='{}' />
+ </requestedPrivileges>
+ </security>
+</trustInfo>""".format(
+ execution_level_map[execution_level],
+ ui_access,
+ )
+ else:
+ inner = ""
+
+ generated_manifest_contents = generated_manifest_outer % inner
+ generated_name = name + ".generated.manifest"
+ # Need to join with the build_dir here as we're writing it during
+ # generation time, but we return the un-joined version because the build
+ # will occur in that directory. We only write the file if the contents
+ # have changed so that simply regenerating the project files doesn't
+ # cause a relink.
+ build_dir_generated_name = os.path.join(build_dir, generated_name)
+ gyp.common.EnsureDirExists(build_dir_generated_name)
+ f = gyp.common.WriteOnDiff(build_dir_generated_name)
+ f.write(generated_manifest_contents)
+ f.close()
+ manifest_files = [generated_name]
+
+ if allow_isolation:
+ flags.append("/ALLOWISOLATION")
+
+ manifest_files += self._GetAdditionalManifestFiles(config, gyp_to_build_path)
+ return flags, output_name, manifest_files
+
+ def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
+ """Gets additional manifest files that are added to the default one
+ generated by the linker."""
+ files = self._Setting(
+ ("VCManifestTool", "AdditionalManifestFiles"), config, default=[]
+ )
+ if isinstance(files, str):
+ files = files.split(";")
+ return [
+ os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
+ for f in files
+ ]
+
+ def IsUseLibraryDependencyInputs(self, config):
+ """Returns whether the target should be linked via Use Library Dependency
+ Inputs (using component .objs of a given .lib)."""
+ config = self._TargetConfig(config)
+ uldi = self._Setting(("VCLinkerTool", "UseLibraryDependencyInputs"), config)
+ return uldi == "true"
+
+ def IsEmbedManifest(self, config):
+ """Returns whether manifest should be linked into binary."""
+ config = self._TargetConfig(config)
+ embed = self._Setting(
+ ("VCManifestTool", "EmbedManifest"), config, default="true"
+ )
+ return embed == "true"
+
+ def IsLinkIncremental(self, config):
+ """Returns whether the target should be linked incrementally."""
+ config = self._TargetConfig(config)
+ link_inc = self._Setting(("VCLinkerTool", "LinkIncremental"), config)
+ return link_inc != "1"
+
+ def GetRcflags(self, config, gyp_to_ninja_path):
+ """Returns the flags that need to be added to invocations of the resource
+ compiler."""
+ config = self._TargetConfig(config)
+ rcflags = []
+ rc = self._GetWrapper(
+ self, self.msvs_settings[config], "VCResourceCompilerTool", append=rcflags
+ )
+ rc("AdditionalIncludeDirectories", map=gyp_to_ninja_path, prefix="/I")
+ rcflags.append("/I" + gyp_to_ninja_path("."))
+ rc("PreprocessorDefinitions", prefix="/d")
+ # /l arg must be in hex without leading '0x'
+ rc("Culture", prefix="/l", map=lambda x: hex(int(x))[2:])
+ return rcflags
+
+ def BuildCygwinBashCommandLine(self, args, path_to_base):
+ """Build a command line that runs args via cygwin bash. We assume that all
+ incoming paths are in Windows normpath'd form, so they need to be
+ converted to posix style for the part of the command line that's passed to
+ bash. We also have to do some Visual Studio macro emulation here because
+ various rules use magic VS names for things. Also note that rules that
+ contain ninja variables cannot be fixed here (for example ${source}), so
+ the outer generator needs to make sure that the paths that are written out
+ are in posix style, if the command line will be used here."""
+ cygwin_dir = os.path.normpath(
+ os.path.join(path_to_base, self.msvs_cygwin_dirs[0])
+ )
+ cd = ("cd %s" % path_to_base).replace("\\", "/")
+ args = [a.replace("\\", "/").replace('"', '\\"') for a in args]
+ args = ["'%s'" % a.replace("'", "'\\''") for a in args]
+ bash_cmd = " ".join(args)
+ cmd = (
+ 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir
+ + f'bash -c "{cd} ; {bash_cmd}"'
+ )
+ return cmd
+
+ RuleShellFlags = collections.namedtuple("RuleShellFlags", ["cygwin", "quote"])
+
+ def GetRuleShellFlags(self, rule):
+ """Return RuleShellFlags about how the given rule should be run. This
+ includes whether it should run under cygwin (msvs_cygwin_shell), and
+ whether the commands should be quoted (msvs_quote_cmd)."""
+ # If the variable is unset, or set to 1 we use cygwin
+ cygwin = int(rule.get("msvs_cygwin_shell",
+ self.spec.get("msvs_cygwin_shell", 1))) != 0
+ # Default to quoting. There's only a few special instances where the
+ # target command uses non-standard command line parsing and handle quotes
+ # and quote escaping differently.
+ quote_cmd = int(rule.get("msvs_quote_cmd", 1))
+ assert quote_cmd != 0 or cygwin != 1, \
+ "msvs_quote_cmd=0 only applicable for msvs_cygwin_shell=0"
+ return MsvsSettings.RuleShellFlags(cygwin, quote_cmd)
+
+ def _HasExplicitRuleForExtension(self, spec, extension):
+ """Determine if there's an explicit rule for a particular extension."""
+ for rule in spec.get("rules", []):
+ if rule["extension"] == extension:
+ return True
+ return False
+
+ def _HasExplicitIdlActions(self, spec):
+ """Determine if an action should not run midl for .idl files."""
+ return any(
+ [action.get("explicit_idl_action", 0) for action in spec.get("actions", [])]
+ )
+
+ def HasExplicitIdlRulesOrActions(self, spec):
+ """Determine if there's an explicit rule or action for idl files. When
+ there isn't we need to generate implicit rules to build MIDL .idl files."""
+ return self._HasExplicitRuleForExtension(
+ spec, "idl"
+ ) or self._HasExplicitIdlActions(spec)
+
+ def HasExplicitAsmRules(self, spec):
+ """Determine if there's an explicit rule for asm files. When there isn't we
+ need to generate implicit rules to assemble .asm files."""
+ return self._HasExplicitRuleForExtension(spec, "asm")
+
+ def GetIdlBuildData(self, source, config):
+ """Determine the implicit outputs for an idl file. Returns output
+ directory, outputs, and variables and flags that are required."""
+ config = self._TargetConfig(config)
+ midl_get = self._GetWrapper(self, self.msvs_settings[config], "VCMIDLTool")
+
+ def midl(name, default=None):
+ return self.ConvertVSMacros(midl_get(name, default=default), config=config)
+
+ tlb = midl("TypeLibraryName", default="${root}.tlb")
+ header = midl("HeaderFileName", default="${root}.h")
+ dlldata = midl("DLLDataFileName", default="dlldata.c")
+ iid = midl("InterfaceIdentifierFileName", default="${root}_i.c")
+ proxy = midl("ProxyFileName", default="${root}_p.c")
+ # Note that .tlb is not included in the outputs as it is not always
+ # generated depending on the content of the input idl file.
+ outdir = midl("OutputDirectory", default="")
+ output = [header, dlldata, iid, proxy]
+ variables = [
+ ("tlb", tlb),
+ ("h", header),
+ ("dlldata", dlldata),
+ ("iid", iid),
+ ("proxy", proxy),
+ ]
+ # TODO(scottmg): Are there configuration settings to set these flags?
+ target_platform = self.GetArch(config)
+ if target_platform == "x86":
+ target_platform = "win32"
+ flags = ["/char", "signed", "/env", target_platform, "/Oicf"]
+ return outdir, output, variables, flags
+
+
+def _LanguageMatchesForPch(source_ext, pch_source_ext):
+ c_exts = (".c",)
+ cc_exts = (".cc", ".cxx", ".cpp")
+ return (source_ext in c_exts and pch_source_ext in c_exts) or (
+ source_ext in cc_exts and pch_source_ext in cc_exts
+ )
+
+
+class PrecompiledHeader:
+ """Helper to generate dependencies and build rules to handle generation of
+ precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
+ """
+
+ def __init__(
+ self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext
+ ):
+ self.settings = settings
+ self.config = config
+ pch_source = self.settings.msvs_precompiled_source[self.config]
+ self.pch_source = gyp_to_build_path(pch_source)
+ filename, _ = os.path.splitext(pch_source)
+ self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
+
+ def _PchHeader(self):
+ """Get the header that will appear in an #include line for all source
+ files."""
+ return self.settings.msvs_precompiled_header[self.config]
+
+ def GetObjDependencies(self, sources, objs, arch):
+ """Given a list of sources files and the corresponding object files,
+ returns a list of the pch files that should be depended upon. The
+ additional wrapping in the return value is for interface compatibility
+ with make.py on Mac, and xcode_emulation.py."""
+ assert arch is None
+ if not self._PchHeader():
+ return []
+ pch_ext = os.path.splitext(self.pch_source)[1]
+ for source in sources:
+ if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
+ return [(None, None, self.output_obj)]
+ return []
+
+ def GetPchBuildCommands(self, arch):
+ """Not used on Windows as there are no additional build steps required
+ (instead, existing steps are modified in GetFlagsModifications below)."""
+ return []
+
+ def GetFlagsModifications(
+ self, input, output, implicit, command, cflags_c, cflags_cc, expand_special
+ ):
+ """Get the modified cflags and implicit dependencies that should be used
+ for the pch compilation step."""
+ if input == self.pch_source:
+ pch_output = ["/Yc" + self._PchHeader()]
+ if command == "cxx":
+ return (
+ [("cflags_cc", map(expand_special, cflags_cc + pch_output))],
+ self.output_obj,
+ [],
+ )
+ elif command == "cc":
+ return (
+ [("cflags_c", map(expand_special, cflags_c + pch_output))],
+ self.output_obj,
+ [],
+ )
+ return [], output, implicit
+
+
+vs_version = None
+
+
+def GetVSVersion(generator_flags):
+ global vs_version
+ if not vs_version:
+ vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
+ generator_flags.get("msvs_version", "auto"), allow_fallback=False
+ )
+ return vs_version
+
+
+def _GetVsvarsSetupArgs(generator_flags, arch):
+ vs = GetVSVersion(generator_flags)
+ return vs.SetupScript()
+
+
+def ExpandMacros(string, expansions):
+ """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
+ for the canonical way to retrieve a suitable dict."""
+ if "$" in string:
+ for old, new in expansions.items():
+ assert "$(" not in new, new
+ string = string.replace(old, new)
+ return string
+
+
+def _ExtractImportantEnvironment(output_of_set):
+ """Extracts environment variables required for the toolchain to run from
+ a textual dump output by the cmd.exe 'set' command."""
+ envvars_to_save = (
+ "goma_.*", # TODO(scottmg): This is ugly, but needed for goma.
+ "include",
+ "lib",
+ "libpath",
+ "path",
+ "pathext",
+ "systemroot",
+ "temp",
+ "tmp",
+ )
+ env = {}
+ # This occasionally happens and leads to misleading SYSTEMROOT error messages
+ # if not caught here.
+ if output_of_set.count("=") == 0:
+ raise Exception("Invalid output_of_set. Value is:\n%s" % output_of_set)
+ for line in output_of_set.splitlines():
+ for envvar in envvars_to_save:
+ if re.match(envvar + "=", line.lower()):
+ var, setting = line.split("=", 1)
+ if envvar == "path":
+ # Our own rules (for running gyp-win-tool) and other actions in
+ # Chromium rely on python being in the path. Add the path to this
+ # python here so that if it's not in the path when ninja is run
+ # later, python will still be found.
+ setting = os.path.dirname(sys.executable) + os.pathsep + setting
+ env[var.upper()] = setting
+ break
+ for required in ("SYSTEMROOT", "TEMP", "TMP"):
+ if required not in env:
+ raise Exception(
+ 'Environment variable "%s" '
+ "required to be set to valid path" % required
+ )
+ return env
+
+
+def _FormatAsEnvironmentBlock(envvar_dict):
+ """Format as an 'environment block' directly suitable for CreateProcess.
+ Briefly this is a list of key=value\0, terminated by an additional \0. See
+ CreateProcess documentation for more details."""
+ block = ""
+ nul = "\0"
+ for key, value in envvar_dict.items():
+ block += key + "=" + value + nul
+ block += nul
+ return block
+
+
+def _ExtractCLPath(output_of_where):
+ """Gets the path to cl.exe based on the output of calling the environment
+ setup batch file, followed by the equivalent of `where`."""
+ # Take the first line, as that's the first found in the PATH.
+ for line in output_of_where.strip().splitlines():
+ if line.startswith("LOC:"):
+ return line[len("LOC:") :].strip()
+
+
+def GenerateEnvironmentFiles(
+ toplevel_build_dir, generator_flags, system_includes, open_out
+):
+ """It's not sufficient to have the absolute path to the compiler, linker,
+ etc. on Windows, as those tools rely on .dlls being in the PATH. We also
+ need to support both x86 and x64 compilers within the same build (to support
+ msvs_target_platform hackery). Different architectures require a different
+ compiler binary, and different supporting environment variables (INCLUDE,
+ LIB, LIBPATH). So, we extract the environment here, wrap all invocations
+ of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
+ sets up the environment, and then we do not prefix the compiler with
+ an absolute path, instead preferring something like "cl.exe" in the rule
+ which will then run whichever the environment setup has put in the path.
+ When the following procedure to generate environment files does not
+ meet your requirement (e.g. for custom toolchains), you can pass
+ "-G ninja_use_custom_environment_files" to the gyp to suppress file
+ generation and use custom environment files prepared by yourself."""
+ archs = ("x86", "x64")
+ if generator_flags.get("ninja_use_custom_environment_files", 0):
+ cl_paths = {}
+ for arch in archs:
+ cl_paths[arch] = "cl.exe"
+ return cl_paths
+ vs = GetVSVersion(generator_flags)
+ cl_paths = {}
+ for arch in archs:
+ # Extract environment variables for subprocesses.
+ args = vs.SetupScript(arch)
+ args.extend(("&&", "set"))
+ popen = subprocess.Popen(
+ args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ variables = popen.communicate()[0].decode("utf-8")
+ if popen.returncode != 0:
+ raise Exception('"%s" failed with error %d' % (args, popen.returncode))
+ env = _ExtractImportantEnvironment(variables)
+
+ # Inject system includes from gyp files into INCLUDE.
+ if system_includes:
+ system_includes = system_includes | OrderedSet(
+ env.get("INCLUDE", "").split(";")
+ )
+ env["INCLUDE"] = ";".join(system_includes)
+
+ env_block = _FormatAsEnvironmentBlock(env)
+ f = open_out(os.path.join(toplevel_build_dir, "environment." + arch), "w")
+ f.write(env_block)
+ f.close()
+
+ # Find cl.exe location for this architecture.
+ args = vs.SetupScript(arch)
+ args.extend(
+ ("&&", "for", "%i", "in", "(cl.exe)", "do", "@echo", "LOC:%~$PATH:i")
+ )
+ popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
+ output = popen.communicate()[0].decode("utf-8")
+ cl_paths[arch] = _ExtractCLPath(output)
+ return cl_paths
+
+
+def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
+ """Emulate behavior of msvs_error_on_missing_sources present in the msvs
+ generator: Check that all regular source files, i.e. not created at run time,
+ exist on disk. Missing files cause needless recompilation when building via
+ VS, and we want this check to match for people/bots that build using ninja,
+ so they're not surprised when the VS build fails."""
+ if int(generator_flags.get("msvs_error_on_missing_sources", 0)):
+ no_specials = filter(lambda x: "$" not in x, sources)
+ relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
+ missing = [x for x in relative if not os.path.exists(x)]
+ if missing:
+ # They'll look like out\Release\..\..\stuff\things.cc, so normalize the
+ # path for a slightly less crazy looking output.
+ cleaned_up = [os.path.normpath(x) for x in missing]
+ raise Exception("Missing input files:\n%s" % "\n".join(cleaned_up))
+
+
+# Sets some values in default_variables, which are required for many
+# generators, run on Windows.
+def CalculateCommonVariables(default_variables, params):
+ generator_flags = params.get("generator_flags", {})
+
+ # Set a variable so conditions can be based on msvs_version.
+ msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
+ default_variables["MSVS_VERSION"] = msvs_version.ShortName()
+
+ # To determine processor word size on Windows, in addition to checking
+ # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
+ # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
+ # contains the actual word size of the system when running thru WOW64).
+ if "64" in os.environ.get("PROCESSOR_ARCHITECTURE", "") or "64" in os.environ.get(
+ "PROCESSOR_ARCHITEW6432", ""
+ ):
+ default_variables["MSVS_OS_BITS"] = 64
+ else:
+ default_variables["MSVS_OS_BITS"] = 32
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
new file mode 100644
index 0000000..0e3e86c
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
@@ -0,0 +1,174 @@
+# This file comes from
+# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
+# Do not edit! Edit the upstream one instead.
+
+"""Python module for generating .ninja files.
+
+Note that this is emphatically not a required piece of Ninja; it's
+just a helpful utility for build-file-generation systems that already
+use Python.
+"""
+
+import textwrap
+
+
+def escape_path(word):
+ return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
+
+
+class Writer:
+ def __init__(self, output, width=78):
+ self.output = output
+ self.width = width
+
+ def newline(self):
+ self.output.write("\n")
+
+ def comment(self, text):
+ for line in textwrap.wrap(text, self.width - 2):
+ self.output.write("# " + line + "\n")
+
+ def variable(self, key, value, indent=0):
+ if value is None:
+ return
+ if isinstance(value, list):
+ value = " ".join(filter(None, value)) # Filter out empty strings.
+ self._line(f"{key} = {value}", indent)
+
+ def pool(self, name, depth):
+ self._line("pool %s" % name)
+ self.variable("depth", depth, indent=1)
+
+ def rule(
+ self,
+ name,
+ command,
+ description=None,
+ depfile=None,
+ generator=False,
+ pool=None,
+ restat=False,
+ rspfile=None,
+ rspfile_content=None,
+ deps=None,
+ ):
+ self._line("rule %s" % name)
+ self.variable("command", command, indent=1)
+ if description:
+ self.variable("description", description, indent=1)
+ if depfile:
+ self.variable("depfile", depfile, indent=1)
+ if generator:
+ self.variable("generator", "1", indent=1)
+ if pool:
+ self.variable("pool", pool, indent=1)
+ if restat:
+ self.variable("restat", "1", indent=1)
+ if rspfile:
+ self.variable("rspfile", rspfile, indent=1)
+ if rspfile_content:
+ self.variable("rspfile_content", rspfile_content, indent=1)
+ if deps:
+ self.variable("deps", deps, indent=1)
+
+ def build(
+ self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None
+ ):
+ outputs = self._as_list(outputs)
+ all_inputs = self._as_list(inputs)[:]
+ out_outputs = list(map(escape_path, outputs))
+ all_inputs = list(map(escape_path, all_inputs))
+
+ if implicit:
+ implicit = map(escape_path, self._as_list(implicit))
+ all_inputs.append("|")
+ all_inputs.extend(implicit)
+ if order_only:
+ order_only = map(escape_path, self._as_list(order_only))
+ all_inputs.append("||")
+ all_inputs.extend(order_only)
+
+ self._line(
+ "build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs))
+ )
+
+ if variables:
+ if isinstance(variables, dict):
+ iterator = iter(variables.items())
+ else:
+ iterator = iter(variables)
+
+ for key, val in iterator:
+ self.variable(key, val, indent=1)
+
+ return outputs
+
+ def include(self, path):
+ self._line("include %s" % path)
+
+ def subninja(self, path):
+ self._line("subninja %s" % path)
+
+ def default(self, paths):
+ self._line("default %s" % " ".join(self._as_list(paths)))
+
+ def _count_dollars_before_index(self, s, i):
+ """Returns the number of '$' characters right in front of s[i]."""
+ dollar_count = 0
+ dollar_index = i - 1
+ while dollar_index > 0 and s[dollar_index] == "$":
+ dollar_count += 1
+ dollar_index -= 1
+ return dollar_count
+
+ def _line(self, text, indent=0):
+ """Write 'text' word-wrapped at self.width characters."""
+ leading_space = " " * indent
+ while len(leading_space) + len(text) > self.width:
+ # The text is too wide; wrap if possible.
+
+ # Find the rightmost space that would obey our width constraint and
+ # that's not an escaped space.
+ available_space = self.width - len(leading_space) - len(" $")
+ space = available_space
+ while True:
+ space = text.rfind(" ", 0, space)
+ if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
+ break
+
+ if space < 0:
+ # No such space; just use the first unescaped space we can find.
+ space = available_space - 1
+ while True:
+ space = text.find(" ", space + 1)
+ if (
+ space < 0
+ or self._count_dollars_before_index(text, space) % 2 == 0
+ ):
+ break
+ if space < 0:
+ # Give up on breaking.
+ break
+
+ self.output.write(leading_space + text[0:space] + " $\n")
+ text = text[space + 1 :]
+
+ # Subsequent lines are continuations, so indent them.
+ leading_space = " " * (indent + 2)
+
+ self.output.write(leading_space + text + "\n")
+
+ def _as_list(self, input):
+ if input is None:
+ return []
+ if isinstance(input, list):
+ return input
+ return [input]
+
+
+def escape(string):
+ """Escape a string such that it can be embedded into a Ninja file without
+ further interpretation."""
+ assert "\n" not in string, "Ninja syntax does not allow newlines"
+ # We only have one special metacharacter: '$'.
+ return string.replace("$", "$$")
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
new file mode 100644
index 0000000..729cec0
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
@@ -0,0 +1,61 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A clone of the default copy.deepcopy that doesn't handle cyclic
+structures or complex types except for dicts and lists. This is
+because gyp copies so large structure that small copy overhead ends up
+taking seconds in a project the size of Chromium."""
+
+
+class Error(Exception):
+ pass
+
+
+__all__ = ["Error", "deepcopy"]
+
+
+def deepcopy(x):
+ """Deep copy operation on gyp objects such as strings, ints, dicts
+ and lists. More than twice as fast as copy.deepcopy but much less
+ generic."""
+
+ try:
+ return _deepcopy_dispatch[type(x)](x)
+ except KeyError:
+ raise Error(
+ "Unsupported type %s for deepcopy. Use copy.deepcopy "
+ + "or expand simple_copy support." % type(x)
+ )
+
+
+_deepcopy_dispatch = d = {}
+
+
+def _deepcopy_atomic(x):
+ return x
+
+
+types = bool, float, int, str, type, type(None)
+
+for x in types:
+ d[x] = _deepcopy_atomic
+
+
+def _deepcopy_list(x):
+ return [deepcopy(a) for a in x]
+
+
+d[list] = _deepcopy_list
+
+
+def _deepcopy_dict(x):
+ y = {}
+ for key, value in x.items():
+ y[deepcopy(key)] = deepcopy(value)
+ return y
+
+
+d[dict] = _deepcopy_dict
+
+del d
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
new file mode 100755
index 0000000..638eee4
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
@@ -0,0 +1,374 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for Windows builds.
+
+These functions are executed via gyp-win-tool when using the ninja generator.
+"""
+
+
+import os
+import re
+import shutil
+import subprocess
+import stat
+import string
+import sys
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+
+# A regex matching an argument corresponding to the output filename passed to
+# link.exe.
+_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P<out>.+)$", re.IGNORECASE)
+
+
+def main(args):
+ executor = WinTool()
+ exit_code = executor.Dispatch(args)
+ if exit_code is not None:
+ sys.exit(exit_code)
+
+
+class WinTool:
+ """This class performs all the Windows tooling steps. The methods can either
+ be executed directly, or dispatched from an argument list."""
+
+ def _UseSeparateMspdbsrv(self, env, args):
+ """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
+ shared one."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ if args[0] != "link.exe":
+ return
+
+ # Use the output filename passed to the linker to generate an endpoint name
+ # for mspdbsrv.exe.
+ endpoint_name = None
+ for arg in args:
+ m = _LINK_EXE_OUT_ARG.match(arg)
+ if m:
+ endpoint_name = re.sub(
+ r"\W+", "", "%s_%d" % (m.group("out"), os.getpid())
+ )
+ break
+
+ if endpoint_name is None:
+ return
+
+ # Adds the appropriate environment variable. This will be read by link.exe
+ # to know which instance of mspdbsrv.exe it should connect to (if it's
+ # not set then the default endpoint is used).
+ env["_MSPDBSRV_ENDPOINT_"] = endpoint_name
+
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ return getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like recursive-mirror to RecursiveMirror."""
+ return name_string.title().replace("-", "")
+
+ def _GetEnv(self, arch):
+ """Gets the saved environment from a file for a given architecture."""
+ # The environment is saved as an "environment block" (see CreateProcess
+ # and msvs_emulation for details). We convert to a dict here.
+ # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
+ pairs = open(arch).read()[:-2].split("\0")
+ kvs = [item.split("=", 1) for item in pairs]
+ return dict(kvs)
+
+ def ExecStamp(self, path):
+ """Simple stamp command."""
+ open(path, "w").close()
+
+ def ExecRecursiveMirror(self, source, dest):
+ """Emulation of rm -rf out && cp -af in out."""
+ if os.path.exists(dest):
+ if os.path.isdir(dest):
+
+ def _on_error(fn, path, excinfo):
+ # The operation failed, possibly because the file is set to
+ # read-only. If that's why, make it writable and try the op again.
+ if not os.access(path, os.W_OK):
+ os.chmod(path, stat.S_IWRITE)
+ fn(path)
+
+ shutil.rmtree(dest, onerror=_on_error)
+ else:
+ if not os.access(dest, os.W_OK):
+ # Attempt to make the file writable before deleting it.
+ os.chmod(dest, stat.S_IWRITE)
+ os.unlink(dest)
+
+ if os.path.isdir(source):
+ shutil.copytree(source, dest)
+ else:
+ shutil.copy2(source, dest)
+
+ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
+ """Filter diagnostic output from link that looks like:
+ ' Creating library ui.dll.lib and object ui.dll.exp'
+ This happens when there are exports from the dll or exe.
+ """
+ env = self._GetEnv(arch)
+ if use_separate_mspdbsrv == "True":
+ self._UseSeparateMspdbsrv(env, args)
+ if sys.platform == "win32":
+ args = list(args) # *args is a tuple by default, which is read-only.
+ args[0] = args[0].replace("/", "\\")
+ # https://docs.python.org/2/library/subprocess.html:
+ # "On Unix with shell=True [...] if args is a sequence, the first item
+ # specifies the command string, and any additional items will be treated as
+ # additional arguments to the shell itself. That is to say, Popen does the
+ # equivalent of:
+ # Popen(['/bin/sh', '-c', args[0], args[1], ...])"
+ # For that reason, since going through the shell doesn't seem necessary on
+ # non-Windows don't do that there.
+ link = subprocess.Popen(
+ args,
+ shell=sys.platform == "win32",
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
+ out = link.communicate()[0].decode("utf-8")
+ for line in out.splitlines():
+ if (
+ not line.startswith(" Creating library ")
+ and not line.startswith("Generating code")
+ and not line.startswith("Finished generating code")
+ ):
+ print(line)
+ return link.returncode
+
+ def ExecLinkWithManifests(
+ self,
+ arch,
+ embed_manifest,
+ out,
+ ldcmd,
+ resname,
+ mt,
+ rc,
+ intermediate_manifest,
+ *manifests
+ ):
+ """A wrapper for handling creating a manifest resource and then executing
+ a link command."""
+ # The 'normal' way to do manifests is to have link generate a manifest
+ # based on gathering dependencies from the object files, then merge that
+ # manifest with other manifests supplied as sources, convert the merged
+ # manifest to a resource, and then *relink*, including the compiled
+ # version of the manifest resource. This breaks incremental linking, and
+ # is generally overly complicated. Instead, we merge all the manifests
+ # provided (along with one that includes what would normally be in the
+ # linker-generated one, see msvs_emulation.py), and include that into the
+ # first and only link. We still tell link to generate a manifest, but we
+ # only use that to assert that our simpler process did not miss anything.
+ variables = {
+ "python": sys.executable,
+ "arch": arch,
+ "out": out,
+ "ldcmd": ldcmd,
+ "resname": resname,
+ "mt": mt,
+ "rc": rc,
+ "intermediate_manifest": intermediate_manifest,
+ "manifests": " ".join(manifests),
+ }
+ add_to_ld = ""
+ if manifests:
+ subprocess.check_call(
+ "%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
+ "-manifest %(manifests)s -out:%(out)s.manifest" % variables
+ )
+ if embed_manifest == "True":
+ subprocess.check_call(
+ "%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest"
+ " %(out)s.manifest.rc %(resname)s" % variables
+ )
+ subprocess.check_call(
+ "%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s "
+ "%(out)s.manifest.rc" % variables
+ )
+ add_to_ld = " %(out)s.manifest.res" % variables
+ subprocess.check_call(ldcmd + add_to_ld)
+
+ # Run mt.exe on the theoretically complete manifest we generated, merging
+ # it with the one the linker generated to confirm that the linker
+ # generated one does not add anything. This is strictly unnecessary for
+ # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
+ # used in a #pragma comment.
+ if manifests:
+ # Merge the intermediate one with ours to .assert.manifest, then check
+ # that .assert.manifest is identical to ours.
+ subprocess.check_call(
+ "%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
+ "-manifest %(out)s.manifest %(intermediate_manifest)s "
+ "-out:%(out)s.assert.manifest" % variables
+ )
+ assert_manifest = "%(out)s.assert.manifest" % variables
+ our_manifest = "%(out)s.manifest" % variables
+ # Load and normalize the manifests. mt.exe sometimes removes whitespace,
+ # and sometimes doesn't unfortunately.
+ with open(our_manifest) as our_f:
+ with open(assert_manifest) as assert_f:
+ translator = str.maketrans('', '', string.whitespace)
+ our_data = our_f.read().translate(translator)
+ assert_data = assert_f.read().translate(translator)
+ if our_data != assert_data:
+ os.unlink(out)
+
+ def dump(filename):
+ print(filename, file=sys.stderr)
+ print("-----", file=sys.stderr)
+ with open(filename) as f:
+ print(f.read(), file=sys.stderr)
+ print("-----", file=sys.stderr)
+
+ dump(intermediate_manifest)
+ dump(our_manifest)
+ dump(assert_manifest)
+ sys.stderr.write(
+ 'Linker generated manifest "%s" added to final manifest "%s" '
+ '(result in "%s"). '
+ "Were /MANIFEST switches used in #pragma statements? "
+ % (intermediate_manifest, our_manifest, assert_manifest)
+ )
+ return 1
+
+ def ExecManifestWrapper(self, arch, *args):
+ """Run manifest tool with environment set. Strip out undesirable warning
+ (some XML blocks are recognized by the OS loader, but not the manifest
+ tool)."""
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(
+ args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ out = popen.communicate()[0].decode("utf-8")
+ for line in out.splitlines():
+ if line and "manifest authoring warning 81010002" not in line:
+ print(line)
+ return popen.returncode
+
+ def ExecManifestToRc(self, arch, *args):
+ """Creates a resource file pointing a SxS assembly manifest.
+ |args| is tuple containing path to resource file, path to manifest file
+ and resource name which can be "1" (for executables) or "2" (for DLLs)."""
+ manifest_path, resource_path, resource_name = args
+ with open(resource_path, "w") as output:
+ output.write(
+ '#include <windows.h>\n%s RT_MANIFEST "%s"'
+ % (resource_name, os.path.abspath(manifest_path).replace("\\", "/"))
+ )
+
+ def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags):
+ """Filter noisy filenames output from MIDL compile step that isn't
+ quietable via command line flags.
+ """
+ args = (
+ ["midl", "/nologo"]
+ + list(flags)
+ + [
+ "/out",
+ outdir,
+ "/tlb",
+ tlb,
+ "/h",
+ h,
+ "/dlldata",
+ dlldata,
+ "/iid",
+ iid,
+ "/proxy",
+ proxy,
+ idl,
+ ]
+ )
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(
+ args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ out = popen.communicate()[0].decode("utf-8")
+ # Filter junk out of stdout, and write filtered versions. Output we want
+ # to filter is pairs of lines that look like this:
+ # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
+ # objidl.idl
+ lines = out.splitlines()
+ prefixes = ("Processing ", "64 bit Processing ")
+ processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)}
+ for line in lines:
+ if not line.startswith(prefixes) and line not in processing:
+ print(line)
+ return popen.returncode
+
+ def ExecAsmWrapper(self, arch, *args):
+ """Filter logo banner from invocations of asm.exe."""
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(
+ args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ out = popen.communicate()[0].decode("utf-8")
+ for line in out.splitlines():
+ if (
+ not line.startswith("Copyright (C) Microsoft Corporation")
+ and not line.startswith("Microsoft (R) Macro Assembler")
+ and not line.startswith(" Assembling: ")
+ and line
+ ):
+ print(line)
+ return popen.returncode
+
+ def ExecRcWrapper(self, arch, *args):
+ """Filter logo banner from invocations of rc.exe. Older versions of RC
+ don't support the /nologo flag."""
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(
+ args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ out = popen.communicate()[0].decode("utf-8")
+ for line in out.splitlines():
+ if (
+ not line.startswith("Microsoft (R) Windows (R) Resource Compiler")
+ and not line.startswith("Copyright (C) Microsoft Corporation")
+ and line
+ ):
+ print(line)
+ return popen.returncode
+
+ def ExecActionWrapper(self, arch, rspfile, *dir):
+ """Runs an action command line from a response file using the environment
+ for |arch|. If |dir| is supplied, use that as the working directory."""
+ env = self._GetEnv(arch)
+ # TODO(scottmg): This is a temporary hack to get some specific variables
+ # through to actions that are set after gyp-time. http://crbug.com/333738.
+ for k, v in os.environ.items():
+ if k not in env:
+ env[k] = v
+ args = open(rspfile).read()
+ dir = dir[0] if dir else None
+ return subprocess.call(args, shell=True, env=env, cwd=dir)
+
+ def ExecClCompile(self, project_dir, selected_files):
+ """Executed by msvs-ninja projects when the 'ClCompile' target is used to
+ build selected C/C++ files."""
+ project_dir = os.path.relpath(project_dir, BASE_DIR)
+ selected_files = selected_files.split(";")
+ ninja_targets = [
+ os.path.join(project_dir, filename) + "^^" for filename in selected_files
+ ]
+ cmd = ["ninja.exe"]
+ cmd.extend(ninja_targets)
+ return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
new file mode 100644
index 0000000..a75d8ee
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
@@ -0,0 +1,1939 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This module contains classes that help to emulate xcodebuild behavior on top of
+other build systems, such as make and ninja.
+"""
+
+
+import copy
+import gyp.common
+import os
+import os.path
+import re
+import shlex
+import subprocess
+import sys
+from gyp.common import GypError
+
+# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
+# "xcodebuild" is called too quickly (it has been found to return incorrect
+# version number).
+XCODE_VERSION_CACHE = None
+
+# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
+# corresponding to the installed version of Xcode.
+XCODE_ARCHS_DEFAULT_CACHE = None
+
+
+def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
+ """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
+ and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
+ mapping = {"$(ARCHS_STANDARD)": archs}
+ if archs_including_64_bit:
+ mapping["$(ARCHS_STANDARD_INCLUDING_64_BIT)"] = archs_including_64_bit
+ return mapping
+
+
+class XcodeArchsDefault:
+ """A class to resolve ARCHS variable from xcode_settings, resolving Xcode
+ macros and implementing filtering by VALID_ARCHS. The expansion of macros
+ depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
+ on the version of Xcode.
+ """
+
+ # Match variable like $(ARCHS_STANDARD).
+ variable_pattern = re.compile(r"\$\([a-zA-Z_][a-zA-Z0-9_]*\)$")
+
+ def __init__(self, default, mac, iphonesimulator, iphoneos):
+ self._default = (default,)
+ self._archs = {"mac": mac, "ios": iphoneos, "iossim": iphonesimulator}
+
+ def _VariableMapping(self, sdkroot):
+ """Returns the dictionary of variable mapping depending on the SDKROOT."""
+ sdkroot = sdkroot.lower()
+ if "iphoneos" in sdkroot:
+ return self._archs["ios"]
+ elif "iphonesimulator" in sdkroot:
+ return self._archs["iossim"]
+ else:
+ return self._archs["mac"]
+
+ def _ExpandArchs(self, archs, sdkroot):
+ """Expands variables references in ARCHS, and remove duplicates."""
+ variable_mapping = self._VariableMapping(sdkroot)
+ expanded_archs = []
+ for arch in archs:
+ if self.variable_pattern.match(arch):
+ variable = arch
+ try:
+ variable_expansion = variable_mapping[variable]
+ for arch in variable_expansion:
+ if arch not in expanded_archs:
+ expanded_archs.append(arch)
+ except KeyError:
+ print('Warning: Ignoring unsupported variable "%s".' % variable)
+ elif arch not in expanded_archs:
+ expanded_archs.append(arch)
+ return expanded_archs
+
+ def ActiveArchs(self, archs, valid_archs, sdkroot):
+ """Expands variables references in ARCHS, and filter by VALID_ARCHS if it
+ is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
+ values present in VALID_ARCHS are kept)."""
+ expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or "")
+ if valid_archs:
+ filtered_archs = []
+ for arch in expanded_archs:
+ if arch in valid_archs:
+ filtered_archs.append(arch)
+ expanded_archs = filtered_archs
+ return expanded_archs
+
+
+def GetXcodeArchsDefault():
+ """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
+ installed version of Xcode. The default values used by Xcode for ARCHS
+ and the expansion of the variables depends on the version of Xcode used.
+
+ For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
+ uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
+ $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
+ and deprecated with Xcode 5.1.
+
+ For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
+ architecture as part of $(ARCHS_STANDARD) and default to only building it.
+
+ For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
+ of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
+ are also part of $(ARCHS_STANDARD).
+
+ All these rules are coded in the construction of the |XcodeArchsDefault|
+ object to use depending on the version of Xcode detected. The object is
+ for performance reason."""
+ global XCODE_ARCHS_DEFAULT_CACHE
+ if XCODE_ARCHS_DEFAULT_CACHE:
+ return XCODE_ARCHS_DEFAULT_CACHE
+ xcode_version, _ = XcodeVersion()
+ if xcode_version < "0500":
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ "$(ARCHS_STANDARD)",
+ XcodeArchsVariableMapping(["i386"]),
+ XcodeArchsVariableMapping(["i386"]),
+ XcodeArchsVariableMapping(["armv7"]),
+ )
+ elif xcode_version < "0510":
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ "$(ARCHS_STANDARD_INCLUDING_64_BIT)",
+ XcodeArchsVariableMapping(["x86_64"], ["x86_64"]),
+ XcodeArchsVariableMapping(["i386"], ["i386", "x86_64"]),
+ XcodeArchsVariableMapping(
+ ["armv7", "armv7s"], ["armv7", "armv7s", "arm64"]
+ ),
+ )
+ else:
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ "$(ARCHS_STANDARD)",
+ XcodeArchsVariableMapping(["x86_64"], ["x86_64"]),
+ XcodeArchsVariableMapping(["i386", "x86_64"], ["i386", "x86_64"]),
+ XcodeArchsVariableMapping(
+ ["armv7", "armv7s", "arm64"], ["armv7", "armv7s", "arm64"]
+ ),
+ )
+ return XCODE_ARCHS_DEFAULT_CACHE
+
+
+class XcodeSettings:
+ """A class that understands the gyp 'xcode_settings' object."""
+
+ # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
+ # at class-level for efficiency.
+ _sdk_path_cache = {}
+ _platform_path_cache = {}
+ _sdk_root_cache = {}
+
+ # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
+ # cached at class-level for efficiency.
+ _plist_cache = {}
+
+ # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
+ # cached at class-level for efficiency.
+ _codesigning_key_cache = {}
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ self.isIOS = False
+ self.mac_toolchain_dir = None
+ self.header_map_path = None
+
+ # Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
+ # This means self.xcode_settings[config] always contains all settings
+ # for that config -- the per-target settings as well. Settings that are
+ # the same for all configs are implicitly per-target settings.
+ self.xcode_settings = {}
+ configs = spec["configurations"]
+ for configname, config in configs.items():
+ self.xcode_settings[configname] = config.get("xcode_settings", {})
+ self._ConvertConditionalKeys(configname)
+ if self.xcode_settings[configname].get("IPHONEOS_DEPLOYMENT_TARGET", None):
+ self.isIOS = True
+
+ # This is only non-None temporarily during the execution of some methods.
+ self.configname = None
+
+ # Used by _AdjustLibrary to match .a and .dylib entries in libraries.
+ self.library_re = re.compile(r"^lib([^/]+)\.(a|dylib)$")
+
+ def _ConvertConditionalKeys(self, configname):
+ """Converts or warns on conditional keys. Xcode supports conditional keys,
+ such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
+ with some keys converted while the rest force a warning."""
+ settings = self.xcode_settings[configname]
+ conditional_keys = [key for key in settings if key.endswith("]")]
+ for key in conditional_keys:
+ # If you need more, speak up at http://crbug.com/122592
+ if key.endswith("[sdk=iphoneos*]"):
+ if configname.endswith("iphoneos"):
+ new_key = key.split("[")[0]
+ settings[new_key] = settings[key]
+ else:
+ print(
+ "Warning: Conditional keys not implemented, ignoring:",
+ " ".join(conditional_keys),
+ )
+ del settings[key]
+
+ def _Settings(self):
+ assert self.configname
+ return self.xcode_settings[self.configname]
+
+ def _Test(self, test_key, cond_key, default):
+ return self._Settings().get(test_key, default) == cond_key
+
+ def _Appendf(self, lst, test_key, format_str, default=None):
+ if test_key in self._Settings():
+ lst.append(format_str % str(self._Settings()[test_key]))
+ elif default:
+ lst.append(format_str % str(default))
+
+ def _WarnUnimplemented(self, test_key):
+ if test_key in self._Settings():
+ print('Warning: Ignoring not yet implemented key "%s".' % test_key)
+
+ def IsBinaryOutputFormat(self, configname):
+ default = "binary" if self.isIOS else "xml"
+ format = self.xcode_settings[configname].get("INFOPLIST_OUTPUT_FORMAT", default)
+ return format == "binary"
+
+ def IsIosFramework(self):
+ return self.spec["type"] == "shared_library" and self._IsBundle() and self.isIOS
+
+ def _IsBundle(self):
+ return (
+ int(self.spec.get("mac_bundle", 0)) != 0
+ or self._IsXCTest()
+ or self._IsXCUiTest()
+ )
+
+ def _IsXCTest(self):
+ return int(self.spec.get("mac_xctest_bundle", 0)) != 0
+
+ def _IsXCUiTest(self):
+ return int(self.spec.get("mac_xcuitest_bundle", 0)) != 0
+
+ def _IsIosAppExtension(self):
+ return int(self.spec.get("ios_app_extension", 0)) != 0
+
+ def _IsIosWatchKitExtension(self):
+ return int(self.spec.get("ios_watchkit_extension", 0)) != 0
+
+ def _IsIosWatchApp(self):
+ return int(self.spec.get("ios_watch_app", 0)) != 0
+
+ def GetFrameworkVersion(self):
+ """Returns the framework version of the current target. Only valid for
+ bundles."""
+ assert self._IsBundle()
+ return self.GetPerTargetSetting("FRAMEWORK_VERSION", default="A")
+
+ def GetWrapperExtension(self):
+ """Returns the bundle extension (.app, .framework, .plugin, etc). Only
+ valid for bundles."""
+ assert self._IsBundle()
+ if self.spec["type"] in ("loadable_module", "shared_library"):
+ default_wrapper_extension = {
+ "loadable_module": "bundle",
+ "shared_library": "framework",
+ }[self.spec["type"]]
+ wrapper_extension = self.GetPerTargetSetting(
+ "WRAPPER_EXTENSION", default=default_wrapper_extension
+ )
+ return "." + self.spec.get("product_extension", wrapper_extension)
+ elif self.spec["type"] == "executable":
+ if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
+ return "." + self.spec.get("product_extension", "appex")
+ else:
+ return "." + self.spec.get("product_extension", "app")
+ else:
+ assert False, "Don't know extension for '{}', target '{}'".format(
+ self.spec["type"],
+ self.spec["target_name"],
+ )
+
+ def GetProductName(self):
+ """Returns PRODUCT_NAME."""
+ return self.spec.get("product_name", self.spec["target_name"])
+
+ def GetFullProductName(self):
+ """Returns FULL_PRODUCT_NAME."""
+ if self._IsBundle():
+ return self.GetWrapperName()
+ else:
+ return self._GetStandaloneBinaryPath()
+
+ def GetWrapperName(self):
+ """Returns the directory name of the bundle represented by this target.
+ Only valid for bundles."""
+ assert self._IsBundle()
+ return self.GetProductName() + self.GetWrapperExtension()
+
+ def GetBundleContentsFolderPath(self):
+ """Returns the qualified path to the bundle's contents folder. E.g.
+ Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
+ if self.isIOS:
+ return self.GetWrapperName()
+ assert self._IsBundle()
+ if self.spec["type"] == "shared_library":
+ return os.path.join(
+ self.GetWrapperName(), "Versions", self.GetFrameworkVersion()
+ )
+ else:
+ # loadable_modules have a 'Contents' folder like executables.
+ return os.path.join(self.GetWrapperName(), "Contents")
+
+ def GetBundleResourceFolder(self):
+ """Returns the qualified path to the bundle's resource folder. E.g.
+ Chromium.app/Contents/Resources. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.isIOS:
+ return self.GetBundleContentsFolderPath()
+ return os.path.join(self.GetBundleContentsFolderPath(), "Resources")
+
+ def GetBundleExecutableFolderPath(self):
+ """Returns the qualified path to the bundle's executables folder. E.g.
+ Chromium.app/Contents/MacOS. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec["type"] in ("shared_library") or self.isIOS:
+ return self.GetBundleContentsFolderPath()
+ elif self.spec["type"] in ("executable", "loadable_module"):
+ return os.path.join(self.GetBundleContentsFolderPath(), "MacOS")
+
+ def GetBundleJavaFolderPath(self):
+ """Returns the qualified path to the bundle's Java resource folder.
+ E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleResourceFolder(), "Java")
+
+ def GetBundleFrameworksFolderPath(self):
+ """Returns the qualified path to the bundle's frameworks folder. E.g,
+ Chromium.app/Contents/Frameworks. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), "Frameworks")
+
+ def GetBundleSharedFrameworksFolderPath(self):
+ """Returns the qualified path to the bundle's frameworks folder. E.g,
+ Chromium.app/Contents/SharedFrameworks. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), "SharedFrameworks")
+
+ def GetBundleSharedSupportFolderPath(self):
+ """Returns the qualified path to the bundle's shared support folder. E.g,
+ Chromium.app/Contents/SharedSupport. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec["type"] == "shared_library":
+ return self.GetBundleResourceFolder()
+ else:
+ return os.path.join(self.GetBundleContentsFolderPath(), "SharedSupport")
+
+ def GetBundlePlugInsFolderPath(self):
+ """Returns the qualified path to the bundle's plugins folder. E.g,
+ Chromium.app/Contents/PlugIns. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), "PlugIns")
+
+ def GetBundleXPCServicesFolderPath(self):
+ """Returns the qualified path to the bundle's XPC services folder. E.g,
+ Chromium.app/Contents/XPCServices. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), "XPCServices")
+
+ def GetBundlePlistPath(self):
+ """Returns the qualified path to the bundle's plist file. E.g.
+ Chromium.app/Contents/Info.plist. Only valid for bundles."""
+ assert self._IsBundle()
+ if (
+ self.spec["type"] in ("executable", "loadable_module")
+ or self.IsIosFramework()
+ ):
+ return os.path.join(self.GetBundleContentsFolderPath(), "Info.plist")
+ else:
+ return os.path.join(
+ self.GetBundleContentsFolderPath(), "Resources", "Info.plist"
+ )
+
+ def GetProductType(self):
+ """Returns the PRODUCT_TYPE of this target."""
+ if self._IsIosAppExtension():
+ assert self._IsBundle(), (
+ "ios_app_extension flag requires mac_bundle "
+ "(target %s)" % self.spec["target_name"]
+ )
+ return "com.apple.product-type.app-extension"
+ if self._IsIosWatchKitExtension():
+ assert self._IsBundle(), (
+ "ios_watchkit_extension flag requires "
+ "mac_bundle (target %s)" % self.spec["target_name"]
+ )
+ return "com.apple.product-type.watchkit-extension"
+ if self._IsIosWatchApp():
+ assert self._IsBundle(), (
+ "ios_watch_app flag requires mac_bundle "
+ "(target %s)" % self.spec["target_name"]
+ )
+ return "com.apple.product-type.application.watchapp"
+ if self._IsXCUiTest():
+ assert self._IsBundle(), (
+ "mac_xcuitest_bundle flag requires mac_bundle "
+ "(target %s)" % self.spec["target_name"]
+ )
+ return "com.apple.product-type.bundle.ui-testing"
+ if self._IsBundle():
+ return {
+ "executable": "com.apple.product-type.application",
+ "loadable_module": "com.apple.product-type.bundle",
+ "shared_library": "com.apple.product-type.framework",
+ }[self.spec["type"]]
+ else:
+ return {
+ "executable": "com.apple.product-type.tool",
+ "loadable_module": "com.apple.product-type.library.dynamic",
+ "shared_library": "com.apple.product-type.library.dynamic",
+ "static_library": "com.apple.product-type.library.static",
+ }[self.spec["type"]]
+
+ def GetMachOType(self):
+ """Returns the MACH_O_TYPE of this target."""
+ # Weird, but matches Xcode.
+ if not self._IsBundle() and self.spec["type"] == "executable":
+ return ""
+ return {
+ "executable": "mh_execute",
+ "static_library": "staticlib",
+ "shared_library": "mh_dylib",
+ "loadable_module": "mh_bundle",
+ }[self.spec["type"]]
+
+ def _GetBundleBinaryPath(self):
+ """Returns the name of the bundle binary of by this target.
+ E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(
+ self.GetBundleExecutableFolderPath(), self.GetExecutableName()
+ )
+
+ def _GetStandaloneExecutableSuffix(self):
+ if "product_extension" in self.spec:
+ return "." + self.spec["product_extension"]
+ return {
+ "executable": "",
+ "static_library": ".a",
+ "shared_library": ".dylib",
+ "loadable_module": ".so",
+ }[self.spec["type"]]
+
+ def _GetStandaloneExecutablePrefix(self):
+ return self.spec.get(
+ "product_prefix",
+ {
+ "executable": "",
+ "static_library": "lib",
+ "shared_library": "lib",
+ # Non-bundled loadable_modules are called foo.so for some reason
+ # (that is, .so and no prefix) with the xcode build -- match that.
+ "loadable_module": "",
+ }[self.spec["type"]],
+ )
+
+ def _GetStandaloneBinaryPath(self):
+ """Returns the name of the non-bundle binary represented by this target.
+ E.g. hello_world. Only valid for non-bundles."""
+ assert not self._IsBundle()
+ assert self.spec["type"] in (
+ "executable",
+ "shared_library",
+ "static_library",
+ "loadable_module",
+ ), ("Unexpected type %s" % self.spec["type"])
+ target = self.spec["target_name"]
+ if self.spec["type"] == "static_library":
+ if target[:3] == "lib":
+ target = target[3:]
+ elif self.spec["type"] in ("loadable_module", "shared_library"):
+ if target[:3] == "lib":
+ target = target[3:]
+
+ target_prefix = self._GetStandaloneExecutablePrefix()
+ target = self.spec.get("product_name", target)
+ target_ext = self._GetStandaloneExecutableSuffix()
+ return target_prefix + target + target_ext
+
+ def GetExecutableName(self):
+ """Returns the executable name of the bundle represented by this target.
+ E.g. Chromium."""
+ if self._IsBundle():
+ return self.spec.get("product_name", self.spec["target_name"])
+ else:
+ return self._GetStandaloneBinaryPath()
+
+ def GetExecutablePath(self):
+ """Returns the qualified path to the primary executable of the bundle
+ represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium."""
+ if self._IsBundle():
+ return self._GetBundleBinaryPath()
+ else:
+ return self._GetStandaloneBinaryPath()
+
+ def GetActiveArchs(self, configname):
+ """Returns the architectures this target should be built for."""
+ config_settings = self.xcode_settings[configname]
+ xcode_archs_default = GetXcodeArchsDefault()
+ return xcode_archs_default.ActiveArchs(
+ config_settings.get("ARCHS"),
+ config_settings.get("VALID_ARCHS"),
+ config_settings.get("SDKROOT"),
+ )
+
+ def _GetSdkVersionInfoItem(self, sdk, infoitem):
+ # xcodebuild requires Xcode and can't run on Command Line Tools-only
+ # systems from 10.7 onward.
+ # Since the CLT has no SDK paths anyway, returning None is the
+ # most sensible route and should still do the right thing.
+ try:
+ return GetStdoutQuiet(["xcrun", "--sdk", sdk, infoitem])
+ except GypError:
+ pass
+
+ def _SdkRoot(self, configname):
+ if configname is None:
+ configname = self.configname
+ return self.GetPerConfigSetting("SDKROOT", configname, default="")
+
+ def _XcodePlatformPath(self, configname=None):
+ sdk_root = self._SdkRoot(configname)
+ if sdk_root not in XcodeSettings._platform_path_cache:
+ platform_path = self._GetSdkVersionInfoItem(
+ sdk_root, "--show-sdk-platform-path"
+ )
+ XcodeSettings._platform_path_cache[sdk_root] = platform_path
+ return XcodeSettings._platform_path_cache[sdk_root]
+
+ def _SdkPath(self, configname=None):
+ sdk_root = self._SdkRoot(configname)
+ if sdk_root.startswith("/"):
+ return sdk_root
+ return self._XcodeSdkPath(sdk_root)
+
+ def _XcodeSdkPath(self, sdk_root):
+ if sdk_root not in XcodeSettings._sdk_path_cache:
+ sdk_path = self._GetSdkVersionInfoItem(sdk_root, "--show-sdk-path")
+ XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
+ if sdk_root:
+ XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
+ return XcodeSettings._sdk_path_cache[sdk_root]
+
+ def _AppendPlatformVersionMinFlags(self, lst):
+ self._Appendf(lst, "MACOSX_DEPLOYMENT_TARGET", "-mmacosx-version-min=%s")
+ if "IPHONEOS_DEPLOYMENT_TARGET" in self._Settings():
+ # TODO: Implement this better?
+ sdk_path_basename = os.path.basename(self._SdkPath())
+ if sdk_path_basename.lower().startswith("iphonesimulator"):
+ self._Appendf(
+ lst, "IPHONEOS_DEPLOYMENT_TARGET", "-mios-simulator-version-min=%s"
+ )
+ else:
+ self._Appendf(
+ lst, "IPHONEOS_DEPLOYMENT_TARGET", "-miphoneos-version-min=%s"
+ )
+
+ def GetCflags(self, configname, arch=None):
+ """Returns flags that need to be added to .c, .cc, .m, and .mm
+ compilations."""
+ # This functions (and the similar ones below) do not offer complete
+ # emulation of all xcode_settings keys. They're implemented on demand.
+
+ self.configname = configname
+ cflags = []
+
+ sdk_root = self._SdkPath()
+ if "SDKROOT" in self._Settings() and sdk_root:
+ cflags.append("-isysroot %s" % sdk_root)
+
+ if self.header_map_path:
+ cflags.append("-I%s" % self.header_map_path)
+
+ if self._Test("CLANG_WARN_CONSTANT_CONVERSION", "YES", default="NO"):
+ cflags.append("-Wconstant-conversion")
+
+ if self._Test("GCC_CHAR_IS_UNSIGNED_CHAR", "YES", default="NO"):
+ cflags.append("-funsigned-char")
+
+ if self._Test("GCC_CW_ASM_SYNTAX", "YES", default="YES"):
+ cflags.append("-fasm-blocks")
+
+ if "GCC_DYNAMIC_NO_PIC" in self._Settings():
+ if self._Settings()["GCC_DYNAMIC_NO_PIC"] == "YES":
+ cflags.append("-mdynamic-no-pic")
+ else:
+ pass
+ # TODO: In this case, it depends on the target. xcode passes
+ # mdynamic-no-pic by default for executable and possibly static lib
+ # according to mento
+
+ if self._Test("GCC_ENABLE_PASCAL_STRINGS", "YES", default="YES"):
+ cflags.append("-mpascal-strings")
+
+ self._Appendf(cflags, "GCC_OPTIMIZATION_LEVEL", "-O%s", default="s")
+
+ if self._Test("GCC_GENERATE_DEBUGGING_SYMBOLS", "YES", default="YES"):
+ dbg_format = self._Settings().get("DEBUG_INFORMATION_FORMAT", "dwarf")
+ if dbg_format == "dwarf":
+ cflags.append("-gdwarf-2")
+ elif dbg_format == "stabs":
+ raise NotImplementedError("stabs debug format is not supported yet.")
+ elif dbg_format == "dwarf-with-dsym":
+ cflags.append("-gdwarf-2")
+ else:
+ raise NotImplementedError("Unknown debug format %s" % dbg_format)
+
+ if self._Settings().get("GCC_STRICT_ALIASING") == "YES":
+ cflags.append("-fstrict-aliasing")
+ elif self._Settings().get("GCC_STRICT_ALIASING") == "NO":
+ cflags.append("-fno-strict-aliasing")
+
+ if self._Test("GCC_SYMBOLS_PRIVATE_EXTERN", "YES", default="NO"):
+ cflags.append("-fvisibility=hidden")
+
+ if self._Test("GCC_TREAT_WARNINGS_AS_ERRORS", "YES", default="NO"):
+ cflags.append("-Werror")
+
+ if self._Test("GCC_WARN_ABOUT_MISSING_NEWLINE", "YES", default="NO"):
+ cflags.append("-Wnewline-eof")
+
+ # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
+ # llvm-gcc. It also requires a fairly recent libtool, and
+ # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
+ # path to the libLTO.dylib that matches the used clang.
+ if self._Test("LLVM_LTO", "YES", default="NO"):
+ cflags.append("-flto")
+
+ self._AppendPlatformVersionMinFlags(cflags)
+
+ # TODO:
+ if self._Test("COPY_PHASE_STRIP", "YES", default="NO"):
+ self._WarnUnimplemented("COPY_PHASE_STRIP")
+ self._WarnUnimplemented("GCC_DEBUGGING_SYMBOLS")
+ self._WarnUnimplemented("GCC_ENABLE_OBJC_EXCEPTIONS")
+
+ # TODO: This is exported correctly, but assigning to it is not supported.
+ self._WarnUnimplemented("MACH_O_TYPE")
+ self._WarnUnimplemented("PRODUCT_TYPE")
+
+ # If GYP_CROSSCOMPILE (--cross-compiling), disable architecture-specific
+ # additions and assume these will be provided as required via CC_host,
+ # CXX_host, CC_target and CXX_target.
+ if not gyp.common.CrossCompileRequested():
+ if arch is not None:
+ archs = [arch]
+ else:
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
+ if len(archs) != 1:
+ # TODO: Supporting fat binaries will be annoying.
+ self._WarnUnimplemented("ARCHS")
+ archs = ["i386"]
+ cflags.append("-arch " + archs[0])
+
+ if archs[0] in ("i386", "x86_64"):
+ if self._Test("GCC_ENABLE_SSE3_EXTENSIONS", "YES", default="NO"):
+ cflags.append("-msse3")
+ if self._Test(
+ "GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS", "YES", default="NO"
+ ):
+ cflags.append("-mssse3") # Note 3rd 's'.
+ if self._Test("GCC_ENABLE_SSE41_EXTENSIONS", "YES", default="NO"):
+ cflags.append("-msse4.1")
+ if self._Test("GCC_ENABLE_SSE42_EXTENSIONS", "YES", default="NO"):
+ cflags.append("-msse4.2")
+
+ cflags += self._Settings().get("WARNING_CFLAGS", [])
+
+ if self._IsXCTest():
+ platform_root = self._XcodePlatformPath(configname)
+ if platform_root:
+ cflags.append("-F" + platform_root + "/Developer/Library/Frameworks/")
+
+ if sdk_root:
+ framework_root = sdk_root
+ else:
+ framework_root = ""
+ config = self.spec["configurations"][self.configname]
+ framework_dirs = config.get("mac_framework_dirs", [])
+ for directory in framework_dirs:
+ cflags.append("-F" + directory.replace("$(SDKROOT)", framework_root))
+
+ self.configname = None
+ return cflags
+
+ def GetCflagsC(self, configname):
+ """Returns flags that need to be added to .c, and .m compilations."""
+ self.configname = configname
+ cflags_c = []
+ if self._Settings().get("GCC_C_LANGUAGE_STANDARD", "") == "ansi":
+ cflags_c.append("-ansi")
+ else:
+ self._Appendf(cflags_c, "GCC_C_LANGUAGE_STANDARD", "-std=%s")
+ cflags_c += self._Settings().get("OTHER_CFLAGS", [])
+ self.configname = None
+ return cflags_c
+
+ def GetCflagsCC(self, configname):
+ """Returns flags that need to be added to .cc, and .mm compilations."""
+ self.configname = configname
+ cflags_cc = []
+
+ clang_cxx_language_standard = self._Settings().get(
+ "CLANG_CXX_LANGUAGE_STANDARD"
+ )
+ # Note: Don't make c++0x to c++11 so that c++0x can be used with older
+ # clangs that don't understand c++11 yet (like Xcode 4.2's).
+ if clang_cxx_language_standard:
+ cflags_cc.append("-std=%s" % clang_cxx_language_standard)
+
+ self._Appendf(cflags_cc, "CLANG_CXX_LIBRARY", "-stdlib=%s")
+
+ if self._Test("GCC_ENABLE_CPP_RTTI", "NO", default="YES"):
+ cflags_cc.append("-fno-rtti")
+ if self._Test("GCC_ENABLE_CPP_EXCEPTIONS", "NO", default="YES"):
+ cflags_cc.append("-fno-exceptions")
+ if self._Test("GCC_INLINES_ARE_PRIVATE_EXTERN", "YES", default="NO"):
+ cflags_cc.append("-fvisibility-inlines-hidden")
+ if self._Test("GCC_THREADSAFE_STATICS", "NO", default="YES"):
+ cflags_cc.append("-fno-threadsafe-statics")
+ # Note: This flag is a no-op for clang, it only has an effect for gcc.
+ if self._Test("GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO", "NO", default="YES"):
+ cflags_cc.append("-Wno-invalid-offsetof")
+
+ other_ccflags = []
+
+ for flag in self._Settings().get("OTHER_CPLUSPLUSFLAGS", ["$(inherited)"]):
+ # TODO: More general variable expansion. Missing in many other places too.
+ if flag in ("$inherited", "$(inherited)", "${inherited}"):
+ flag = "$OTHER_CFLAGS"
+ if flag in ("$OTHER_CFLAGS", "$(OTHER_CFLAGS)", "${OTHER_CFLAGS}"):
+ other_ccflags += self._Settings().get("OTHER_CFLAGS", [])
+ else:
+ other_ccflags.append(flag)
+ cflags_cc += other_ccflags
+
+ self.configname = None
+ return cflags_cc
+
+ def _AddObjectiveCGarbageCollectionFlags(self, flags):
+ gc_policy = self._Settings().get("GCC_ENABLE_OBJC_GC", "unsupported")
+ if gc_policy == "supported":
+ flags.append("-fobjc-gc")
+ elif gc_policy == "required":
+ flags.append("-fobjc-gc-only")
+
+ def _AddObjectiveCARCFlags(self, flags):
+ if self._Test("CLANG_ENABLE_OBJC_ARC", "YES", default="NO"):
+ flags.append("-fobjc-arc")
+
+ def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
+ if self._Test(
+ "CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS", "YES", default="NO"
+ ):
+ flags.append("-Wobjc-missing-property-synthesis")
+
+ def GetCflagsObjC(self, configname):
+ """Returns flags that need to be added to .m compilations."""
+ self.configname = configname
+ cflags_objc = []
+ self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
+ self._AddObjectiveCARCFlags(cflags_objc)
+ self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
+ self.configname = None
+ return cflags_objc
+
+ def GetCflagsObjCC(self, configname):
+ """Returns flags that need to be added to .mm compilations."""
+ self.configname = configname
+ cflags_objcc = []
+ self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
+ self._AddObjectiveCARCFlags(cflags_objcc)
+ self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
+ if self._Test("GCC_OBJC_CALL_CXX_CDTORS", "YES", default="NO"):
+ cflags_objcc.append("-fobjc-call-cxx-cdtors")
+ self.configname = None
+ return cflags_objcc
+
+ def GetInstallNameBase(self):
+ """Return DYLIB_INSTALL_NAME_BASE for this target."""
+ # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
+ if self.spec["type"] != "shared_library" and (
+ self.spec["type"] != "loadable_module" or self._IsBundle()
+ ):
+ return None
+ install_base = self.GetPerTargetSetting(
+ "DYLIB_INSTALL_NAME_BASE",
+ default="/Library/Frameworks" if self._IsBundle() else "/usr/local/lib",
+ )
+ return install_base
+
+ def _StandardizePath(self, path):
+ """Do :standardizepath processing for path."""
+ # I'm not quite sure what :standardizepath does. Just call normpath(),
+ # but don't let @executable_path/../foo collapse to foo.
+ if "/" in path:
+ prefix, rest = "", path
+ if path.startswith("@"):
+ prefix, rest = path.split("/", 1)
+ rest = os.path.normpath(rest) # :standardizepath
+ path = os.path.join(prefix, rest)
+ return path
+
+ def GetInstallName(self):
+ """Return LD_DYLIB_INSTALL_NAME for this target."""
+ # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
+ if self.spec["type"] != "shared_library" and (
+ self.spec["type"] != "loadable_module" or self._IsBundle()
+ ):
+ return None
+
+ default_install_name = (
+ "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)"
+ )
+ install_name = self.GetPerTargetSetting(
+ "LD_DYLIB_INSTALL_NAME", default=default_install_name
+ )
+
+ # Hardcode support for the variables used in chromium for now, to
+ # unblock people using the make build.
+ if "$" in install_name:
+ assert install_name in (
+ "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/"
+ "$(WRAPPER_NAME)/$(PRODUCT_NAME)",
+ default_install_name,
+ ), (
+ "Variables in LD_DYLIB_INSTALL_NAME are not generally supported "
+ "yet in target '%s' (got '%s')"
+ % (self.spec["target_name"], install_name)
+ )
+
+ install_name = install_name.replace(
+ "$(DYLIB_INSTALL_NAME_BASE:standardizepath)",
+ self._StandardizePath(self.GetInstallNameBase()),
+ )
+ if self._IsBundle():
+ # These are only valid for bundles, hence the |if|.
+ install_name = install_name.replace(
+ "$(WRAPPER_NAME)", self.GetWrapperName()
+ )
+ install_name = install_name.replace(
+ "$(PRODUCT_NAME)", self.GetProductName()
+ )
+ else:
+ assert "$(WRAPPER_NAME)" not in install_name
+ assert "$(PRODUCT_NAME)" not in install_name
+
+ install_name = install_name.replace(
+ "$(EXECUTABLE_PATH)", self.GetExecutablePath()
+ )
+ return install_name
+
+ def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
+ """Checks if ldflag contains a filename and if so remaps it from
+ gyp-directory-relative to build-directory-relative."""
+ # This list is expanded on demand.
+ # They get matched as:
+ # -exported_symbols_list file
+ # -Wl,exported_symbols_list file
+ # -Wl,exported_symbols_list,file
+ LINKER_FILE = r"(\S+)"
+ WORD = r"\S+"
+ linker_flags = [
+ ["-exported_symbols_list", LINKER_FILE], # Needed for NaCl.
+ ["-unexported_symbols_list", LINKER_FILE],
+ ["-reexported_symbols_list", LINKER_FILE],
+ ["-sectcreate", WORD, WORD, LINKER_FILE], # Needed for remoting.
+ ]
+ for flag_pattern in linker_flags:
+ regex = re.compile("(?:-Wl,)?" + "[ ,]".join(flag_pattern))
+ m = regex.match(ldflag)
+ if m:
+ ldflag = (
+ ldflag[: m.start(1)]
+ + gyp_to_build_path(m.group(1))
+ + ldflag[m.end(1) :]
+ )
+ # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
+ # TODO(thakis): Update ffmpeg.gyp):
+ if ldflag.startswith("-L"):
+ ldflag = "-L" + gyp_to_build_path(ldflag[len("-L") :])
+ return ldflag
+
+ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
+ """Returns flags that need to be passed to the linker.
+
+ Args:
+ configname: The name of the configuration to get ld flags for.
+ product_dir: The directory where products such static and dynamic
+ libraries are placed. This is added to the library search path.
+ gyp_to_build_path: A function that converts paths relative to the
+ current gyp file to paths relative to the build directory.
+ """
+ self.configname = configname
+ ldflags = []
+
+ # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
+ # can contain entries that depend on this. Explicitly absolutify these.
+ for ldflag in self._Settings().get("OTHER_LDFLAGS", []):
+ ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
+
+ if self._Test("DEAD_CODE_STRIPPING", "YES", default="NO"):
+ ldflags.append("-Wl,-dead_strip")
+
+ if self._Test("PREBINDING", "YES", default="NO"):
+ ldflags.append("-Wl,-prebind")
+
+ self._Appendf(
+ ldflags, "DYLIB_COMPATIBILITY_VERSION", "-compatibility_version %s"
+ )
+ self._Appendf(ldflags, "DYLIB_CURRENT_VERSION", "-current_version %s")
+
+ self._AppendPlatformVersionMinFlags(ldflags)
+
+ if "SDKROOT" in self._Settings() and self._SdkPath():
+ ldflags.append("-isysroot " + self._SdkPath())
+
+ for library_path in self._Settings().get("LIBRARY_SEARCH_PATHS", []):
+ ldflags.append("-L" + gyp_to_build_path(library_path))
+
+ if "ORDER_FILE" in self._Settings():
+ ldflags.append(
+ "-Wl,-order_file "
+ + "-Wl,"
+ + gyp_to_build_path(self._Settings()["ORDER_FILE"])
+ )
+
+ if not gyp.common.CrossCompileRequested():
+ if arch is not None:
+ archs = [arch]
+ else:
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
+ if len(archs) != 1:
+ # TODO: Supporting fat binaries will be annoying.
+ self._WarnUnimplemented("ARCHS")
+ archs = ["i386"]
+ ldflags.append("-arch " + archs[0])
+
+ # Xcode adds the product directory by default.
+ # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838
+ ldflags.append("-L" + (product_dir if product_dir != "." else "./"))
+
+ install_name = self.GetInstallName()
+ if install_name and self.spec["type"] != "loadable_module":
+ ldflags.append("-install_name " + install_name.replace(" ", r"\ "))
+
+ for rpath in self._Settings().get("LD_RUNPATH_SEARCH_PATHS", []):
+ ldflags.append("-Wl,-rpath," + rpath)
+
+ sdk_root = self._SdkPath()
+ if not sdk_root:
+ sdk_root = ""
+ config = self.spec["configurations"][self.configname]
+ framework_dirs = config.get("mac_framework_dirs", [])
+ for directory in framework_dirs:
+ ldflags.append("-F" + directory.replace("$(SDKROOT)", sdk_root))
+
+ if self._IsXCTest():
+ platform_root = self._XcodePlatformPath(configname)
+ if sdk_root and platform_root:
+ ldflags.append("-F" + platform_root + "/Developer/Library/Frameworks/")
+ ldflags.append("-framework XCTest")
+
+ is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
+ if sdk_root and is_extension:
+ # Adds the link flags for extensions. These flags are common for all
+ # extensions and provide loader and main function.
+ # These flags reflect the compilation options used by xcode to compile
+ # extensions.
+ xcode_version, _ = XcodeVersion()
+ if xcode_version < "0900":
+ ldflags.append("-lpkstart")
+ ldflags.append(
+ sdk_root
+ + "/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit"
+ )
+ else:
+ ldflags.append("-e _NSExtensionMain")
+ ldflags.append("-fapplication-extension")
+
+ self._Appendf(ldflags, "CLANG_CXX_LIBRARY", "-stdlib=%s")
+
+ self.configname = None
+ return ldflags
+
+ def GetLibtoolflags(self, configname):
+ """Returns flags that need to be passed to the static linker.
+
+ Args:
+ configname: The name of the configuration to get ld flags for.
+ """
+ self.configname = configname
+ libtoolflags = []
+
+ for libtoolflag in self._Settings().get("OTHER_LDFLAGS", []):
+ libtoolflags.append(libtoolflag)
+ # TODO(thakis): ARCHS?
+
+ self.configname = None
+ return libtoolflags
+
+ def GetPerTargetSettings(self):
+ """Gets a list of all the per-target settings. This will only fetch keys
+ whose values are the same across all configurations."""
+ first_pass = True
+ result = {}
+ for configname in sorted(self.xcode_settings.keys()):
+ if first_pass:
+ result = dict(self.xcode_settings[configname])
+ first_pass = False
+ else:
+ for key, value in self.xcode_settings[configname].items():
+ if key not in result:
+ continue
+ elif result[key] != value:
+ del result[key]
+ return result
+
+ def GetPerConfigSetting(self, setting, configname, default=None):
+ if configname in self.xcode_settings:
+ return self.xcode_settings[configname].get(setting, default)
+ else:
+ return self.GetPerTargetSetting(setting, default)
+
+ def GetPerTargetSetting(self, setting, default=None):
+ """Tries to get xcode_settings.setting from spec. Assumes that the setting
+ has the same value in all configurations and throws otherwise."""
+ is_first_pass = True
+ result = None
+ for configname in sorted(self.xcode_settings.keys()):
+ if is_first_pass:
+ result = self.xcode_settings[configname].get(setting, None)
+ is_first_pass = False
+ else:
+ assert result == self.xcode_settings[configname].get(setting, None), (
+ "Expected per-target setting for '%s', got per-config setting "
+ "(target %s)" % (setting, self.spec["target_name"])
+ )
+ if result is None:
+ return default
+ return result
+
+ def _GetStripPostbuilds(self, configname, output_binary, quiet):
+ """Returns a list of shell commands that contain the shell commands
+ necessary to strip this target's binary. These should be run as postbuilds
+ before the actual postbuilds run."""
+ self.configname = configname
+
+ result = []
+ if self._Test("DEPLOYMENT_POSTPROCESSING", "YES", default="NO") and self._Test(
+ "STRIP_INSTALLED_PRODUCT", "YES", default="NO"
+ ):
+
+ default_strip_style = "debugging"
+ if (
+ self.spec["type"] == "loadable_module" or self._IsIosAppExtension()
+ ) and self._IsBundle():
+ default_strip_style = "non-global"
+ elif self.spec["type"] == "executable":
+ default_strip_style = "all"
+
+ strip_style = self._Settings().get("STRIP_STYLE", default_strip_style)
+ strip_flags = {"all": "", "non-global": "-x", "debugging": "-S"}[
+ strip_style
+ ]
+
+ explicit_strip_flags = self._Settings().get("STRIPFLAGS", "")
+ if explicit_strip_flags:
+ strip_flags += " " + _NormalizeEnvVarReferences(explicit_strip_flags)
+
+ if not quiet:
+ result.append("echo STRIP\\(%s\\)" % self.spec["target_name"])
+ result.append(f"strip {strip_flags} {output_binary}")
+
+ self.configname = None
+ return result
+
+ def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
+ """Returns a list of shell commands that contain the shell commands
+ necessary to massage this target's debug information. These should be run
+ as postbuilds before the actual postbuilds run."""
+ self.configname = configname
+
+ # For static libraries, no dSYMs are created.
+ result = []
+ if (
+ self._Test("GCC_GENERATE_DEBUGGING_SYMBOLS", "YES", default="YES")
+ and self._Test(
+ "DEBUG_INFORMATION_FORMAT", "dwarf-with-dsym", default="dwarf"
+ )
+ and self.spec["type"] != "static_library"
+ ):
+ if not quiet:
+ result.append("echo DSYMUTIL\\(%s\\)" % self.spec["target_name"])
+ result.append("dsymutil {} -o {}".format(output_binary, output + ".dSYM"))
+
+ self.configname = None
+ return result
+
+ def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
+ """Returns a list of shell commands that contain the shell commands
+ to run as postbuilds for this target, before the actual postbuilds."""
+ # dSYMs need to build before stripping happens.
+ return self._GetDebugInfoPostbuilds(
+ configname, output, output_binary, quiet
+ ) + self._GetStripPostbuilds(configname, output_binary, quiet)
+
+ def _GetIOSPostbuilds(self, configname, output_binary):
+ """Return a shell command to codesign the iOS output binary so it can
+ be deployed to a device. This should be run as the very last step of the
+ build."""
+ if not (
+ self.isIOS
+ and (self.spec["type"] == "executable" or self._IsXCTest())
+ or self.IsIosFramework()
+ ):
+ return []
+
+ postbuilds = []
+ product_name = self.GetFullProductName()
+ settings = self.xcode_settings[configname]
+
+ # Xcode expects XCTests to be copied into the TEST_HOST dir.
+ if self._IsXCTest():
+ source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name)
+ test_host = os.path.dirname(settings.get("TEST_HOST"))
+ xctest_destination = os.path.join(test_host, "PlugIns", product_name)
+ postbuilds.extend([f"ditto {source} {xctest_destination}"])
+
+ key = self._GetIOSCodeSignIdentityKey(settings)
+ if not key:
+ return postbuilds
+
+ # Warn for any unimplemented signing xcode keys.
+ unimpl = ["OTHER_CODE_SIGN_FLAGS"]
+ unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
+ if unimpl:
+ print(
+ "Warning: Some codesign keys not implemented, ignoring: %s"
+ % ", ".join(sorted(unimpl))
+ )
+
+ if self._IsXCTest():
+ # For device xctests, Xcode copies two extra frameworks into $TEST_HOST.
+ test_host = os.path.dirname(settings.get("TEST_HOST"))
+ frameworks_dir = os.path.join(test_host, "Frameworks")
+ platform_root = self._XcodePlatformPath(configname)
+ frameworks = [
+ "Developer/Library/PrivateFrameworks/IDEBundleInjection.framework",
+ "Developer/Library/Frameworks/XCTest.framework",
+ ]
+ for framework in frameworks:
+ source = os.path.join(platform_root, framework)
+ destination = os.path.join(frameworks_dir, os.path.basename(framework))
+ postbuilds.extend([f"ditto {source} {destination}"])
+
+ # Then re-sign everything with 'preserve=True'
+ postbuilds.extend(
+ [
+ '%s code-sign-bundle "%s" "%s" "%s" "%s" %s'
+ % (
+ os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"),
+ key,
+ settings.get("CODE_SIGN_ENTITLEMENTS", ""),
+ settings.get("PROVISIONING_PROFILE", ""),
+ destination,
+ True,
+ )
+ ]
+ )
+ plugin_dir = os.path.join(test_host, "PlugIns")
+ targets = [os.path.join(plugin_dir, product_name), test_host]
+ for target in targets:
+ postbuilds.extend(
+ [
+ '%s code-sign-bundle "%s" "%s" "%s" "%s" %s'
+ % (
+ os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"),
+ key,
+ settings.get("CODE_SIGN_ENTITLEMENTS", ""),
+ settings.get("PROVISIONING_PROFILE", ""),
+ target,
+ True,
+ )
+ ]
+ )
+
+ postbuilds.extend(
+ [
+ '%s code-sign-bundle "%s" "%s" "%s" "%s" %s'
+ % (
+ os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"),
+ key,
+ settings.get("CODE_SIGN_ENTITLEMENTS", ""),
+ settings.get("PROVISIONING_PROFILE", ""),
+ os.path.join("${BUILT_PRODUCTS_DIR}", product_name),
+ False,
+ )
+ ]
+ )
+ return postbuilds
+
+ def _GetIOSCodeSignIdentityKey(self, settings):
+ identity = settings.get("CODE_SIGN_IDENTITY")
+ if not identity:
+ return None
+ if identity not in XcodeSettings._codesigning_key_cache:
+ output = subprocess.check_output(
+ ["security", "find-identity", "-p", "codesigning", "-v"]
+ )
+ for line in output.splitlines():
+ if identity in line:
+ fingerprint = line.split()[1]
+ cache = XcodeSettings._codesigning_key_cache
+ assert identity not in cache or fingerprint == cache[identity], (
+ "Multiple codesigning fingerprints for identity: %s" % identity
+ )
+ XcodeSettings._codesigning_key_cache[identity] = fingerprint
+ return XcodeSettings._codesigning_key_cache.get(identity, "")
+
+ def AddImplicitPostbuilds(
+ self, configname, output, output_binary, postbuilds=[], quiet=False
+ ):
+ """Returns a list of shell commands that should run before and after
+ |postbuilds|."""
+ assert output_binary is not None
+ pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
+ post = self._GetIOSPostbuilds(configname, output_binary)
+ return pre + postbuilds + post
+
+ def _AdjustLibrary(self, library, config_name=None):
+ if library.endswith(".framework"):
+ l_flag = "-framework " + os.path.splitext(os.path.basename(library))[0]
+ else:
+ m = self.library_re.match(library)
+ if m:
+ l_flag = "-l" + m.group(1)
+ else:
+ l_flag = library
+
+ sdk_root = self._SdkPath(config_name)
+ if not sdk_root:
+ sdk_root = ""
+ # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
+ # ".dylib" without providing a real support for them. What it does, for
+ # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
+ # library order and cause collision when building Chrome.
+ #
+ # Instead substitute ".tbd" to ".dylib" in the generated project when the
+ # following conditions are both true:
+ # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
+ # - the ".dylib" file does not exists but a ".tbd" file do.
+ library = l_flag.replace("$(SDKROOT)", sdk_root)
+ if l_flag.startswith("$(SDKROOT)"):
+ basename, ext = os.path.splitext(library)
+ if ext == ".dylib" and not os.path.exists(library):
+ tbd_library = basename + ".tbd"
+ if os.path.exists(tbd_library):
+ library = tbd_library
+ return library
+
+ def AdjustLibraries(self, libraries, config_name=None):
+ """Transforms entries like 'Cocoa.framework' in libraries into entries like
+ '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
+ """
+ libraries = [self._AdjustLibrary(library, config_name) for library in libraries]
+ return libraries
+
+ def _BuildMachineOSBuild(self):
+ return GetStdout(["sw_vers", "-buildVersion"])
+
+ def _XcodeIOSDeviceFamily(self, configname):
+ family = self.xcode_settings[configname].get("TARGETED_DEVICE_FAMILY", "1")
+ return [int(x) for x in family.split(",")]
+
+ def GetExtraPlistItems(self, configname=None):
+ """Returns a dictionary with extra items to insert into Info.plist."""
+ if configname not in XcodeSettings._plist_cache:
+ cache = {}
+ cache["BuildMachineOSBuild"] = self._BuildMachineOSBuild()
+
+ xcode_version, xcode_build = XcodeVersion()
+ cache["DTXcode"] = xcode_version
+ cache["DTXcodeBuild"] = xcode_build
+ compiler = self.xcode_settings[configname].get("GCC_VERSION")
+ if compiler is not None:
+ cache["DTCompiler"] = compiler
+
+ sdk_root = self._SdkRoot(configname)
+ if not sdk_root:
+ sdk_root = self._DefaultSdkRoot()
+ sdk_version = self._GetSdkVersionInfoItem(sdk_root, "--show-sdk-version")
+ cache["DTSDKName"] = sdk_root + (sdk_version or "")
+ if xcode_version >= "0720":
+ cache["DTSDKBuild"] = self._GetSdkVersionInfoItem(
+ sdk_root, "--show-sdk-build-version"
+ )
+ elif xcode_version >= "0430":
+ cache["DTSDKBuild"] = sdk_version
+ else:
+ cache["DTSDKBuild"] = cache["BuildMachineOSBuild"]
+
+ if self.isIOS:
+ cache["MinimumOSVersion"] = self.xcode_settings[configname].get(
+ "IPHONEOS_DEPLOYMENT_TARGET"
+ )
+ cache["DTPlatformName"] = sdk_root
+ cache["DTPlatformVersion"] = sdk_version
+
+ if configname.endswith("iphoneos"):
+ cache["CFBundleSupportedPlatforms"] = ["iPhoneOS"]
+ cache["DTPlatformBuild"] = cache["DTSDKBuild"]
+ else:
+ cache["CFBundleSupportedPlatforms"] = ["iPhoneSimulator"]
+ # This is weird, but Xcode sets DTPlatformBuild to an empty field
+ # for simulator builds.
+ cache["DTPlatformBuild"] = ""
+ XcodeSettings._plist_cache[configname] = cache
+
+ # Include extra plist items that are per-target, not per global
+ # XcodeSettings.
+ items = dict(XcodeSettings._plist_cache[configname])
+ if self.isIOS:
+ items["UIDeviceFamily"] = self._XcodeIOSDeviceFamily(configname)
+ return items
+
+ def _DefaultSdkRoot(self):
+ """Returns the default SDKROOT to use.
+
+ Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
+ project, then the environment variable was empty. Starting with this
+ version, Xcode uses the name of the newest SDK installed.
+ """
+ xcode_version, _ = XcodeVersion()
+ if xcode_version < "0500":
+ return ""
+ default_sdk_path = self._XcodeSdkPath("")
+ default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
+ if default_sdk_root:
+ return default_sdk_root
+ try:
+ all_sdks = GetStdout(["xcodebuild", "-showsdks"])
+ except GypError:
+ # If xcodebuild fails, there will be no valid SDKs
+ return ""
+ for line in all_sdks.splitlines():
+ items = line.split()
+ if len(items) >= 3 and items[-2] == "-sdk":
+ sdk_root = items[-1]
+ sdk_path = self._XcodeSdkPath(sdk_root)
+ if sdk_path == default_sdk_path:
+ return sdk_root
+ return ""
+
+
+class MacPrefixHeader:
+ """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
+
+ This feature consists of several pieces:
+ * If GCC_PREFIX_HEADER is present, all compilations in that project get an
+ additional |-include path_to_prefix_header| cflag.
+ * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
+ instead compiled, and all other compilations in the project get an
+ additional |-include path_to_compiled_header| instead.
+ + Compiled prefix headers have the extension gch. There is one gch file for
+ every language used in the project (c, cc, m, mm), since gch files for
+ different languages aren't compatible.
+ + gch files themselves are built with the target's normal cflags, but they
+ obviously don't get the |-include| flag. Instead, they need a -x flag that
+ describes their language.
+ + All o files in the target need to depend on the gch file, to make sure
+ it's built before any o file is built.
+
+ This class helps with some of these tasks, but it needs help from the build
+ system for writing dependencies to the gch files, for writing build commands
+ for the gch files, and for figuring out the location of the gch files.
+ """
+
+ def __init__(
+ self, xcode_settings, gyp_path_to_build_path, gyp_path_to_build_output
+ ):
+ """If xcode_settings is None, all methods on this class are no-ops.
+
+ Args:
+ gyp_path_to_build_path: A function that takes a gyp-relative path,
+ and returns a path relative to the build directory.
+ gyp_path_to_build_output: A function that takes a gyp-relative path and
+ a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
+ to where the output of precompiling that path for that language
+ should be placed (without the trailing '.gch').
+ """
+ # This doesn't support per-configuration prefix headers. Good enough
+ # for now.
+ self.header = None
+ self.compile_headers = False
+ if xcode_settings:
+ self.header = xcode_settings.GetPerTargetSetting("GCC_PREFIX_HEADER")
+ self.compile_headers = (
+ xcode_settings.GetPerTargetSetting(
+ "GCC_PRECOMPILE_PREFIX_HEADER", default="NO"
+ )
+ != "NO"
+ )
+ self.compiled_headers = {}
+ if self.header:
+ if self.compile_headers:
+ for lang in ["c", "cc", "m", "mm"]:
+ self.compiled_headers[lang] = gyp_path_to_build_output(
+ self.header, lang
+ )
+ self.header = gyp_path_to_build_path(self.header)
+
+ def _CompiledHeader(self, lang, arch):
+ assert self.compile_headers
+ h = self.compiled_headers[lang]
+ if arch:
+ h += "." + arch
+ return h
+
+ def GetInclude(self, lang, arch=None):
+ """Gets the cflags to include the prefix header for language |lang|."""
+ if self.compile_headers and lang in self.compiled_headers:
+ return "-include %s" % self._CompiledHeader(lang, arch)
+ elif self.header:
+ return "-include %s" % self.header
+ else:
+ return ""
+
+ def _Gch(self, lang, arch):
+ """Returns the actual file name of the prefix header for language |lang|."""
+ assert self.compile_headers
+ return self._CompiledHeader(lang, arch) + ".gch"
+
+ def GetObjDependencies(self, sources, objs, arch=None):
+ """Given a list of source files and the corresponding object files, returns
+ a list of (source, object, gch) tuples, where |gch| is the build-directory
+ relative path to the gch file each object file depends on. |compilable[i]|
+ has to be the source file belonging to |objs[i]|."""
+ if not self.header or not self.compile_headers:
+ return []
+
+ result = []
+ for source, obj in zip(sources, objs):
+ ext = os.path.splitext(source)[1]
+ lang = {
+ ".c": "c",
+ ".cpp": "cc",
+ ".cc": "cc",
+ ".cxx": "cc",
+ ".m": "m",
+ ".mm": "mm",
+ }.get(ext, None)
+ if lang:
+ result.append((source, obj, self._Gch(lang, arch)))
+ return result
+
+ def GetPchBuildCommands(self, arch=None):
+ """Returns [(path_to_gch, language_flag, language, header)].
+ |path_to_gch| and |header| are relative to the build directory.
+ """
+ if not self.header or not self.compile_headers:
+ return []
+ return [
+ (self._Gch("c", arch), "-x c-header", "c", self.header),
+ (self._Gch("cc", arch), "-x c++-header", "cc", self.header),
+ (self._Gch("m", arch), "-x objective-c-header", "m", self.header),
+ (self._Gch("mm", arch), "-x objective-c++-header", "mm", self.header),
+ ]
+
+
+def XcodeVersion():
+ """Returns a tuple of version and build version of installed Xcode."""
+ # `xcodebuild -version` output looks like
+ # Xcode 4.6.3
+ # Build version 4H1503
+ # or like
+ # Xcode 3.2.6
+ # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
+ # BuildVersion: 10M2518
+ # Convert that to ('0463', '4H1503') or ('0326', '10M2518').
+ global XCODE_VERSION_CACHE
+ if XCODE_VERSION_CACHE:
+ return XCODE_VERSION_CACHE
+ version = ""
+ build = ""
+ try:
+ version_list = GetStdoutQuiet(["xcodebuild", "-version"]).splitlines()
+ # In some circumstances xcodebuild exits 0 but doesn't return
+ # the right results; for example, a user on 10.7 or 10.8 with
+ # a bogus path set via xcode-select
+ # In that case this may be a CLT-only install so fall back to
+ # checking that version.
+ if len(version_list) < 2:
+ raise GypError("xcodebuild returned unexpected results")
+ version = version_list[0].split()[-1] # Last word on first line
+ build = version_list[-1].split()[-1] # Last word on last line
+ except GypError: # Xcode not installed so look for XCode Command Line Tools
+ version = CLTVersion() # macOS Catalina returns 11.0.0.0.1.1567737322
+ if not version:
+ raise GypError("No Xcode or CLT version detected!")
+ # Be careful to convert "4.2.3" to "0423" and "11.0.0" to "1100":
+ version = version.split(".")[:3] # Just major, minor, micro
+ version[0] = version[0].zfill(2) # Add a leading zero if major is one digit
+ version = ("".join(version) + "00")[:4] # Limit to exactly four characters
+ XCODE_VERSION_CACHE = (version, build)
+ return XCODE_VERSION_CACHE
+
+
+# This function ported from the logic in Homebrew's CLT version check
+def CLTVersion():
+ """Returns the version of command-line tools from pkgutil."""
+ # pkgutil output looks like
+ # package-id: com.apple.pkg.CLTools_Executables
+ # version: 5.0.1.0.1.1382131676
+ # volume: /
+ # location: /
+ # install-time: 1382544035
+ # groups: com.apple.FindSystemFiles.pkg-group
+ # com.apple.DevToolsBoth.pkg-group
+ # com.apple.DevToolsNonRelocatableShared.pkg-group
+ STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
+ FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
+ MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
+
+ regex = re.compile("version: (?P<version>.+)")
+ for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
+ try:
+ output = GetStdout(["/usr/sbin/pkgutil", "--pkg-info", key])
+ return re.search(regex, output).groupdict()["version"]
+ except GypError:
+ continue
+
+ regex = re.compile(r'Command Line Tools for Xcode\s+(?P<version>\S+)')
+ try:
+ output = GetStdout(["/usr/sbin/softwareupdate", "--history"])
+ return re.search(regex, output).groupdict()["version"]
+ except GypError:
+ return None
+
+
+def GetStdoutQuiet(cmdlist):
+ """Returns the content of standard output returned by invoking |cmdlist|.
+ Ignores the stderr.
+ Raises |GypError| if the command return with a non-zero return code."""
+ job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out = job.communicate()[0].decode("utf-8")
+ if job.returncode != 0:
+ raise GypError("Error %d running %s" % (job.returncode, cmdlist[0]))
+ return out.rstrip("\n")
+
+
+def GetStdout(cmdlist):
+ """Returns the content of standard output returned by invoking |cmdlist|.
+ Raises |GypError| if the command return with a non-zero return code."""
+ job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
+ out = job.communicate()[0].decode("utf-8")
+ if job.returncode != 0:
+ sys.stderr.write(out + "\n")
+ raise GypError("Error %d running %s" % (job.returncode, cmdlist[0]))
+ return out.rstrip("\n")
+
+
+def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
+ """Merges the global xcode_settings dictionary into each configuration of the
+ target represented by spec. For keys that are both in the global and the local
+ xcode_settings dict, the local key gets precedence.
+ """
+ # The xcode generator special-cases global xcode_settings and does something
+ # that amounts to merging in the global xcode_settings into each local
+ # xcode_settings dict.
+ global_xcode_settings = global_dict.get("xcode_settings", {})
+ for config in spec["configurations"].values():
+ if "xcode_settings" in config:
+ new_settings = global_xcode_settings.copy()
+ new_settings.update(config["xcode_settings"])
+ config["xcode_settings"] = new_settings
+
+
+def IsMacBundle(flavor, spec):
+ """Returns if |spec| should be treated as a bundle.
+
+ Bundles are directories with a certain subdirectory structure, instead of
+ just a single file. Bundle rules do not produce a binary but also package
+ resources into that directory."""
+ is_mac_bundle = (
+ int(spec.get("mac_xctest_bundle", 0)) != 0
+ or int(spec.get("mac_xcuitest_bundle", 0)) != 0
+ or (int(spec.get("mac_bundle", 0)) != 0 and flavor == "mac")
+ )
+
+ if is_mac_bundle:
+ assert spec["type"] != "none", (
+ 'mac_bundle targets cannot have type none (target "%s")'
+ % spec["target_name"]
+ )
+ return is_mac_bundle
+
+
+def GetMacBundleResources(product_dir, xcode_settings, resources):
+ """Yields (output, resource) pairs for every resource in |resources|.
+ Only call this for mac bundle targets.
+
+ Args:
+ product_dir: Path to the directory containing the output bundle,
+ relative to the build directory.
+ xcode_settings: The XcodeSettings of the current target.
+ resources: A list of bundle resources, relative to the build directory.
+ """
+ dest = os.path.join(product_dir, xcode_settings.GetBundleResourceFolder())
+ for res in resources:
+ output = dest
+
+ # The make generator doesn't support it, so forbid it everywhere
+ # to keep the generators more interchangeable.
+ assert " " not in res, "Spaces in resource filenames not supported (%s)" % res
+
+ # Split into (path,file).
+ res_parts = os.path.split(res)
+
+ # Now split the path into (prefix,maybe.lproj).
+ lproj_parts = os.path.split(res_parts[0])
+ # If the resource lives in a .lproj bundle, add that to the destination.
+ if lproj_parts[1].endswith(".lproj"):
+ output = os.path.join(output, lproj_parts[1])
+
+ output = os.path.join(output, res_parts[1])
+ # Compiled XIB files are referred to by .nib.
+ if output.endswith(".xib"):
+ output = os.path.splitext(output)[0] + ".nib"
+ # Compiled storyboard files are referred to by .storyboardc.
+ if output.endswith(".storyboard"):
+ output = os.path.splitext(output)[0] + ".storyboardc"
+
+ yield output, res
+
+
+def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
+ """Returns (info_plist, dest_plist, defines, extra_env), where:
+ * |info_plist| is the source plist path, relative to the
+ build directory,
+ * |dest_plist| is the destination plist path, relative to the
+ build directory,
+ * |defines| is a list of preprocessor defines (empty if the plist
+ shouldn't be preprocessed,
+ * |extra_env| is a dict of env variables that should be exported when
+ invoking |mac_tool copy-info-plist|.
+
+ Only call this for mac bundle targets.
+
+ Args:
+ product_dir: Path to the directory containing the output bundle,
+ relative to the build directory.
+ xcode_settings: The XcodeSettings of the current target.
+ gyp_to_build_path: A function that converts paths relative to the
+ current gyp file to paths relative to the build directory.
+ """
+ info_plist = xcode_settings.GetPerTargetSetting("INFOPLIST_FILE")
+ if not info_plist:
+ return None, None, [], {}
+
+ # The make generator doesn't support it, so forbid it everywhere
+ # to keep the generators more interchangeable.
+ assert " " not in info_plist, (
+ "Spaces in Info.plist filenames not supported (%s)" % info_plist
+ )
+
+ info_plist = gyp_path_to_build_path(info_plist)
+
+ # If explicitly set to preprocess the plist, invoke the C preprocessor and
+ # specify any defines as -D flags.
+ if (
+ xcode_settings.GetPerTargetSetting("INFOPLIST_PREPROCESS", default="NO")
+ == "YES"
+ ):
+ # Create an intermediate file based on the path.
+ defines = shlex.split(
+ xcode_settings.GetPerTargetSetting(
+ "INFOPLIST_PREPROCESSOR_DEFINITIONS", default=""
+ )
+ )
+ else:
+ defines = []
+
+ dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
+ extra_env = xcode_settings.GetPerTargetSettings()
+
+ return info_plist, dest_plist, defines, extra_env
+
+
+def _GetXcodeEnv(
+ xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None
+):
+ """Return the environment variables that Xcode would set. See
+ http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
+ for a full list.
+
+ Args:
+ xcode_settings: An XcodeSettings object. If this is None, this function
+ returns an empty dict.
+ built_products_dir: Absolute path to the built products dir.
+ srcroot: Absolute path to the source root.
+ configuration: The build configuration name.
+ additional_settings: An optional dict with more values to add to the
+ result.
+ """
+
+ if not xcode_settings:
+ return {}
+
+ # This function is considered a friend of XcodeSettings, so let it reach into
+ # its implementation details.
+ spec = xcode_settings.spec
+
+ # These are filled in on an as-needed basis.
+ env = {
+ "BUILT_FRAMEWORKS_DIR": built_products_dir,
+ "BUILT_PRODUCTS_DIR": built_products_dir,
+ "CONFIGURATION": configuration,
+ "PRODUCT_NAME": xcode_settings.GetProductName(),
+ # For FULL_PRODUCT_NAME see:
+ # /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec # noqa: E501
+ "SRCROOT": srcroot,
+ "SOURCE_ROOT": "${SRCROOT}",
+ # This is not true for static libraries, but currently the env is only
+ # written for bundles:
+ "TARGET_BUILD_DIR": built_products_dir,
+ "TEMP_DIR": "${TMPDIR}",
+ "XCODE_VERSION_ACTUAL": XcodeVersion()[0],
+ }
+ if xcode_settings.GetPerConfigSetting("SDKROOT", configuration):
+ env["SDKROOT"] = xcode_settings._SdkPath(configuration)
+ else:
+ env["SDKROOT"] = ""
+
+ if xcode_settings.mac_toolchain_dir:
+ env["DEVELOPER_DIR"] = xcode_settings.mac_toolchain_dir
+
+ if spec["type"] in (
+ "executable",
+ "static_library",
+ "shared_library",
+ "loadable_module",
+ ):
+ env["EXECUTABLE_NAME"] = xcode_settings.GetExecutableName()
+ env["EXECUTABLE_PATH"] = xcode_settings.GetExecutablePath()
+ env["FULL_PRODUCT_NAME"] = xcode_settings.GetFullProductName()
+ mach_o_type = xcode_settings.GetMachOType()
+ if mach_o_type:
+ env["MACH_O_TYPE"] = mach_o_type
+ env["PRODUCT_TYPE"] = xcode_settings.GetProductType()
+ if xcode_settings._IsBundle():
+ # xcodeproj_file.py sets the same Xcode subfolder value for this as for
+ # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value.
+ env["BUILT_FRAMEWORKS_DIR"] = os.path.join(
+ built_products_dir + os.sep + xcode_settings.GetBundleFrameworksFolderPath()
+ )
+ env["CONTENTS_FOLDER_PATH"] = xcode_settings.GetBundleContentsFolderPath()
+ env["EXECUTABLE_FOLDER_PATH"] = xcode_settings.GetBundleExecutableFolderPath()
+ env[
+ "UNLOCALIZED_RESOURCES_FOLDER_PATH"
+ ] = xcode_settings.GetBundleResourceFolder()
+ env["JAVA_FOLDER_PATH"] = xcode_settings.GetBundleJavaFolderPath()
+ env["FRAMEWORKS_FOLDER_PATH"] = xcode_settings.GetBundleFrameworksFolderPath()
+ env[
+ "SHARED_FRAMEWORKS_FOLDER_PATH"
+ ] = xcode_settings.GetBundleSharedFrameworksFolderPath()
+ env[
+ "SHARED_SUPPORT_FOLDER_PATH"
+ ] = xcode_settings.GetBundleSharedSupportFolderPath()
+ env["PLUGINS_FOLDER_PATH"] = xcode_settings.GetBundlePlugInsFolderPath()
+ env["XPCSERVICES_FOLDER_PATH"] = xcode_settings.GetBundleXPCServicesFolderPath()
+ env["INFOPLIST_PATH"] = xcode_settings.GetBundlePlistPath()
+ env["WRAPPER_NAME"] = xcode_settings.GetWrapperName()
+
+ install_name = xcode_settings.GetInstallName()
+ if install_name:
+ env["LD_DYLIB_INSTALL_NAME"] = install_name
+ install_name_base = xcode_settings.GetInstallNameBase()
+ if install_name_base:
+ env["DYLIB_INSTALL_NAME_BASE"] = install_name_base
+ xcode_version, _ = XcodeVersion()
+ if xcode_version >= "0500" and not env.get("SDKROOT"):
+ sdk_root = xcode_settings._SdkRoot(configuration)
+ if not sdk_root:
+ sdk_root = xcode_settings._XcodeSdkPath("")
+ if sdk_root is None:
+ sdk_root = ""
+ env["SDKROOT"] = sdk_root
+
+ if not additional_settings:
+ additional_settings = {}
+ else:
+ # Flatten lists to strings.
+ for k in additional_settings:
+ if not isinstance(additional_settings[k], str):
+ additional_settings[k] = " ".join(additional_settings[k])
+ additional_settings.update(env)
+
+ for k in additional_settings:
+ additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
+
+ return additional_settings
+
+
+def _NormalizeEnvVarReferences(str):
+ """Takes a string containing variable references in the form ${FOO}, $(FOO),
+ or $FOO, and returns a string with all variable references in the form ${FOO}.
+ """
+ # $FOO -> ${FOO}
+ str = re.sub(r"\$([a-zA-Z_][a-zA-Z0-9_]*)", r"${\1}", str)
+
+ # $(FOO) -> ${FOO}
+ matches = re.findall(r"(\$\(([a-zA-Z0-9\-_]+)\))", str)
+ for match in matches:
+ to_replace, variable = match
+ assert "$(" not in match, "$($(FOO)) variables not supported: " + match
+ str = str.replace(to_replace, "${" + variable + "}")
+
+ return str
+
+
+def ExpandEnvVars(string, expansions):
+ """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
+ expansions list. If the variable expands to something that references
+ another variable, this variable is expanded as well if it's in env --
+ until no variables present in env are left."""
+ for k, v in reversed(expansions):
+ string = string.replace("${" + k + "}", v)
+ string = string.replace("$(" + k + ")", v)
+ string = string.replace("$" + k, v)
+ return string
+
+
+def _TopologicallySortedEnvVarKeys(env):
+ """Takes a dict |env| whose values are strings that can refer to other keys,
+ for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
+ env such that key2 is after key1 in L if env[key2] refers to env[key1].
+
+ Throws an Exception in case of dependency cycles.
+ """
+ # Since environment variables can refer to other variables, the evaluation
+ # order is important. Below is the logic to compute the dependency graph
+ # and sort it.
+ regex = re.compile(r"\$\{([a-zA-Z0-9\-_]+)\}")
+
+ def GetEdges(node):
+ # Use a definition of edges such that user_of_variable -> used_varible.
+ # This happens to be easier in this case, since a variable's
+ # definition contains all variables it references in a single string.
+ # We can then reverse the result of the topological sort at the end.
+ # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
+ matches = {v for v in regex.findall(env[node]) if v in env}
+ for dependee in matches:
+ assert "${" not in dependee, "Nested variables not supported: " + dependee
+ return matches
+
+ try:
+ # Topologically sort, and then reverse, because we used an edge definition
+ # that's inverted from the expected result of this function (see comment
+ # above).
+ order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
+ order.reverse()
+ return order
+ except gyp.common.CycleError as e:
+ raise GypError(
+ "Xcode environment variables are cyclically dependent: " + str(e.nodes)
+ )
+
+
+def GetSortedXcodeEnv(
+ xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None
+):
+ env = _GetXcodeEnv(
+ xcode_settings, built_products_dir, srcroot, configuration, additional_settings
+ )
+ return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
+
+
+def GetSpecPostbuildCommands(spec, quiet=False):
+ """Returns the list of postbuilds explicitly defined on |spec|, in a form
+ executable by a shell."""
+ postbuilds = []
+ for postbuild in spec.get("postbuilds", []):
+ if not quiet:
+ postbuilds.append(
+ "echo POSTBUILD\\(%s\\) %s"
+ % (spec["target_name"], postbuild["postbuild_name"])
+ )
+ postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild["action"]))
+ return postbuilds
+
+
+def _HasIOSTarget(targets):
+ """Returns true if any target contains the iOS specific key
+ IPHONEOS_DEPLOYMENT_TARGET."""
+ for target_dict in targets.values():
+ for config in target_dict["configurations"].values():
+ if config.get("xcode_settings", {}).get("IPHONEOS_DEPLOYMENT_TARGET"):
+ return True
+ return False
+
+
+def _AddIOSDeviceConfigurations(targets):
+ """Clone all targets and append -iphoneos to the name. Configure these targets
+ to build for iOS devices and use correct architectures for those builds."""
+ for target_dict in targets.values():
+ toolset = target_dict["toolset"]
+ configs = target_dict["configurations"]
+ for config_name, simulator_config_dict in dict(configs).items():
+ iphoneos_config_dict = copy.deepcopy(simulator_config_dict)
+ configs[config_name + "-iphoneos"] = iphoneos_config_dict
+ configs[config_name + "-iphonesimulator"] = simulator_config_dict
+ if toolset == "target":
+ simulator_config_dict["xcode_settings"]["SDKROOT"] = "iphonesimulator"
+ iphoneos_config_dict["xcode_settings"]["SDKROOT"] = "iphoneos"
+ return targets
+
+
+def CloneConfigurationForDeviceAndEmulator(target_dicts):
+ """If |target_dicts| contains any iOS targets, automatically create -iphoneos
+ targets for iOS device builds."""
+ if _HasIOSTarget(target_dicts):
+ return _AddIOSDeviceConfigurations(target_dicts)
+ return target_dicts
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
new file mode 100644
index 0000000..bb74eac
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
@@ -0,0 +1,302 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Xcode-ninja wrapper project file generator.
+
+This updates the data structures passed to the Xcode gyp generator to build
+with ninja instead. The Xcode project itself is transformed into a list of
+executable targets, each with a build step to build with ninja, and a target
+with every source and resource file. This appears to sidestep some of the
+major performance headaches experienced using complex projects and large number
+of targets within Xcode.
+"""
+
+import errno
+import gyp.generator.ninja
+import os
+import re
+import xml.sax.saxutils
+
+
+def _WriteWorkspace(main_gyp, sources_gyp, params):
+ """ Create a workspace to wrap main and sources gyp paths. """
+ (build_file_root, build_file_ext) = os.path.splitext(main_gyp)
+ workspace_path = build_file_root + ".xcworkspace"
+ options = params["options"]
+ if options.generator_output:
+ workspace_path = os.path.join(options.generator_output, workspace_path)
+ try:
+ os.makedirs(workspace_path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ output_string = (
+ '<?xml version="1.0" encoding="UTF-8"?>\n' + '<Workspace version = "1.0">\n'
+ )
+ for gyp_name in [main_gyp, sources_gyp]:
+ name = os.path.splitext(os.path.basename(gyp_name))[0] + ".xcodeproj"
+ name = xml.sax.saxutils.quoteattr("group:" + name)
+ output_string += " <FileRef location = %s></FileRef>\n" % name
+ output_string += "</Workspace>\n"
+
+ workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
+
+ try:
+ with open(workspace_file) as input_file:
+ input_string = input_file.read()
+ if input_string == output_string:
+ return
+ except OSError:
+ # Ignore errors if the file doesn't exist.
+ pass
+
+ with open(workspace_file, "w") as output_file:
+ output_file.write(output_string)
+
+
+def _TargetFromSpec(old_spec, params):
+ """ Create fake target for xcode-ninja wrapper. """
+ # Determine ninja top level build dir (e.g. /path/to/out).
+ ninja_toplevel = None
+ jobs = 0
+ if params:
+ options = params["options"]
+ ninja_toplevel = os.path.join(
+ options.toplevel_dir, gyp.generator.ninja.ComputeOutputDir(params)
+ )
+ jobs = params.get("generator_flags", {}).get("xcode_ninja_jobs", 0)
+
+ target_name = old_spec.get("target_name")
+ product_name = old_spec.get("product_name", target_name)
+ product_extension = old_spec.get("product_extension")
+
+ ninja_target = {}
+ ninja_target["target_name"] = target_name
+ ninja_target["product_name"] = product_name
+ if product_extension:
+ ninja_target["product_extension"] = product_extension
+ ninja_target["toolset"] = old_spec.get("toolset")
+ ninja_target["default_configuration"] = old_spec.get("default_configuration")
+ ninja_target["configurations"] = {}
+
+ # Tell Xcode to look in |ninja_toplevel| for build products.
+ new_xcode_settings = {}
+ if ninja_toplevel:
+ new_xcode_settings["CONFIGURATION_BUILD_DIR"] = (
+ "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
+ )
+
+ if "configurations" in old_spec:
+ for config in old_spec["configurations"]:
+ old_xcode_settings = old_spec["configurations"][config].get(
+ "xcode_settings", {}
+ )
+ if "IPHONEOS_DEPLOYMENT_TARGET" in old_xcode_settings:
+ new_xcode_settings["CODE_SIGNING_REQUIRED"] = "NO"
+ new_xcode_settings["IPHONEOS_DEPLOYMENT_TARGET"] = old_xcode_settings[
+ "IPHONEOS_DEPLOYMENT_TARGET"
+ ]
+ for key in ["BUNDLE_LOADER", "TEST_HOST"]:
+ if key in old_xcode_settings:
+ new_xcode_settings[key] = old_xcode_settings[key]
+
+ ninja_target["configurations"][config] = {}
+ ninja_target["configurations"][config][
+ "xcode_settings"
+ ] = new_xcode_settings
+
+ ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0)
+ ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0)
+ ninja_target["ios_app_extension"] = old_spec.get("ios_app_extension", 0)
+ ninja_target["ios_watchkit_extension"] = old_spec.get("ios_watchkit_extension", 0)
+ ninja_target["ios_watchkit_app"] = old_spec.get("ios_watchkit_app", 0)
+ ninja_target["type"] = old_spec["type"]
+ if ninja_toplevel:
+ ninja_target["actions"] = [
+ {
+ "action_name": "Compile and copy %s via ninja" % target_name,
+ "inputs": [],
+ "outputs": [],
+ "action": [
+ "env",
+ "PATH=%s" % os.environ["PATH"],
+ "ninja",
+ "-C",
+ new_xcode_settings["CONFIGURATION_BUILD_DIR"],
+ target_name,
+ ],
+ "message": "Compile and copy %s via ninja" % target_name,
+ },
+ ]
+ if jobs > 0:
+ ninja_target["actions"][0]["action"].extend(("-j", jobs))
+ return ninja_target
+
+
+def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
+ """Limit targets for Xcode wrapper.
+
+ Xcode sometimes performs poorly with too many targets, so only include
+ proper executable targets, with filters to customize.
+ Arguments:
+ target_extras: Regular expression to always add, matching any target.
+ executable_target_pattern: Regular expression limiting executable targets.
+ spec: Specifications for target.
+ """
+ target_name = spec.get("target_name")
+ # Always include targets matching target_extras.
+ if target_extras is not None and re.search(target_extras, target_name):
+ return True
+
+ # Otherwise just show executable targets and xc_tests.
+ if int(spec.get("mac_xctest_bundle", 0)) != 0 or (
+ spec.get("type", "") == "executable"
+ and spec.get("product_extension", "") != "bundle"
+ ):
+
+ # If there is a filter and the target does not match, exclude the target.
+ if executable_target_pattern is not None:
+ if not re.search(executable_target_pattern, target_name):
+ return False
+ return True
+ return False
+
+
+def CreateWrapper(target_list, target_dicts, data, params):
+ """Initialize targets for the ninja wrapper.
+
+ This sets up the necessary variables in the targets to generate Xcode projects
+ that use ninja as an external builder.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dict of flattened build files keyed on gyp path.
+ params: Dict of global options for gyp.
+ """
+ orig_gyp = params["build_files"][0]
+ for gyp_name, gyp_dict in data.items():
+ if gyp_name == orig_gyp:
+ depth = gyp_dict["_DEPTH"]
+
+ # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
+ # and prepend .ninja before the .gyp extension.
+ generator_flags = params.get("generator_flags", {})
+ main_gyp = generator_flags.get("xcode_ninja_main_gyp", None)
+ if main_gyp is None:
+ (build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
+ main_gyp = build_file_root + ".ninja" + build_file_ext
+
+ # Create new |target_list|, |target_dicts| and |data| data structures.
+ new_target_list = []
+ new_target_dicts = {}
+ new_data = {}
+
+ # Set base keys needed for |data|.
+ new_data[main_gyp] = {}
+ new_data[main_gyp]["included_files"] = []
+ new_data[main_gyp]["targets"] = []
+ new_data[main_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
+
+ # Normally the xcode-ninja generator includes only valid executable targets.
+ # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
+ # executable targets that match the pattern. (Default all)
+ executable_target_pattern = generator_flags.get(
+ "xcode_ninja_executable_target_pattern", None
+ )
+
+ # For including other non-executable targets, add the matching target name
+ # to the |xcode_ninja_target_pattern| regular expression. (Default none)
+ target_extras = generator_flags.get("xcode_ninja_target_pattern", None)
+
+ for old_qualified_target in target_list:
+ spec = target_dicts[old_qualified_target]
+ if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
+ # Add to new_target_list.
+ target_name = spec.get("target_name")
+ new_target_name = f"{main_gyp}:{target_name}#target"
+ new_target_list.append(new_target_name)
+
+ # Add to new_target_dicts.
+ new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
+
+ # Add to new_data.
+ for old_target in data[old_qualified_target.split(":")[0]]["targets"]:
+ if old_target["target_name"] == target_name:
+ new_data_target = {}
+ new_data_target["target_name"] = old_target["target_name"]
+ new_data_target["toolset"] = old_target["toolset"]
+ new_data[main_gyp]["targets"].append(new_data_target)
+
+ # Create sources target.
+ sources_target_name = "sources_for_indexing"
+ sources_target = _TargetFromSpec(
+ {
+ "target_name": sources_target_name,
+ "toolset": "target",
+ "default_configuration": "Default",
+ "mac_bundle": "0",
+ "type": "executable",
+ },
+ None,
+ )
+
+ # Tell Xcode to look everywhere for headers.
+ sources_target["configurations"] = {"Default": {"include_dirs": [depth]}}
+
+ # Put excluded files into the sources target so they can be opened in Xcode.
+ skip_excluded_files = not generator_flags.get(
+ "xcode_ninja_list_excluded_files", True
+ )
+
+ sources = []
+ for target, target_dict in target_dicts.items():
+ base = os.path.dirname(target)
+ files = target_dict.get("sources", []) + target_dict.get(
+ "mac_bundle_resources", []
+ )
+
+ if not skip_excluded_files:
+ files.extend(
+ target_dict.get("sources_excluded", [])
+ + target_dict.get("mac_bundle_resources_excluded", [])
+ )
+
+ for action in target_dict.get("actions", []):
+ files.extend(action.get("inputs", []))
+
+ if not skip_excluded_files:
+ files.extend(action.get("inputs_excluded", []))
+
+ # Remove files starting with $. These are mostly intermediate files for the
+ # build system.
+ files = [file for file in files if not file.startswith("$")]
+
+ # Make sources relative to root build file.
+ relative_path = os.path.dirname(main_gyp)
+ sources += [
+ os.path.relpath(os.path.join(base, file), relative_path) for file in files
+ ]
+
+ sources_target["sources"] = sorted(set(sources))
+
+ # Put sources_to_index in it's own gyp.
+ sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
+ fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target"
+
+ # Add to new_target_list, new_target_dicts and new_data.
+ new_target_list.append(fully_qualified_target_name)
+ new_target_dicts[fully_qualified_target_name] = sources_target
+ new_data_target = {}
+ new_data_target["target_name"] = sources_target["target_name"]
+ new_data_target["_DEPTH"] = depth
+ new_data_target["toolset"] = "target"
+ new_data[sources_gyp] = {}
+ new_data[sources_gyp]["targets"] = []
+ new_data[sources_gyp]["included_files"] = []
+ new_data[sources_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
+ new_data[sources_gyp]["targets"].append(new_data_target)
+
+ # Write workspace to file.
+ _WriteWorkspace(main_gyp, sources_gyp, params)
+ return (new_target_list, new_target_dicts, new_data)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
new file mode 100644
index 0000000..0e941eb
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
@@ -0,0 +1,3197 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Xcode project file generator.
+
+This module is both an Xcode project file generator and a documentation of the
+Xcode project file format. Knowledge of the project file format was gained
+based on extensive experience with Xcode, and by making changes to projects in
+Xcode.app and observing the resultant changes in the associated project files.
+
+XCODE PROJECT FILES
+
+The generator targets the file format as written by Xcode 3.2 (specifically,
+3.2.6), but past experience has taught that the format has not changed
+significantly in the past several years, and future versions of Xcode are able
+to read older project files.
+
+Xcode project files are "bundled": the project "file" from an end-user's
+perspective is actually a directory with an ".xcodeproj" extension. The
+project file from this module's perspective is actually a file inside this
+directory, always named "project.pbxproj". This file contains a complete
+description of the project and is all that is needed to use the xcodeproj.
+Other files contained in the xcodeproj directory are simply used to store
+per-user settings, such as the state of various UI elements in the Xcode
+application.
+
+The project.pbxproj file is a property list, stored in a format almost
+identical to the NeXTstep property list format. The file is able to carry
+Unicode data, and is encoded in UTF-8. The root element in the property list
+is a dictionary that contains several properties of minimal interest, and two
+properties of immense interest. The most important property is a dictionary
+named "objects". The entire structure of the project is represented by the
+children of this property. The objects dictionary is keyed by unique 96-bit
+values represented by 24 uppercase hexadecimal characters. Each value in the
+objects dictionary is itself a dictionary, describing an individual object.
+
+Each object in the dictionary is a member of a class, which is identified by
+the "isa" property of each object. A variety of classes are represented in a
+project file. Objects can refer to other objects by ID, using the 24-character
+hexadecimal object key. A project's objects form a tree, with a root object
+of class PBXProject at the root. As an example, the PBXProject object serves
+as parent to an XCConfigurationList object defining the build configurations
+used in the project, a PBXGroup object serving as a container for all files
+referenced in the project, and a list of target objects, each of which defines
+a target in the project. There are several different types of target object,
+such as PBXNativeTarget and PBXAggregateTarget. In this module, this
+relationship is expressed by having each target type derive from an abstract
+base named XCTarget.
+
+The project.pbxproj file's root dictionary also contains a property, sibling to
+the "objects" dictionary, named "rootObject". The value of rootObject is a
+24-character object key referring to the root PBXProject object in the
+objects dictionary.
+
+In Xcode, every file used as input to a target or produced as a final product
+of a target must appear somewhere in the hierarchy rooted at the PBXGroup
+object referenced by the PBXProject's mainGroup property. A PBXGroup is
+generally represented as a folder in the Xcode application. PBXGroups can
+contain other PBXGroups as well as PBXFileReferences, which are pointers to
+actual files.
+
+Each XCTarget contains a list of build phases, represented in this module by
+the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations
+are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the
+"Compile Sources" and "Link Binary With Libraries" phases displayed in the
+Xcode application. Files used as input to these phases (for example, source
+files in the former case and libraries and frameworks in the latter) are
+represented by PBXBuildFile objects, referenced by elements of "files" lists
+in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile
+object as a "weak" reference: it does not "own" the PBXBuildFile, which is
+owned by the root object's mainGroup or a descendant group. In most cases, the
+layer of indirection between an XCBuildPhase and a PBXFileReference via a
+PBXBuildFile appears extraneous, but there's actually one reason for this:
+file-specific compiler flags are added to the PBXBuildFile object so as to
+allow a single file to be a member of multiple targets while having distinct
+compiler flags for each. These flags can be modified in the Xcode applciation
+in the "Build" tab of a File Info window.
+
+When a project is open in the Xcode application, Xcode will rewrite it. As
+such, this module is careful to adhere to the formatting used by Xcode, to
+avoid insignificant changes appearing in the file when it is used in the
+Xcode application. This will keep version control repositories happy, and
+makes it possible to compare a project file used in Xcode to one generated by
+this module to determine if any significant changes were made in the
+application.
+
+Xcode has its own way of assigning 24-character identifiers to each object,
+which is not duplicated here. Because the identifier only is only generated
+once, when an object is created, and is then left unchanged, there is no need
+to attempt to duplicate Xcode's behavior in this area. The generator is free
+to select any identifier, even at random, to refer to the objects it creates,
+and Xcode will retain those identifiers and use them when subsequently
+rewriting the project file. However, the generator would choose new random
+identifiers each time the project files are generated, leading to difficulties
+comparing "used" project files to "pristine" ones produced by this module,
+and causing the appearance of changes as every object identifier is changed
+when updated projects are checked in to a version control repository. To
+mitigate this problem, this module chooses identifiers in a more deterministic
+way, by hashing a description of each object as well as its parent and ancestor
+objects. This strategy should result in minimal "shift" in IDs as successive
+generations of project files are produced.
+
+THIS MODULE
+
+This module introduces several classes, all derived from the XCObject class.
+Nearly all of the "brains" are built into the XCObject class, which understands
+how to create and modify objects, maintain the proper tree structure, compute
+identifiers, and print objects. For the most part, classes derived from
+XCObject need only provide a _schema class object, a dictionary that
+expresses what properties objects of the class may contain.
+
+Given this structure, it's possible to build a minimal project file by creating
+objects of the appropriate types and making the proper connections:
+
+ config_list = XCConfigurationList()
+ group = PBXGroup()
+ project = PBXProject({'buildConfigurationList': config_list,
+ 'mainGroup': group})
+
+With the project object set up, it can be added to an XCProjectFile object.
+XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject
+subclass that does not actually correspond to a class type found in a project
+file. Rather, it is used to represent the project file's root dictionary.
+Printing an XCProjectFile will print the entire project file, including the
+full "objects" dictionary.
+
+ project_file = XCProjectFile({'rootObject': project})
+ project_file.ComputeIDs()
+ project_file.Print()
+
+Xcode project files are always encoded in UTF-8. This module will accept
+strings of either the str class or the unicode class. Strings of class str
+are assumed to already be encoded in UTF-8. Obviously, if you're just using
+ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset.
+Strings of class unicode are handled properly and encoded in UTF-8 when
+a project file is output.
+"""
+
+import gyp.common
+from functools import cmp_to_key
+import hashlib
+from operator import attrgetter
+import posixpath
+import re
+import struct
+import sys
+
+
+def cmp(x, y):
+ return (x > y) - (x < y)
+
+
+# See XCObject._EncodeString. This pattern is used to determine when a string
+# can be printed unquoted. Strings that match this pattern may be printed
+# unquoted. Strings that do not match must be quoted and may be further
+# transformed to be properly encoded. Note that this expression matches the
+# characters listed with "+", for 1 or more occurrences: if a string is empty,
+# it must not match this pattern, because it needs to be encoded as "".
+_unquoted = re.compile("^[A-Za-z0-9$./_]+$")
+
+# Strings that match this pattern are quoted regardless of what _unquoted says.
+# Oddly, Xcode will quote any string with a run of three or more underscores.
+_quoted = re.compile("___")
+
+# This pattern should match any character that needs to be escaped by
+# XCObject._EncodeString. See that function.
+_escaped = re.compile('[\\\\"]|[\x00-\x1f]')
+
+
+# Used by SourceTreeAndPathFromPath
+_path_leading_variable = re.compile(r"^\$\((.*?)\)(/(.*))?$")
+
+
+def SourceTreeAndPathFromPath(input_path):
+ """Given input_path, returns a tuple with sourceTree and path values.
+
+ Examples:
+ input_path (source_tree, output_path)
+ '$(VAR)/path' ('VAR', 'path')
+ '$(VAR)' ('VAR', None)
+ 'path' (None, 'path')
+ """
+
+ source_group_match = _path_leading_variable.match(input_path)
+ if source_group_match:
+ source_tree = source_group_match.group(1)
+ output_path = source_group_match.group(3) # This may be None.
+ else:
+ source_tree = None
+ output_path = input_path
+
+ return (source_tree, output_path)
+
+
+def ConvertVariablesToShellSyntax(input_string):
+ return re.sub(r"\$\((.*?)\)", "${\\1}", input_string)
+
+
+class XCObject:
+ """The abstract base of all class types used in Xcode project files.
+
+ Class variables:
+ _schema: A dictionary defining the properties of this class. The keys to
+ _schema are string property keys as used in project files. Values
+ are a list of four or five elements:
+ [ is_list, property_type, is_strong, is_required, default ]
+ is_list: True if the property described is a list, as opposed
+ to a single element.
+ property_type: The type to use as the value of the property,
+ or if is_list is True, the type to use for each
+ element of the value's list. property_type must
+ be an XCObject subclass, or one of the built-in
+ types str, int, or dict.
+ is_strong: If property_type is an XCObject subclass, is_strong
+ is True to assert that this class "owns," or serves
+ as parent, to the property value (or, if is_list is
+ True, values). is_strong must be False if
+ property_type is not an XCObject subclass.
+ is_required: True if the property is required for the class.
+ Note that is_required being True does not preclude
+ an empty string ("", in the case of property_type
+ str) or list ([], in the case of is_list True) from
+ being set for the property.
+ default: Optional. If is_required is True, default may be set
+ to provide a default value for objects that do not supply
+ their own value. If is_required is True and default
+ is not provided, users of the class must supply their own
+ value for the property.
+ Note that although the values of the array are expressed in
+ boolean terms, subclasses provide values as integers to conserve
+ horizontal space.
+ _should_print_single_line: False in XCObject. Subclasses whose objects
+ should be written to the project file in the
+ alternate single-line format, such as
+ PBXFileReference and PBXBuildFile, should
+ set this to True.
+ _encode_transforms: Used by _EncodeString to encode unprintable characters.
+ The index into this list is the ordinal of the
+ character to transform; each value is a string
+ used to represent the character in the output. XCObject
+ provides an _encode_transforms list suitable for most
+ XCObject subclasses.
+ _alternate_encode_transforms: Provided for subclasses that wish to use
+ the alternate encoding rules. Xcode seems
+ to use these rules when printing objects in
+ single-line format. Subclasses that desire
+ this behavior should set _encode_transforms
+ to _alternate_encode_transforms.
+ _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
+ to construct this object's ID. Most classes that need custom
+ hashing behavior should do it by overriding Hashables,
+ but in some cases an object's parent may wish to push a
+ hashable value into its child, and it can do so by appending
+ to _hashables.
+ Attributes:
+ id: The object's identifier, a 24-character uppercase hexadecimal string.
+ Usually, objects being created should not set id until the entire
+ project file structure is built. At that point, UpdateIDs() should
+ be called on the root object to assign deterministic values for id to
+ each object in the tree.
+ parent: The object's parent. This is set by a parent XCObject when a child
+ object is added to it.
+ _properties: The object's property dictionary. An object's properties are
+ described by its class' _schema variable.
+ """
+
+ _schema = {}
+ _should_print_single_line = False
+
+ # See _EncodeString.
+ _encode_transforms = []
+ i = 0
+ while i < ord(" "):
+ _encode_transforms.append("\\U%04x" % i)
+ i = i + 1
+ _encode_transforms[7] = "\\a"
+ _encode_transforms[8] = "\\b"
+ _encode_transforms[9] = "\\t"
+ _encode_transforms[10] = "\\n"
+ _encode_transforms[11] = "\\v"
+ _encode_transforms[12] = "\\f"
+ _encode_transforms[13] = "\\n"
+
+ _alternate_encode_transforms = list(_encode_transforms)
+ _alternate_encode_transforms[9] = chr(9)
+ _alternate_encode_transforms[10] = chr(10)
+ _alternate_encode_transforms[11] = chr(11)
+
+ def __init__(self, properties=None, id=None, parent=None):
+ self.id = id
+ self.parent = parent
+ self._properties = {}
+ self._hashables = []
+ self._SetDefaultsFromSchema()
+ self.UpdateProperties(properties)
+
+ def __repr__(self):
+ try:
+ name = self.Name()
+ except NotImplementedError:
+ return f"<{self.__class__.__name__} at 0x{id(self):x}>"
+ return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>"
+
+ def Copy(self):
+ """Make a copy of this object.
+
+ The new object will have its own copy of lists and dicts. Any XCObject
+ objects owned by this object (marked "strong") will be copied in the
+ new object, even those found in lists. If this object has any weak
+ references to other XCObjects, the same references are added to the new
+ object without making a copy.
+ """
+
+ that = self.__class__(id=self.id, parent=self.parent)
+ for key, value in self._properties.items():
+ is_strong = self._schema[key][2]
+
+ if isinstance(value, XCObject):
+ if is_strong:
+ new_value = value.Copy()
+ new_value.parent = that
+ that._properties[key] = new_value
+ else:
+ that._properties[key] = value
+ elif isinstance(value, (str, int)):
+ that._properties[key] = value
+ elif isinstance(value, list):
+ if is_strong:
+ # If is_strong is True, each element is an XCObject, so it's safe to
+ # call Copy.
+ that._properties[key] = []
+ for item in value:
+ new_item = item.Copy()
+ new_item.parent = that
+ that._properties[key].append(new_item)
+ else:
+ that._properties[key] = value[:]
+ elif isinstance(value, dict):
+ # dicts are never strong.
+ if is_strong:
+ raise TypeError(
+ "Strong dict for key " + key + " in " + self.__class__.__name__
+ )
+ else:
+ that._properties[key] = value.copy()
+ else:
+ raise TypeError(
+ "Unexpected type "
+ + value.__class__.__name__
+ + " for key "
+ + key
+ + " in "
+ + self.__class__.__name__
+ )
+
+ return that
+
+ def Name(self):
+ """Return the name corresponding to an object.
+
+ Not all objects necessarily need to be nameable, and not all that do have
+ a "name" property. Override as needed.
+ """
+
+ # If the schema indicates that "name" is required, try to access the
+ # property even if it doesn't exist. This will result in a KeyError
+ # being raised for the property that should be present, which seems more
+ # appropriate than NotImplementedError in this case.
+ if "name" in self._properties or (
+ "name" in self._schema and self._schema["name"][3]
+ ):
+ return self._properties["name"]
+
+ raise NotImplementedError(self.__class__.__name__ + " must implement Name")
+
+ def Comment(self):
+ """Return a comment string for the object.
+
+ Most objects just use their name as the comment, but PBXProject uses
+ different values.
+
+ The returned comment is not escaped and does not have any comment marker
+ strings applied to it.
+ """
+
+ return self.Name()
+
+ def Hashables(self):
+ hashables = [self.__class__.__name__]
+
+ name = self.Name()
+ if name is not None:
+ hashables.append(name)
+
+ hashables.extend(self._hashables)
+
+ return hashables
+
+ def HashablesForChild(self):
+ return None
+
+ def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
+ """Set "id" properties deterministically.
+
+ An object's "id" property is set based on a hash of its class type and
+ name, as well as the class type and name of all ancestor objects. As
+ such, it is only advisable to call ComputeIDs once an entire project file
+ tree is built.
+
+ If recursive is True, recurse into all descendant objects and update their
+ hashes.
+
+ If overwrite is True, any existing value set in the "id" property will be
+ replaced.
+ """
+
+ def _HashUpdate(hash, data):
+ """Update hash with data's length and contents.
+
+ If the hash were updated only with the value of data, it would be
+ possible for clowns to induce collisions by manipulating the names of
+ their objects. By adding the length, it's exceedingly less likely that
+ ID collisions will be encountered, intentionally or not.
+ """
+
+ hash.update(struct.pack(">i", len(data)))
+ if isinstance(data, str):
+ data = data.encode("utf-8")
+ hash.update(data)
+
+ if seed_hash is None:
+ seed_hash = hashlib.sha1()
+
+ hash = seed_hash.copy()
+
+ hashables = self.Hashables()
+ assert len(hashables) > 0
+ for hashable in hashables:
+ _HashUpdate(hash, hashable)
+
+ if recursive:
+ hashables_for_child = self.HashablesForChild()
+ if hashables_for_child is None:
+ child_hash = hash
+ else:
+ assert len(hashables_for_child) > 0
+ child_hash = seed_hash.copy()
+ for hashable in hashables_for_child:
+ _HashUpdate(child_hash, hashable)
+
+ for child in self.Children():
+ child.ComputeIDs(recursive, overwrite, child_hash)
+
+ if overwrite or self.id is None:
+ # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
+ # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
+ # into the portion that gets used.
+ assert hash.digest_size % 4 == 0
+ digest_int_count = hash.digest_size // 4
+ digest_ints = struct.unpack(">" + "I" * digest_int_count, hash.digest())
+ id_ints = [0, 0, 0]
+ for index in range(0, digest_int_count):
+ id_ints[index % 3] ^= digest_ints[index]
+ self.id = "%08X%08X%08X" % tuple(id_ints)
+
+ def EnsureNoIDCollisions(self):
+ """Verifies that no two objects have the same ID. Checks all descendants.
+ """
+
+ ids = {}
+ descendants = self.Descendants()
+ for descendant in descendants:
+ if descendant.id in ids:
+ other = ids[descendant.id]
+ raise KeyError(
+ 'Duplicate ID %s, objects "%s" and "%s" in "%s"'
+ % (
+ descendant.id,
+ str(descendant._properties),
+ str(other._properties),
+ self._properties["rootObject"].Name(),
+ )
+ )
+ ids[descendant.id] = descendant
+
+ def Children(self):
+ """Returns a list of all of this object's owned (strong) children."""
+
+ children = []
+ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong) = attributes[0:3]
+ if is_strong and property in self._properties:
+ if not is_list:
+ children.append(self._properties[property])
+ else:
+ children.extend(self._properties[property])
+ return children
+
+ def Descendants(self):
+ """Returns a list of all of this object's descendants, including this
+ object.
+ """
+
+ children = self.Children()
+ descendants = [self]
+ for child in children:
+ descendants.extend(child.Descendants())
+ return descendants
+
+ def PBXProjectAncestor(self):
+ # The base case for recursion is defined at PBXProject.PBXProjectAncestor.
+ if self.parent:
+ return self.parent.PBXProjectAncestor()
+ return None
+
+ def _EncodeComment(self, comment):
+ """Encodes a comment to be placed in the project file output, mimicking
+ Xcode behavior.
+ """
+
+ # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If
+ # the string already contains a "*/", it is turned into "(*)/". This keeps
+ # the file writer from outputting something that would be treated as the
+ # end of a comment in the middle of something intended to be entirely a
+ # comment.
+
+ return "/* " + comment.replace("*/", "(*)/") + " */"
+
+ def _EncodeTransform(self, match):
+ # This function works closely with _EncodeString. It will only be called
+ # by re.sub with match.group(0) containing a character matched by the
+ # the _escaped expression.
+ char = match.group(0)
+
+ # Backslashes (\) and quotation marks (") are always replaced with a
+ # backslash-escaped version of the same. Everything else gets its
+ # replacement from the class' _encode_transforms array.
+ if char == "\\":
+ return "\\\\"
+ if char == '"':
+ return '\\"'
+ return self._encode_transforms[ord(char)]
+
+ def _EncodeString(self, value):
+ """Encodes a string to be placed in the project file output, mimicking
+ Xcode behavior.
+ """
+
+ # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
+ # $ (dollar sign), . (period), and _ (underscore) is present. Also use
+ # quotation marks to represent empty strings.
+ #
+ # Escape " (double-quote) and \ (backslash) by preceding them with a
+ # backslash.
+ #
+ # Some characters below the printable ASCII range are encoded specially:
+ # 7 ^G BEL is encoded as "\a"
+ # 8 ^H BS is encoded as "\b"
+ # 11 ^K VT is encoded as "\v"
+ # 12 ^L NP is encoded as "\f"
+ # 127 ^? DEL is passed through as-is without escaping
+ # - In PBXFileReference and PBXBuildFile objects:
+ # 9 ^I HT is passed through as-is without escaping
+ # 10 ^J NL is passed through as-is without escaping
+ # 13 ^M CR is passed through as-is without escaping
+ # - In other objects:
+ # 9 ^I HT is encoded as "\t"
+ # 10 ^J NL is encoded as "\n"
+ # 13 ^M CR is encoded as "\n" rendering it indistinguishable from
+ # 10 ^J NL
+ # All other characters within the ASCII control character range (0 through
+ # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point
+ # in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
+ # Characters above the ASCII range are passed through to the output encoded
+ # as UTF-8 without any escaping. These mappings are contained in the
+ # class' _encode_transforms list.
+
+ if _unquoted.search(value) and not _quoted.search(value):
+ return value
+
+ return '"' + _escaped.sub(self._EncodeTransform, value) + '"'
+
+ def _XCPrint(self, file, tabs, line):
+ file.write("\t" * tabs + line)
+
+ def _XCPrintableValue(self, tabs, value, flatten_list=False):
+ """Returns a representation of value that may be printed in a project file,
+ mimicking Xcode's behavior.
+
+ _XCPrintableValue can handle str and int values, XCObjects (which are
+ made printable by returning their id property), and list and dict objects
+ composed of any of the above types. When printing a list or dict, and
+ _should_print_single_line is False, the tabs parameter is used to determine
+ how much to indent the lines corresponding to the items in the list or
+ dict.
+
+ If flatten_list is True, single-element lists will be transformed into
+ strings.
+ """
+
+ printable = ""
+ comment = None
+
+ if self._should_print_single_line:
+ sep = " "
+ element_tabs = ""
+ end_tabs = ""
+ else:
+ sep = "\n"
+ element_tabs = "\t" * (tabs + 1)
+ end_tabs = "\t" * tabs
+
+ if isinstance(value, XCObject):
+ printable += value.id
+ comment = value.Comment()
+ elif isinstance(value, str):
+ printable += self._EncodeString(value)
+ elif isinstance(value, str):
+ printable += self._EncodeString(value.encode("utf-8"))
+ elif isinstance(value, int):
+ printable += str(value)
+ elif isinstance(value, list):
+ if flatten_list and len(value) <= 1:
+ if len(value) == 0:
+ printable += self._EncodeString("")
+ else:
+ printable += self._EncodeString(value[0])
+ else:
+ printable = "(" + sep
+ for item in value:
+ printable += (
+ element_tabs
+ + self._XCPrintableValue(tabs + 1, item, flatten_list)
+ + ","
+ + sep
+ )
+ printable += end_tabs + ")"
+ elif isinstance(value, dict):
+ printable = "{" + sep
+ for item_key, item_value in sorted(value.items()):
+ printable += (
+ element_tabs
+ + self._XCPrintableValue(tabs + 1, item_key, flatten_list)
+ + " = "
+ + self._XCPrintableValue(tabs + 1, item_value, flatten_list)
+ + ";"
+ + sep
+ )
+ printable += end_tabs + "}"
+ else:
+ raise TypeError("Can't make " + value.__class__.__name__ + " printable")
+
+ if comment:
+ printable += " " + self._EncodeComment(comment)
+
+ return printable
+
+ def _XCKVPrint(self, file, tabs, key, value):
+ """Prints a key and value, members of an XCObject's _properties dictionary,
+ to file.
+
+ tabs is an int identifying the indentation level. If the class'
+ _should_print_single_line variable is True, tabs is ignored and the
+ key-value pair will be followed by a space insead of a newline.
+ """
+
+ if self._should_print_single_line:
+ printable = ""
+ after_kv = " "
+ else:
+ printable = "\t" * tabs
+ after_kv = "\n"
+
+ # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy
+ # objects without comments. Sometimes it prints them with comments, but
+ # the majority of the time, it doesn't. To avoid unnecessary changes to
+ # the project file after Xcode opens it, don't write comments for
+ # remoteGlobalIDString. This is a sucky hack and it would certainly be
+ # cleaner to extend the schema to indicate whether or not a comment should
+ # be printed, but since this is the only case where the problem occurs and
+ # Xcode itself can't seem to make up its mind, the hack will suffice.
+ #
+ # Also see PBXContainerItemProxy._schema['remoteGlobalIDString'].
+ if key == "remoteGlobalIDString" and isinstance(self, PBXContainerItemProxy):
+ value_to_print = value.id
+ else:
+ value_to_print = value
+
+ # PBXBuildFile's settings property is represented in the output as a dict,
+ # but a hack here has it represented as a string. Arrange to strip off the
+ # quotes so that it shows up in the output as expected.
+ if key == "settings" and isinstance(self, PBXBuildFile):
+ strip_value_quotes = True
+ else:
+ strip_value_quotes = False
+
+ # In another one-off, let's set flatten_list on buildSettings properties
+ # of XCBuildConfiguration objects, because that's how Xcode treats them.
+ if key == "buildSettings" and isinstance(self, XCBuildConfiguration):
+ flatten_list = True
+ else:
+ flatten_list = False
+
+ try:
+ printable_key = self._XCPrintableValue(tabs, key, flatten_list)
+ printable_value = self._XCPrintableValue(tabs, value_to_print, flatten_list)
+ if (
+ strip_value_quotes
+ and len(printable_value) > 1
+ and printable_value[0] == '"'
+ and printable_value[-1] == '"'
+ ):
+ printable_value = printable_value[1:-1]
+ printable += printable_key + " = " + printable_value + ";" + after_kv
+ except TypeError as e:
+ gyp.common.ExceptionAppend(e, 'while printing key "%s"' % key)
+ raise
+
+ self._XCPrint(file, 0, printable)
+
+ def Print(self, file=sys.stdout):
+ """Prints a reprentation of this object to file, adhering to Xcode output
+ formatting.
+ """
+
+ self.VerifyHasRequiredProperties()
+
+ if self._should_print_single_line:
+ # When printing an object in a single line, Xcode doesn't put any space
+ # between the beginning of a dictionary (or presumably a list) and the
+ # first contained item, so you wind up with snippets like
+ # ...CDEF = {isa = PBXFileReference; fileRef = 0123...
+ # If it were me, I would have put a space in there after the opening
+ # curly, but I guess this is just another one of those inconsistencies
+ # between how Xcode prints PBXFileReference and PBXBuildFile objects as
+ # compared to other objects. Mimic Xcode's behavior here by using an
+ # empty string for sep.
+ sep = ""
+ end_tabs = 0
+ else:
+ sep = "\n"
+ end_tabs = 2
+
+ # Start the object. For example, '\t\tPBXProject = {\n'.
+ self._XCPrint(file, 2, self._XCPrintableValue(2, self) + " = {" + sep)
+
+ # "isa" isn't in the _properties dictionary, it's an intrinsic property
+ # of the class which the object belongs to. Xcode always outputs "isa"
+ # as the first element of an object dictionary.
+ self._XCKVPrint(file, 3, "isa", self.__class__.__name__)
+
+ # The remaining elements of an object dictionary are sorted alphabetically.
+ for property, value in sorted(self._properties.items()):
+ self._XCKVPrint(file, 3, property, value)
+
+ # End the object.
+ self._XCPrint(file, end_tabs, "};\n")
+
+ def UpdateProperties(self, properties, do_copy=False):
+ """Merge the supplied properties into the _properties dictionary.
+
+ The input properties must adhere to the class schema or a KeyError or
+ TypeError exception will be raised. If adding an object of an XCObject
+ subclass and the schema indicates a strong relationship, the object's
+ parent will be set to this object.
+
+ If do_copy is True, then lists, dicts, strong-owned XCObjects, and
+ strong-owned XCObjects in lists will be copied instead of having their
+ references added.
+ """
+
+ if properties is None:
+ return
+
+ for property, value in properties.items():
+ # Make sure the property is in the schema.
+ if property not in self._schema:
+ raise KeyError(property + " not in " + self.__class__.__name__)
+
+ # Make sure the property conforms to the schema.
+ (is_list, property_type, is_strong) = self._schema[property][0:3]
+ if is_list:
+ if value.__class__ != list:
+ raise TypeError(
+ property
+ + " of "
+ + self.__class__.__name__
+ + " must be list, not "
+ + value.__class__.__name__
+ )
+ for item in value:
+ if not isinstance(item, property_type) and not (
+ isinstance(item, str) and property_type == str
+ ):
+ # Accept unicode where str is specified. str is treated as
+ # UTF-8-encoded.
+ raise TypeError(
+ "item of "
+ + property
+ + " of "
+ + self.__class__.__name__
+ + " must be "
+ + property_type.__name__
+ + ", not "
+ + item.__class__.__name__
+ )
+ elif not isinstance(value, property_type) and not (
+ isinstance(value, str) and property_type == str
+ ):
+ # Accept unicode where str is specified. str is treated as
+ # UTF-8-encoded.
+ raise TypeError(
+ property
+ + " of "
+ + self.__class__.__name__
+ + " must be "
+ + property_type.__name__
+ + ", not "
+ + value.__class__.__name__
+ )
+
+ # Checks passed, perform the assignment.
+ if do_copy:
+ if isinstance(value, XCObject):
+ if is_strong:
+ self._properties[property] = value.Copy()
+ else:
+ self._properties[property] = value
+ elif isinstance(value, (str, int)):
+ self._properties[property] = value
+ elif isinstance(value, list):
+ if is_strong:
+ # If is_strong is True, each element is an XCObject,
+ # so it's safe to call Copy.
+ self._properties[property] = []
+ for item in value:
+ self._properties[property].append(item.Copy())
+ else:
+ self._properties[property] = value[:]
+ elif isinstance(value, dict):
+ self._properties[property] = value.copy()
+ else:
+ raise TypeError(
+ "Don't know how to copy a "
+ + value.__class__.__name__
+ + " object for "
+ + property
+ + " in "
+ + self.__class__.__name__
+ )
+ else:
+ self._properties[property] = value
+
+ # Set up the child's back-reference to this object. Don't use |value|
+ # any more because it may not be right if do_copy is true.
+ if is_strong:
+ if not is_list:
+ self._properties[property].parent = self
+ else:
+ for item in self._properties[property]:
+ item.parent = self
+
+ def HasProperty(self, key):
+ return key in self._properties
+
+ def GetProperty(self, key):
+ return self._properties[key]
+
+ def SetProperty(self, key, value):
+ self.UpdateProperties({key: value})
+
+ def DelProperty(self, key):
+ if key in self._properties:
+ del self._properties[key]
+
+ def AppendProperty(self, key, value):
+ # TODO(mark): Support ExtendProperty too (and make this call that)?
+
+ # Schema validation.
+ if key not in self._schema:
+ raise KeyError(key + " not in " + self.__class__.__name__)
+
+ (is_list, property_type, is_strong) = self._schema[key][0:3]
+ if not is_list:
+ raise TypeError(key + " of " + self.__class__.__name__ + " must be list")
+ if not isinstance(value, property_type):
+ raise TypeError(
+ "item of "
+ + key
+ + " of "
+ + self.__class__.__name__
+ + " must be "
+ + property_type.__name__
+ + ", not "
+ + value.__class__.__name__
+ )
+
+ # If the property doesn't exist yet, create a new empty list to receive the
+ # item.
+ self._properties[key] = self._properties.get(key, [])
+
+ # Set up the ownership link.
+ if is_strong:
+ value.parent = self
+
+ # Store the item.
+ self._properties[key].append(value)
+
+ def VerifyHasRequiredProperties(self):
+ """Ensure that all properties identified as required by the schema are
+ set.
+ """
+
+ # TODO(mark): A stronger verification mechanism is needed. Some
+ # subclasses need to perform validation beyond what the schema can enforce.
+ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ if is_required and property not in self._properties:
+ raise KeyError(self.__class__.__name__ + " requires " + property)
+
+ def _SetDefaultsFromSchema(self):
+ """Assign object default values according to the schema. This will not
+ overwrite properties that have already been set."""
+
+ defaults = {}
+ for property, attributes in self._schema.items():
+ (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ if (
+ is_required
+ and len(attributes) >= 5
+ and property not in self._properties
+ ):
+ default = attributes[4]
+
+ defaults[property] = default
+
+ if len(defaults) > 0:
+ # Use do_copy=True so that each new object gets its own copy of strong
+ # objects, lists, and dicts.
+ self.UpdateProperties(defaults, do_copy=True)
+
+
+class XCHierarchicalElement(XCObject):
+ """Abstract base for PBXGroup and PBXFileReference. Not represented in a
+ project file."""
+
+ # TODO(mark): Do name and path belong here? Probably so.
+ # If path is set and name is not, name may have a default value. Name will
+ # be set to the basename of path, if the basename of path is different from
+ # the full value of path. If path is already just a leaf name, name will
+ # not be set.
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "comments": [0, str, 0, 0],
+ "fileEncoding": [0, str, 0, 0],
+ "includeInIndex": [0, int, 0, 0],
+ "indentWidth": [0, int, 0, 0],
+ "lineEnding": [0, int, 0, 0],
+ "sourceTree": [0, str, 0, 1, "<group>"],
+ "tabWidth": [0, int, 0, 0],
+ "usesTabs": [0, int, 0, 0],
+ "wrapsLines": [0, int, 0, 0],
+ }
+ )
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCObject.__init__(self, properties, id, parent)
+ if "path" in self._properties and "name" not in self._properties:
+ path = self._properties["path"]
+ name = posixpath.basename(path)
+ if name != "" and path != name:
+ self.SetProperty("name", name)
+
+ if "path" in self._properties and (
+ "sourceTree" not in self._properties
+ or self._properties["sourceTree"] == "<group>"
+ ):
+ # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take
+ # the variable out and make the path be relative to that variable by
+ # assigning the variable name as the sourceTree.
+ (source_tree, path) = SourceTreeAndPathFromPath(self._properties["path"])
+ if source_tree is not None:
+ self._properties["sourceTree"] = source_tree
+ if path is not None:
+ self._properties["path"] = path
+ if (
+ source_tree is not None
+ and path is None
+ and "name" not in self._properties
+ ):
+ # The path was of the form "$(SDKROOT)" with no path following it.
+ # This object is now relative to that variable, so it has no path
+ # attribute of its own. It does, however, keep a name.
+ del self._properties["path"]
+ self._properties["name"] = source_tree
+
+ def Name(self):
+ if "name" in self._properties:
+ return self._properties["name"]
+ elif "path" in self._properties:
+ return self._properties["path"]
+ else:
+ # This happens in the case of the root PBXGroup.
+ return None
+
+ def Hashables(self):
+ """Custom hashables for XCHierarchicalElements.
+
+ XCHierarchicalElements are special. Generally, their hashes shouldn't
+ change if the paths don't change. The normal XCObject implementation of
+ Hashables adds a hashable for each object, which means that if
+ the hierarchical structure changes (possibly due to changes caused when
+ TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
+ the hashes will change. For example, if a project file initially contains
+ a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
+ a/b. If someone later adds a/f2 to the project file, a/b can no longer be
+ collapsed, and f1 winds up with parent b and grandparent a. That would
+ be sufficient to change f1's hash.
+
+ To counteract this problem, hashables for all XCHierarchicalElements except
+ for the main group (which has neither a name nor a path) are taken to be
+ just the set of path components. Because hashables are inherited from
+ parents, this provides assurance that a/b/f1 has the same set of hashables
+ whether its parent is b or a/b.
+
+ The main group is a special case. As it is permitted to have no name or
+ path, it is permitted to use the standard XCObject hash mechanism. This
+ is not considered a problem because there can be only one main group.
+ """
+
+ if self == self.PBXProjectAncestor()._properties["mainGroup"]:
+ # super
+ return XCObject.Hashables(self)
+
+ hashables = []
+
+ # Put the name in first, ensuring that if TakeOverOnlyChild collapses
+ # children into a top-level group like "Source", the name always goes
+ # into the list of hashables without interfering with path components.
+ if "name" in self._properties:
+ # Make it less likely for people to manipulate hashes by following the
+ # pattern of always pushing an object type value onto the list first.
+ hashables.append(self.__class__.__name__ + ".name")
+ hashables.append(self._properties["name"])
+
+ # NOTE: This still has the problem that if an absolute path is encountered,
+ # including paths with a sourceTree, they'll still inherit their parents'
+ # hashables, even though the paths aren't relative to their parents. This
+ # is not expected to be much of a problem in practice.
+ path = self.PathFromSourceTreeAndPath()
+ if path is not None:
+ components = path.split(posixpath.sep)
+ for component in components:
+ hashables.append(self.__class__.__name__ + ".path")
+ hashables.append(component)
+
+ hashables.extend(self._hashables)
+
+ return hashables
+
+ def Compare(self, other):
+ # Allow comparison of these types. PBXGroup has the highest sort rank;
+ # PBXVariantGroup is treated as equal to PBXFileReference.
+ valid_class_types = {
+ PBXFileReference: "file",
+ PBXGroup: "group",
+ PBXVariantGroup: "file",
+ }
+ self_type = valid_class_types[self.__class__]
+ other_type = valid_class_types[other.__class__]
+
+ if self_type == other_type:
+ # If the two objects are of the same sort rank, compare their names.
+ return cmp(self.Name(), other.Name())
+
+ # Otherwise, sort groups before everything else.
+ if self_type == "group":
+ return -1
+ return 1
+
+ def CompareRootGroup(self, other):
+ # This function should be used only to compare direct children of the
+ # containing PBXProject's mainGroup. These groups should appear in the
+ # listed order.
+ # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the
+ # generator should have a way of influencing this list rather than having
+ # to hardcode for the generator here.
+ order = [
+ "Source",
+ "Intermediates",
+ "Projects",
+ "Frameworks",
+ "Products",
+ "Build",
+ ]
+
+ # If the groups aren't in the listed order, do a name comparison.
+ # Otherwise, groups in the listed order should come before those that
+ # aren't.
+ self_name = self.Name()
+ other_name = other.Name()
+ self_in = isinstance(self, PBXGroup) and self_name in order
+ other_in = isinstance(self, PBXGroup) and other_name in order
+ if not self_in and not other_in:
+ return self.Compare(other)
+ if self_name in order and other_name not in order:
+ return -1
+ if other_name in order and self_name not in order:
+ return 1
+
+ # If both groups are in the listed order, go by the defined order.
+ self_index = order.index(self_name)
+ other_index = order.index(other_name)
+ if self_index < other_index:
+ return -1
+ if self_index > other_index:
+ return 1
+ return 0
+
+ def PathFromSourceTreeAndPath(self):
+ # Turn the object's sourceTree and path properties into a single flat
+ # string of a form comparable to the path parameter. If there's a
+ # sourceTree property other than "<group>", wrap it in $(...) for the
+ # comparison.
+ components = []
+ if self._properties["sourceTree"] != "<group>":
+ components.append("$(" + self._properties["sourceTree"] + ")")
+ if "path" in self._properties:
+ components.append(self._properties["path"])
+
+ if len(components) > 0:
+ return posixpath.join(*components)
+
+ return None
+
+ def FullPath(self):
+ # Returns a full path to self relative to the project file, or relative
+ # to some other source tree. Start with self, and walk up the chain of
+ # parents prepending their paths, if any, until no more parents are
+ # available (project-relative path) or until a path relative to some
+ # source tree is found.
+ xche = self
+ path = None
+ while isinstance(xche, XCHierarchicalElement) and (
+ path is None or (not path.startswith("/") and not path.startswith("$"))
+ ):
+ this_path = xche.PathFromSourceTreeAndPath()
+ if this_path is not None and path is not None:
+ path = posixpath.join(this_path, path)
+ elif this_path is not None:
+ path = this_path
+ xche = xche.parent
+
+ return path
+
+
+class PBXGroup(XCHierarchicalElement):
+ """
+ Attributes:
+ _children_by_path: Maps pathnames of children of this PBXGroup to the
+ actual child XCHierarchicalElement objects.
+ _variant_children_by_name_and_path: Maps (name, path) tuples of
+ PBXVariantGroup children to the actual child PBXVariantGroup objects.
+ """
+
+ _schema = XCHierarchicalElement._schema.copy()
+ _schema.update(
+ {
+ "children": [1, XCHierarchicalElement, 1, 1, []],
+ "name": [0, str, 0, 0],
+ "path": [0, str, 0, 0],
+ }
+ )
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCHierarchicalElement.__init__(self, properties, id, parent)
+ self._children_by_path = {}
+ self._variant_children_by_name_and_path = {}
+ for child in self._properties.get("children", []):
+ self._AddChildToDicts(child)
+
+ def Hashables(self):
+ # super
+ hashables = XCHierarchicalElement.Hashables(self)
+
+ # It is not sufficient to just rely on name and parent to build a unique
+ # hashable : a node could have two child PBXGroup sharing a common name.
+ # To add entropy the hashable is enhanced with the names of all its
+ # children.
+ for child in self._properties.get("children", []):
+ child_name = child.Name()
+ if child_name is not None:
+ hashables.append(child_name)
+
+ return hashables
+
+ def HashablesForChild(self):
+ # To avoid a circular reference the hashables used to compute a child id do
+ # not include the child names.
+ return XCHierarchicalElement.Hashables(self)
+
+ def _AddChildToDicts(self, child):
+ # Sets up this PBXGroup object's dicts to reference the child properly.
+ child_path = child.PathFromSourceTreeAndPath()
+ if child_path:
+ if child_path in self._children_by_path:
+ raise ValueError("Found multiple children with path " + child_path)
+ self._children_by_path[child_path] = child
+
+ if isinstance(child, PBXVariantGroup):
+ child_name = child._properties.get("name", None)
+ key = (child_name, child_path)
+ if key in self._variant_children_by_name_and_path:
+ raise ValueError(
+ "Found multiple PBXVariantGroup children with "
+ + "name "
+ + str(child_name)
+ + " and path "
+ + str(child_path)
+ )
+ self._variant_children_by_name_and_path[key] = child
+
+ def AppendChild(self, child):
+ # Callers should use this instead of calling
+ # AppendProperty('children', child) directly because this function
+ # maintains the group's dicts.
+ self.AppendProperty("children", child)
+ self._AddChildToDicts(child)
+
+ def GetChildByName(self, name):
+ # This is not currently optimized with a dict as GetChildByPath is because
+ # it has few callers. Most callers probably want GetChildByPath. This
+ # function is only useful to get children that have names but no paths,
+ # which is rare. The children of the main group ("Source", "Products",
+ # etc.) is pretty much the only case where this likely to come up.
+ #
+ # TODO(mark): Maybe this should raise an error if more than one child is
+ # present with the same name.
+ if "children" not in self._properties:
+ return None
+
+ for child in self._properties["children"]:
+ if child.Name() == name:
+ return child
+
+ return None
+
+ def GetChildByPath(self, path):
+ if not path:
+ return None
+
+ if path in self._children_by_path:
+ return self._children_by_path[path]
+
+ return None
+
+ def GetChildByRemoteObject(self, remote_object):
+ # This method is a little bit esoteric. Given a remote_object, which
+ # should be a PBXFileReference in another project file, this method will
+ # return this group's PBXReferenceProxy object serving as a local proxy
+ # for the remote PBXFileReference.
+ #
+ # This function might benefit from a dict optimization as GetChildByPath
+ # for some workloads, but profiling shows that it's not currently a
+ # problem.
+ if "children" not in self._properties:
+ return None
+
+ for child in self._properties["children"]:
+ if not isinstance(child, PBXReferenceProxy):
+ continue
+
+ container_proxy = child._properties["remoteRef"]
+ if container_proxy._properties["remoteGlobalIDString"] == remote_object:
+ return child
+
+ return None
+
+ def AddOrGetFileByPath(self, path, hierarchical):
+ """Returns an existing or new file reference corresponding to path.
+
+ If hierarchical is True, this method will create or use the necessary
+ hierarchical group structure corresponding to path. Otherwise, it will
+ look in and create an item in the current group only.
+
+ If an existing matching reference is found, it is returned, otherwise, a
+ new one will be created, added to the correct group, and returned.
+
+ If path identifies a directory by virtue of carrying a trailing slash,
+ this method returns a PBXFileReference of "folder" type. If path
+ identifies a variant, by virtue of it identifying a file inside a directory
+ with an ".lproj" extension, this method returns a PBXVariantGroup
+ containing the variant named by path, and possibly other variants. For
+ all other paths, a "normal" PBXFileReference will be returned.
+ """
+
+ # Adding or getting a directory? Directories end with a trailing slash.
+ is_dir = False
+ if path.endswith("/"):
+ is_dir = True
+ path = posixpath.normpath(path)
+ if is_dir:
+ path = path + "/"
+
+ # Adding or getting a variant? Variants are files inside directories
+ # with an ".lproj" extension. Xcode uses variants for localization. For
+ # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named
+ # MainMenu.nib inside path/to, and give it a variant named Language. In
+ # this example, grandparent would be set to path/to and parent_root would
+ # be set to Language.
+ variant_name = None
+ parent = posixpath.dirname(path)
+ grandparent = posixpath.dirname(parent)
+ parent_basename = posixpath.basename(parent)
+ (parent_root, parent_ext) = posixpath.splitext(parent_basename)
+ if parent_ext == ".lproj":
+ variant_name = parent_root
+ if grandparent == "":
+ grandparent = None
+
+ # Putting a directory inside a variant group is not currently supported.
+ assert not is_dir or variant_name is None
+
+ path_split = path.split(posixpath.sep)
+ if (
+ len(path_split) == 1
+ or ((is_dir or variant_name is not None) and len(path_split) == 2)
+ or not hierarchical
+ ):
+ # The PBXFileReference or PBXVariantGroup will be added to or gotten from
+ # this PBXGroup, no recursion necessary.
+ if variant_name is None:
+ # Add or get a PBXFileReference.
+ file_ref = self.GetChildByPath(path)
+ if file_ref is not None:
+ assert file_ref.__class__ == PBXFileReference
+ else:
+ file_ref = PBXFileReference({"path": path})
+ self.AppendChild(file_ref)
+ else:
+ # Add or get a PBXVariantGroup. The variant group name is the same
+ # as the basename (MainMenu.nib in the example above). grandparent
+ # specifies the path to the variant group itself, and path_split[-2:]
+ # is the path of the specific variant relative to its group.
+ variant_group_name = posixpath.basename(path)
+ variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(
+ variant_group_name, grandparent
+ )
+ variant_path = posixpath.sep.join(path_split[-2:])
+ variant_ref = variant_group_ref.GetChildByPath(variant_path)
+ if variant_ref is not None:
+ assert variant_ref.__class__ == PBXFileReference
+ else:
+ variant_ref = PBXFileReference(
+ {"name": variant_name, "path": variant_path}
+ )
+ variant_group_ref.AppendChild(variant_ref)
+ # The caller is interested in the variant group, not the specific
+ # variant file.
+ file_ref = variant_group_ref
+ return file_ref
+ else:
+ # Hierarchical recursion. Add or get a PBXGroup corresponding to the
+ # outermost path component, and then recurse into it, chopping off that
+ # path component.
+ next_dir = path_split[0]
+ group_ref = self.GetChildByPath(next_dir)
+ if group_ref is not None:
+ assert group_ref.__class__ == PBXGroup
+ else:
+ group_ref = PBXGroup({"path": next_dir})
+ self.AppendChild(group_ref)
+ return group_ref.AddOrGetFileByPath(
+ posixpath.sep.join(path_split[1:]), hierarchical
+ )
+
+ def AddOrGetVariantGroupByNameAndPath(self, name, path):
+ """Returns an existing or new PBXVariantGroup for name and path.
+
+ If a PBXVariantGroup identified by the name and path arguments is already
+ present as a child of this object, it is returned. Otherwise, a new
+ PBXVariantGroup with the correct properties is created, added as a child,
+ and returned.
+
+ This method will generally be called by AddOrGetFileByPath, which knows
+ when to create a variant group based on the structure of the pathnames
+ passed to it.
+ """
+
+ key = (name, path)
+ if key in self._variant_children_by_name_and_path:
+ variant_group_ref = self._variant_children_by_name_and_path[key]
+ assert variant_group_ref.__class__ == PBXVariantGroup
+ return variant_group_ref
+
+ variant_group_properties = {"name": name}
+ if path is not None:
+ variant_group_properties["path"] = path
+ variant_group_ref = PBXVariantGroup(variant_group_properties)
+ self.AppendChild(variant_group_ref)
+
+ return variant_group_ref
+
+ def TakeOverOnlyChild(self, recurse=False):
+ """If this PBXGroup has only one child and it's also a PBXGroup, take
+ it over by making all of its children this object's children.
+
+ This function will continue to take over only children when those children
+ are groups. If there are three PBXGroups representing a, b, and c, with
+ c inside b and b inside a, and a and b have no other children, this will
+ result in a taking over both b and c, forming a PBXGroup for a/b/c.
+
+ If recurse is True, this function will recurse into children and ask them
+ to collapse themselves by taking over only children as well. Assuming
+ an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
+ (d1, d2, and f are files, the rest are groups), recursion will result in
+ a group for a/b/c containing a group for d3/e.
+ """
+
+ # At this stage, check that child class types are PBXGroup exactly,
+ # instead of using isinstance. The only subclass of PBXGroup,
+ # PBXVariantGroup, should not participate in reparenting in the same way:
+ # reparenting by merging different object types would be wrong.
+ while (
+ len(self._properties["children"]) == 1
+ and self._properties["children"][0].__class__ == PBXGroup
+ ):
+ # Loop to take over the innermost only-child group possible.
+
+ child = self._properties["children"][0]
+
+ # Assume the child's properties, including its children. Save a copy
+ # of this object's old properties, because they'll still be needed.
+ # This object retains its existing id and parent attributes.
+ old_properties = self._properties
+ self._properties = child._properties
+ self._children_by_path = child._children_by_path
+
+ if (
+ "sourceTree" not in self._properties
+ or self._properties["sourceTree"] == "<group>"
+ ):
+ # The child was relative to its parent. Fix up the path. Note that
+ # children with a sourceTree other than "<group>" are not relative to
+ # their parents, so no path fix-up is needed in that case.
+ if "path" in old_properties:
+ if "path" in self._properties:
+ # Both the original parent and child have paths set.
+ self._properties["path"] = posixpath.join(
+ old_properties["path"], self._properties["path"]
+ )
+ else:
+ # Only the original parent has a path, use it.
+ self._properties["path"] = old_properties["path"]
+ if "sourceTree" in old_properties:
+ # The original parent had a sourceTree set, use it.
+ self._properties["sourceTree"] = old_properties["sourceTree"]
+
+ # If the original parent had a name set, keep using it. If the original
+ # parent didn't have a name but the child did, let the child's name
+ # live on. If the name attribute seems unnecessary now, get rid of it.
+ if "name" in old_properties and old_properties["name"] not in (
+ None,
+ self.Name(),
+ ):
+ self._properties["name"] = old_properties["name"]
+ if (
+ "name" in self._properties
+ and "path" in self._properties
+ and self._properties["name"] == self._properties["path"]
+ ):
+ del self._properties["name"]
+
+ # Notify all children of their new parent.
+ for child in self._properties["children"]:
+ child.parent = self
+
+ # If asked to recurse, recurse.
+ if recurse:
+ for child in self._properties["children"]:
+ if child.__class__ == PBXGroup:
+ child.TakeOverOnlyChild(recurse)
+
+ def SortGroup(self):
+ self._properties["children"] = sorted(
+ self._properties["children"], key=cmp_to_key(lambda x, y: x.Compare(y))
+ )
+
+ # Recurse.
+ for child in self._properties["children"]:
+ if isinstance(child, PBXGroup):
+ child.SortGroup()
+
+
+class XCFileLikeElement(XCHierarchicalElement):
+ # Abstract base for objects that can be used as the fileRef property of
+ # PBXBuildFile.
+
+ def PathHashables(self):
+ # A PBXBuildFile that refers to this object will call this method to
+ # obtain additional hashables specific to this XCFileLikeElement. Don't
+ # just use this object's hashables, they're not specific and unique enough
+ # on their own (without access to the parent hashables.) Instead, provide
+ # hashables that identify this object by path by getting its hashables as
+ # well as the hashables of ancestor XCHierarchicalElement objects.
+
+ hashables = []
+ xche = self
+ while isinstance(xche, XCHierarchicalElement):
+ xche_hashables = xche.Hashables()
+ for index, xche_hashable in enumerate(xche_hashables):
+ hashables.insert(index, xche_hashable)
+ xche = xche.parent
+ return hashables
+
+
+class XCContainerPortal(XCObject):
+ # Abstract base for objects that can be used as the containerPortal property
+ # of PBXContainerItemProxy.
+ pass
+
+
+class XCRemoteObject(XCObject):
+ # Abstract base for objects that can be used as the remoteGlobalIDString
+ # property of PBXContainerItemProxy.
+ pass
+
+
+class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
+ _schema = XCFileLikeElement._schema.copy()
+ _schema.update(
+ {
+ "explicitFileType": [0, str, 0, 0],
+ "lastKnownFileType": [0, str, 0, 0],
+ "name": [0, str, 0, 0],
+ "path": [0, str, 0, 1],
+ }
+ )
+
+ # Weird output rules for PBXFileReference.
+ _should_print_single_line = True
+ # super
+ _encode_transforms = XCFileLikeElement._alternate_encode_transforms
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCFileLikeElement.__init__(self, properties, id, parent)
+ if "path" in self._properties and self._properties["path"].endswith("/"):
+ self._properties["path"] = self._properties["path"][:-1]
+ is_dir = True
+ else:
+ is_dir = False
+
+ if (
+ "path" in self._properties
+ and "lastKnownFileType" not in self._properties
+ and "explicitFileType" not in self._properties
+ ):
+ # TODO(mark): This is the replacement for a replacement for a quick hack.
+ # It is no longer incredibly sucky, but this list needs to be extended.
+ extension_map = {
+ "a": "archive.ar",
+ "app": "wrapper.application",
+ "bdic": "file",
+ "bundle": "wrapper.cfbundle",
+ "c": "sourcecode.c.c",
+ "cc": "sourcecode.cpp.cpp",
+ "cpp": "sourcecode.cpp.cpp",
+ "css": "text.css",
+ "cxx": "sourcecode.cpp.cpp",
+ "dart": "sourcecode",
+ "dylib": "compiled.mach-o.dylib",
+ "framework": "wrapper.framework",
+ "gyp": "sourcecode",
+ "gypi": "sourcecode",
+ "h": "sourcecode.c.h",
+ "hxx": "sourcecode.cpp.h",
+ "icns": "image.icns",
+ "java": "sourcecode.java",
+ "js": "sourcecode.javascript",
+ "kext": "wrapper.kext",
+ "m": "sourcecode.c.objc",
+ "mm": "sourcecode.cpp.objcpp",
+ "nib": "wrapper.nib",
+ "o": "compiled.mach-o.objfile",
+ "pdf": "image.pdf",
+ "pl": "text.script.perl",
+ "plist": "text.plist.xml",
+ "pm": "text.script.perl",
+ "png": "image.png",
+ "py": "text.script.python",
+ "r": "sourcecode.rez",
+ "rez": "sourcecode.rez",
+ "s": "sourcecode.asm",
+ "storyboard": "file.storyboard",
+ "strings": "text.plist.strings",
+ "swift": "sourcecode.swift",
+ "ttf": "file",
+ "xcassets": "folder.assetcatalog",
+ "xcconfig": "text.xcconfig",
+ "xcdatamodel": "wrapper.xcdatamodel",
+ "xcdatamodeld": "wrapper.xcdatamodeld",
+ "xib": "file.xib",
+ "y": "sourcecode.yacc",
+ }
+
+ prop_map = {
+ "dart": "explicitFileType",
+ "gyp": "explicitFileType",
+ "gypi": "explicitFileType",
+ }
+
+ if is_dir:
+ file_type = "folder"
+ prop_name = "lastKnownFileType"
+ else:
+ basename = posixpath.basename(self._properties["path"])
+ (root, ext) = posixpath.splitext(basename)
+ # Check the map using a lowercase extension.
+ # TODO(mark): Maybe it should try with the original case first and fall
+ # back to lowercase, in case there are any instances where case
+ # matters. There currently aren't.
+ if ext != "":
+ ext = ext[1:].lower()
+
+ # TODO(mark): "text" is the default value, but "file" is appropriate
+ # for unrecognized files not containing text. Xcode seems to choose
+ # based on content.
+ file_type = extension_map.get(ext, "text")
+ prop_name = prop_map.get(ext, "lastKnownFileType")
+
+ self._properties[prop_name] = file_type
+
+
+class PBXVariantGroup(PBXGroup, XCFileLikeElement):
+ """PBXVariantGroup is used by Xcode to represent localizations."""
+
+ # No additions to the schema relative to PBXGroup.
+ pass
+
+
+# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below
+# because it uses PBXContainerItemProxy, defined below.
+
+
+class XCBuildConfiguration(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "baseConfigurationReference": [0, PBXFileReference, 0, 0],
+ "buildSettings": [0, dict, 0, 1, {}],
+ "name": [0, str, 0, 1],
+ }
+ )
+
+ def HasBuildSetting(self, key):
+ return key in self._properties["buildSettings"]
+
+ def GetBuildSetting(self, key):
+ return self._properties["buildSettings"][key]
+
+ def SetBuildSetting(self, key, value):
+ # TODO(mark): If a list, copy?
+ self._properties["buildSettings"][key] = value
+
+ def AppendBuildSetting(self, key, value):
+ if key not in self._properties["buildSettings"]:
+ self._properties["buildSettings"][key] = []
+ self._properties["buildSettings"][key].append(value)
+
+ def DelBuildSetting(self, key):
+ if key in self._properties["buildSettings"]:
+ del self._properties["buildSettings"][key]
+
+ def SetBaseConfiguration(self, value):
+ self._properties["baseConfigurationReference"] = value
+
+
+class XCConfigurationList(XCObject):
+ # _configs is the default list of configurations.
+ _configs = [
+ XCBuildConfiguration({"name": "Debug"}),
+ XCBuildConfiguration({"name": "Release"}),
+ ]
+
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "buildConfigurations": [1, XCBuildConfiguration, 1, 1, _configs],
+ "defaultConfigurationIsVisible": [0, int, 0, 1, 1],
+ "defaultConfigurationName": [0, str, 0, 1, "Release"],
+ }
+ )
+
+ def Name(self):
+ return (
+ "Build configuration list for "
+ + self.parent.__class__.__name__
+ + ' "'
+ + self.parent.Name()
+ + '"'
+ )
+
+ def ConfigurationNamed(self, name):
+ """Convenience accessor to obtain an XCBuildConfiguration by name."""
+ for configuration in self._properties["buildConfigurations"]:
+ if configuration._properties["name"] == name:
+ return configuration
+
+ raise KeyError(name)
+
+ def DefaultConfiguration(self):
+ """Convenience accessor to obtain the default XCBuildConfiguration."""
+ return self.ConfigurationNamed(self._properties["defaultConfigurationName"])
+
+ def HasBuildSetting(self, key):
+ """Determines the state of a build setting in all XCBuildConfiguration
+ child objects.
+
+ If all child objects have key in their build settings, and the value is the
+ same in all child objects, returns 1.
+
+ If no child objects have the key in their build settings, returns 0.
+
+ If some, but not all, child objects have the key in their build settings,
+ or if any children have different values for the key, returns -1.
+ """
+
+ has = None
+ value = None
+ for configuration in self._properties["buildConfigurations"]:
+ configuration_has = configuration.HasBuildSetting(key)
+ if has is None:
+ has = configuration_has
+ elif has != configuration_has:
+ return -1
+
+ if configuration_has:
+ configuration_value = configuration.GetBuildSetting(key)
+ if value is None:
+ value = configuration_value
+ elif value != configuration_value:
+ return -1
+
+ if not has:
+ return 0
+
+ return 1
+
+ def GetBuildSetting(self, key):
+ """Gets the build setting for key.
+
+ All child XCConfiguration objects must have the same value set for the
+ setting, or a ValueError will be raised.
+ """
+
+ # TODO(mark): This is wrong for build settings that are lists. The list
+ # contents should be compared (and a list copy returned?)
+
+ value = None
+ for configuration in self._properties["buildConfigurations"]:
+ configuration_value = configuration.GetBuildSetting(key)
+ if value is None:
+ value = configuration_value
+ else:
+ if value != configuration_value:
+ raise ValueError("Variant values for " + key)
+
+ return value
+
+ def SetBuildSetting(self, key, value):
+ """Sets the build setting for key to value in all child
+ XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties["buildConfigurations"]:
+ configuration.SetBuildSetting(key, value)
+
+ def AppendBuildSetting(self, key, value):
+ """Appends value to the build setting for key, which is treated as a list,
+ in all child XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties["buildConfigurations"]:
+ configuration.AppendBuildSetting(key, value)
+
+ def DelBuildSetting(self, key):
+ """Deletes the build setting key from all child XCBuildConfiguration
+ objects.
+ """
+
+ for configuration in self._properties["buildConfigurations"]:
+ configuration.DelBuildSetting(key)
+
+ def SetBaseConfiguration(self, value):
+ """Sets the build configuration in all child XCBuildConfiguration objects.
+ """
+
+ for configuration in self._properties["buildConfigurations"]:
+ configuration.SetBaseConfiguration(value)
+
+
+class PBXBuildFile(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "fileRef": [0, XCFileLikeElement, 0, 1],
+ "settings": [0, str, 0, 0], # hack, it's a dict
+ }
+ )
+
+ # Weird output rules for PBXBuildFile.
+ _should_print_single_line = True
+ _encode_transforms = XCObject._alternate_encode_transforms
+
+ def Name(self):
+ # Example: "main.cc in Sources"
+ return self._properties["fileRef"].Name() + " in " + self.parent.Name()
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # It is not sufficient to just rely on Name() to get the
+ # XCFileLikeElement's name, because that is not a complete pathname.
+ # PathHashables returns hashables unique enough that no two
+ # PBXBuildFiles should wind up with the same set of hashables, unless
+ # someone adds the same file multiple times to the same target. That
+ # would be considered invalid anyway.
+ hashables.extend(self._properties["fileRef"].PathHashables())
+
+ return hashables
+
+
+class XCBuildPhase(XCObject):
+ """Abstract base for build phase classes. Not represented in a project
+ file.
+
+ Attributes:
+ _files_by_path: A dict mapping each path of a child in the files list by
+ path (keys) to the corresponding PBXBuildFile children (values).
+ _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
+ to the corresponding PBXBuildFile children (values).
+ """
+
+ # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
+ # actually have a "files" list. XCBuildPhase should not have "files" but
+ # another abstract subclass of it should provide this, and concrete build
+ # phase types that do have "files" lists should be derived from that new
+ # abstract subclass. XCBuildPhase should only provide buildActionMask and
+ # runOnlyForDeploymentPostprocessing, and not files or the various
+ # file-related methods and attributes.
+
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "buildActionMask": [0, int, 0, 1, 0x7FFFFFFF],
+ "files": [1, PBXBuildFile, 1, 1, []],
+ "runOnlyForDeploymentPostprocessing": [0, int, 0, 1, 0],
+ }
+ )
+
+ def __init__(self, properties=None, id=None, parent=None):
+ # super
+ XCObject.__init__(self, properties, id, parent)
+
+ self._files_by_path = {}
+ self._files_by_xcfilelikeelement = {}
+ for pbxbuildfile in self._properties.get("files", []):
+ self._AddBuildFileToDicts(pbxbuildfile)
+
+ def FileGroup(self, path):
+ # Subclasses must override this by returning a two-element tuple. The
+ # first item in the tuple should be the PBXGroup to which "path" should be
+ # added, either as a child or deeper descendant. The second item should
+ # be a boolean indicating whether files should be added into hierarchical
+ # groups or one single flat group.
+ raise NotImplementedError(self.__class__.__name__ + " must implement FileGroup")
+
+ def _AddPathToDict(self, pbxbuildfile, path):
+ """Adds path to the dict tracking paths belonging to this build phase.
+
+ If the path is already a member of this build phase, raises an exception.
+ """
+
+ if path in self._files_by_path:
+ raise ValueError("Found multiple build files with path " + path)
+ self._files_by_path[path] = pbxbuildfile
+
+ def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
+ """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
+
+ If path is specified, then it is the path that is being added to the
+ phase, and pbxbuildfile must contain either a PBXFileReference directly
+ referencing that path, or it must contain a PBXVariantGroup that itself
+ contains a PBXFileReference referencing the path.
+
+ If path is not specified, either the PBXFileReference's path or the paths
+ of all children of the PBXVariantGroup are taken as being added to the
+ phase.
+
+ If the path is already present in the phase, raises an exception.
+
+ If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
+ are already present in the phase, referenced by a different PBXBuildFile
+ object, raises an exception. This does not raise an exception when
+ a PBXFileReference or PBXVariantGroup reappear and are referenced by the
+ same PBXBuildFile that has already introduced them, because in the case
+ of PBXVariantGroup objects, they may correspond to multiple paths that are
+ not all added simultaneously. When this situation occurs, the path needs
+ to be added to _files_by_path, but nothing needs to change in
+ _files_by_xcfilelikeelement, and the caller should have avoided adding
+ the PBXBuildFile if it is already present in the list of children.
+ """
+
+ xcfilelikeelement = pbxbuildfile._properties["fileRef"]
+
+ paths = []
+ if path is not None:
+ # It's best when the caller provides the path.
+ if isinstance(xcfilelikeelement, PBXVariantGroup):
+ paths.append(path)
+ else:
+ # If the caller didn't provide a path, there can be either multiple
+ # paths (PBXVariantGroup) or one.
+ if isinstance(xcfilelikeelement, PBXVariantGroup):
+ for variant in xcfilelikeelement._properties["children"]:
+ paths.append(variant.FullPath())
+ else:
+ paths.append(xcfilelikeelement.FullPath())
+
+ # Add the paths first, because if something's going to raise, the
+ # messages provided by _AddPathToDict are more useful owing to its
+ # having access to a real pathname and not just an object's Name().
+ for a_path in paths:
+ self._AddPathToDict(pbxbuildfile, a_path)
+
+ # If another PBXBuildFile references this XCFileLikeElement, there's a
+ # problem.
+ if (
+ xcfilelikeelement in self._files_by_xcfilelikeelement
+ and self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile
+ ):
+ raise ValueError(
+ "Found multiple build files for " + xcfilelikeelement.Name()
+ )
+ self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
+
+ def AppendBuildFile(self, pbxbuildfile, path=None):
+ # Callers should use this instead of calling
+ # AppendProperty('files', pbxbuildfile) directly because this function
+ # maintains the object's dicts. Better yet, callers can just call AddFile
+ # with a pathname and not worry about building their own PBXBuildFile
+ # objects.
+ self.AppendProperty("files", pbxbuildfile)
+ self._AddBuildFileToDicts(pbxbuildfile, path)
+
+ def AddFile(self, path, settings=None):
+ (file_group, hierarchical) = self.FileGroup(path)
+ file_ref = file_group.AddOrGetFileByPath(path, hierarchical)
+
+ if file_ref in self._files_by_xcfilelikeelement and isinstance(
+ file_ref, PBXVariantGroup
+ ):
+ # There's already a PBXBuildFile in this phase corresponding to the
+ # PBXVariantGroup. path just provides a new variant that belongs to
+ # the group. Add the path to the dict.
+ pbxbuildfile = self._files_by_xcfilelikeelement[file_ref]
+ self._AddBuildFileToDicts(pbxbuildfile, path)
+ else:
+ # Add a new PBXBuildFile to get file_ref into the phase.
+ if settings is None:
+ pbxbuildfile = PBXBuildFile({"fileRef": file_ref})
+ else:
+ pbxbuildfile = PBXBuildFile({"fileRef": file_ref, "settings": settings})
+ self.AppendBuildFile(pbxbuildfile, path)
+
+
+class PBXHeadersBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return "Headers"
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXResourcesBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return "Resources"
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXSourcesBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return "Sources"
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+
+class PBXFrameworksBuildPhase(XCBuildPhase):
+ # No additions to the schema relative to XCBuildPhase.
+
+ def Name(self):
+ return "Frameworks"
+
+ def FileGroup(self, path):
+ (root, ext) = posixpath.splitext(path)
+ if ext != "":
+ ext = ext[1:].lower()
+ if ext == "o":
+ # .o files are added to Xcode Frameworks phases, but conceptually aren't
+ # frameworks, they're more like sources or intermediates. Redirect them
+ # to show up in one of those other groups.
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+ else:
+ return (self.PBXProjectAncestor().FrameworksGroup(), False)
+
+
+class PBXShellScriptBuildPhase(XCBuildPhase):
+ _schema = XCBuildPhase._schema.copy()
+ _schema.update(
+ {
+ "inputPaths": [1, str, 0, 1, []],
+ "name": [0, str, 0, 0],
+ "outputPaths": [1, str, 0, 1, []],
+ "shellPath": [0, str, 0, 1, "/bin/sh"],
+ "shellScript": [0, str, 0, 1],
+ "showEnvVarsInLog": [0, int, 0, 0],
+ }
+ )
+
+ def Name(self):
+ if "name" in self._properties:
+ return self._properties["name"]
+
+ return "ShellScript"
+
+
+class PBXCopyFilesBuildPhase(XCBuildPhase):
+ _schema = XCBuildPhase._schema.copy()
+ _schema.update(
+ {
+ "dstPath": [0, str, 0, 1],
+ "dstSubfolderSpec": [0, int, 0, 1],
+ "name": [0, str, 0, 0],
+ }
+ )
+
+ # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)".
+ # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path"
+ # or None. If group 3 is "path", group 4 will be None otherwise group 4 is
+ # "DIR2" and group 6 is "path".
+ path_tree_re = re.compile(r"^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$")
+
+ # path_tree_{first,second}_to_subfolder map names of Xcode variables to the
+ # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase
+ # object.
+ path_tree_first_to_subfolder = {
+ # Types that can be chosen via the Xcode UI.
+ "BUILT_PRODUCTS_DIR": 16, # Products Directory
+ "BUILT_FRAMEWORKS_DIR": 10, # Not an official Xcode macro.
+ # Existed before support for the
+ # names below was added. Maps to
+ # "Frameworks".
+ }
+
+ path_tree_second_to_subfolder = {
+ "WRAPPER_NAME": 1, # Wrapper
+ # Although Xcode's friendly name is "Executables", the destination
+ # is demonstrably the value of the build setting
+ # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH.
+ "EXECUTABLE_FOLDER_PATH": 6, # Executables.
+ "UNLOCALIZED_RESOURCES_FOLDER_PATH": 7, # Resources
+ "JAVA_FOLDER_PATH": 15, # Java Resources
+ "FRAMEWORKS_FOLDER_PATH": 10, # Frameworks
+ "SHARED_FRAMEWORKS_FOLDER_PATH": 11, # Shared Frameworks
+ "SHARED_SUPPORT_FOLDER_PATH": 12, # Shared Support
+ "PLUGINS_FOLDER_PATH": 13, # PlugIns
+ # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec.
+ # Note that it re-uses the BUILT_PRODUCTS_DIR value for
+ # dstSubfolderSpec. dstPath is set below.
+ "XPCSERVICES_FOLDER_PATH": 16, # XPC Services.
+ }
+
+ def Name(self):
+ if "name" in self._properties:
+ return self._properties["name"]
+
+ return "CopyFiles"
+
+ def FileGroup(self, path):
+ return self.PBXProjectAncestor().RootGroupForPath(path)
+
+ def SetDestination(self, path):
+ """Set the dstSubfolderSpec and dstPath properties from path.
+
+ path may be specified in the same notation used for XCHierarchicalElements,
+ specifically, "$(DIR)/path".
+ """
+
+ path_tree_match = self.path_tree_re.search(path)
+ if path_tree_match:
+ path_tree = path_tree_match.group(1)
+ if path_tree in self.path_tree_first_to_subfolder:
+ subfolder = self.path_tree_first_to_subfolder[path_tree]
+ relative_path = path_tree_match.group(3)
+ if relative_path is None:
+ relative_path = ""
+
+ if subfolder == 16 and path_tree_match.group(4) is not None:
+ # BUILT_PRODUCTS_DIR (16) is the first element in a path whose
+ # second element is possibly one of the variable names in
+ # path_tree_second_to_subfolder. Xcode sets the values of all these
+ # variables to relative paths so .gyp files must prefix them with
+ # BUILT_PRODUCTS_DIR, e.g.
+ # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then
+ # xcode_emulation.py can export these variables with the same values
+ # as Xcode yet make & ninja files can determine the absolute path
+ # to the target. Xcode uses the dstSubfolderSpec value set here
+ # to determine the full path.
+ #
+ # An alternative of xcode_emulation.py setting the values to
+ # absolute paths when exporting these variables has been
+ # ruled out because then the values would be different
+ # depending on the build tool.
+ #
+ # Another alternative is to invent new names for the variables used
+ # to match to the subfolder indices in the second table. .gyp files
+ # then will not need to prepend $(BUILT_PRODUCTS_DIR) because
+ # xcode_emulation.py can set the values of those variables to
+ # the absolute paths when exporting. This is possibly the thinking
+ # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner.
+ #
+ # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because
+ # this same way could be used to specify destinations in .gyp files
+ # that pre-date this addition to GYP. However they would only work
+ # with the Xcode generator.
+ # The previous version of xcode_emulation.py
+ # does not export these variables. Such files will get the benefit
+ # of the Xcode UI showing the proper destination name simply by
+ # regenerating the projects with this version of GYP.
+ path_tree = path_tree_match.group(4)
+ relative_path = path_tree_match.group(6)
+ separator = "/"
+
+ if path_tree in self.path_tree_second_to_subfolder:
+ subfolder = self.path_tree_second_to_subfolder[path_tree]
+ if relative_path is None:
+ relative_path = ""
+ separator = ""
+ if path_tree == "XPCSERVICES_FOLDER_PATH":
+ relative_path = (
+ "$(CONTENTS_FOLDER_PATH)/XPCServices"
+ + separator
+ + relative_path
+ )
+ else:
+ # subfolder = 16 from above
+ # The second element of the path is an unrecognized variable.
+ # Include it and any remaining elements in relative_path.
+ relative_path = path_tree_match.group(3)
+
+ else:
+ # The path starts with an unrecognized Xcode variable
+ # name like $(SRCROOT). Xcode will still handle this
+ # as an "absolute path" that starts with the variable.
+ subfolder = 0
+ relative_path = path
+ elif path.startswith("/"):
+ # Special case. Absolute paths are in dstSubfolderSpec 0.
+ subfolder = 0
+ relative_path = path[1:]
+ else:
+ raise ValueError(
+ f"Can't use path {path} in a {self.__class__.__name__}"
+ )
+
+ self._properties["dstPath"] = relative_path
+ self._properties["dstSubfolderSpec"] = subfolder
+
+
+class PBXBuildRule(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "compilerSpec": [0, str, 0, 1],
+ "filePatterns": [0, str, 0, 0],
+ "fileType": [0, str, 0, 1],
+ "isEditable": [0, int, 0, 1, 1],
+ "outputFiles": [1, str, 0, 1, []],
+ "script": [0, str, 0, 0],
+ }
+ )
+
+ def Name(self):
+ # Not very inspired, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.append(self._properties["fileType"])
+ if "filePatterns" in self._properties:
+ hashables.append(self._properties["filePatterns"])
+ return hashables
+
+
+class PBXContainerItemProxy(XCObject):
+ # When referencing an item in this project file, containerPortal is the
+ # PBXProject root object of this project file. When referencing an item in
+ # another project file, containerPortal is a PBXFileReference identifying
+ # the other project file.
+ #
+ # When serving as a proxy to an XCTarget (in this project file or another),
+ # proxyType is 1. When serving as a proxy to a PBXFileReference (in another
+ # project file), proxyType is 2. Type 2 is used for references to the
+ # producs of the other project file's targets.
+ #
+ # Xcode is weird about remoteGlobalIDString. Usually, it's printed without
+ # a comment, indicating that it's tracked internally simply as a string, but
+ # sometimes it's printed with a comment (usually when the object is initially
+ # created), indicating that it's tracked as a project file object at least
+ # sometimes. This module always tracks it as an object, but contains a hack
+ # to prevent it from printing the comment in the project file output. See
+ # _XCKVPrint.
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "containerPortal": [0, XCContainerPortal, 0, 1],
+ "proxyType": [0, int, 0, 1],
+ "remoteGlobalIDString": [0, XCRemoteObject, 0, 1],
+ "remoteInfo": [0, str, 0, 1],
+ }
+ )
+
+ def __repr__(self):
+ props = self._properties
+ name = "{}.gyp:{}".format(props["containerPortal"].Name(), props["remoteInfo"])
+ return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>"
+
+ def Name(self):
+ # Admittedly not the best name, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.extend(self._properties["containerPortal"].Hashables())
+ hashables.extend(self._properties["remoteGlobalIDString"].Hashables())
+ return hashables
+
+
+class PBXTargetDependency(XCObject):
+ # The "target" property accepts an XCTarget object, and obviously not
+ # NoneType. But XCTarget is defined below, so it can't be put into the
+ # schema yet. The definition of PBXTargetDependency can't be moved below
+ # XCTarget because XCTarget's own schema references PBXTargetDependency.
+ # Python doesn't deal well with this circular relationship, and doesn't have
+ # a real way to do forward declarations. To work around, the type of
+ # the "target" property is reset below, after XCTarget is defined.
+ #
+ # At least one of "name" and "target" is required.
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "name": [0, str, 0, 0],
+ "target": [0, None.__class__, 0, 0],
+ "targetProxy": [0, PBXContainerItemProxy, 1, 1],
+ }
+ )
+
+ def __repr__(self):
+ name = self._properties.get("name") or self._properties["target"].Name()
+ return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>"
+
+ def Name(self):
+ # Admittedly not the best name, but it's what Xcode uses.
+ return self.__class__.__name__
+
+ def Hashables(self):
+ # super
+ hashables = XCObject.Hashables(self)
+
+ # Use the hashables of the weak objects that this object refers to.
+ hashables.extend(self._properties["targetProxy"].Hashables())
+ return hashables
+
+
+class PBXReferenceProxy(XCFileLikeElement):
+ _schema = XCFileLikeElement._schema.copy()
+ _schema.update(
+ {
+ "fileType": [0, str, 0, 1],
+ "path": [0, str, 0, 1],
+ "remoteRef": [0, PBXContainerItemProxy, 1, 1],
+ }
+ )
+
+
+class XCTarget(XCRemoteObject):
+ # An XCTarget is really just an XCObject, the XCRemoteObject thing is just
+ # to allow PBXProject to be used in the remoteGlobalIDString property of
+ # PBXContainerItemProxy.
+ #
+ # Setting a "name" property at instantiation may also affect "productName",
+ # which may in turn affect the "PRODUCT_NAME" build setting in children of
+ # "buildConfigurationList". See __init__ below.
+ _schema = XCRemoteObject._schema.copy()
+ _schema.update(
+ {
+ "buildConfigurationList": [
+ 0,
+ XCConfigurationList,
+ 1,
+ 1,
+ XCConfigurationList(),
+ ],
+ "buildPhases": [1, XCBuildPhase, 1, 1, []],
+ "dependencies": [1, PBXTargetDependency, 1, 1, []],
+ "name": [0, str, 0, 1],
+ "productName": [0, str, 0, 1],
+ }
+ )
+
+ def __init__(
+ self,
+ properties=None,
+ id=None,
+ parent=None,
+ force_outdir=None,
+ force_prefix=None,
+ force_extension=None,
+ ):
+ # super
+ XCRemoteObject.__init__(self, properties, id, parent)
+
+ # Set up additional defaults not expressed in the schema. If a "name"
+ # property was supplied, set "productName" if it is not present. Also set
+ # the "PRODUCT_NAME" build setting in each configuration, but only if
+ # the setting is not present in any build configuration.
+ if "name" in self._properties:
+ if "productName" not in self._properties:
+ self.SetProperty("productName", self._properties["name"])
+
+ if "productName" in self._properties:
+ if "buildConfigurationList" in self._properties:
+ configs = self._properties["buildConfigurationList"]
+ if configs.HasBuildSetting("PRODUCT_NAME") == 0:
+ configs.SetBuildSetting(
+ "PRODUCT_NAME", self._properties["productName"]
+ )
+
+ def AddDependency(self, other):
+ pbxproject = self.PBXProjectAncestor()
+ other_pbxproject = other.PBXProjectAncestor()
+ if pbxproject == other_pbxproject:
+ # Add a dependency to another target in the same project file.
+ container = PBXContainerItemProxy(
+ {
+ "containerPortal": pbxproject,
+ "proxyType": 1,
+ "remoteGlobalIDString": other,
+ "remoteInfo": other.Name(),
+ }
+ )
+ dependency = PBXTargetDependency(
+ {"target": other, "targetProxy": container}
+ )
+ self.AppendProperty("dependencies", dependency)
+ else:
+ # Add a dependency to a target in a different project file.
+ other_project_ref = pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
+ container = PBXContainerItemProxy(
+ {
+ "containerPortal": other_project_ref,
+ "proxyType": 1,
+ "remoteGlobalIDString": other,
+ "remoteInfo": other.Name(),
+ }
+ )
+ dependency = PBXTargetDependency(
+ {"name": other.Name(), "targetProxy": container}
+ )
+ self.AppendProperty("dependencies", dependency)
+
+ # Proxy all of these through to the build configuration list.
+
+ def ConfigurationNamed(self, name):
+ return self._properties["buildConfigurationList"].ConfigurationNamed(name)
+
+ def DefaultConfiguration(self):
+ return self._properties["buildConfigurationList"].DefaultConfiguration()
+
+ def HasBuildSetting(self, key):
+ return self._properties["buildConfigurationList"].HasBuildSetting(key)
+
+ def GetBuildSetting(self, key):
+ return self._properties["buildConfigurationList"].GetBuildSetting(key)
+
+ def SetBuildSetting(self, key, value):
+ return self._properties["buildConfigurationList"].SetBuildSetting(key, value)
+
+ def AppendBuildSetting(self, key, value):
+ return self._properties["buildConfigurationList"].AppendBuildSetting(key, value)
+
+ def DelBuildSetting(self, key):
+ return self._properties["buildConfigurationList"].DelBuildSetting(key)
+
+
+# Redefine the type of the "target" property. See PBXTargetDependency._schema
+# above.
+PBXTargetDependency._schema["target"][1] = XCTarget
+
+
+class PBXNativeTarget(XCTarget):
+ # buildPhases is overridden in the schema to be able to set defaults.
+ #
+ # NOTE: Contrary to most objects, it is advisable to set parent when
+ # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject
+ # object. A parent reference is required for a PBXNativeTarget during
+ # construction to be able to set up the target defaults for productReference,
+ # because a PBXBuildFile object must be created for the target and it must
+ # be added to the PBXProject's mainGroup hierarchy.
+ _schema = XCTarget._schema.copy()
+ _schema.update(
+ {
+ "buildPhases": [
+ 1,
+ XCBuildPhase,
+ 1,
+ 1,
+ [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()],
+ ],
+ "buildRules": [1, PBXBuildRule, 1, 1, []],
+ "productReference": [0, PBXFileReference, 0, 1],
+ "productType": [0, str, 0, 1],
+ }
+ )
+
+ # Mapping from Xcode product-types to settings. The settings are:
+ # filetype : used for explicitFileType in the project file
+ # prefix : the prefix for the file name
+ # suffix : the suffix for the file name
+ _product_filetypes = {
+ "com.apple.product-type.application": ["wrapper.application", "", ".app"],
+ "com.apple.product-type.application.watchapp": [
+ "wrapper.application",
+ "",
+ ".app",
+ ],
+ "com.apple.product-type.watchkit-extension": [
+ "wrapper.app-extension",
+ "",
+ ".appex",
+ ],
+ "com.apple.product-type.app-extension": ["wrapper.app-extension", "", ".appex"],
+ "com.apple.product-type.bundle": ["wrapper.cfbundle", "", ".bundle"],
+ "com.apple.product-type.framework": ["wrapper.framework", "", ".framework"],
+ "com.apple.product-type.library.dynamic": [
+ "compiled.mach-o.dylib",
+ "lib",
+ ".dylib",
+ ],
+ "com.apple.product-type.library.static": ["archive.ar", "lib", ".a"],
+ "com.apple.product-type.tool": ["compiled.mach-o.executable", "", ""],
+ "com.apple.product-type.bundle.unit-test": ["wrapper.cfbundle", "", ".xctest"],
+ "com.apple.product-type.bundle.ui-testing": ["wrapper.cfbundle", "", ".xctest"],
+ "com.googlecode.gyp.xcode.bundle": ["compiled.mach-o.dylib", "", ".so"],
+ "com.apple.product-type.kernel-extension": ["wrapper.kext", "", ".kext"],
+ }
+
+ def __init__(
+ self,
+ properties=None,
+ id=None,
+ parent=None,
+ force_outdir=None,
+ force_prefix=None,
+ force_extension=None,
+ ):
+ # super
+ XCTarget.__init__(self, properties, id, parent)
+
+ if (
+ "productName" in self._properties
+ and "productType" in self._properties
+ and "productReference" not in self._properties
+ and self._properties["productType"] in self._product_filetypes
+ ):
+ products_group = None
+ pbxproject = self.PBXProjectAncestor()
+ if pbxproject is not None:
+ products_group = pbxproject.ProductsGroup()
+
+ if products_group is not None:
+ (filetype, prefix, suffix) = self._product_filetypes[
+ self._properties["productType"]
+ ]
+ # Xcode does not have a distinct type for loadable modules that are
+ # pure BSD targets (not in a bundle wrapper). GYP allows such modules
+ # to be specified by setting a target type to loadable_module without
+ # having mac_bundle set. These are mapped to the pseudo-product type
+ # com.googlecode.gyp.xcode.bundle.
+ #
+ # By picking up this special type and converting it to a dynamic
+ # library (com.apple.product-type.library.dynamic) with fix-ups,
+ # single-file loadable modules can be produced.
+ #
+ # MACH_O_TYPE is changed to mh_bundle to produce the proper file type
+ # (as opposed to mh_dylib). In order for linking to succeed,
+ # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be
+ # cleared. They are meaningless for type mh_bundle.
+ #
+ # Finally, the .so extension is forcibly applied over the default
+ # (.dylib), unless another forced extension is already selected.
+ # .dylib is plainly wrong, and .bundle is used by loadable_modules in
+ # bundle wrappers (com.apple.product-type.bundle). .so seems an odd
+ # choice because it's used as the extension on many other systems that
+ # don't distinguish between linkable shared libraries and non-linkable
+ # loadable modules, but there's precedent: Python loadable modules on
+ # Mac OS X use an .so extension.
+ if self._properties["productType"] == "com.googlecode.gyp.xcode.bundle":
+ self._properties[
+ "productType"
+ ] = "com.apple.product-type.library.dynamic"
+ self.SetBuildSetting("MACH_O_TYPE", "mh_bundle")
+ self.SetBuildSetting("DYLIB_CURRENT_VERSION", "")
+ self.SetBuildSetting("DYLIB_COMPATIBILITY_VERSION", "")
+ if force_extension is None:
+ force_extension = suffix[1:]
+
+ if (
+ self._properties["productType"]
+ == "com.apple.product-type-bundle.unit.test"
+ or self._properties["productType"]
+ == "com.apple.product-type-bundle.ui-testing"
+ ):
+ if force_extension is None:
+ force_extension = suffix[1:]
+
+ if force_extension is not None:
+ # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
+ # Extension override.
+ suffix = "." + force_extension
+ if filetype.startswith("wrapper."):
+ self.SetBuildSetting("WRAPPER_EXTENSION", force_extension)
+ else:
+ self.SetBuildSetting("EXECUTABLE_EXTENSION", force_extension)
+
+ if filetype.startswith("compiled.mach-o.executable"):
+ product_name = self._properties["productName"]
+ product_name += suffix
+ suffix = ""
+ self.SetProperty("productName", product_name)
+ self.SetBuildSetting("PRODUCT_NAME", product_name)
+
+ # Xcode handles most prefixes based on the target type, however there
+ # are exceptions. If a "BSD Dynamic Library" target is added in the
+ # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that
+ # behavior.
+ if force_prefix is not None:
+ prefix = force_prefix
+ if filetype.startswith("wrapper."):
+ self.SetBuildSetting("WRAPPER_PREFIX", prefix)
+ else:
+ self.SetBuildSetting("EXECUTABLE_PREFIX", prefix)
+
+ if force_outdir is not None:
+ self.SetBuildSetting("TARGET_BUILD_DIR", force_outdir)
+
+ # TODO(tvl): Remove the below hack.
+ # http://code.google.com/p/gyp/issues/detail?id=122
+
+ # Some targets include the prefix in the target_name. These targets
+ # really should just add a product_name setting that doesn't include
+ # the prefix. For example:
+ # target_name = 'libevent', product_name = 'event'
+ # This check cleans up for them.
+ product_name = self._properties["productName"]
+ prefix_len = len(prefix)
+ if prefix_len and (product_name[:prefix_len] == prefix):
+ product_name = product_name[prefix_len:]
+ self.SetProperty("productName", product_name)
+ self.SetBuildSetting("PRODUCT_NAME", product_name)
+
+ ref_props = {
+ "explicitFileType": filetype,
+ "includeInIndex": 0,
+ "path": prefix + product_name + suffix,
+ "sourceTree": "BUILT_PRODUCTS_DIR",
+ }
+ file_ref = PBXFileReference(ref_props)
+ products_group.AppendChild(file_ref)
+ self.SetProperty("productReference", file_ref)
+
+ def GetBuildPhaseByType(self, type):
+ if "buildPhases" not in self._properties:
+ return None
+
+ the_phase = None
+ for phase in self._properties["buildPhases"]:
+ if isinstance(phase, type):
+ # Some phases may be present in multiples in a well-formed project file,
+ # but phases like PBXSourcesBuildPhase may only be present singly, and
+ # this function is intended as an aid to GetBuildPhaseByType. Loop
+ # over the entire list of phases and assert if more than one of the
+ # desired type is found.
+ assert the_phase is None
+ the_phase = phase
+
+ return the_phase
+
+ def HeadersPhase(self):
+ headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase)
+ if headers_phase is None:
+ headers_phase = PBXHeadersBuildPhase()
+
+ # The headers phase should come before the resources, sources, and
+ # frameworks phases, if any.
+ insert_at = len(self._properties["buildPhases"])
+ for index, phase in enumerate(self._properties["buildPhases"]):
+ if (
+ isinstance(phase, PBXResourcesBuildPhase)
+ or isinstance(phase, PBXSourcesBuildPhase)
+ or isinstance(phase, PBXFrameworksBuildPhase)
+ ):
+ insert_at = index
+ break
+
+ self._properties["buildPhases"].insert(insert_at, headers_phase)
+ headers_phase.parent = self
+
+ return headers_phase
+
+ def ResourcesPhase(self):
+ resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
+ if resources_phase is None:
+ resources_phase = PBXResourcesBuildPhase()
+
+ # The resources phase should come before the sources and frameworks
+ # phases, if any.
+ insert_at = len(self._properties["buildPhases"])
+ for index, phase in enumerate(self._properties["buildPhases"]):
+ if isinstance(phase, PBXSourcesBuildPhase) or isinstance(
+ phase, PBXFrameworksBuildPhase
+ ):
+ insert_at = index
+ break
+
+ self._properties["buildPhases"].insert(insert_at, resources_phase)
+ resources_phase.parent = self
+
+ return resources_phase
+
+ def SourcesPhase(self):
+ sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
+ if sources_phase is None:
+ sources_phase = PBXSourcesBuildPhase()
+ self.AppendProperty("buildPhases", sources_phase)
+
+ return sources_phase
+
+ def FrameworksPhase(self):
+ frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
+ if frameworks_phase is None:
+ frameworks_phase = PBXFrameworksBuildPhase()
+ self.AppendProperty("buildPhases", frameworks_phase)
+
+ return frameworks_phase
+
+ def AddDependency(self, other):
+ # super
+ XCTarget.AddDependency(self, other)
+
+ static_library_type = "com.apple.product-type.library.static"
+ shared_library_type = "com.apple.product-type.library.dynamic"
+ framework_type = "com.apple.product-type.framework"
+ if (
+ isinstance(other, PBXNativeTarget)
+ and "productType" in self._properties
+ and self._properties["productType"] != static_library_type
+ and "productType" in other._properties
+ and (
+ other._properties["productType"] == static_library_type
+ or (
+ (
+ other._properties["productType"] == shared_library_type
+ or other._properties["productType"] == framework_type
+ )
+ and (
+ (not other.HasBuildSetting("MACH_O_TYPE"))
+ or other.GetBuildSetting("MACH_O_TYPE") != "mh_bundle"
+ )
+ )
+ )
+ ):
+
+ file_ref = other.GetProperty("productReference")
+
+ pbxproject = self.PBXProjectAncestor()
+ other_pbxproject = other.PBXProjectAncestor()
+ if pbxproject != other_pbxproject:
+ other_project_product_group = pbxproject.AddOrGetProjectReference(
+ other_pbxproject
+ )[0]
+ file_ref = other_project_product_group.GetChildByRemoteObject(file_ref)
+
+ self.FrameworksPhase().AppendProperty(
+ "files", PBXBuildFile({"fileRef": file_ref})
+ )
+
+
+class PBXAggregateTarget(XCTarget):
+ pass
+
+
+class PBXProject(XCContainerPortal):
+ # A PBXProject is really just an XCObject, the XCContainerPortal thing is
+ # just to allow PBXProject to be used in the containerPortal property of
+ # PBXContainerItemProxy.
+ """
+
+ Attributes:
+ path: "sample.xcodeproj". TODO(mark) Document me!
+ _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
+ value is a reference to the dict in the
+ projectReferences list associated with the keyed
+ PBXProject.
+ """
+
+ _schema = XCContainerPortal._schema.copy()
+ _schema.update(
+ {
+ "attributes": [0, dict, 0, 0],
+ "buildConfigurationList": [
+ 0,
+ XCConfigurationList,
+ 1,
+ 1,
+ XCConfigurationList(),
+ ],
+ "compatibilityVersion": [0, str, 0, 1, "Xcode 3.2"],
+ "hasScannedForEncodings": [0, int, 0, 1, 1],
+ "mainGroup": [0, PBXGroup, 1, 1, PBXGroup()],
+ "projectDirPath": [0, str, 0, 1, ""],
+ "projectReferences": [1, dict, 0, 0],
+ "projectRoot": [0, str, 0, 1, ""],
+ "targets": [1, XCTarget, 1, 1, []],
+ }
+ )
+
+ def __init__(self, properties=None, id=None, parent=None, path=None):
+ self.path = path
+ self._other_pbxprojects = {}
+ # super
+ return XCContainerPortal.__init__(self, properties, id, parent)
+
+ def Name(self):
+ name = self.path
+ if name[-10:] == ".xcodeproj":
+ name = name[:-10]
+ return posixpath.basename(name)
+
+ def Path(self):
+ return self.path
+
+ def Comment(self):
+ return "Project object"
+
+ def Children(self):
+ # super
+ children = XCContainerPortal.Children(self)
+
+ # Add children that the schema doesn't know about. Maybe there's a more
+ # elegant way around this, but this is the only case where we need to own
+ # objects in a dictionary (that is itself in a list), and three lines for
+ # a one-off isn't that big a deal.
+ if "projectReferences" in self._properties:
+ for reference in self._properties["projectReferences"]:
+ children.append(reference["ProductGroup"])
+
+ return children
+
+ def PBXProjectAncestor(self):
+ return self
+
+ def _GroupByName(self, name):
+ if "mainGroup" not in self._properties:
+ self.SetProperty("mainGroup", PBXGroup())
+
+ main_group = self._properties["mainGroup"]
+ group = main_group.GetChildByName(name)
+ if group is None:
+ group = PBXGroup({"name": name})
+ main_group.AppendChild(group)
+
+ return group
+
+ # SourceGroup and ProductsGroup are created by default in Xcode's own
+ # templates.
+ def SourceGroup(self):
+ return self._GroupByName("Source")
+
+ def ProductsGroup(self):
+ return self._GroupByName("Products")
+
+ # IntermediatesGroup is used to collect source-like files that are generated
+ # by rules or script phases and are placed in intermediate directories such
+ # as DerivedSources.
+ def IntermediatesGroup(self):
+ return self._GroupByName("Intermediates")
+
+ # FrameworksGroup and ProjectsGroup are top-level groups used to collect
+ # frameworks and projects.
+ def FrameworksGroup(self):
+ return self._GroupByName("Frameworks")
+
+ def ProjectsGroup(self):
+ return self._GroupByName("Projects")
+
+ def RootGroupForPath(self, path):
+ """Returns a PBXGroup child of this object to which path should be added.
+
+ This method is intended to choose between SourceGroup and
+ IntermediatesGroup on the basis of whether path is present in a source
+ directory or an intermediates directory. For the purposes of this
+ determination, any path located within a derived file directory such as
+ PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
+ directory.
+
+ The returned value is a two-element tuple. The first element is the
+ PBXGroup, and the second element specifies whether that group should be
+ organized hierarchically (True) or as a single flat list (False).
+ """
+
+ # TODO(mark): make this a class variable and bind to self on call?
+ # Also, this list is nowhere near exhaustive.
+ # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by
+ # gyp.generator.xcode. There should probably be some way for that module
+ # to push the names in, rather than having to hard-code them here.
+ source_tree_groups = {
+ "DERIVED_FILE_DIR": (self.IntermediatesGroup, True),
+ "INTERMEDIATE_DIR": (self.IntermediatesGroup, True),
+ "PROJECT_DERIVED_FILE_DIR": (self.IntermediatesGroup, True),
+ "SHARED_INTERMEDIATE_DIR": (self.IntermediatesGroup, True),
+ }
+
+ (source_tree, path) = SourceTreeAndPathFromPath(path)
+ if source_tree is not None and source_tree in source_tree_groups:
+ (group_func, hierarchical) = source_tree_groups[source_tree]
+ group = group_func()
+ return (group, hierarchical)
+
+ # TODO(mark): make additional choices based on file extension.
+
+ return (self.SourceGroup(), True)
+
+ def AddOrGetFileInRootGroup(self, path):
+ """Returns a PBXFileReference corresponding to path in the correct group
+ according to RootGroupForPath's heuristics.
+
+ If an existing PBXFileReference for path exists, it will be returned.
+ Otherwise, one will be created and returned.
+ """
+
+ (group, hierarchical) = self.RootGroupForPath(path)
+ return group.AddOrGetFileByPath(path, hierarchical)
+
+ def RootGroupsTakeOverOnlyChildren(self, recurse=False):
+ """Calls TakeOverOnlyChild for all groups in the main group."""
+
+ for group in self._properties["mainGroup"]._properties["children"]:
+ if isinstance(group, PBXGroup):
+ group.TakeOverOnlyChild(recurse)
+
+ def SortGroups(self):
+ # Sort the children of the mainGroup (like "Source" and "Products")
+ # according to their defined order.
+ self._properties["mainGroup"]._properties["children"] = sorted(
+ self._properties["mainGroup"]._properties["children"],
+ key=cmp_to_key(lambda x, y: x.CompareRootGroup(y)),
+ )
+
+ # Sort everything else by putting group before files, and going
+ # alphabetically by name within sections of groups and files. SortGroup
+ # is recursive.
+ for group in self._properties["mainGroup"]._properties["children"]:
+ if not isinstance(group, PBXGroup):
+ continue
+
+ if group.Name() == "Products":
+ # The Products group is a special case. Instead of sorting
+ # alphabetically, sort things in the order of the targets that
+ # produce the products. To do this, just build up a new list of
+ # products based on the targets.
+ products = []
+ for target in self._properties["targets"]:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+ product = target._properties["productReference"]
+ # Make sure that the product is already in the products group.
+ assert product in group._properties["children"]
+ products.append(product)
+
+ # Make sure that this process doesn't miss anything that was already
+ # in the products group.
+ assert len(products) == len(group._properties["children"])
+ group._properties["children"] = products
+ else:
+ group.SortGroup()
+
+ def AddOrGetProjectReference(self, other_pbxproject):
+ """Add a reference to another project file (via PBXProject object) to this
+ one.
+
+ Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
+ this project file that contains a PBXReferenceProxy object for each
+ product of each PBXNativeTarget in the other project file. ProjectRef is
+ a PBXFileReference to the other project file.
+
+ If this project file already references the other project file, the
+ existing ProductGroup and ProjectRef are returned. The ProductGroup will
+ still be updated if necessary.
+ """
+
+ if "projectReferences" not in self._properties:
+ self._properties["projectReferences"] = []
+
+ product_group = None
+ project_ref = None
+
+ if other_pbxproject not in self._other_pbxprojects:
+ # This project file isn't yet linked to the other one. Establish the
+ # link.
+ product_group = PBXGroup({"name": "Products"})
+
+ # ProductGroup is strong.
+ product_group.parent = self
+
+ # There's nothing unique about this PBXGroup, and if left alone, it will
+ # wind up with the same set of hashables as all other PBXGroup objects
+ # owned by the projectReferences list. Add the hashables of the
+ # remote PBXProject that it's related to.
+ product_group._hashables.extend(other_pbxproject.Hashables())
+
+ # The other project reports its path as relative to the same directory
+ # that this project's path is relative to. The other project's path
+ # is not necessarily already relative to this project. Figure out the
+ # pathname that this project needs to use to refer to the other one.
+ this_path = posixpath.dirname(self.Path())
+ projectDirPath = self.GetProperty("projectDirPath")
+ if projectDirPath:
+ if posixpath.isabs(projectDirPath[0]):
+ this_path = projectDirPath
+ else:
+ this_path = posixpath.join(this_path, projectDirPath)
+ other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path)
+
+ # ProjectRef is weak (it's owned by the mainGroup hierarchy).
+ project_ref = PBXFileReference(
+ {
+ "lastKnownFileType": "wrapper.pb-project",
+ "path": other_path,
+ "sourceTree": "SOURCE_ROOT",
+ }
+ )
+ self.ProjectsGroup().AppendChild(project_ref)
+
+ ref_dict = {"ProductGroup": product_group, "ProjectRef": project_ref}
+ self._other_pbxprojects[other_pbxproject] = ref_dict
+ self.AppendProperty("projectReferences", ref_dict)
+
+ # Xcode seems to sort this list case-insensitively
+ self._properties["projectReferences"] = sorted(
+ self._properties["projectReferences"],
+ key=lambda x: x["ProjectRef"].Name().lower()
+ )
+ else:
+ # The link already exists. Pull out the relevnt data.
+ project_ref_dict = self._other_pbxprojects[other_pbxproject]
+ product_group = project_ref_dict["ProductGroup"]
+ project_ref = project_ref_dict["ProjectRef"]
+
+ self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
+
+ inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False)
+ targets = other_pbxproject.GetProperty("targets")
+ if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets):
+ dir_path = project_ref._properties["path"]
+ product_group._hashables.extend(dir_path)
+
+ return [product_group, project_ref]
+
+ def _AllSymrootsUnique(self, target, inherit_unique_symroot):
+ # Returns True if all configurations have a unique 'SYMROOT' attribute.
+ # The value of inherit_unique_symroot decides, if a configuration is assumed
+ # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't
+ # define an explicit value for 'SYMROOT'.
+ symroots = self._DefinedSymroots(target)
+ for s in self._DefinedSymroots(target):
+ if (
+ s is not None
+ and not self._IsUniqueSymrootForTarget(s)
+ or s is None
+ and not inherit_unique_symroot
+ ):
+ return False
+ return True if symroots else inherit_unique_symroot
+
+ def _DefinedSymroots(self, target):
+ # Returns all values for the 'SYMROOT' attribute defined in all
+ # configurations for this target. If any configuration doesn't define the
+ # 'SYMROOT' attribute, None is added to the returned set. If all
+ # configurations don't define the 'SYMROOT' attribute, an empty set is
+ # returned.
+ config_list = target.GetProperty("buildConfigurationList")
+ symroots = set()
+ for config in config_list.GetProperty("buildConfigurations"):
+ setting = config.GetProperty("buildSettings")
+ if "SYMROOT" in setting:
+ symroots.add(setting["SYMROOT"])
+ else:
+ symroots.add(None)
+ if len(symroots) == 1 and None in symroots:
+ return set()
+ return symroots
+
+ def _IsUniqueSymrootForTarget(self, symroot):
+ # This method returns True if all configurations in target contain a
+ # 'SYMROOT' attribute that is unique for the given target. A value is
+ # unique, if the Xcode macro '$SRCROOT' appears in it in any form.
+ uniquifier = ["$SRCROOT", "$(SRCROOT)"]
+ if any(x in symroot for x in uniquifier):
+ return True
+ return False
+
+ def _SetUpProductReferences(self, other_pbxproject, product_group, project_ref):
+ # TODO(mark): This only adds references to products in other_pbxproject
+ # when they don't exist in this pbxproject. Perhaps it should also
+ # remove references from this pbxproject that are no longer present in
+ # other_pbxproject. Perhaps it should update various properties if they
+ # change.
+ for target in other_pbxproject._properties["targets"]:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+
+ other_fileref = target._properties["productReference"]
+ if product_group.GetChildByRemoteObject(other_fileref) is None:
+ # Xcode sets remoteInfo to the name of the target and not the name
+ # of its product, despite this proxy being a reference to the product.
+ container_item = PBXContainerItemProxy(
+ {
+ "containerPortal": project_ref,
+ "proxyType": 2,
+ "remoteGlobalIDString": other_fileref,
+ "remoteInfo": target.Name(),
+ }
+ )
+ # TODO(mark): Does sourceTree get copied straight over from the other
+ # project? Can the other project ever have lastKnownFileType here
+ # instead of explicitFileType? (Use it if so?) Can path ever be
+ # unset? (I don't think so.) Can other_fileref have name set, and
+ # does it impact the PBXReferenceProxy if so? These are the questions
+ # that perhaps will be answered one day.
+ reference_proxy = PBXReferenceProxy(
+ {
+ "fileType": other_fileref._properties["explicitFileType"],
+ "path": other_fileref._properties["path"],
+ "sourceTree": other_fileref._properties["sourceTree"],
+ "remoteRef": container_item,
+ }
+ )
+
+ product_group.AppendChild(reference_proxy)
+
+ def SortRemoteProductReferences(self):
+ # For each remote project file, sort the associated ProductGroup in the
+ # same order that the targets are sorted in the remote project file. This
+ # is the sort order used by Xcode.
+
+ def CompareProducts(x, y, remote_products):
+ # x and y are PBXReferenceProxy objects. Go through their associated
+ # PBXContainerItem to get the remote PBXFileReference, which will be
+ # present in the remote_products list.
+ x_remote = x._properties["remoteRef"]._properties["remoteGlobalIDString"]
+ y_remote = y._properties["remoteRef"]._properties["remoteGlobalIDString"]
+ x_index = remote_products.index(x_remote)
+ y_index = remote_products.index(y_remote)
+
+ # Use the order of each remote PBXFileReference in remote_products to
+ # determine the sort order.
+ return cmp(x_index, y_index)
+
+ for other_pbxproject, ref_dict in self._other_pbxprojects.items():
+ # Build up a list of products in the remote project file, ordered the
+ # same as the targets that produce them.
+ remote_products = []
+ for target in other_pbxproject._properties["targets"]:
+ if not isinstance(target, PBXNativeTarget):
+ continue
+ remote_products.append(target._properties["productReference"])
+
+ # Sort the PBXReferenceProxy children according to the list of remote
+ # products.
+ product_group = ref_dict["ProductGroup"]
+ product_group._properties["children"] = sorted(
+ product_group._properties["children"],
+ key=cmp_to_key(
+ lambda x, y, rp=remote_products: CompareProducts(x, y, rp)),
+ )
+
+
+class XCProjectFile(XCObject):
+ _schema = XCObject._schema.copy()
+ _schema.update(
+ {
+ "archiveVersion": [0, int, 0, 1, 1],
+ "classes": [0, dict, 0, 1, {}],
+ "objectVersion": [0, int, 0, 1, 46],
+ "rootObject": [0, PBXProject, 1, 1],
+ }
+ )
+
+ def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
+ # Although XCProjectFile is implemented here as an XCObject, it's not a
+ # proper object in the Xcode sense, and it certainly doesn't have its own
+ # ID. Pass through an attempt to update IDs to the real root object.
+ if recursive:
+ self._properties["rootObject"].ComputeIDs(recursive, overwrite, hash)
+
+ def Print(self, file=sys.stdout):
+ self.VerifyHasRequiredProperties()
+
+ # Add the special "objects" property, which will be caught and handled
+ # separately during printing. This structure allows a fairly standard
+ # loop do the normal printing.
+ self._properties["objects"] = {}
+ self._XCPrint(file, 0, "// !$*UTF8*$!\n")
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, "{ ")
+ else:
+ self._XCPrint(file, 0, "{\n")
+ for property, value in sorted(
+ self._properties.items()
+ ):
+ if property == "objects":
+ self._PrintObjects(file)
+ else:
+ self._XCKVPrint(file, 1, property, value)
+ self._XCPrint(file, 0, "}\n")
+ del self._properties["objects"]
+
+ def _PrintObjects(self, file):
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, "objects = {")
+ else:
+ self._XCPrint(file, 1, "objects = {\n")
+
+ objects_by_class = {}
+ for object in self.Descendants():
+ if object == self:
+ continue
+ class_name = object.__class__.__name__
+ if class_name not in objects_by_class:
+ objects_by_class[class_name] = []
+ objects_by_class[class_name].append(object)
+
+ for class_name in sorted(objects_by_class):
+ self._XCPrint(file, 0, "\n")
+ self._XCPrint(file, 0, "/* Begin " + class_name + " section */\n")
+ for object in sorted(
+ objects_by_class[class_name], key=attrgetter("id")
+ ):
+ object.Print(file)
+ self._XCPrint(file, 0, "/* End " + class_name + " section */\n")
+
+ if self._should_print_single_line:
+ self._XCPrint(file, 0, "}; ")
+ else:
+ self._XCPrint(file, 1, "};\n")
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
new file mode 100644
index 0000000..5301963
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
@@ -0,0 +1,65 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Applies a fix to CR LF TAB handling in xml.dom.
+
+Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
+Working around this: http://bugs.python.org/issue5752
+TODO(bradnelson): Consider dropping this when we drop XP support.
+"""
+
+
+import xml.dom.minidom
+
+
+def _Replacement_write_data(writer, data, is_attrib=False):
+ """Writes datachars to writer."""
+ data = data.replace("&", "&amp;").replace("<", "&lt;")
+ data = data.replace('"', "&quot;").replace(">", "&gt;")
+ if is_attrib:
+ data = data.replace("\r", "&#xD;").replace("\n", "&#xA;").replace("\t", "&#x9;")
+ writer.write(data)
+
+
+def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
+ # indent = current indentation
+ # addindent = indentation to add to higher levels
+ # newl = newline string
+ writer.write(indent + "<" + self.tagName)
+
+ attrs = self._get_attributes()
+ a_names = sorted(attrs.keys())
+
+ for a_name in a_names:
+ writer.write(' %s="' % a_name)
+ _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
+ writer.write('"')
+ if self.childNodes:
+ writer.write(">%s" % newl)
+ for node in self.childNodes:
+ node.writexml(writer, indent + addindent, addindent, newl)
+ writer.write(f"{indent}</{self.tagName}>{newl}")
+ else:
+ writer.write("/>%s" % newl)
+
+
+class XmlFix:
+ """Object to manage temporary patching of xml.dom.minidom."""
+
+ def __init__(self):
+ # Preserve current xml.dom.minidom functions.
+ self.write_data = xml.dom.minidom._write_data
+ self.writexml = xml.dom.minidom.Element.writexml
+ # Inject replacement versions of a function and a method.
+ xml.dom.minidom._write_data = _Replacement_write_data
+ xml.dom.minidom.Element.writexml = _Replacement_writexml
+
+ def Cleanup(self):
+ if self.write_data:
+ xml.dom.minidom._write_data = self.write_data
+ xml.dom.minidom.Element.writexml = self.writexml
+ self.write_data = None
+
+ def __del__(self):
+ self.Cleanup()
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pyproject.toml b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pyproject.toml
new file mode 100644
index 0000000..d8a5451
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/pyproject.toml
@@ -0,0 +1,41 @@
+[build-system]
+requires = ["setuptools>=61.0"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "gyp-next"
+version = "0.14.0"
+authors = [
+ { name="Node.js contributors", email="ryzokuken@disroot.org" },
+]
+description = "A fork of the GYP build system for use in the Node.js projects"
+readme = "README.md"
+license = { file="LICENSE" }
+requires-python = ">=3.6"
+classifiers = [
+ "Development Status :: 3 - Alpha",
+ "Environment :: Console",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: BSD License",
+ "Natural Language :: English",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+]
+
+[project.optional-dependencies]
+dev = ["flake8", "pytest"]
+
+[project.scripts]
+gyp = "gyp:script_main"
+
+[project.urls]
+"Homepage" = "https://github.com/nodejs/gyp-next"
+
+[tool.setuptools]
+package-dir = {"" = "pylib"}
+packages = ["gyp", "gyp.generator"]
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/test_gyp.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/test_gyp.py
new file mode 100755
index 0000000..b7bb956
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/test_gyp.py
@@ -0,0 +1,261 @@
+#!/usr/bin/env python3
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gyptest.py -- test runner for GYP tests."""
+
+
+import argparse
+import os
+import platform
+import subprocess
+import sys
+import time
+
+
+def is_test_name(f):
+ return f.startswith("gyptest") and f.endswith(".py")
+
+
+def find_all_gyptest_files(directory):
+ result = []
+ for root, dirs, files in os.walk(directory):
+ result.extend([os.path.join(root, f) for f in files if is_test_name(f)])
+ result.sort()
+ return result
+
+
+def main(argv=None):
+ if argv is None:
+ argv = sys.argv
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-a", "--all", action="store_true", help="run all tests")
+ parser.add_argument("-C", "--chdir", action="store", help="change to directory")
+ parser.add_argument(
+ "-f",
+ "--format",
+ action="store",
+ default="",
+ help="run tests with the specified formats",
+ )
+ parser.add_argument(
+ "-G",
+ "--gyp_option",
+ action="append",
+ default=[],
+ help="Add -G options to the gyp command line",
+ )
+ parser.add_argument(
+ "-l", "--list", action="store_true", help="list available tests and exit"
+ )
+ parser.add_argument(
+ "-n",
+ "--no-exec",
+ action="store_true",
+ help="no execute, just print the command line",
+ )
+ parser.add_argument(
+ "--path", action="append", default=[], help="additional $PATH directory"
+ )
+ parser.add_argument(
+ "-q",
+ "--quiet",
+ action="store_true",
+ help="quiet, don't print anything unless there are failures",
+ )
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ help="print configuration info and test results.",
+ )
+ parser.add_argument("tests", nargs="*")
+ args = parser.parse_args(argv[1:])
+
+ if args.chdir:
+ os.chdir(args.chdir)
+
+ if args.path:
+ extra_path = [os.path.abspath(p) for p in args.path]
+ extra_path = os.pathsep.join(extra_path)
+ os.environ["PATH"] = extra_path + os.pathsep + os.environ["PATH"]
+
+ if not args.tests:
+ if not args.all:
+ sys.stderr.write("Specify -a to get all tests.\n")
+ return 1
+ args.tests = ["test"]
+
+ tests = []
+ for arg in args.tests:
+ if os.path.isdir(arg):
+ tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
+ else:
+ if not is_test_name(os.path.basename(arg)):
+ print(arg, "is not a valid gyp test name.", file=sys.stderr)
+ sys.exit(1)
+ tests.append(arg)
+
+ if args.list:
+ for test in tests:
+ print(test)
+ sys.exit(0)
+
+ os.environ["PYTHONPATH"] = os.path.abspath("test/lib")
+
+ if args.verbose:
+ print_configuration_info()
+
+ if args.gyp_option and not args.quiet:
+ print("Extra Gyp options: %s\n" % args.gyp_option)
+
+ if args.format:
+ format_list = args.format.split(",")
+ else:
+ format_list = {
+ "aix5": ["make"],
+ "os400": ["make"],
+ "freebsd7": ["make"],
+ "freebsd8": ["make"],
+ "openbsd5": ["make"],
+ "cygwin": ["msvs"],
+ "win32": ["msvs", "ninja"],
+ "linux": ["make", "ninja"],
+ "linux2": ["make", "ninja"],
+ "linux3": ["make", "ninja"],
+ # TODO: Re-enable xcode-ninja.
+ # https://bugs.chromium.org/p/gyp/issues/detail?id=530
+ # 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
+ "darwin": ["make", "ninja", "xcode"],
+ }[sys.platform]
+
+ gyp_options = []
+ for option in args.gyp_option:
+ gyp_options += ["-G", option]
+
+ runner = Runner(format_list, tests, gyp_options, args.verbose)
+ runner.run()
+
+ if not args.quiet:
+ runner.print_results()
+
+ return 1 if runner.failures else 0
+
+
+def print_configuration_info():
+ print("Test configuration:")
+ if sys.platform == "darwin":
+ sys.path.append(os.path.abspath("test/lib"))
+ import TestMac
+
+ print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
+ print(f" Xcode {TestMac.Xcode.Version()}")
+ elif sys.platform == "win32":
+ sys.path.append(os.path.abspath("pylib"))
+ import gyp.MSVSVersion
+
+ print(" Win %s %s\n" % platform.win32_ver()[0:2])
+ print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
+ elif sys.platform in ("linux", "linux2"):
+ print(" Linux %s" % " ".join(platform.linux_distribution()))
+ print(f" Python {platform.python_version()}")
+ print(f" PYTHONPATH={os.environ['PYTHONPATH']}")
+ print()
+
+
+class Runner:
+ def __init__(self, formats, tests, gyp_options, verbose):
+ self.formats = formats
+ self.tests = tests
+ self.verbose = verbose
+ self.gyp_options = gyp_options
+ self.failures = []
+ self.num_tests = len(formats) * len(tests)
+ num_digits = len(str(self.num_tests))
+ self.fmt_str = "[%%%dd/%%%dd] (%%s) %%s" % (num_digits, num_digits)
+ self.isatty = sys.stdout.isatty() and not self.verbose
+ self.env = os.environ.copy()
+ self.hpos = 0
+
+ def run(self):
+ run_start = time.time()
+
+ i = 1
+ for fmt in self.formats:
+ for test in self.tests:
+ self.run_test(test, fmt, i)
+ i += 1
+
+ if self.isatty:
+ self.erase_current_line()
+
+ self.took = time.time() - run_start
+
+ def run_test(self, test, fmt, i):
+ if self.isatty:
+ self.erase_current_line()
+
+ msg = self.fmt_str % (i, self.num_tests, fmt, test)
+ self.print_(msg)
+
+ start = time.time()
+ cmd = [sys.executable, test] + self.gyp_options
+ self.env["TESTGYP_FORMAT"] = fmt
+ proc = subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.env
+ )
+ proc.wait()
+ took = time.time() - start
+
+ stdout = proc.stdout.read().decode("utf8")
+ if proc.returncode == 2:
+ res = "skipped"
+ elif proc.returncode:
+ res = "failed"
+ self.failures.append(f"({test}) {fmt}")
+ else:
+ res = "passed"
+ res_msg = f" {res} {took:.3f}s"
+ self.print_(res_msg)
+
+ if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")):
+ print()
+ print("\n".join(f" {line}" for line in stdout.splitlines()))
+ elif not self.isatty:
+ print()
+
+ def print_(self, msg):
+ print(msg, end="")
+ index = msg.rfind("\n")
+ if index == -1:
+ self.hpos += len(msg)
+ else:
+ self.hpos = len(msg) - index
+ sys.stdout.flush()
+
+ def erase_current_line(self):
+ print("\b" * self.hpos + " " * self.hpos + "\b" * self.hpos, end="")
+ sys.stdout.flush()
+ self.hpos = 0
+
+ def print_results(self):
+ num_failures = len(self.failures)
+ if num_failures:
+ print()
+ if num_failures == 1:
+ print("Failed the following test:")
+ else:
+ print("Failed the following %d tests:" % num_failures)
+ print("\t" + "\n\t".join(sorted(self.failures)))
+ print()
+ print(
+ "Ran %d tests in %.3fs, %d failed."
+ % (self.num_tests, self.took, num_failures)
+ )
+ print()
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/README b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/README
new file mode 100644
index 0000000..84a73d1
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/README
@@ -0,0 +1,15 @@
+pretty_vcproj:
+ Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
+
+ They key/value pair are used to resolve vsprops name.
+
+ For example, if I want to diff the base.vcproj project:
+
+ pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > original.txt
+ pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
+
+ And you can use your favorite diff tool to see the changes.
+
+ Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
+ I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
+ before you perform the diff. \ No newline at end of file
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/README b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/README
new file mode 100644
index 0000000..2492a2c
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/README
@@ -0,0 +1,5 @@
+Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in
+
+~/Library/Application Support/Developer/Shared/Xcode/Specifications/
+
+and restart Xcode. \ No newline at end of file
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec
new file mode 100644
index 0000000..85e2e26
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec
@@ -0,0 +1,27 @@
+/*
+ gyp.pbfilespec
+ GYP source file spec for Xcode 3
+
+ There is not much documentation available regarding the format
+ of .pbfilespec files. As a starting point, see for instance the
+ outdated documentation at:
+ http://maxao.free.fr/xcode-plugin-interface/specifications.html
+ and the files in:
+ /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
+
+ Place this file in directory:
+ ~/Library/Application Support/Developer/Shared/Xcode/Specifications/
+*/
+
+(
+ {
+ Identifier = sourcecode.gyp;
+ BasedOn = sourcecode;
+ Name = "GYP Files";
+ Extensions = ("gyp", "gypi");
+ MIMETypes = ("text/gyp");
+ Language = "xcode.lang.gyp";
+ IsTextFile = YES;
+ IsSourceFile = YES;
+ }
+)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec
new file mode 100644
index 0000000..3b3506d
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec
@@ -0,0 +1,226 @@
+/*
+ Copyright (c) 2011 Google Inc. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+
+ gyp.xclangspec
+ GYP language specification for Xcode 3
+
+ There is not much documentation available regarding the format
+ of .xclangspec files. As a starting point, see for instance the
+ outdated documentation at:
+ http://maxao.free.fr/xcode-plugin-interface/specifications.html
+ and the files in:
+ /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
+
+ Place this file in directory:
+ ~/Library/Application Support/Developer/Shared/Xcode/Specifications/
+*/
+
+(
+
+ {
+ Identifier = "xcode.lang.gyp.keyword";
+ Syntax = {
+ Words = (
+ "and",
+ "or",
+ "<!",
+ "<",
+ );
+ Type = "xcode.syntax.keyword";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.target.declarator";
+ Syntax = {
+ Words = (
+ "'target_name'",
+ );
+ Type = "xcode.syntax.identifier.type";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.string.singlequote";
+ Syntax = {
+ IncludeRules = (
+ "xcode.lang.string",
+ "xcode.lang.gyp.keyword",
+ "xcode.lang.number",
+ );
+ Start = "'";
+ End = "'";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.comma";
+ Syntax = {
+ Words = ( ",", );
+
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp";
+ Description = "GYP Coloring";
+ BasedOn = "xcode.lang.simpleColoring";
+ IncludeInMenu = YES;
+ Name = "GYP";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer.toplevel";
+ IncludeRules = (
+ "xcode.lang.gyp.dictionary",
+ );
+ Type = "xcode.syntax.plain";
+ };
+ },
+
+ // The following rule returns tokens to the other rules
+ {
+ Identifier = "xcode.lang.gyp.lexer";
+ Syntax = {
+ IncludeRules = (
+ "xcode.lang.gyp.comment",
+ "xcode.lang.string",
+ 'xcode.lang.gyp.targetname.declarator',
+ "xcode.lang.gyp.string.singlequote",
+ "xcode.lang.number",
+ "xcode.lang.gyp.comma",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.lexer.toplevel";
+ Syntax = {
+ IncludeRules = (
+ "xcode.lang.gyp.comment",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.assignment";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.assignment.lhs",
+ ":",
+ "xcode.lang.gyp.assignment.rhs",
+ );
+ };
+
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.target.declaration";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.target.declarator",
+ ":",
+ "xcode.lang.gyp.target.name",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.target.name";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.string.singlequote",
+ );
+ Type = "xcode.syntax.definition.function";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.assignment.lhs";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.string.singlequote",
+ );
+ Type = "xcode.syntax.identifier.type";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.assignment.rhs";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Rules = (
+ "xcode.lang.gyp.string.singlequote?",
+ "xcode.lang.gyp.array?",
+ "xcode.lang.gyp.dictionary?",
+ "xcode.lang.number?",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.dictionary";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Start = "{";
+ End = "}";
+ Foldable = YES;
+ Recursive = YES;
+ IncludeRules = (
+ "xcode.lang.gyp.target.declaration",
+ "xcode.lang.gyp.assignment",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.array";
+ Syntax = {
+ Tokenizer = "xcode.lang.gyp.lexer";
+ Start = "[";
+ End = "]";
+ Foldable = YES;
+ Recursive = YES;
+ IncludeRules = (
+ "xcode.lang.gyp.array",
+ "xcode.lang.gyp.dictionary",
+ "xcode.lang.gyp.string.singlequote",
+ );
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.todo.mark";
+ Syntax = {
+ StartChars = "T";
+ Match = (
+ "^\(TODO\(.*\):[ \t]+.*\)$", // include "TODO: " in the markers list
+ );
+ // This is the order of captures. All of the match strings above need the same order.
+ CaptureTypes = (
+ "xcode.syntax.mark"
+ );
+ Type = "xcode.syntax.comment";
+ };
+ },
+
+ {
+ Identifier = "xcode.lang.gyp.comment";
+ BasedOn = "xcode.lang.comment"; // for text macros
+ Syntax = {
+ Start = "#";
+ End = "\n";
+ IncludeRules = (
+ "xcode.lang.url",
+ "xcode.lang.url.mail",
+ "xcode.lang.comment.mark",
+ "xcode.lang.gyp.todo.mark",
+ );
+ Type = "xcode.syntax.comment";
+ };
+ },
+)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/README b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/README
new file mode 100644
index 0000000..eeef39f
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/README
@@ -0,0 +1,12 @@
+How to install gyp-mode for emacs:
+
+Add the following to your ~/.emacs (replace ... with the path to your gyp
+checkout).
+
+(setq load-path (cons ".../tools/emacs" load-path))
+(require 'gyp)
+
+Restart emacs (or eval-region the added lines) and you should be all set.
+
+Please note that ert is required for running the tests, which is included in
+Emacs 24, or available separately from https://github.com/ohler/ert
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
new file mode 100644
index 0000000..07afc58
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
@@ -0,0 +1,63 @@
+;;; gyp-tests.el - unit tests for gyp-mode.
+
+;; Copyright (c) 2012 Google Inc. All rights reserved.
+;; Use of this source code is governed by a BSD-style license that can be
+;; found in the LICENSE file.
+
+;; The recommended way to run these tests is to run them from the command-line,
+;; with the run-unit-tests.sh script.
+
+(require 'cl)
+(require 'ert)
+(require 'gyp)
+
+(defconst samples (directory-files "testdata" t ".gyp$")
+ "List of golden samples to check")
+
+(defun fontify (filename)
+ (with-temp-buffer
+ (insert-file-contents-literally filename)
+ (gyp-mode)
+ (font-lock-fontify-buffer)
+ (buffer-string)))
+
+(defun read-golden-sample (filename)
+ (with-temp-buffer
+ (insert-file-contents-literally (concat filename ".fontified"))
+ (read (current-buffer))))
+
+(defun equivalent-face (face)
+ "For the purposes of face comparison, we're not interested in the
+ differences between certain faces. For example, the difference between
+ font-lock-comment-delimiter and font-lock-comment-face."
+ (cl-case face
+ ((font-lock-comment-delimiter-face) font-lock-comment-face)
+ (t face)))
+
+(defun text-face-properties (s)
+ "Extract the text properties from s"
+ (let ((result (list t)))
+ (dotimes (i (length s))
+ (setq result (cons (equivalent-face (get-text-property i 'face s))
+ result)))
+ (nreverse result)))
+
+(ert-deftest test-golden-samples ()
+ "Check that fontification produces the same results as the golden samples"
+ (dolist (sample samples)
+ (let ((golden (read-golden-sample sample))
+ (fontified (fontify sample)))
+ (should (equal golden fontified))
+ (should (equal (text-face-properties golden)
+ (text-face-properties fontified))))))
+
+(defun create-golden-sample (filename)
+ "Create a golden sample by fontifying filename and writing out the printable
+ representation of the fontified buffer (with text properties) to the
+ FILENAME.fontified"
+ (with-temp-file (concat filename ".fontified")
+ (print (fontify filename) (current-buffer))))
+
+(defun create-golden-samples ()
+ "Recreate the golden samples"
+ (dolist (sample samples) (create-golden-sample sample)))
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/gyp.el b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/gyp.el
new file mode 100644
index 0000000..042ff3a
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/gyp.el
@@ -0,0 +1,275 @@
+;;; gyp.el - font-lock-mode support for gyp files.
+
+;; Copyright (c) 2012 Google Inc. All rights reserved.
+;; Use of this source code is governed by a BSD-style license that can be
+;; found in the LICENSE file.
+
+;; Put this somewhere in your load-path and
+;; (require 'gyp)
+
+(require 'python)
+(require 'cl)
+
+(when (string-match "python-mode.el" (symbol-file 'python-mode 'defun))
+ (error (concat "python-mode must be loaded from python.el (bundled with "
+ "recent emacsen), not from the older and less maintained "
+ "python-mode.el")))
+
+(defadvice python-indent-calculate-levels (after gyp-outdent-closing-parens
+ activate)
+ "De-indent closing parens, braces, and brackets in gyp-mode."
+ (when (and (eq major-mode 'gyp-mode)
+ (string-match "^ *[])}][],)}]* *$"
+ (buffer-substring-no-properties
+ (line-beginning-position) (line-end-position))))
+ (setf (first python-indent-levels)
+ (- (first python-indent-levels) python-continuation-offset))))
+
+(defadvice python-indent-guess-indent-offset (around
+ gyp-indent-guess-indent-offset
+ activate)
+ "Guess correct indent offset in gyp-mode."
+ (or (and (not (eq major-mode 'gyp-mode))
+ ad-do-it)
+ (save-excursion
+ (save-restriction
+ (widen)
+ (goto-char (point-min))
+ ;; Find first line ending with an opening brace that is not a comment.
+ (or (and (re-search-forward "\\(^[[{]$\\|^.*[^#].*[[{]$\\)")
+ (forward-line)
+ (/= (current-indentation) 0)
+ (set (make-local-variable 'python-indent-offset)
+ (current-indentation))
+ (set (make-local-variable 'python-continuation-offset)
+ (current-indentation)))
+ (message "Can't guess gyp indent offset, using default: %s"
+ python-continuation-offset))))))
+
+(define-derived-mode gyp-mode python-mode "Gyp"
+ "Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
+ ;; gyp-parse-history is a stack of (POSITION . PARSE-STATE) tuples,
+ ;; with greater positions at the top of the stack. PARSE-STATE
+ ;; is a list of section symbols (see gyp-section-name and gyp-parse-to)
+ ;; with most nested section symbol at the front of the list.
+ (set (make-local-variable 'gyp-parse-history) '((1 . (list))))
+ (gyp-add-font-lock-keywords))
+
+(defun gyp-set-indentation ()
+ "Hook function to configure python indentation to suit gyp mode."
+ (set (make-local-variable 'python-indent-offset) 2)
+ (set (make-local-variable 'python-continuation-offset) 2)
+ (set (make-local-variable 'python-indent-guess-indent-offset) t)
+ (python-indent-guess-indent-offset))
+
+(add-hook 'gyp-mode-hook 'gyp-set-indentation)
+
+(add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
+(add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
+(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
+
+;;; Font-lock support
+
+(defconst gyp-dependencies-regexp
+ (regexp-opt (list "dependencies" "export_dependent_settings"))
+ "Regular expression to introduce 'dependencies' section")
+
+(defconst gyp-sources-regexp
+ (regexp-opt (list "action" "files" "include_dirs" "includes" "inputs"
+ "libraries" "outputs" "sources"))
+ "Regular expression to introduce 'sources' sections")
+
+(defconst gyp-conditions-regexp
+ (regexp-opt (list "conditions" "target_conditions"))
+ "Regular expression to introduce conditions sections")
+
+(defconst gyp-variables-regexp
+ "^variables"
+ "Regular expression to introduce variables sections")
+
+(defconst gyp-defines-regexp
+ "^defines"
+ "Regular expression to introduce 'defines' sections")
+
+(defconst gyp-targets-regexp
+ "^targets"
+ "Regular expression to introduce 'targets' sections")
+
+(defun gyp-section-name (section)
+ "Map the sections we are interested in from SECTION to symbol.
+
+ SECTION is a string from the buffer that introduces a section. The result is
+ a symbol representing the kind of section.
+
+ This allows us to treat (for the purposes of font-lock) several different
+ section names as the same kind of section. For example, a 'sources section
+ can be introduced by the 'sources', 'inputs', 'outputs' keyword.
+
+ 'other is the default section kind when a more specific match is not made."
+ (cond ((string-match-p gyp-dependencies-regexp section) 'dependencies)
+ ((string-match-p gyp-sources-regexp section) 'sources)
+ ((string-match-p gyp-variables-regexp section) 'variables)
+ ((string-match-p gyp-conditions-regexp section) 'conditions)
+ ((string-match-p gyp-targets-regexp section) 'targets)
+ ((string-match-p gyp-defines-regexp section) 'defines)
+ (t 'other)))
+
+(defun gyp-invalidate-parse-states-after (target-point)
+ "Erase any parse information after target-point."
+ (while (> (caar gyp-parse-history) target-point)
+ (setq gyp-parse-history (cdr gyp-parse-history))))
+
+(defun gyp-parse-point ()
+ "The point of the last parse state added by gyp-parse-to."
+ (caar gyp-parse-history))
+
+(defun gyp-parse-sections ()
+ "A list of section symbols holding at the last parse state point."
+ (cdar gyp-parse-history))
+
+(defun gyp-inside-dictionary-p ()
+ "Predicate returning true if the parser is inside a dictionary."
+ (not (eq (cadar gyp-parse-history) 'list)))
+
+(defun gyp-add-parse-history (point sections)
+ "Add parse state SECTIONS to the parse history at POINT so that parsing can be
+ resumed instantly."
+ (while (>= (caar gyp-parse-history) point)
+ (setq gyp-parse-history (cdr gyp-parse-history)))
+ (setq gyp-parse-history (cons (cons point sections) gyp-parse-history)))
+
+(defun gyp-parse-to (target-point)
+ "Parses from (point) to TARGET-POINT adding the parse state information to
+ gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a
+ string literal has been parsed. Returns nil if no further parsing can be
+ done, otherwise returns the position of the start of a parsed string, leaving
+ the point at the end of the string."
+ (let ((parsing t)
+ string-start)
+ (while parsing
+ (setq string-start nil)
+ ;; Parse up to a character that starts a sexp, or if the nesting
+ ;; level decreases.
+ (let ((state (parse-partial-sexp (gyp-parse-point)
+ target-point
+ -1
+ t))
+ (sections (gyp-parse-sections)))
+ (if (= (nth 0 state) -1)
+ (setq sections (cdr sections)) ; pop out a level
+ (cond ((looking-at-p "['\"]") ; a string
+ (setq string-start (point))
+ (goto-char (scan-sexps (point) 1))
+ (if (gyp-inside-dictionary-p)
+ ;; Look for sections inside a dictionary
+ (let ((section (gyp-section-name
+ (buffer-substring-no-properties
+ (+ 1 string-start)
+ (- (point) 1)))))
+ (setq sections (cons section (cdr sections)))))
+ ;; Stop after the string so it can be fontified.
+ (setq target-point (point)))
+ ((looking-at-p "{")
+ ;; Inside a dictionary. Increase nesting.
+ (forward-char 1)
+ (setq sections (cons 'unknown sections)))
+ ((looking-at-p "\\[")
+ ;; Inside a list. Increase nesting
+ (forward-char 1)
+ (setq sections (cons 'list sections)))
+ ((not (eobp))
+ ;; other
+ (forward-char 1))))
+ (gyp-add-parse-history (point) sections)
+ (setq parsing (< (point) target-point))))
+ string-start))
+
+(defun gyp-section-at-point ()
+ "Transform the last parse state, which is a list of nested sections and return
+ the section symbol that should be used to determine font-lock information for
+ the string. Can return nil indicating the string should not have any attached
+ section."
+ (let ((sections (gyp-parse-sections)))
+ (cond
+ ((eq (car sections) 'conditions)
+ ;; conditions can occur in a variables section, but we still want to
+ ;; highlight it as a keyword.
+ nil)
+ ((and (eq (car sections) 'list)
+ (eq (cadr sections) 'list))
+ ;; conditions and sources can have items in [[ ]]
+ (caddr sections))
+ (t (cadr sections)))))
+
+(defun gyp-section-match (limit)
+ "Parse from (point) to LIMIT returning by means of match data what was
+ matched. The group of the match indicates what style font-lock should apply.
+ See also `gyp-add-font-lock-keywords'."
+ (gyp-invalidate-parse-states-after (point))
+ (let ((group nil)
+ (string-start t))
+ (while (and (< (point) limit)
+ (not group)
+ string-start)
+ (setq string-start (gyp-parse-to limit))
+ (if string-start
+ (setq group (cl-case (gyp-section-at-point)
+ ('dependencies 1)
+ ('variables 2)
+ ('conditions 2)
+ ('sources 3)
+ ('defines 4)
+ (nil nil)))))
+ (if group
+ (progn
+ ;; Set the match data to indicate to the font-lock mechanism the
+ ;; highlighting to be performed.
+ (set-match-data (append (list string-start (point))
+ (make-list (* (1- group) 2) nil)
+ (list (1+ string-start) (1- (point)))))
+ t))))
+
+;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for
+;;; canonical list of keywords.
+(defun gyp-add-font-lock-keywords ()
+ "Add gyp-mode keywords to font-lock mechanism."
+ ;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match
+ ;; so that we can do the font-locking in a single font-lock pass.
+ (font-lock-add-keywords
+ nil
+ (list
+ ;; Top-level keywords
+ (list (concat "['\"]\\("
+ (regexp-opt (list "action" "action_name" "actions" "cflags"
+ "cflags_cc" "conditions" "configurations"
+ "copies" "defines" "dependencies" "destination"
+ "direct_dependent_settings"
+ "export_dependent_settings" "extension" "files"
+ "include_dirs" "includes" "inputs" "ldflags" "libraries"
+ "link_settings" "mac_bundle" "message"
+ "msvs_external_rule" "outputs" "product_name"
+ "process_outputs_as_sources" "rules" "rule_name"
+ "sources" "suppress_wildcard"
+ "target_conditions" "target_defaults"
+ "target_defines" "target_name" "toolsets"
+ "targets" "type" "variables" "xcode_settings"))
+ "[!/+=]?\\)") 1 'font-lock-keyword-face t)
+ ;; Type of target
+ (list (concat "['\"]\\("
+ (regexp-opt (list "loadable_module" "static_library"
+ "shared_library" "executable" "none"))
+ "\\)") 1 'font-lock-type-face t)
+ (list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1
+ 'font-lock-function-name-face t)
+ (list 'gyp-section-match
+ (list 1 'font-lock-function-name-face t t) ; dependencies
+ (list 2 'font-lock-variable-name-face t t) ; variables, conditions
+ (list 3 'font-lock-constant-face t t) ; sources
+ (list 4 'font-lock-preprocessor-face t t)) ; preprocessor
+ ;; Variable expansion
+ (list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
+ ;; Command expansion
+ (list "<!@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
+ )))
+
+(provide 'gyp)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh
new file mode 100755
index 0000000..6e62b9b
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+emacs --no-site-file --no-init-file --batch \
+ --load ert.el --load gyp.el --load gyp-tests.el \
+ -f ert-run-tests-batch-and-exit
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp
new file mode 100644
index 0000000..29300fe
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp
@@ -0,0 +1,1105 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'chromium_code': 1,
+ # Override to dynamically link the PulseAudio library.
+ 'use_pulseaudio%': 0,
+ # Override to dynamically link the cras (ChromeOS audio) library.
+ 'use_cras%': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'media',
+ 'type': '<(component)',
+ 'dependencies': [
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ '../build/temp_gyp/googleurl.gyp:googleurl',
+ '../crypto/crypto.gyp:crypto',
+ '../third_party/openmax/openmax.gyp:il',
+ '../ui/ui.gyp:ui',
+ ],
+ 'defines': [
+ 'MEDIA_IMPLEMENTATION',
+ ],
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'audio/android/audio_manager_android.cc',
+ 'audio/android/audio_manager_android.h',
+ 'audio/android/audio_track_output_android.cc',
+ 'audio/android/audio_track_output_android.h',
+ 'audio/android/opensles_input.cc',
+ 'audio/android/opensles_input.h',
+ 'audio/android/opensles_output.cc',
+ 'audio/android/opensles_output.h',
+ 'audio/async_socket_io_handler.h',
+ 'audio/async_socket_io_handler_posix.cc',
+ 'audio/async_socket_io_handler_win.cc',
+ 'audio/audio_buffers_state.cc',
+ 'audio/audio_buffers_state.h',
+ 'audio/audio_io.h',
+ 'audio/audio_input_controller.cc',
+ 'audio/audio_input_controller.h',
+ 'audio/audio_input_stream_impl.cc',
+ 'audio/audio_input_stream_impl.h',
+ 'audio/audio_device_name.cc',
+ 'audio/audio_device_name.h',
+ 'audio/audio_manager.cc',
+ 'audio/audio_manager.h',
+ 'audio/audio_manager_base.cc',
+ 'audio/audio_manager_base.h',
+ 'audio/audio_output_controller.cc',
+ 'audio/audio_output_controller.h',
+ 'audio/audio_output_dispatcher.cc',
+ 'audio/audio_output_dispatcher.h',
+ 'audio/audio_output_dispatcher_impl.cc',
+ 'audio/audio_output_dispatcher_impl.h',
+ 'audio/audio_output_mixer.cc',
+ 'audio/audio_output_mixer.h',
+ 'audio/audio_output_proxy.cc',
+ 'audio/audio_output_proxy.h',
+ 'audio/audio_parameters.cc',
+ 'audio/audio_parameters.h',
+ 'audio/audio_util.cc',
+ 'audio/audio_util.h',
+ 'audio/cross_process_notification.cc',
+ 'audio/cross_process_notification.h',
+ 'audio/cross_process_notification_win.cc',
+ 'audio/cross_process_notification_posix.cc',
+ 'audio/fake_audio_input_stream.cc',
+ 'audio/fake_audio_input_stream.h',
+ 'audio/fake_audio_output_stream.cc',
+ 'audio/fake_audio_output_stream.h',
+ 'audio/linux/audio_manager_linux.cc',
+ 'audio/linux/audio_manager_linux.h',
+ 'audio/linux/alsa_input.cc',
+ 'audio/linux/alsa_input.h',
+ 'audio/linux/alsa_output.cc',
+ 'audio/linux/alsa_output.h',
+ 'audio/linux/alsa_util.cc',
+ 'audio/linux/alsa_util.h',
+ 'audio/linux/alsa_wrapper.cc',
+ 'audio/linux/alsa_wrapper.h',
+ 'audio/linux/cras_output.cc',
+ 'audio/linux/cras_output.h',
+ 'audio/openbsd/audio_manager_openbsd.cc',
+ 'audio/openbsd/audio_manager_openbsd.h',
+ 'audio/mac/audio_input_mac.cc',
+ 'audio/mac/audio_input_mac.h',
+ 'audio/mac/audio_low_latency_input_mac.cc',
+ 'audio/mac/audio_low_latency_input_mac.h',
+ 'audio/mac/audio_low_latency_output_mac.cc',
+ 'audio/mac/audio_low_latency_output_mac.h',
+ 'audio/mac/audio_manager_mac.cc',
+ 'audio/mac/audio_manager_mac.h',
+ 'audio/mac/audio_output_mac.cc',
+ 'audio/mac/audio_output_mac.h',
+ 'audio/null_audio_sink.cc',
+ 'audio/null_audio_sink.h',
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ 'audio/sample_rates.cc',
+ 'audio/sample_rates.h',
+ 'audio/simple_sources.cc',
+ 'audio/simple_sources.h',
+ 'audio/win/audio_low_latency_input_win.cc',
+ 'audio/win/audio_low_latency_input_win.h',
+ 'audio/win/audio_low_latency_output_win.cc',
+ 'audio/win/audio_low_latency_output_win.h',
+ 'audio/win/audio_manager_win.cc',
+ 'audio/win/audio_manager_win.h',
+ 'audio/win/avrt_wrapper_win.cc',
+ 'audio/win/avrt_wrapper_win.h',
+ 'audio/win/device_enumeration_win.cc',
+ 'audio/win/device_enumeration_win.h',
+ 'audio/win/wavein_input_win.cc',
+ 'audio/win/wavein_input_win.h',
+ 'audio/win/waveout_output_win.cc',
+ 'audio/win/waveout_output_win.h',
+ 'base/android/media_jni_registrar.cc',
+ 'base/android/media_jni_registrar.h',
+ 'base/audio_decoder.cc',
+ 'base/audio_decoder.h',
+ 'base/audio_decoder_config.cc',
+ 'base/audio_decoder_config.h',
+ 'base/audio_renderer.h',
+ 'base/audio_renderer_mixer.cc',
+ 'base/audio_renderer_mixer.h',
+ 'base/audio_renderer_mixer_input.cc',
+ 'base/audio_renderer_mixer_input.h',
+ 'base/bitstream_buffer.h',
+ 'base/buffers.cc',
+ 'base/buffers.h',
+ 'base/byte_queue.cc',
+ 'base/byte_queue.h',
+ 'base/channel_layout.cc',
+ 'base/channel_layout.h',
+ 'base/clock.cc',
+ 'base/clock.h',
+ 'base/composite_filter.cc',
+ 'base/composite_filter.h',
+ 'base/data_buffer.cc',
+ 'base/data_buffer.h',
+ 'base/data_source.cc',
+ 'base/data_source.h',
+ 'base/decoder_buffer.cc',
+ 'base/decoder_buffer.h',
+ 'base/decrypt_config.cc',
+ 'base/decrypt_config.h',
+ 'base/decryptor.h',
+ 'base/decryptor_client.h',
+ 'base/demuxer.cc',
+ 'base/demuxer.h',
+ 'base/demuxer_stream.cc',
+ 'base/demuxer_stream.h',
+ 'base/djb2.cc',
+ 'base/djb2.h',
+ 'base/filter_collection.cc',
+ 'base/filter_collection.h',
+ 'base/filter_host.h',
+ 'base/filters.cc',
+ 'base/filters.h',
+ 'base/h264_bitstream_converter.cc',
+ 'base/h264_bitstream_converter.h',
+ 'base/media.h',
+ 'base/media_android.cc',
+ 'base/media_export.h',
+ 'base/media_log.cc',
+ 'base/media_log.h',
+ 'base/media_log_event.h',
+ 'base/media_posix.cc',
+ 'base/media_switches.cc',
+ 'base/media_switches.h',
+ 'base/media_win.cc',
+ 'base/message_loop_factory.cc',
+ 'base/message_loop_factory.h',
+ 'base/pipeline.cc',
+ 'base/pipeline.h',
+ 'base/pipeline_status.cc',
+ 'base/pipeline_status.h',
+ 'base/ranges.cc',
+ 'base/ranges.h',
+ 'base/seekable_buffer.cc',
+ 'base/seekable_buffer.h',
+ 'base/state_matrix.cc',
+ 'base/state_matrix.h',
+ 'base/stream_parser.cc',
+ 'base/stream_parser.h',
+ 'base/stream_parser_buffer.cc',
+ 'base/stream_parser_buffer.h',
+ 'base/video_decoder.cc',
+ 'base/video_decoder.h',
+ 'base/video_decoder_config.cc',
+ 'base/video_decoder_config.h',
+ 'base/video_frame.cc',
+ 'base/video_frame.h',
+ 'base/video_renderer.h',
+ 'base/video_util.cc',
+ 'base/video_util.h',
+ 'crypto/aes_decryptor.cc',
+ 'crypto/aes_decryptor.h',
+ 'ffmpeg/ffmpeg_common.cc',
+ 'ffmpeg/ffmpeg_common.h',
+ 'ffmpeg/file_protocol.cc',
+ 'ffmpeg/file_protocol.h',
+ 'filters/audio_file_reader.cc',
+ 'filters/audio_file_reader.h',
+ 'filters/audio_renderer_algorithm.cc',
+ 'filters/audio_renderer_algorithm.h',
+ 'filters/audio_renderer_impl.cc',
+ 'filters/audio_renderer_impl.h',
+ 'filters/bitstream_converter.cc',
+ 'filters/bitstream_converter.h',
+ 'filters/chunk_demuxer.cc',
+ 'filters/chunk_demuxer.h',
+ 'filters/chunk_demuxer_client.h',
+ 'filters/dummy_demuxer.cc',
+ 'filters/dummy_demuxer.h',
+ 'filters/ffmpeg_audio_decoder.cc',
+ 'filters/ffmpeg_audio_decoder.h',
+ 'filters/ffmpeg_demuxer.cc',
+ 'filters/ffmpeg_demuxer.h',
+ 'filters/ffmpeg_h264_bitstream_converter.cc',
+ 'filters/ffmpeg_h264_bitstream_converter.h',
+ 'filters/ffmpeg_glue.cc',
+ 'filters/ffmpeg_glue.h',
+ 'filters/ffmpeg_video_decoder.cc',
+ 'filters/ffmpeg_video_decoder.h',
+ 'filters/file_data_source.cc',
+ 'filters/file_data_source.h',
+ 'filters/gpu_video_decoder.cc',
+ 'filters/gpu_video_decoder.h',
+ 'filters/in_memory_url_protocol.cc',
+ 'filters/in_memory_url_protocol.h',
+ 'filters/source_buffer_stream.cc',
+ 'filters/source_buffer_stream.h',
+ 'filters/video_frame_generator.cc',
+ 'filters/video_frame_generator.h',
+ 'filters/video_renderer_base.cc',
+ 'filters/video_renderer_base.h',
+ 'video/capture/fake_video_capture_device.cc',
+ 'video/capture/fake_video_capture_device.h',
+ 'video/capture/linux/video_capture_device_linux.cc',
+ 'video/capture/linux/video_capture_device_linux.h',
+ 'video/capture/mac/video_capture_device_mac.h',
+ 'video/capture/mac/video_capture_device_mac.mm',
+ 'video/capture/mac/video_capture_device_qtkit_mac.h',
+ 'video/capture/mac/video_capture_device_qtkit_mac.mm',
+ 'video/capture/video_capture.h',
+ 'video/capture/video_capture_device.h',
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ 'video/capture/video_capture_proxy.cc',
+ 'video/capture/video_capture_proxy.h',
+ 'video/capture/video_capture_types.h',
+ 'video/capture/win/filter_base_win.cc',
+ 'video/capture/win/filter_base_win.h',
+ 'video/capture/win/pin_base_win.cc',
+ 'video/capture/win/pin_base_win.h',
+ 'video/capture/win/sink_filter_observer_win.h',
+ 'video/capture/win/sink_filter_win.cc',
+ 'video/capture/win/sink_filter_win.h',
+ 'video/capture/win/sink_input_pin_win.cc',
+ 'video/capture/win/sink_input_pin_win.h',
+ 'video/capture/win/video_capture_device_win.cc',
+ 'video/capture/win/video_capture_device_win.h',
+ 'video/picture.cc',
+ 'video/picture.h',
+ 'video/video_decode_accelerator.cc',
+ 'video/video_decode_accelerator.h',
+ 'webm/webm_constants.h',
+ 'webm/webm_cluster_parser.cc',
+ 'webm/webm_cluster_parser.h',
+ 'webm/webm_content_encodings.cc',
+ 'webm/webm_content_encodings.h',
+ 'webm/webm_content_encodings_client.cc',
+ 'webm/webm_content_encodings_client.h',
+ 'webm/webm_info_parser.cc',
+ 'webm/webm_info_parser.h',
+ 'webm/webm_parser.cc',
+ 'webm/webm_parser.h',
+ 'webm/webm_stream_parser.cc',
+ 'webm/webm_stream_parser.h',
+ 'webm/webm_tracks_parser.cc',
+ 'webm/webm_tracks_parser.h',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '..',
+ ],
+ },
+ 'conditions': [
+ # Android doesn't use ffmpeg, so make the dependency conditional
+ # and exclude the sources which depend on ffmpeg.
+ ['OS != "android"', {
+ 'dependencies': [
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ }],
+ ['OS == "android"', {
+ 'sources!': [
+ 'base/media_posix.cc',
+ 'ffmpeg/ffmpeg_common.cc',
+ 'ffmpeg/ffmpeg_common.h',
+ 'ffmpeg/file_protocol.cc',
+ 'ffmpeg/file_protocol.h',
+ 'filters/audio_file_reader.cc',
+ 'filters/audio_file_reader.h',
+ 'filters/bitstream_converter.cc',
+ 'filters/bitstream_converter.h',
+ 'filters/chunk_demuxer.cc',
+ 'filters/chunk_demuxer.h',
+ 'filters/chunk_demuxer_client.h',
+ 'filters/ffmpeg_audio_decoder.cc',
+ 'filters/ffmpeg_audio_decoder.h',
+ 'filters/ffmpeg_demuxer.cc',
+ 'filters/ffmpeg_demuxer.h',
+ 'filters/ffmpeg_h264_bitstream_converter.cc',
+ 'filters/ffmpeg_h264_bitstream_converter.h',
+ 'filters/ffmpeg_glue.cc',
+ 'filters/ffmpeg_glue.h',
+ 'filters/ffmpeg_video_decoder.cc',
+ 'filters/ffmpeg_video_decoder.h',
+ 'filters/gpu_video_decoder.cc',
+ 'filters/gpu_video_decoder.h',
+ 'webm/webm_cluster_parser.cc',
+ 'webm/webm_cluster_parser.h',
+ 'webm/webm_stream_parser.cc',
+ 'webm/webm_stream_parser.h',
+ ],
+ }],
+ # The below 'android' condition were added temporarily and should be
+ # removed in downstream, because there is no Java environment setup in
+ # upstream yet.
+ ['OS == "android"', {
+ 'sources!':[
+ 'audio/android/audio_track_output_android.cc',
+ ],
+ 'sources':[
+ 'audio/android/audio_track_output_stub_android.cc',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-lOpenSLES',
+ ],
+ },
+ }],
+ ['OS=="linux" or OS=="freebsd" or OS=="solaris"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lasound',
+ ],
+ },
+ }],
+ ['OS=="openbsd"', {
+ 'sources/': [ ['exclude', '/alsa_' ],
+ ['exclude', '/audio_manager_linux' ] ],
+ 'link_settings': {
+ 'libraries': [
+ ],
+ },
+ }],
+ ['OS!="openbsd"', {
+ 'sources!': [
+ 'audio/openbsd/audio_manager_openbsd.cc',
+ 'audio/openbsd/audio_manager_openbsd.h',
+ ],
+ }],
+ ['OS=="linux"', {
+ 'variables': {
+ 'conditions': [
+ ['sysroot!=""', {
+ 'pkg-config': '../build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
+ }, {
+ 'pkg-config': 'pkg-config'
+ }],
+ ],
+ },
+ 'conditions': [
+ ['use_cras == 1', {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags libcras)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '<!@(<(pkg-config) --libs libcras)',
+ ],
+ },
+ 'defines': [
+ 'USE_CRAS',
+ ],
+ }, { # else: use_cras == 0
+ 'sources!': [
+ 'audio/linux/cras_output.cc',
+ 'audio/linux/cras_output.h',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1', {
+ 'conditions': [
+ ['use_pulseaudio == 1', {
+ 'cflags': [
+ '<!@(pkg-config --cflags libpulse)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l libpulse)',
+ ],
+ },
+ 'defines': [
+ 'USE_PULSEAUDIO',
+ ],
+ }, { # else: use_pulseaudio == 0
+ 'sources!': [
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1 and OS != "android"', {
+ # Video capture isn't supported in Android yet.
+ 'sources!': [
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ ],
+ }],
+ ['OS=="mac"', {
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework',
+ '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework',
+ '$(SDKROOT)/System/Library/Frameworks/QTKit.framework',
+ ],
+ },
+ }],
+ ['OS=="win"', {
+ 'sources!': [
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ ],
+ }],
+ ['proprietary_codecs==1 or branding=="Chrome"', {
+ 'sources': [
+ 'mp4/avc.cc',
+ 'mp4/avc.h',
+ 'mp4/box_definitions.cc',
+ 'mp4/box_definitions.h',
+ 'mp4/box_reader.cc',
+ 'mp4/box_reader.h',
+ 'mp4/cenc.cc',
+ 'mp4/cenc.h',
+ 'mp4/mp4_stream_parser.cc',
+ 'mp4/mp4_stream_parser.h',
+ 'mp4/offset_byte_queue.cc',
+ 'mp4/offset_byte_queue.h',
+ 'mp4/track_run_iterator.cc',
+ 'mp4/track_run_iterator.h',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'conditions': [
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset=="target"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ [ 'target_arch == "ia32" or target_arch == "x64"', {
+ 'dependencies': [
+ 'yuv_convert_simd_x86',
+ ],
+ }],
+ [ 'target_arch == "arm"', {
+ 'dependencies': [
+ 'yuv_convert_simd_arm',
+ ],
+ }],
+ ],
+ 'sources': [
+ 'base/yuv_convert.cc',
+ 'base/yuv_convert.h',
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert_simd_x86',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_c.cc',
+ 'base/simd/convert_rgb_to_yuv_sse2.cc',
+ 'base/simd/convert_rgb_to_yuv_ssse3.asm',
+ 'base/simd/convert_rgb_to_yuv_ssse3.cc',
+ 'base/simd/convert_rgb_to_yuv_ssse3.inc',
+ 'base/simd/convert_yuv_to_rgb_c.cc',
+ 'base/simd/convert_yuv_to_rgb_x86.cc',
+ 'base/simd/convert_yuv_to_rgb_mmx.asm',
+ 'base/simd/convert_yuv_to_rgb_mmx.inc',
+ 'base/simd/convert_yuv_to_rgb_sse.asm',
+ 'base/simd/filter_yuv.h',
+ 'base/simd/filter_yuv_c.cc',
+ 'base/simd/filter_yuv_mmx.cc',
+ 'base/simd/filter_yuv_sse2.cc',
+ 'base/simd/linear_scale_yuv_to_rgb_mmx.asm',
+ 'base/simd/linear_scale_yuv_to_rgb_mmx.inc',
+ 'base/simd/linear_scale_yuv_to_rgb_sse.asm',
+ 'base/simd/scale_yuv_to_rgb_mmx.asm',
+ 'base/simd/scale_yuv_to_rgb_mmx.inc',
+ 'base/simd/scale_yuv_to_rgb_sse.asm',
+ 'base/simd/yuv_to_rgb_table.cc',
+ 'base/simd/yuv_to_rgb_table.h',
+ ],
+ 'conditions': [
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset=="target"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ [ 'target_arch == "x64"', {
+ # Source files optimized for X64 systems.
+ 'sources': [
+ 'base/simd/linear_scale_yuv_to_rgb_mmx_x64.asm',
+ 'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
+ ],
+ }],
+ [ 'os_posix == 1 and OS != "mac" and OS != "android"', {
+ 'cflags': [
+ '-msse2',
+ ],
+ }],
+ [ 'OS == "mac"', {
+ 'configurations': {
+ 'Debug': {
+ 'xcode_settings': {
+ # gcc on the mac builds horribly unoptimized sse code in debug
+ # mode. Since this is rarely going to be debugged, run with full
+ # optimizations in Debug as well as Release.
+ 'GCC_OPTIMIZATION_LEVEL': '3', # -O3
+ },
+ },
+ },
+ }],
+ [ 'OS=="win"', {
+ 'variables': {
+ 'yasm_flags': [
+ '-DWIN32',
+ '-DMSVC',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ },
+ }],
+ [ 'OS=="mac"', {
+ 'variables': {
+ 'yasm_flags': [
+ '-DPREFIX',
+ '-DMACHO',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ },
+ }],
+ [ 'os_posix==1 and OS!="mac"', {
+ 'variables': {
+ 'conditions': [
+ [ 'target_arch=="ia32"', {
+ 'yasm_flags': [
+ '-DX86_32',
+ '-DELF',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ }, {
+ 'yasm_flags': [
+ '-DARCH_X86_64',
+ '-DELF',
+ '-DPIC',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ }],
+ ],
+ },
+ }],
+ ],
+ 'variables': {
+ 'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/media',
+ },
+ 'msvs_2010_disable_uldi_when_referenced': 1,
+ 'includes': [
+ '../third_party/yasm/yasm_compile.gypi',
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert_simd_arm',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_c.cc',
+ 'base/simd/convert_rgb_to_yuv.h',
+ 'base/simd/convert_yuv_to_rgb_c.cc',
+ 'base/simd/convert_yuv_to_rgb.h',
+ 'base/simd/filter_yuv.h',
+ 'base/simd/filter_yuv_c.cc',
+ 'base/simd/yuv_to_rgb_table.cc',
+ 'base/simd/yuv_to_rgb_table.h',
+ ],
+ },
+ {
+ 'target_name': 'media_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ '../ui/ui.gyp:ui',
+ ],
+ 'sources': [
+ 'audio/async_socket_io_handler_unittest.cc',
+ 'audio/audio_input_controller_unittest.cc',
+ 'audio/audio_input_device_unittest.cc',
+ 'audio/audio_input_unittest.cc',
+ 'audio/audio_input_volume_unittest.cc',
+ 'audio/audio_low_latency_input_output_unittest.cc',
+ 'audio/audio_output_controller_unittest.cc',
+ 'audio/audio_output_proxy_unittest.cc',
+ 'audio/audio_parameters_unittest.cc',
+ 'audio/audio_util_unittest.cc',
+ 'audio/cross_process_notification_unittest.cc',
+ 'audio/linux/alsa_output_unittest.cc',
+ 'audio/mac/audio_low_latency_input_mac_unittest.cc',
+ 'audio/mac/audio_output_mac_unittest.cc',
+ 'audio/simple_sources_unittest.cc',
+ 'audio/win/audio_low_latency_input_win_unittest.cc',
+ 'audio/win/audio_low_latency_output_win_unittest.cc',
+ 'audio/win/audio_output_win_unittest.cc',
+ 'base/audio_renderer_mixer_unittest.cc',
+ 'base/audio_renderer_mixer_input_unittest.cc',
+ 'base/buffers_unittest.cc',
+ 'base/clock_unittest.cc',
+ 'base/composite_filter_unittest.cc',
+ 'base/data_buffer_unittest.cc',
+ 'base/decoder_buffer_unittest.cc',
+ 'base/djb2_unittest.cc',
+ 'base/fake_audio_render_callback.cc',
+ 'base/fake_audio_render_callback.h',
+ 'base/filter_collection_unittest.cc',
+ 'base/h264_bitstream_converter_unittest.cc',
+ 'base/pipeline_unittest.cc',
+ 'base/ranges_unittest.cc',
+ 'base/run_all_unittests.cc',
+ 'base/seekable_buffer_unittest.cc',
+ 'base/state_matrix_unittest.cc',
+ 'base/test_data_util.cc',
+ 'base/test_data_util.h',
+ 'base/video_frame_unittest.cc',
+ 'base/video_util_unittest.cc',
+ 'base/yuv_convert_unittest.cc',
+ 'crypto/aes_decryptor_unittest.cc',
+ 'ffmpeg/ffmpeg_common_unittest.cc',
+ 'filters/audio_renderer_algorithm_unittest.cc',
+ 'filters/audio_renderer_impl_unittest.cc',
+ 'filters/bitstream_converter_unittest.cc',
+ 'filters/chunk_demuxer_unittest.cc',
+ 'filters/ffmpeg_audio_decoder_unittest.cc',
+ 'filters/ffmpeg_decoder_unittest.h',
+ 'filters/ffmpeg_demuxer_unittest.cc',
+ 'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
+ 'filters/ffmpeg_video_decoder_unittest.cc',
+ 'filters/file_data_source_unittest.cc',
+ 'filters/pipeline_integration_test.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ 'filters/source_buffer_stream_unittest.cc',
+ 'filters/video_renderer_base_unittest.cc',
+ 'video/capture/video_capture_device_unittest.cc',
+ 'webm/cluster_builder.cc',
+ 'webm/cluster_builder.h',
+ 'webm/webm_cluster_parser_unittest.cc',
+ 'webm/webm_content_encodings_client_unittest.cc',
+ 'webm/webm_parser_unittest.cc',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!="mac"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ['OS != "android"', {
+ 'dependencies': [
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ }],
+ ['OS == "android"', {
+ 'sources!': [
+ 'audio/audio_input_volume_unittest.cc',
+ 'base/test_data_util.cc',
+ 'base/test_data_util.h',
+ 'ffmpeg/ffmpeg_common_unittest.cc',
+ 'filters/ffmpeg_audio_decoder_unittest.cc',
+ 'filters/bitstream_converter_unittest.cc',
+ 'filters/chunk_demuxer_unittest.cc',
+ 'filters/ffmpeg_demuxer_unittest.cc',
+ 'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
+ 'filters/ffmpeg_video_decoder_unittest.cc',
+ 'filters/pipeline_integration_test.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ 'mp4/mp4_stream_parser_unittest.cc',
+ 'webm/webm_cluster_parser_unittest.cc',
+ ],
+ }],
+ ['OS == "linux"', {
+ 'conditions': [
+ ['use_cras == 1', {
+ 'sources': [
+ 'audio/linux/cras_output_unittest.cc',
+ ],
+ 'defines': [
+ 'USE_CRAS',
+ ],
+ }],
+ ],
+ }],
+ [ 'target_arch=="ia32" or target_arch=="x64"', {
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_unittest.cc',
+ ],
+ }],
+ ['proprietary_codecs==1 or branding=="Chrome"', {
+ 'sources': [
+ 'mp4/avc_unittest.cc',
+ 'mp4/box_reader_unittest.cc',
+ 'mp4/mp4_stream_parser_unittest.cc',
+ 'mp4/offset_byte_queue_unittest.cc',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'media_test_support',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'audio/test_audio_input_controller_factory.cc',
+ 'audio/test_audio_input_controller_factory.h',
+ 'base/mock_callback.cc',
+ 'base/mock_callback.h',
+ 'base/mock_data_source_host.cc',
+ 'base/mock_data_source_host.h',
+ 'base/mock_demuxer_host.cc',
+ 'base/mock_demuxer_host.h',
+ 'base/mock_filter_host.cc',
+ 'base/mock_filter_host.h',
+ 'base/mock_filters.cc',
+ 'base/mock_filters.h',
+ ],
+ },
+ {
+ 'target_name': 'scaler_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../skia/skia.gyp:skia',
+ ],
+ 'sources': [
+ 'tools/scaler_bench/scaler_bench.cc',
+ ],
+ },
+ {
+ 'target_name': 'qt_faststart',
+ 'type': 'executable',
+ 'sources': [
+ 'tools/qt_faststart/qt_faststart.c'
+ ],
+ },
+ {
+ 'target_name': 'seek_tester',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ ],
+ 'sources': [
+ 'tools/seek_tester/seek_tester.cc',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="win"', {
+ 'targets': [
+ {
+ 'target_name': 'player_wtl',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ '../ui/ui.gyp:ui',
+ ],
+ 'include_dirs': [
+ '<(DEPTH)/third_party/wtl/include',
+ ],
+ 'sources': [
+ 'tools/player_wtl/list.h',
+ 'tools/player_wtl/mainfrm.h',
+ 'tools/player_wtl/movie.cc',
+ 'tools/player_wtl/movie.h',
+ 'tools/player_wtl/player_wtl.cc',
+ 'tools/player_wtl/player_wtl.rc',
+ 'tools/player_wtl/props.h',
+ 'tools/player_wtl/seek.h',
+ 'tools/player_wtl/resource.h',
+ 'tools/player_wtl/view.h',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
+ },
+ },
+ 'defines': [
+ '_CRT_SECURE_NO_WARNINGS=1',
+ ],
+ },
+ ],
+ }],
+ ['OS == "win" or toolkit_uses_gtk == 1', {
+ 'targets': [
+ {
+ 'target_name': 'shader_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'sources': [
+ 'tools/shader_bench/shader_bench.cc',
+ 'tools/shader_bench/cpu_color_painter.cc',
+ 'tools/shader_bench/cpu_color_painter.h',
+ 'tools/shader_bench/gpu_color_painter.cc',
+ 'tools/shader_bench/gpu_color_painter.h',
+ 'tools/shader_bench/gpu_painter.cc',
+ 'tools/shader_bench/gpu_painter.h',
+ 'tools/shader_bench/painter.cc',
+ 'tools/shader_bench/painter.h',
+ 'tools/shader_bench/window.cc',
+ 'tools/shader_bench/window.h',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'sources': [
+ 'tools/shader_bench/window_linux.cc',
+ ],
+ }],
+ ['OS=="win"', {
+ 'dependencies': [
+ '../third_party/angle/src/build_angle.gyp:libEGL',
+ '../third_party/angle/src/build_angle.gyp:libGLESv2',
+ ],
+ 'sources': [
+ 'tools/shader_bench/window_win.cc',
+ ],
+ }],
+ ],
+ },
+ ],
+ }],
+ ['OS == "linux" and target_arch != "arm"', {
+ 'targets': [
+ {
+ 'target_name': 'tile_render_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'libraries': [
+ '-lGL',
+ '-ldl',
+ ],
+ 'sources': [
+ 'tools/tile_render_bench/tile_render_bench.cc',
+ ],
+ },
+ ],
+ }],
+ ['os_posix == 1 and OS != "mac" and OS != "android"', {
+ 'targets': [
+ {
+ 'target_name': 'player_x11',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-ldl',
+ '-lX11',
+ '-lXrender',
+ '-lXext',
+ ],
+ },
+ 'sources': [
+ 'tools/player_x11/data_source_logger.cc',
+ 'tools/player_x11/data_source_logger.h',
+ 'tools/player_x11/gl_video_renderer.cc',
+ 'tools/player_x11/gl_video_renderer.h',
+ 'tools/player_x11/player_x11.cc',
+ 'tools/player_x11/x11_video_renderer.cc',
+ 'tools/player_x11/x11_video_renderer.h',
+ ],
+ },
+ ],
+ }],
+ ['OS == "android"', {
+ 'targets': [
+ {
+ 'target_name': 'player_android',
+ 'type': 'static_library',
+ 'sources': [
+ 'base/android/media_player_bridge.cc',
+ 'base/android/media_player_bridge.h',
+ ],
+ 'dependencies': [
+ '../base/base.gyp:base',
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)/media',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'generate-jni-headers',
+ 'inputs': [
+ '../base/android/jni_generator/jni_generator.py',
+ 'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/media/jni/media_player_listener_jni.h',
+ ],
+ 'action': [
+ 'python',
+ '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+ '-o',
+ '<@(_inputs)',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'media_java',
+ 'type': 'none',
+ 'dependencies': [ '../base/base.gyp:base_java' ],
+ 'variables': {
+ 'package_name': 'media',
+ 'java_in_dir': 'base/android/java',
+ },
+ 'includes': [ '../build/java.gypi' ],
+ },
+
+ ],
+ }, { # OS != "android"'
+ # Android does not use ffmpeg, so disable the targets which require it.
+ 'targets': [
+ {
+ 'target_name': 'ffmpeg_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../base/base.gyp:test_support_perf',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'ffmpeg/ffmpeg_unittest.cc',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ # Needed for the following #include chain:
+ # base/run_all_unittests.cc
+ # ../base/test_suite.h
+ # gtk/gtk.h
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'ffmpeg_regression_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ '../base/base.gyp:test_support_base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'base/test_data_util.cc',
+ 'base/run_all_unittests.cc',
+ 'ffmpeg/ffmpeg_regression_tests.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!="mac"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'ffmpeg_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'test/ffmpeg_tests/ffmpeg_tests.cc',
+ ],
+ },
+ {
+ 'target_name': 'media_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'tools/media_bench/media_bench.cc',
+ ],
+ },
+ ],
+ }]
+ ],
+}
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified
new file mode 100644
index 0000000..962b7b2
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified
@@ -0,0 +1,1107 @@
+
+#("# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'chromium_code': 1,
+ # Override to dynamically link the PulseAudio library.
+ 'use_pulseaudio%': 0,
+ # Override to dynamically link the cras (ChromeOS audio) library.
+ 'use_cras%': 0,
+ },
+ 'targets': [
+ {
+ 'target_name': 'media',
+ 'type': '<(component)',
+ 'dependencies': [
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ '../build/temp_gyp/googleurl.gyp:googleurl',
+ '../crypto/crypto.gyp:crypto',
+ '../third_party/openmax/openmax.gyp:il',
+ '../ui/ui.gyp:ui',
+ ],
+ 'defines': [
+ 'MEDIA_IMPLEMENTATION',
+ ],
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'audio/android/audio_manager_android.cc',
+ 'audio/android/audio_manager_android.h',
+ 'audio/android/audio_track_output_android.cc',
+ 'audio/android/audio_track_output_android.h',
+ 'audio/android/opensles_input.cc',
+ 'audio/android/opensles_input.h',
+ 'audio/android/opensles_output.cc',
+ 'audio/android/opensles_output.h',
+ 'audio/async_socket_io_handler.h',
+ 'audio/async_socket_io_handler_posix.cc',
+ 'audio/async_socket_io_handler_win.cc',
+ 'audio/audio_buffers_state.cc',
+ 'audio/audio_buffers_state.h',
+ 'audio/audio_io.h',
+ 'audio/audio_input_controller.cc',
+ 'audio/audio_input_controller.h',
+ 'audio/audio_input_stream_impl.cc',
+ 'audio/audio_input_stream_impl.h',
+ 'audio/audio_device_name.cc',
+ 'audio/audio_device_name.h',
+ 'audio/audio_manager.cc',
+ 'audio/audio_manager.h',
+ 'audio/audio_manager_base.cc',
+ 'audio/audio_manager_base.h',
+ 'audio/audio_output_controller.cc',
+ 'audio/audio_output_controller.h',
+ 'audio/audio_output_dispatcher.cc',
+ 'audio/audio_output_dispatcher.h',
+ 'audio/audio_output_dispatcher_impl.cc',
+ 'audio/audio_output_dispatcher_impl.h',
+ 'audio/audio_output_mixer.cc',
+ 'audio/audio_output_mixer.h',
+ 'audio/audio_output_proxy.cc',
+ 'audio/audio_output_proxy.h',
+ 'audio/audio_parameters.cc',
+ 'audio/audio_parameters.h',
+ 'audio/audio_util.cc',
+ 'audio/audio_util.h',
+ 'audio/cross_process_notification.cc',
+ 'audio/cross_process_notification.h',
+ 'audio/cross_process_notification_win.cc',
+ 'audio/cross_process_notification_posix.cc',
+ 'audio/fake_audio_input_stream.cc',
+ 'audio/fake_audio_input_stream.h',
+ 'audio/fake_audio_output_stream.cc',
+ 'audio/fake_audio_output_stream.h',
+ 'audio/linux/audio_manager_linux.cc',
+ 'audio/linux/audio_manager_linux.h',
+ 'audio/linux/alsa_input.cc',
+ 'audio/linux/alsa_input.h',
+ 'audio/linux/alsa_output.cc',
+ 'audio/linux/alsa_output.h',
+ 'audio/linux/alsa_util.cc',
+ 'audio/linux/alsa_util.h',
+ 'audio/linux/alsa_wrapper.cc',
+ 'audio/linux/alsa_wrapper.h',
+ 'audio/linux/cras_output.cc',
+ 'audio/linux/cras_output.h',
+ 'audio/openbsd/audio_manager_openbsd.cc',
+ 'audio/openbsd/audio_manager_openbsd.h',
+ 'audio/mac/audio_input_mac.cc',
+ 'audio/mac/audio_input_mac.h',
+ 'audio/mac/audio_low_latency_input_mac.cc',
+ 'audio/mac/audio_low_latency_input_mac.h',
+ 'audio/mac/audio_low_latency_output_mac.cc',
+ 'audio/mac/audio_low_latency_output_mac.h',
+ 'audio/mac/audio_manager_mac.cc',
+ 'audio/mac/audio_manager_mac.h',
+ 'audio/mac/audio_output_mac.cc',
+ 'audio/mac/audio_output_mac.h',
+ 'audio/null_audio_sink.cc',
+ 'audio/null_audio_sink.h',
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ 'audio/sample_rates.cc',
+ 'audio/sample_rates.h',
+ 'audio/simple_sources.cc',
+ 'audio/simple_sources.h',
+ 'audio/win/audio_low_latency_input_win.cc',
+ 'audio/win/audio_low_latency_input_win.h',
+ 'audio/win/audio_low_latency_output_win.cc',
+ 'audio/win/audio_low_latency_output_win.h',
+ 'audio/win/audio_manager_win.cc',
+ 'audio/win/audio_manager_win.h',
+ 'audio/win/avrt_wrapper_win.cc',
+ 'audio/win/avrt_wrapper_win.h',
+ 'audio/win/device_enumeration_win.cc',
+ 'audio/win/device_enumeration_win.h',
+ 'audio/win/wavein_input_win.cc',
+ 'audio/win/wavein_input_win.h',
+ 'audio/win/waveout_output_win.cc',
+ 'audio/win/waveout_output_win.h',
+ 'base/android/media_jni_registrar.cc',
+ 'base/android/media_jni_registrar.h',
+ 'base/audio_decoder.cc',
+ 'base/audio_decoder.h',
+ 'base/audio_decoder_config.cc',
+ 'base/audio_decoder_config.h',
+ 'base/audio_renderer.h',
+ 'base/audio_renderer_mixer.cc',
+ 'base/audio_renderer_mixer.h',
+ 'base/audio_renderer_mixer_input.cc',
+ 'base/audio_renderer_mixer_input.h',
+ 'base/bitstream_buffer.h',
+ 'base/buffers.cc',
+ 'base/buffers.h',
+ 'base/byte_queue.cc',
+ 'base/byte_queue.h',
+ 'base/channel_layout.cc',
+ 'base/channel_layout.h',
+ 'base/clock.cc',
+ 'base/clock.h',
+ 'base/composite_filter.cc',
+ 'base/composite_filter.h',
+ 'base/data_buffer.cc',
+ 'base/data_buffer.h',
+ 'base/data_source.cc',
+ 'base/data_source.h',
+ 'base/decoder_buffer.cc',
+ 'base/decoder_buffer.h',
+ 'base/decrypt_config.cc',
+ 'base/decrypt_config.h',
+ 'base/decryptor.h',
+ 'base/decryptor_client.h',
+ 'base/demuxer.cc',
+ 'base/demuxer.h',
+ 'base/demuxer_stream.cc',
+ 'base/demuxer_stream.h',
+ 'base/djb2.cc',
+ 'base/djb2.h',
+ 'base/filter_collection.cc',
+ 'base/filter_collection.h',
+ 'base/filter_host.h',
+ 'base/filters.cc',
+ 'base/filters.h',
+ 'base/h264_bitstream_converter.cc',
+ 'base/h264_bitstream_converter.h',
+ 'base/media.h',
+ 'base/media_android.cc',
+ 'base/media_export.h',
+ 'base/media_log.cc',
+ 'base/media_log.h',
+ 'base/media_log_event.h',
+ 'base/media_posix.cc',
+ 'base/media_switches.cc',
+ 'base/media_switches.h',
+ 'base/media_win.cc',
+ 'base/message_loop_factory.cc',
+ 'base/message_loop_factory.h',
+ 'base/pipeline.cc',
+ 'base/pipeline.h',
+ 'base/pipeline_status.cc',
+ 'base/pipeline_status.h',
+ 'base/ranges.cc',
+ 'base/ranges.h',
+ 'base/seekable_buffer.cc',
+ 'base/seekable_buffer.h',
+ 'base/state_matrix.cc',
+ 'base/state_matrix.h',
+ 'base/stream_parser.cc',
+ 'base/stream_parser.h',
+ 'base/stream_parser_buffer.cc',
+ 'base/stream_parser_buffer.h',
+ 'base/video_decoder.cc',
+ 'base/video_decoder.h',
+ 'base/video_decoder_config.cc',
+ 'base/video_decoder_config.h',
+ 'base/video_frame.cc',
+ 'base/video_frame.h',
+ 'base/video_renderer.h',
+ 'base/video_util.cc',
+ 'base/video_util.h',
+ 'crypto/aes_decryptor.cc',
+ 'crypto/aes_decryptor.h',
+ 'ffmpeg/ffmpeg_common.cc',
+ 'ffmpeg/ffmpeg_common.h',
+ 'ffmpeg/file_protocol.cc',
+ 'ffmpeg/file_protocol.h',
+ 'filters/audio_file_reader.cc',
+ 'filters/audio_file_reader.h',
+ 'filters/audio_renderer_algorithm.cc',
+ 'filters/audio_renderer_algorithm.h',
+ 'filters/audio_renderer_impl.cc',
+ 'filters/audio_renderer_impl.h',
+ 'filters/bitstream_converter.cc',
+ 'filters/bitstream_converter.h',
+ 'filters/chunk_demuxer.cc',
+ 'filters/chunk_demuxer.h',
+ 'filters/chunk_demuxer_client.h',
+ 'filters/dummy_demuxer.cc',
+ 'filters/dummy_demuxer.h',
+ 'filters/ffmpeg_audio_decoder.cc',
+ 'filters/ffmpeg_audio_decoder.h',
+ 'filters/ffmpeg_demuxer.cc',
+ 'filters/ffmpeg_demuxer.h',
+ 'filters/ffmpeg_h264_bitstream_converter.cc',
+ 'filters/ffmpeg_h264_bitstream_converter.h',
+ 'filters/ffmpeg_glue.cc',
+ 'filters/ffmpeg_glue.h',
+ 'filters/ffmpeg_video_decoder.cc',
+ 'filters/ffmpeg_video_decoder.h',
+ 'filters/file_data_source.cc',
+ 'filters/file_data_source.h',
+ 'filters/gpu_video_decoder.cc',
+ 'filters/gpu_video_decoder.h',
+ 'filters/in_memory_url_protocol.cc',
+ 'filters/in_memory_url_protocol.h',
+ 'filters/source_buffer_stream.cc',
+ 'filters/source_buffer_stream.h',
+ 'filters/video_frame_generator.cc',
+ 'filters/video_frame_generator.h',
+ 'filters/video_renderer_base.cc',
+ 'filters/video_renderer_base.h',
+ 'video/capture/fake_video_capture_device.cc',
+ 'video/capture/fake_video_capture_device.h',
+ 'video/capture/linux/video_capture_device_linux.cc',
+ 'video/capture/linux/video_capture_device_linux.h',
+ 'video/capture/mac/video_capture_device_mac.h',
+ 'video/capture/mac/video_capture_device_mac.mm',
+ 'video/capture/mac/video_capture_device_qtkit_mac.h',
+ 'video/capture/mac/video_capture_device_qtkit_mac.mm',
+ 'video/capture/video_capture.h',
+ 'video/capture/video_capture_device.h',
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ 'video/capture/video_capture_proxy.cc',
+ 'video/capture/video_capture_proxy.h',
+ 'video/capture/video_capture_types.h',
+ 'video/capture/win/filter_base_win.cc',
+ 'video/capture/win/filter_base_win.h',
+ 'video/capture/win/pin_base_win.cc',
+ 'video/capture/win/pin_base_win.h',
+ 'video/capture/win/sink_filter_observer_win.h',
+ 'video/capture/win/sink_filter_win.cc',
+ 'video/capture/win/sink_filter_win.h',
+ 'video/capture/win/sink_input_pin_win.cc',
+ 'video/capture/win/sink_input_pin_win.h',
+ 'video/capture/win/video_capture_device_win.cc',
+ 'video/capture/win/video_capture_device_win.h',
+ 'video/picture.cc',
+ 'video/picture.h',
+ 'video/video_decode_accelerator.cc',
+ 'video/video_decode_accelerator.h',
+ 'webm/webm_constants.h',
+ 'webm/webm_cluster_parser.cc',
+ 'webm/webm_cluster_parser.h',
+ 'webm/webm_content_encodings.cc',
+ 'webm/webm_content_encodings.h',
+ 'webm/webm_content_encodings_client.cc',
+ 'webm/webm_content_encodings_client.h',
+ 'webm/webm_info_parser.cc',
+ 'webm/webm_info_parser.h',
+ 'webm/webm_parser.cc',
+ 'webm/webm_parser.h',
+ 'webm/webm_stream_parser.cc',
+ 'webm/webm_stream_parser.h',
+ 'webm/webm_tracks_parser.cc',
+ 'webm/webm_tracks_parser.h',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '..',
+ ],
+ },
+ 'conditions': [
+ # Android doesn't use ffmpeg, so make the dependency conditional
+ # and exclude the sources which depend on ffmpeg.
+ ['OS != \"android\"', {
+ 'dependencies': [
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ }],
+ ['OS == \"android\"', {
+ 'sources!': [
+ 'base/media_posix.cc',
+ 'ffmpeg/ffmpeg_common.cc',
+ 'ffmpeg/ffmpeg_common.h',
+ 'ffmpeg/file_protocol.cc',
+ 'ffmpeg/file_protocol.h',
+ 'filters/audio_file_reader.cc',
+ 'filters/audio_file_reader.h',
+ 'filters/bitstream_converter.cc',
+ 'filters/bitstream_converter.h',
+ 'filters/chunk_demuxer.cc',
+ 'filters/chunk_demuxer.h',
+ 'filters/chunk_demuxer_client.h',
+ 'filters/ffmpeg_audio_decoder.cc',
+ 'filters/ffmpeg_audio_decoder.h',
+ 'filters/ffmpeg_demuxer.cc',
+ 'filters/ffmpeg_demuxer.h',
+ 'filters/ffmpeg_h264_bitstream_converter.cc',
+ 'filters/ffmpeg_h264_bitstream_converter.h',
+ 'filters/ffmpeg_glue.cc',
+ 'filters/ffmpeg_glue.h',
+ 'filters/ffmpeg_video_decoder.cc',
+ 'filters/ffmpeg_video_decoder.h',
+ 'filters/gpu_video_decoder.cc',
+ 'filters/gpu_video_decoder.h',
+ 'webm/webm_cluster_parser.cc',
+ 'webm/webm_cluster_parser.h',
+ 'webm/webm_stream_parser.cc',
+ 'webm/webm_stream_parser.h',
+ ],
+ }],
+ # The below 'android' condition were added temporarily and should be
+ # removed in downstream, because there is no Java environment setup in
+ # upstream yet.
+ ['OS == \"android\"', {
+ 'sources!':[
+ 'audio/android/audio_track_output_android.cc',
+ ],
+ 'sources':[
+ 'audio/android/audio_track_output_stub_android.cc',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-lOpenSLES',
+ ],
+ },
+ }],
+ ['OS==\"linux\" or OS==\"freebsd\" or OS==\"solaris\"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lasound',
+ ],
+ },
+ }],
+ ['OS==\"openbsd\"', {
+ 'sources/': [ ['exclude', '/alsa_' ],
+ ['exclude', '/audio_manager_linux' ] ],
+ 'link_settings': {
+ 'libraries': [
+ ],
+ },
+ }],
+ ['OS!=\"openbsd\"', {
+ 'sources!': [
+ 'audio/openbsd/audio_manager_openbsd.cc',
+ 'audio/openbsd/audio_manager_openbsd.h',
+ ],
+ }],
+ ['OS==\"linux\"', {
+ 'variables': {
+ 'conditions': [
+ ['sysroot!=\"\"', {
+ 'pkg-config': '../build/linux/pkg-config-wrapper \"<(sysroot)\" \"<(target_arch)\"',
+ }, {
+ 'pkg-config': 'pkg-config'
+ }],
+ ],
+ },
+ 'conditions': [
+ ['use_cras == 1', {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags libcras)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '<!@(<(pkg-config) --libs libcras)',
+ ],
+ },
+ 'defines': [
+ 'USE_CRAS',
+ ],
+ }, { # else: use_cras == 0
+ 'sources!': [
+ 'audio/linux/cras_output.cc',
+ 'audio/linux/cras_output.h',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1', {
+ 'conditions': [
+ ['use_pulseaudio == 1', {
+ 'cflags': [
+ '<!@(pkg-config --cflags libpulse)',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '<!@(pkg-config --libs-only-l libpulse)',
+ ],
+ },
+ 'defines': [
+ 'USE_PULSEAUDIO',
+ ],
+ }, { # else: use_pulseaudio == 0
+ 'sources!': [
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ ],
+ }],
+ ],
+ }],
+ ['os_posix == 1 and OS != \"android\"', {
+ # Video capture isn't supported in Android yet.
+ 'sources!': [
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ ],
+ }],
+ ['OS==\"mac\"', {
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework',
+ '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework',
+ '$(SDKROOT)/System/Library/Frameworks/QTKit.framework',
+ ],
+ },
+ }],
+ ['OS==\"win\"', {
+ 'sources!': [
+ 'audio/pulse/pulse_output.cc',
+ 'audio/pulse/pulse_output.h',
+ 'video/capture/video_capture_device_dummy.cc',
+ 'video/capture/video_capture_device_dummy.h',
+ ],
+ }],
+ ['proprietary_codecs==1 or branding==\"Chrome\"', {
+ 'sources': [
+ 'mp4/avc.cc',
+ 'mp4/avc.h',
+ 'mp4/box_definitions.cc',
+ 'mp4/box_definitions.h',
+ 'mp4/box_reader.cc',
+ 'mp4/box_reader.h',
+ 'mp4/cenc.cc',
+ 'mp4/cenc.h',
+ 'mp4/mp4_stream_parser.cc',
+ 'mp4/mp4_stream_parser.h',
+ 'mp4/offset_byte_queue.cc',
+ 'mp4/offset_byte_queue.h',
+ 'mp4/track_run_iterator.cc',
+ 'mp4/track_run_iterator.h',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'conditions': [
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset==\"target\"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ [ 'target_arch == \"ia32\" or target_arch == \"x64\"', {
+ 'dependencies': [
+ 'yuv_convert_simd_x86',
+ ],
+ }],
+ [ 'target_arch == \"arm\"', {
+ 'dependencies': [
+ 'yuv_convert_simd_arm',
+ ],
+ }],
+ ],
+ 'sources': [
+ 'base/yuv_convert.cc',
+ 'base/yuv_convert.h',
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert_simd_x86',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_c.cc',
+ 'base/simd/convert_rgb_to_yuv_sse2.cc',
+ 'base/simd/convert_rgb_to_yuv_ssse3.asm',
+ 'base/simd/convert_rgb_to_yuv_ssse3.cc',
+ 'base/simd/convert_rgb_to_yuv_ssse3.inc',
+ 'base/simd/convert_yuv_to_rgb_c.cc',
+ 'base/simd/convert_yuv_to_rgb_x86.cc',
+ 'base/simd/convert_yuv_to_rgb_mmx.asm',
+ 'base/simd/convert_yuv_to_rgb_mmx.inc',
+ 'base/simd/convert_yuv_to_rgb_sse.asm',
+ 'base/simd/filter_yuv.h',
+ 'base/simd/filter_yuv_c.cc',
+ 'base/simd/filter_yuv_mmx.cc',
+ 'base/simd/filter_yuv_sse2.cc',
+ 'base/simd/linear_scale_yuv_to_rgb_mmx.asm',
+ 'base/simd/linear_scale_yuv_to_rgb_mmx.inc',
+ 'base/simd/linear_scale_yuv_to_rgb_sse.asm',
+ 'base/simd/scale_yuv_to_rgb_mmx.asm',
+ 'base/simd/scale_yuv_to_rgb_mmx.inc',
+ 'base/simd/scale_yuv_to_rgb_sse.asm',
+ 'base/simd/yuv_to_rgb_table.cc',
+ 'base/simd/yuv_to_rgb_table.h',
+ ],
+ 'conditions': [
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset==\"target\"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ [ 'target_arch == \"x64\"', {
+ # Source files optimized for X64 systems.
+ 'sources': [
+ 'base/simd/linear_scale_yuv_to_rgb_mmx_x64.asm',
+ 'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
+ ],
+ }],
+ [ 'os_posix == 1 and OS != \"mac\" and OS != \"android\"', {
+ 'cflags': [
+ '-msse2',
+ ],
+ }],
+ [ 'OS == \"mac\"', {
+ 'configurations': {
+ 'Debug': {
+ 'xcode_settings': {
+ # gcc on the mac builds horribly unoptimized sse code in debug
+ # mode. Since this is rarely going to be debugged, run with full
+ # optimizations in Debug as well as Release.
+ 'GCC_OPTIMIZATION_LEVEL': '3', # -O3
+ },
+ },
+ },
+ }],
+ [ 'OS==\"win\"', {
+ 'variables': {
+ 'yasm_flags': [
+ '-DWIN32',
+ '-DMSVC',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ },
+ }],
+ [ 'OS==\"mac\"', {
+ 'variables': {
+ 'yasm_flags': [
+ '-DPREFIX',
+ '-DMACHO',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ },
+ }],
+ [ 'os_posix==1 and OS!=\"mac\"', {
+ 'variables': {
+ 'conditions': [
+ [ 'target_arch==\"ia32\"', {
+ 'yasm_flags': [
+ '-DX86_32',
+ '-DELF',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ }, {
+ 'yasm_flags': [
+ '-DARCH_X86_64',
+ '-DELF',
+ '-DPIC',
+ '-DCHROMIUM',
+ '-Isimd',
+ ],
+ }],
+ ],
+ },
+ }],
+ ],
+ 'variables': {
+ 'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/media',
+ },
+ 'msvs_2010_disable_uldi_when_referenced': 1,
+ 'includes': [
+ '../third_party/yasm/yasm_compile.gypi',
+ ],
+ },
+ {
+ 'target_name': 'yuv_convert_simd_arm',
+ 'type': 'static_library',
+ 'include_dirs': [
+ '..',
+ ],
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_c.cc',
+ 'base/simd/convert_rgb_to_yuv.h',
+ 'base/simd/convert_yuv_to_rgb_c.cc',
+ 'base/simd/convert_yuv_to_rgb.h',
+ 'base/simd/filter_yuv.h',
+ 'base/simd/filter_yuv_c.cc',
+ 'base/simd/yuv_to_rgb_table.cc',
+ 'base/simd/yuv_to_rgb_table.h',
+ ],
+ },
+ {
+ 'target_name': 'media_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ '../ui/ui.gyp:ui',
+ ],
+ 'sources': [
+ 'audio/async_socket_io_handler_unittest.cc',
+ 'audio/audio_input_controller_unittest.cc',
+ 'audio/audio_input_device_unittest.cc',
+ 'audio/audio_input_unittest.cc',
+ 'audio/audio_input_volume_unittest.cc',
+ 'audio/audio_low_latency_input_output_unittest.cc',
+ 'audio/audio_output_controller_unittest.cc',
+ 'audio/audio_output_proxy_unittest.cc',
+ 'audio/audio_parameters_unittest.cc',
+ 'audio/audio_util_unittest.cc',
+ 'audio/cross_process_notification_unittest.cc',
+ 'audio/linux/alsa_output_unittest.cc',
+ 'audio/mac/audio_low_latency_input_mac_unittest.cc',
+ 'audio/mac/audio_output_mac_unittest.cc',
+ 'audio/simple_sources_unittest.cc',
+ 'audio/win/audio_low_latency_input_win_unittest.cc',
+ 'audio/win/audio_low_latency_output_win_unittest.cc',
+ 'audio/win/audio_output_win_unittest.cc',
+ 'base/audio_renderer_mixer_unittest.cc',
+ 'base/audio_renderer_mixer_input_unittest.cc',
+ 'base/buffers_unittest.cc',
+ 'base/clock_unittest.cc',
+ 'base/composite_filter_unittest.cc',
+ 'base/data_buffer_unittest.cc',
+ 'base/decoder_buffer_unittest.cc',
+ 'base/djb2_unittest.cc',
+ 'base/fake_audio_render_callback.cc',
+ 'base/fake_audio_render_callback.h',
+ 'base/filter_collection_unittest.cc',
+ 'base/h264_bitstream_converter_unittest.cc',
+ 'base/pipeline_unittest.cc',
+ 'base/ranges_unittest.cc',
+ 'base/run_all_unittests.cc',
+ 'base/seekable_buffer_unittest.cc',
+ 'base/state_matrix_unittest.cc',
+ 'base/test_data_util.cc',
+ 'base/test_data_util.h',
+ 'base/video_frame_unittest.cc',
+ 'base/video_util_unittest.cc',
+ 'base/yuv_convert_unittest.cc',
+ 'crypto/aes_decryptor_unittest.cc',
+ 'ffmpeg/ffmpeg_common_unittest.cc',
+ 'filters/audio_renderer_algorithm_unittest.cc',
+ 'filters/audio_renderer_impl_unittest.cc',
+ 'filters/bitstream_converter_unittest.cc',
+ 'filters/chunk_demuxer_unittest.cc',
+ 'filters/ffmpeg_audio_decoder_unittest.cc',
+ 'filters/ffmpeg_decoder_unittest.h',
+ 'filters/ffmpeg_demuxer_unittest.cc',
+ 'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
+ 'filters/ffmpeg_video_decoder_unittest.cc',
+ 'filters/file_data_source_unittest.cc',
+ 'filters/pipeline_integration_test.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ 'filters/source_buffer_stream_unittest.cc',
+ 'filters/video_renderer_base_unittest.cc',
+ 'video/capture/video_capture_device_unittest.cc',
+ 'webm/cluster_builder.cc',
+ 'webm/cluster_builder.h',
+ 'webm/webm_cluster_parser_unittest.cc',
+ 'webm/webm_content_encodings_client_unittest.cc',
+ 'webm/webm_parser_unittest.cc',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!=\"mac\"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ['OS != \"android\"', {
+ 'dependencies': [
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ }],
+ ['OS == \"android\"', {
+ 'sources!': [
+ 'audio/audio_input_volume_unittest.cc',
+ 'base/test_data_util.cc',
+ 'base/test_data_util.h',
+ 'ffmpeg/ffmpeg_common_unittest.cc',
+ 'filters/ffmpeg_audio_decoder_unittest.cc',
+ 'filters/bitstream_converter_unittest.cc',
+ 'filters/chunk_demuxer_unittest.cc',
+ 'filters/ffmpeg_demuxer_unittest.cc',
+ 'filters/ffmpeg_glue_unittest.cc',
+ 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
+ 'filters/ffmpeg_video_decoder_unittest.cc',
+ 'filters/pipeline_integration_test.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ 'mp4/mp4_stream_parser_unittest.cc',
+ 'webm/webm_cluster_parser_unittest.cc',
+ ],
+ }],
+ ['OS == \"linux\"', {
+ 'conditions': [
+ ['use_cras == 1', {
+ 'sources': [
+ 'audio/linux/cras_output_unittest.cc',
+ ],
+ 'defines': [
+ 'USE_CRAS',
+ ],
+ }],
+ ],
+ }],
+ [ 'target_arch==\"ia32\" or target_arch==\"x64\"', {
+ 'sources': [
+ 'base/simd/convert_rgb_to_yuv_unittest.cc',
+ ],
+ }],
+ ['proprietary_codecs==1 or branding==\"Chrome\"', {
+ 'sources': [
+ 'mp4/avc_unittest.cc',
+ 'mp4/box_reader_unittest.cc',
+ 'mp4/mp4_stream_parser_unittest.cc',
+ 'mp4/offset_byte_queue_unittest.cc',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'media_test_support',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ ],
+ 'sources': [
+ 'audio/test_audio_input_controller_factory.cc',
+ 'audio/test_audio_input_controller_factory.h',
+ 'base/mock_callback.cc',
+ 'base/mock_callback.h',
+ 'base/mock_data_source_host.cc',
+ 'base/mock_data_source_host.h',
+ 'base/mock_demuxer_host.cc',
+ 'base/mock_demuxer_host.h',
+ 'base/mock_filter_host.cc',
+ 'base/mock_filter_host.h',
+ 'base/mock_filters.cc',
+ 'base/mock_filters.h',
+ ],
+ },
+ {
+ 'target_name': 'scaler_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../skia/skia.gyp:skia',
+ ],
+ 'sources': [
+ 'tools/scaler_bench/scaler_bench.cc',
+ ],
+ },
+ {
+ 'target_name': 'qt_faststart',
+ 'type': 'executable',
+ 'sources': [
+ 'tools/qt_faststart/qt_faststart.c'
+ ],
+ },
+ {
+ 'target_name': 'seek_tester',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ ],
+ 'sources': [
+ 'tools/seek_tester/seek_tester.cc',
+ ],
+ },
+ ],
+ 'conditions': [
+ ['OS==\"win\"', {
+ 'targets': [
+ {
+ 'target_name': 'player_wtl',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ '../ui/ui.gyp:ui',
+ ],
+ 'include_dirs': [
+ '<(DEPTH)/third_party/wtl/include',
+ ],
+ 'sources': [
+ 'tools/player_wtl/list.h',
+ 'tools/player_wtl/mainfrm.h',
+ 'tools/player_wtl/movie.cc',
+ 'tools/player_wtl/movie.h',
+ 'tools/player_wtl/player_wtl.cc',
+ 'tools/player_wtl/player_wtl.rc',
+ 'tools/player_wtl/props.h',
+ 'tools/player_wtl/seek.h',
+ 'tools/player_wtl/resource.h',
+ 'tools/player_wtl/view.h',
+ ],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
+ },
+ },
+ 'defines': [
+ '_CRT_SECURE_NO_WARNINGS=1',
+ ],
+ },
+ ],
+ }],
+ ['OS == \"win\" or toolkit_uses_gtk == 1', {
+ 'targets': [
+ {
+ 'target_name': 'shader_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'sources': [
+ 'tools/shader_bench/shader_bench.cc',
+ 'tools/shader_bench/cpu_color_painter.cc',
+ 'tools/shader_bench/cpu_color_painter.h',
+ 'tools/shader_bench/gpu_color_painter.cc',
+ 'tools/shader_bench/gpu_color_painter.h',
+ 'tools/shader_bench/gpu_painter.cc',
+ 'tools/shader_bench/gpu_painter.h',
+ 'tools/shader_bench/painter.cc',
+ 'tools/shader_bench/painter.h',
+ 'tools/shader_bench/window.cc',
+ 'tools/shader_bench/window.h',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'sources': [
+ 'tools/shader_bench/window_linux.cc',
+ ],
+ }],
+ ['OS==\"win\"', {
+ 'dependencies': [
+ '../third_party/angle/src/build_angle.gyp:libEGL',
+ '../third_party/angle/src/build_angle.gyp:libGLESv2',
+ ],
+ 'sources': [
+ 'tools/shader_bench/window_win.cc',
+ ],
+ }],
+ ],
+ },
+ ],
+ }],
+ ['OS == \"linux\" and target_arch != \"arm\"', {
+ 'targets': [
+ {
+ 'target_name': 'tile_render_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'libraries': [
+ '-lGL',
+ '-ldl',
+ ],
+ 'sources': [
+ 'tools/tile_render_bench/tile_render_bench.cc',
+ ],
+ },
+ ],
+ }],
+ ['os_posix == 1 and OS != \"mac\" and OS != \"android\"', {
+ 'targets': [
+ {
+ 'target_name': 'player_x11',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'yuv_convert',
+ '../base/base.gyp:base',
+ '../ui/gl/gl.gyp:gl',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-ldl',
+ '-lX11',
+ '-lXrender',
+ '-lXext',
+ ],
+ },
+ 'sources': [
+ 'tools/player_x11/data_source_logger.cc',
+ 'tools/player_x11/data_source_logger.h',
+ 'tools/player_x11/gl_video_renderer.cc',
+ 'tools/player_x11/gl_video_renderer.h',
+ 'tools/player_x11/player_x11.cc',
+ 'tools/player_x11/x11_video_renderer.cc',
+ 'tools/player_x11/x11_video_renderer.h',
+ ],
+ },
+ ],
+ }],
+ ['OS == \"android\"', {
+ 'targets': [
+ {
+ 'target_name': 'player_android',
+ 'type': 'static_library',
+ 'sources': [
+ 'base/android/media_player_bridge.cc',
+ 'base/android/media_player_bridge.h',
+ ],
+ 'dependencies': [
+ '../base/base.gyp:base',
+ ],
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)/media',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'generate-jni-headers',
+ 'inputs': [
+ '../base/android/jni_generator/jni_generator.py',
+ 'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/media/jni/media_player_listener_jni.h',
+ ],
+ 'action': [
+ 'python',
+ '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+ '-o',
+ '<@(_inputs)',
+ '<@(_outputs)',
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'media_java',
+ 'type': 'none',
+ 'dependencies': [ '../base/base.gyp:base_java' ],
+ 'variables': {
+ 'package_name': 'media',
+ 'java_in_dir': 'base/android/java',
+ },
+ 'includes': [ '../build/java.gypi' ],
+ },
+
+ ],
+ }, { # OS != \"android\"'
+ # Android does not use ffmpeg, so disable the targets which require it.
+ 'targets': [
+ {
+ 'target_name': 'ffmpeg_unittests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ '../base/base.gyp:base',
+ '../base/base.gyp:base_i18n',
+ '../base/base.gyp:test_support_base',
+ '../base/base.gyp:test_support_perf',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'ffmpeg/ffmpeg_unittest.cc',
+ ],
+ 'conditions': [
+ ['toolkit_uses_gtk == 1', {
+ 'dependencies': [
+ # Needed for the following #include chain:
+ # base/run_all_unittests.cc
+ # ../base/test_suite.h
+ # gtk/gtk.h
+ '../build/linux/system.gyp:gtk',
+ ],
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'ffmpeg_regression_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ 'media_test_support',
+ '../base/base.gyp:test_support_base',
+ '../testing/gmock.gyp:gmock',
+ '../testing/gtest.gyp:gtest',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'base/test_data_util.cc',
+ 'base/run_all_unittests.cc',
+ 'ffmpeg/ffmpeg_regression_tests.cc',
+ 'filters/pipeline_integration_test_base.cc',
+ ],
+ 'conditions': [
+ ['os_posix==1 and OS!=\"mac\"', {
+ 'conditions': [
+ ['linux_use_tcmalloc==1', {
+ 'dependencies': [
+ '../base/allocator/allocator.gyp:allocator',
+ ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'ffmpeg_tests',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'test/ffmpeg_tests/ffmpeg_tests.cc',
+ ],
+ },
+ {
+ 'target_name': 'media_bench',
+ 'type': 'executable',
+ 'dependencies': [
+ 'media',
+ '../base/base.gyp:base',
+ '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
+ ],
+ 'sources': [
+ 'tools/media_bench/media_bench.cc',
+ ],
+ },
+ ],
+ }]
+ ],
+}
+" 0 64 (face font-lock-comment-face) 64 137 (face font-lock-comment-face) 137 166 (face font-lock-comment-face) 166 171 nil 171 172 (face font-lock-string-face) 172 181 (face font-lock-keyword-face) 181 182 (face font-lock-string-face) 182 190 nil 190 191 (face font-lock-string-face) 191 204 (face font-lock-variable-name-face) 204 205 (face font-lock-string-face) 205 214 nil 214 269 (face font-lock-comment-face) 269 273 nil 273 274 (face font-lock-string-face) 274 289 (face font-lock-variable-name-face) 289 290 (face font-lock-string-face) 290 299 nil 299 365 (face font-lock-comment-face) 365 369 nil 369 370 (face font-lock-string-face) 370 379 (face font-lock-variable-name-face) 379 380 (face font-lock-string-face) 380 392 nil 392 393 (face font-lock-string-face) 393 400 (face font-lock-keyword-face) 400 401 (face font-lock-string-face) 401 417 nil 417 418 (face font-lock-string-face) 418 429 (face font-lock-keyword-face) 429 430 (face font-lock-string-face) 430 432 nil 432 433 (face font-lock-string-face) 433 438 (face font-lock-function-name-face) 438 439 (face font-lock-string-face) 439 447 nil 447 448 (face font-lock-string-face) 448 452 (face font-lock-keyword-face) 452 453 (face font-lock-string-face) 453 455 nil 455 458 (face font-lock-string-face) 458 467 (face font-lock-variable-name-face) 467 469 (face font-lock-string-face) 469 477 nil 477 478 (face font-lock-string-face) 478 490 (face font-lock-keyword-face) 490 491 (face font-lock-string-face) 491 503 nil 503 504 (face font-lock-string-face) 504 515 (face font-lock-function-name-face) 515 516 (face font-lock-string-face) 516 526 nil 526 527 (face font-lock-string-face) 527 548 (face font-lock-function-name-face) 548 549 (face font-lock-string-face) 549 559 nil 559 560 (face font-lock-string-face) 560 643 (face font-lock-function-name-face) 643 644 (face font-lock-string-face) 644 654 nil 654 655 (face font-lock-string-face) 655 696 (face font-lock-function-name-face) 696 697 (face font-lock-string-face) 697 707 nil 707 708 (face font-lock-string-face) 708 735 (face font-lock-function-name-face) 735 736 (face font-lock-string-face) 736 746 nil 746 747 (face font-lock-string-face) 747 784 (face font-lock-function-name-face) 784 785 (face font-lock-string-face) 785 795 nil 795 796 (face font-lock-string-face) 796 811 (face font-lock-function-name-face) 811 812 (face font-lock-string-face) 812 829 nil 829 830 (face font-lock-string-face) 830 837 (face font-lock-keyword-face) 837 838 (face font-lock-string-face) 838 850 nil 850 851 (face font-lock-string-face) 851 871 (face font-lock-preprocessor-face) 871 872 (face font-lock-string-face) 872 889 nil 889 890 (face font-lock-string-face) 890 902 (face font-lock-keyword-face) 902 903 (face font-lock-string-face) 903 915 nil 915 916 (face font-lock-string-face) 916 918 (face font-lock-constant-face) 918 919 (face font-lock-string-face) 919 936 nil 936 937 (face font-lock-string-face) 937 944 (face font-lock-keyword-face) 944 945 (face font-lock-string-face) 945 957 nil 957 958 (face font-lock-string-face) 958 996 (face font-lock-constant-face) 996 997 (face font-lock-string-face) 997 1007 nil 1007 1008 (face font-lock-string-face) 1008 1045 (face font-lock-constant-face) 1045 1046 (face font-lock-string-face) 1046 1056 nil 1056 1057 (face font-lock-string-face) 1057 1100 (face font-lock-constant-face) 1100 1101 (face font-lock-string-face) 1101 1111 nil 1111 1112 (face font-lock-string-face) 1112 1154 (face font-lock-constant-face) 1154 1155 (face font-lock-string-face) 1155 1165 nil 1165 1166 (face font-lock-string-face) 1166 1197 (face font-lock-constant-face) 1197 1198 (face font-lock-string-face) 1198 1208 nil 1208 1209 (face font-lock-string-face) 1209 1239 (face font-lock-constant-face) 1239 1240 (face font-lock-string-face) 1240 1250 nil 1250 1251 (face font-lock-string-face) 1251 1283 (face font-lock-constant-face) 1283 1284 (face font-lock-string-face) 1284 1294 nil 1294 1295 (face font-lock-string-face) 1295 1326 (face font-lock-constant-face) 1326 1327 (face font-lock-string-face) 1327 1337 nil 1337 1338 (face font-lock-string-face) 1338 1369 (face font-lock-constant-face) 1369 1370 (face font-lock-string-face) 1370 1380 nil 1380 1381 (face font-lock-string-face) 1381 1419 (face font-lock-constant-face) 1419 1420 (face font-lock-string-face) 1420 1430 nil 1430 1431 (face font-lock-string-face) 1431 1467 (face font-lock-constant-face) 1467 1468 (face font-lock-string-face) 1468 1478 nil 1478 1479 (face font-lock-string-face) 1479 1507 (face font-lock-constant-face) 1507 1508 (face font-lock-string-face) 1508 1518 nil 1518 1519 (face font-lock-string-face) 1519 1546 (face font-lock-constant-face) 1546 1547 (face font-lock-string-face) 1547 1557 nil 1557 1558 (face font-lock-string-face) 1558 1574 (face font-lock-constant-face) 1574 1575 (face font-lock-string-face) 1575 1585 nil 1585 1586 (face font-lock-string-face) 1586 1617 (face font-lock-constant-face) 1617 1618 (face font-lock-string-face) 1618 1628 nil 1628 1629 (face font-lock-string-face) 1629 1659 (face font-lock-constant-face) 1659 1660 (face font-lock-string-face) 1660 1670 nil 1670 1671 (face font-lock-string-face) 1671 1703 (face font-lock-constant-face) 1703 1704 (face font-lock-string-face) 1704 1714 nil 1714 1715 (face font-lock-string-face) 1715 1746 (face font-lock-constant-face) 1746 1747 (face font-lock-string-face) 1747 1757 nil 1757 1758 (face font-lock-string-face) 1758 1784 (face font-lock-constant-face) 1784 1785 (face font-lock-string-face) 1785 1795 nil 1795 1796 (face font-lock-string-face) 1796 1821 (face font-lock-constant-face) 1821 1822 (face font-lock-string-face) 1822 1832 nil 1832 1833 (face font-lock-string-face) 1833 1855 (face font-lock-constant-face) 1855 1856 (face font-lock-string-face) 1856 1866 nil 1866 1867 (face font-lock-string-face) 1867 1888 (face font-lock-constant-face) 1888 1889 (face font-lock-string-face) 1889 1899 nil 1899 1900 (face font-lock-string-face) 1900 1927 (face font-lock-constant-face) 1927 1928 (face font-lock-string-face) 1928 1938 nil 1938 1939 (face font-lock-string-face) 1939 1965 (face font-lock-constant-face) 1965 1966 (face font-lock-string-face) 1966 1976 nil 1976 1977 (face font-lock-string-face) 1977 2009 (face font-lock-constant-face) 2009 2010 (face font-lock-string-face) 2010 2020 nil 2020 2021 (face font-lock-string-face) 2021 2052 (face font-lock-constant-face) 2052 2053 (face font-lock-string-face) 2053 2063 nil 2063 2064 (face font-lock-string-face) 2064 2096 (face font-lock-constant-face) 2096 2097 (face font-lock-string-face) 2097 2107 nil 2107 2108 (face font-lock-string-face) 2108 2139 (face font-lock-constant-face) 2139 2140 (face font-lock-string-face) 2140 2150 nil 2150 2151 (face font-lock-string-face) 2151 2188 (face font-lock-constant-face) 2188 2189 (face font-lock-string-face) 2189 2199 nil 2199 2200 (face font-lock-string-face) 2200 2236 (face font-lock-constant-face) 2236 2237 (face font-lock-string-face) 2237 2247 nil 2247 2248 (face font-lock-string-face) 2248 2275 (face font-lock-constant-face) 2275 2276 (face font-lock-string-face) 2276 2286 nil 2286 2287 (face font-lock-string-face) 2287 2313 (face font-lock-constant-face) 2313 2314 (face font-lock-string-face) 2314 2324 nil 2324 2325 (face font-lock-string-face) 2325 2352 (face font-lock-constant-face) 2352 2353 (face font-lock-string-face) 2353 2363 nil 2363 2364 (face font-lock-string-face) 2364 2390 (face font-lock-constant-face) 2390 2391 (face font-lock-string-face) 2391 2401 nil 2401 2402 (face font-lock-string-face) 2402 2427 (face font-lock-constant-face) 2427 2428 (face font-lock-string-face) 2428 2438 nil 2438 2439 (face font-lock-string-face) 2439 2463 (face font-lock-constant-face) 2463 2464 (face font-lock-string-face) 2464 2474 nil 2474 2475 (face font-lock-string-face) 2475 2494 (face font-lock-constant-face) 2494 2495 (face font-lock-string-face) 2495 2505 nil 2505 2506 (face font-lock-string-face) 2506 2524 (face font-lock-constant-face) 2524 2525 (face font-lock-string-face) 2525 2535 nil 2535 2536 (face font-lock-string-face) 2536 2571 (face font-lock-constant-face) 2571 2572 (face font-lock-string-face) 2572 2582 nil 2582 2583 (face font-lock-string-face) 2583 2617 (face font-lock-constant-face) 2617 2618 (face font-lock-string-face) 2618 2628 nil 2628 2629 (face font-lock-string-face) 2629 2668 (face font-lock-constant-face) 2668 2669 (face font-lock-string-face) 2669 2679 nil 2679 2680 (face font-lock-string-face) 2680 2721 (face font-lock-constant-face) 2721 2722 (face font-lock-string-face) 2722 2732 nil 2732 2733 (face font-lock-string-face) 2733 2765 (face font-lock-constant-face) 2765 2766 (face font-lock-string-face) 2766 2776 nil 2776 2777 (face font-lock-string-face) 2777 2808 (face font-lock-constant-face) 2808 2809 (face font-lock-string-face) 2809 2819 nil 2819 2820 (face font-lock-string-face) 2820 2853 (face font-lock-constant-face) 2853 2854 (face font-lock-string-face) 2854 2864 nil 2864 2865 (face font-lock-string-face) 2865 2897 (face font-lock-constant-face) 2897 2898 (face font-lock-string-face) 2898 2908 nil 2908 2909 (face font-lock-string-face) 2909 2943 (face font-lock-constant-face) 2943 2944 (face font-lock-string-face) 2944 2954 nil 2954 2955 (face font-lock-string-face) 2955 2988 (face font-lock-constant-face) 2988 2989 (face font-lock-string-face) 2989 2999 nil 2999 3000 (face font-lock-string-face) 3000 3025 (face font-lock-constant-face) 3025 3026 (face font-lock-string-face) 3026 3036 nil 3036 3037 (face font-lock-string-face) 3037 3061 (face font-lock-constant-face) 3061 3062 (face font-lock-string-face) 3062 3072 nil 3072 3073 (face font-lock-string-face) 3073 3099 (face font-lock-constant-face) 3099 3100 (face font-lock-string-face) 3100 3110 nil 3110 3111 (face font-lock-string-face) 3111 3136 (face font-lock-constant-face) 3136 3137 (face font-lock-string-face) 3137 3147 nil 3147 3148 (face font-lock-string-face) 3148 3172 (face font-lock-constant-face) 3172 3173 (face font-lock-string-face) 3173 3183 nil 3183 3184 (face font-lock-string-face) 3184 3207 (face font-lock-constant-face) 3207 3208 (face font-lock-string-face) 3208 3218 nil 3218 3219 (face font-lock-string-face) 3219 3246 (face font-lock-constant-face) 3246 3247 (face font-lock-string-face) 3247 3257 nil 3257 3258 (face font-lock-string-face) 3258 3284 (face font-lock-constant-face) 3284 3285 (face font-lock-string-face) 3285 3295 nil 3295 3296 (face font-lock-string-face) 3296 3322 (face font-lock-constant-face) 3322 3323 (face font-lock-string-face) 3323 3333 nil 3333 3334 (face font-lock-string-face) 3334 3359 (face font-lock-constant-face) 3359 3360 (face font-lock-string-face) 3360 3370 nil 3370 3371 (face font-lock-string-face) 3371 3409 (face font-lock-constant-face) 3409 3410 (face font-lock-string-face) 3410 3420 nil 3420 3421 (face font-lock-string-face) 3421 3458 (face font-lock-constant-face) 3458 3459 (face font-lock-string-face) 3459 3469 nil 3469 3470 (face font-lock-string-face) 3470 3498 (face font-lock-constant-face) 3498 3499 (face font-lock-string-face) 3499 3509 nil 3509 3510 (face font-lock-string-face) 3510 3537 (face font-lock-constant-face) 3537 3538 (face font-lock-string-face) 3538 3548 nil 3548 3549 (face font-lock-string-face) 3549 3589 (face font-lock-constant-face) 3589 3590 (face font-lock-string-face) 3590 3600 nil 3600 3601 (face font-lock-string-face) 3601 3640 (face font-lock-constant-face) 3640 3641 (face font-lock-string-face) 3641 3651 nil 3651 3652 (face font-lock-string-face) 3652 3693 (face font-lock-constant-face) 3693 3694 (face font-lock-string-face) 3694 3704 nil 3704 3705 (face font-lock-string-face) 3705 3745 (face font-lock-constant-face) 3745 3746 (face font-lock-string-face) 3746 3756 nil 3756 3757 (face font-lock-string-face) 3757 3787 (face font-lock-constant-face) 3787 3788 (face font-lock-string-face) 3788 3798 nil 3798 3799 (face font-lock-string-face) 3799 3828 (face font-lock-constant-face) 3828 3829 (face font-lock-string-face) 3829 3839 nil 3839 3840 (face font-lock-string-face) 3840 3869 (face font-lock-constant-face) 3869 3870 (face font-lock-string-face) 3870 3880 nil 3880 3881 (face font-lock-string-face) 3881 3909 (face font-lock-constant-face) 3909 3910 (face font-lock-string-face) 3910 3920 nil 3920 3921 (face font-lock-string-face) 3921 3945 (face font-lock-constant-face) 3945 3946 (face font-lock-string-face) 3946 3956 nil 3956 3957 (face font-lock-string-face) 3957 3980 (face font-lock-constant-face) 3980 3981 (face font-lock-string-face) 3981 3991 nil 3991 3992 (face font-lock-string-face) 3992 4019 (face font-lock-constant-face) 4019 4020 (face font-lock-string-face) 4020 4030 nil 4030 4031 (face font-lock-string-face) 4031 4057 (face font-lock-constant-face) 4057 4058 (face font-lock-string-face) 4058 4068 nil 4068 4069 (face font-lock-string-face) 4069 4090 (face font-lock-constant-face) 4090 4091 (face font-lock-string-face) 4091 4101 nil 4101 4102 (face font-lock-string-face) 4102 4122 (face font-lock-constant-face) 4122 4123 (face font-lock-string-face) 4123 4133 nil 4133 4134 (face font-lock-string-face) 4134 4157 (face font-lock-constant-face) 4157 4158 (face font-lock-string-face) 4158 4168 nil 4168 4169 (face font-lock-string-face) 4169 4191 (face font-lock-constant-face) 4191 4192 (face font-lock-string-face) 4192 4202 nil 4202 4203 (face font-lock-string-face) 4203 4243 (face font-lock-constant-face) 4243 4244 (face font-lock-string-face) 4244 4254 nil 4254 4255 (face font-lock-string-face) 4255 4294 (face font-lock-constant-face) 4294 4295 (face font-lock-string-face) 4295 4305 nil 4305 4306 (face font-lock-string-face) 4306 4347 (face font-lock-constant-face) 4347 4348 (face font-lock-string-face) 4348 4358 nil 4358 4359 (face font-lock-string-face) 4359 4399 (face font-lock-constant-face) 4399 4400 (face font-lock-string-face) 4400 4410 nil 4410 4411 (face font-lock-string-face) 4411 4441 (face font-lock-constant-face) 4441 4442 (face font-lock-string-face) 4442 4452 nil 4452 4453 (face font-lock-string-face) 4453 4482 (face font-lock-constant-face) 4482 4483 (face font-lock-string-face) 4483 4493 nil 4493 4494 (face font-lock-string-face) 4494 4523 (face font-lock-constant-face) 4523 4524 (face font-lock-string-face) 4524 4534 nil 4534 4535 (face font-lock-string-face) 4535 4563 (face font-lock-constant-face) 4563 4564 (face font-lock-string-face) 4564 4574 nil 4574 4575 (face font-lock-string-face) 4575 4610 (face font-lock-constant-face) 4610 4611 (face font-lock-string-face) 4611 4621 nil 4621 4622 (face font-lock-string-face) 4622 4656 (face font-lock-constant-face) 4656 4657 (face font-lock-string-face) 4657 4667 nil 4667 4668 (face font-lock-string-face) 4668 4697 (face font-lock-constant-face) 4697 4698 (face font-lock-string-face) 4698 4708 nil 4708 4709 (face font-lock-string-face) 4709 4737 (face font-lock-constant-face) 4737 4738 (face font-lock-string-face) 4738 4748 nil 4748 4749 (face font-lock-string-face) 4749 4780 (face font-lock-constant-face) 4780 4781 (face font-lock-string-face) 4781 4791 nil 4791 4792 (face font-lock-string-face) 4792 4822 (face font-lock-constant-face) 4822 4823 (face font-lock-string-face) 4823 4833 nil 4833 4834 (face font-lock-string-face) 4834 4869 (face font-lock-constant-face) 4869 4870 (face font-lock-string-face) 4870 4880 nil 4880 4881 (face font-lock-string-face) 4881 4915 (face font-lock-constant-face) 4915 4916 (face font-lock-string-face) 4916 4926 nil 4926 4927 (face font-lock-string-face) 4927 4948 (face font-lock-constant-face) 4948 4949 (face font-lock-string-face) 4949 4959 nil 4959 4960 (face font-lock-string-face) 4960 4980 (face font-lock-constant-face) 4980 4981 (face font-lock-string-face) 4981 4991 nil 4991 4992 (face font-lock-string-face) 4992 5020 (face font-lock-constant-face) 5020 5021 (face font-lock-string-face) 5021 5031 nil 5031 5032 (face font-lock-string-face) 5032 5059 (face font-lock-constant-face) 5059 5060 (face font-lock-string-face) 5060 5070 nil 5070 5071 (face font-lock-string-face) 5071 5092 (face font-lock-constant-face) 5092 5093 (face font-lock-string-face) 5093 5103 nil 5103 5104 (face font-lock-string-face) 5104 5132 (face font-lock-constant-face) 5132 5133 (face font-lock-string-face) 5133 5143 nil 5143 5144 (face font-lock-string-face) 5144 5171 (face font-lock-constant-face) 5171 5172 (face font-lock-string-face) 5172 5182 nil 5182 5183 (face font-lock-string-face) 5183 5217 (face font-lock-constant-face) 5217 5218 (face font-lock-string-face) 5218 5228 nil 5228 5229 (face font-lock-string-face) 5229 5262 (face font-lock-constant-face) 5262 5263 (face font-lock-string-face) 5263 5273 nil 5273 5274 (face font-lock-string-face) 5274 5297 (face font-lock-constant-face) 5297 5298 (face font-lock-string-face) 5298 5308 nil 5308 5309 (face font-lock-string-face) 5309 5324 (face font-lock-constant-face) 5324 5325 (face font-lock-string-face) 5325 5335 nil 5335 5336 (face font-lock-string-face) 5336 5350 (face font-lock-constant-face) 5350 5351 (face font-lock-string-face) 5351 5361 nil 5361 5362 (face font-lock-string-face) 5362 5380 (face font-lock-constant-face) 5380 5381 (face font-lock-string-face) 5381 5391 nil 5391 5392 (face font-lock-string-face) 5392 5409 (face font-lock-constant-face) 5409 5410 (face font-lock-string-face) 5410 5420 nil 5420 5421 (face font-lock-string-face) 5421 5443 (face font-lock-constant-face) 5443 5444 (face font-lock-string-face) 5444 5454 nil 5454 5455 (face font-lock-string-face) 5455 5476 (face font-lock-constant-face) 5476 5477 (face font-lock-string-face) 5477 5487 nil 5487 5488 (face font-lock-string-face) 5488 5501 (face font-lock-constant-face) 5501 5502 (face font-lock-string-face) 5502 5512 nil 5512 5513 (face font-lock-string-face) 5513 5525 (face font-lock-constant-face) 5525 5526 (face font-lock-string-face) 5526 5536 nil 5536 5537 (face font-lock-string-face) 5537 5561 (face font-lock-constant-face) 5561 5562 (face font-lock-string-face) 5562 5572 nil 5572 5573 (face font-lock-string-face) 5573 5596 (face font-lock-constant-face) 5596 5597 (face font-lock-string-face) 5597 5607 nil 5607 5608 (face font-lock-string-face) 5608 5627 (face font-lock-constant-face) 5627 5628 (face font-lock-string-face) 5628 5638 nil 5638 5639 (face font-lock-string-face) 5639 5657 (face font-lock-constant-face) 5657 5658 (face font-lock-string-face) 5658 5668 nil 5668 5669 (face font-lock-string-face) 5669 5688 (face font-lock-constant-face) 5688 5689 (face font-lock-string-face) 5689 5699 nil 5699 5700 (face font-lock-string-face) 5700 5718 (face font-lock-constant-face) 5718 5719 (face font-lock-string-face) 5719 5729 nil 5729 5730 (face font-lock-string-face) 5730 5752 (face font-lock-constant-face) 5752 5753 (face font-lock-string-face) 5753 5763 nil 5763 5764 (face font-lock-string-face) 5764 5785 (face font-lock-constant-face) 5785 5786 (face font-lock-string-face) 5786 5796 nil 5796 5797 (face font-lock-string-face) 5797 5819 (face font-lock-constant-face) 5819 5820 (face font-lock-string-face) 5820 5830 nil 5830 5831 (face font-lock-string-face) 5831 5852 (face font-lock-constant-face) 5852 5853 (face font-lock-string-face) 5853 5863 nil 5863 5864 (face font-lock-string-face) 5864 5880 (face font-lock-constant-face) 5880 5881 (face font-lock-string-face) 5881 5891 nil 5891 5892 (face font-lock-string-face) 5892 5915 (face font-lock-constant-face) 5915 5916 (face font-lock-string-face) 5916 5926 nil 5926 5927 (face font-lock-string-face) 5927 5942 (face font-lock-constant-face) 5942 5943 (face font-lock-string-face) 5943 5953 nil 5953 5954 (face font-lock-string-face) 5954 5968 (face font-lock-constant-face) 5968 5969 (face font-lock-string-face) 5969 5979 nil 5979 5980 (face font-lock-string-face) 5980 6002 (face font-lock-constant-face) 6002 6003 (face font-lock-string-face) 6003 6013 nil 6013 6014 (face font-lock-string-face) 6014 6035 (face font-lock-constant-face) 6035 6036 (face font-lock-string-face) 6036 6046 nil 6046 6047 (face font-lock-string-face) 6047 6059 (face font-lock-constant-face) 6059 6060 (face font-lock-string-face) 6060 6070 nil 6070 6071 (face font-lock-string-face) 6071 6082 (face font-lock-constant-face) 6082 6083 (face font-lock-string-face) 6083 6093 nil 6093 6094 (face font-lock-string-face) 6094 6119 (face font-lock-constant-face) 6119 6120 (face font-lock-string-face) 6120 6130 nil 6130 6131 (face font-lock-string-face) 6131 6155 (face font-lock-constant-face) 6155 6156 (face font-lock-string-face) 6156 6166 nil 6166 6167 (face font-lock-string-face) 6167 6185 (face font-lock-constant-face) 6185 6186 (face font-lock-string-face) 6186 6196 nil 6196 6197 (face font-lock-string-face) 6197 6212 (face font-lock-constant-face) 6212 6213 (face font-lock-string-face) 6213 6223 nil 6223 6224 (face font-lock-string-face) 6224 6238 (face font-lock-constant-face) 6238 6239 (face font-lock-string-face) 6239 6249 nil 6249 6250 (face font-lock-string-face) 6250 6282 (face font-lock-constant-face) 6282 6283 (face font-lock-string-face) 6283 6293 nil 6293 6294 (face font-lock-string-face) 6294 6325 (face font-lock-constant-face) 6325 6326 (face font-lock-string-face) 6326 6336 nil 6336 6337 (face font-lock-string-face) 6337 6349 (face font-lock-constant-face) 6349 6350 (face font-lock-string-face) 6350 6360 nil 6360 6361 (face font-lock-string-face) 6361 6382 (face font-lock-constant-face) 6382 6383 (face font-lock-string-face) 6383 6393 nil 6393 6394 (face font-lock-string-face) 6394 6413 (face font-lock-constant-face) 6413 6414 (face font-lock-string-face) 6414 6424 nil 6424 6425 (face font-lock-string-face) 6425 6442 (face font-lock-constant-face) 6442 6443 (face font-lock-string-face) 6443 6453 nil 6453 6454 (face font-lock-string-face) 6454 6470 (face font-lock-constant-face) 6470 6471 (face font-lock-string-face) 6471 6481 nil 6481 6482 (face font-lock-string-face) 6482 6504 (face font-lock-constant-face) 6504 6505 (face font-lock-string-face) 6505 6515 nil 6515 6516 (face font-lock-string-face) 6516 6535 (face font-lock-constant-face) 6535 6536 (face font-lock-string-face) 6536 6546 nil 6546 6547 (face font-lock-string-face) 6547 6569 (face font-lock-constant-face) 6569 6570 (face font-lock-string-face) 6570 6580 nil 6580 6581 (face font-lock-string-face) 6581 6602 (face font-lock-constant-face) 6602 6603 (face font-lock-string-face) 6603 6613 nil 6613 6614 (face font-lock-string-face) 6614 6631 (face font-lock-constant-face) 6631 6632 (face font-lock-string-face) 6632 6642 nil 6642 6643 (face font-lock-string-face) 6643 6671 (face font-lock-constant-face) 6671 6672 (face font-lock-string-face) 6672 6682 nil 6682 6683 (face font-lock-string-face) 6683 6710 (face font-lock-constant-face) 6710 6711 (face font-lock-string-face) 6711 6721 nil 6721 6722 (face font-lock-string-face) 6722 6738 (face font-lock-constant-face) 6738 6739 (face font-lock-string-face) 6739 6749 nil 6749 6750 (face font-lock-string-face) 6750 6765 (face font-lock-constant-face) 6765 6766 (face font-lock-string-face) 6766 6776 nil 6776 6777 (face font-lock-string-face) 6777 6800 (face font-lock-constant-face) 6800 6801 (face font-lock-string-face) 6801 6811 nil 6811 6812 (face font-lock-string-face) 6812 6834 (face font-lock-constant-face) 6834 6835 (face font-lock-string-face) 6835 6845 nil 6845 6846 (face font-lock-string-face) 6846 6860 (face font-lock-constant-face) 6860 6861 (face font-lock-string-face) 6861 6871 nil 6871 6872 (face font-lock-string-face) 6872 6885 (face font-lock-constant-face) 6885 6886 (face font-lock-string-face) 6886 6896 nil 6896 6897 (face font-lock-string-face) 6897 6920 (face font-lock-constant-face) 6920 6921 (face font-lock-string-face) 6921 6931 nil 6931 6932 (face font-lock-string-face) 6932 6954 (face font-lock-constant-face) 6954 6955 (face font-lock-string-face) 6955 6965 nil 6965 6966 (face font-lock-string-face) 6966 6986 (face font-lock-constant-face) 6986 6987 (face font-lock-string-face) 6987 6997 nil 6997 6998 (face font-lock-string-face) 6998 7017 (face font-lock-constant-face) 7017 7018 (face font-lock-string-face) 7018 7028 nil 7028 7029 (face font-lock-string-face) 7029 7050 (face font-lock-constant-face) 7050 7051 (face font-lock-string-face) 7051 7061 nil 7061 7062 (face font-lock-string-face) 7062 7082 (face font-lock-constant-face) 7082 7083 (face font-lock-string-face) 7083 7093 nil 7093 7094 (face font-lock-string-face) 7094 7122 (face font-lock-constant-face) 7122 7123 (face font-lock-string-face) 7123 7133 nil 7133 7134 (face font-lock-string-face) 7134 7161 (face font-lock-constant-face) 7161 7162 (face font-lock-string-face) 7162 7172 nil 7172 7173 (face font-lock-string-face) 7173 7194 (face font-lock-constant-face) 7194 7195 (face font-lock-string-face) 7195 7205 nil 7205 7206 (face font-lock-string-face) 7206 7226 (face font-lock-constant-face) 7226 7227 (face font-lock-string-face) 7227 7237 nil 7237 7238 (face font-lock-string-face) 7238 7266 (face font-lock-constant-face) 7266 7267 (face font-lock-string-face) 7267 7277 nil 7277 7278 (face font-lock-string-face) 7278 7305 (face font-lock-constant-face) 7305 7306 (face font-lock-string-face) 7306 7316 nil 7316 7317 (face font-lock-string-face) 7317 7336 (face font-lock-constant-face) 7336 7337 (face font-lock-string-face) 7337 7347 nil 7347 7348 (face font-lock-string-face) 7348 7366 (face font-lock-constant-face) 7366 7367 (face font-lock-string-face) 7367 7377 nil 7377 7378 (face font-lock-string-face) 7378 7399 (face font-lock-constant-face) 7399 7400 (face font-lock-string-face) 7400 7410 nil 7410 7411 (face font-lock-string-face) 7411 7429 (face font-lock-constant-face) 7429 7430 (face font-lock-string-face) 7430 7440 nil 7440 7441 (face font-lock-string-face) 7441 7458 (face font-lock-constant-face) 7458 7459 (face font-lock-string-face) 7459 7469 nil 7469 7470 (face font-lock-string-face) 7470 7493 (face font-lock-constant-face) 7493 7494 (face font-lock-string-face) 7494 7504 nil 7504 7505 (face font-lock-string-face) 7505 7527 (face font-lock-constant-face) 7527 7528 (face font-lock-string-face) 7528 7538 nil 7538 7539 (face font-lock-string-face) 7539 7562 (face font-lock-constant-face) 7562 7563 (face font-lock-string-face) 7563 7573 nil 7573 7574 (face font-lock-string-face) 7574 7596 (face font-lock-constant-face) 7596 7597 (face font-lock-string-face) 7597 7607 nil 7607 7608 (face font-lock-string-face) 7608 7631 (face font-lock-constant-face) 7631 7632 (face font-lock-string-face) 7632 7642 nil 7642 7643 (face font-lock-string-face) 7643 7665 (face font-lock-constant-face) 7665 7666 (face font-lock-string-face) 7666 7676 nil 7676 7677 (face font-lock-string-face) 7677 7705 (face font-lock-constant-face) 7705 7706 (face font-lock-string-face) 7706 7716 nil 7716 7717 (face font-lock-string-face) 7717 7744 (face font-lock-constant-face) 7744 7745 (face font-lock-string-face) 7745 7755 nil 7755 7756 (face font-lock-string-face) 7756 7791 (face font-lock-constant-face) 7791 7792 (face font-lock-string-face) 7792 7802 nil 7802 7803 (face font-lock-string-face) 7803 7837 (face font-lock-constant-face) 7837 7838 (face font-lock-string-face) 7838 7848 nil 7848 7849 (face font-lock-string-face) 7849 7879 (face font-lock-constant-face) 7879 7880 (face font-lock-string-face) 7880 7890 nil 7890 7891 (face font-lock-string-face) 7891 7920 (face font-lock-constant-face) 7920 7921 (face font-lock-string-face) 7921 7931 nil 7931 7932 (face font-lock-string-face) 7932 7962 (face font-lock-constant-face) 7962 7963 (face font-lock-string-face) 7963 7973 nil 7973 7974 (face font-lock-string-face) 7974 8003 (face font-lock-constant-face) 8003 8004 (face font-lock-string-face) 8004 8014 nil 8014 8015 (face font-lock-string-face) 8015 8039 (face font-lock-constant-face) 8039 8040 (face font-lock-string-face) 8040 8050 nil 8050 8051 (face font-lock-string-face) 8051 8074 (face font-lock-constant-face) 8074 8075 (face font-lock-string-face) 8075 8085 nil 8085 8086 (face font-lock-string-face) 8086 8116 (face font-lock-constant-face) 8116 8117 (face font-lock-string-face) 8117 8127 nil 8127 8128 (face font-lock-string-face) 8128 8152 (face font-lock-constant-face) 8152 8153 (face font-lock-string-face) 8153 8163 nil 8163 8164 (face font-lock-string-face) 8164 8187 (face font-lock-constant-face) 8187 8188 (face font-lock-string-face) 8188 8198 nil 8198 8199 (face font-lock-string-face) 8199 8230 (face font-lock-constant-face) 8230 8231 (face font-lock-string-face) 8231 8241 nil 8241 8242 (face font-lock-string-face) 8242 8272 (face font-lock-constant-face) 8272 8273 (face font-lock-string-face) 8273 8283 nil 8283 8284 (face font-lock-string-face) 8284 8309 (face font-lock-constant-face) 8309 8310 (face font-lock-string-face) 8310 8320 nil 8320 8321 (face font-lock-string-face) 8321 8345 (face font-lock-constant-face) 8345 8346 (face font-lock-string-face) 8346 8356 nil 8356 8357 (face font-lock-string-face) 8357 8399 (face font-lock-constant-face) 8399 8400 (face font-lock-string-face) 8400 8410 nil 8410 8411 (face font-lock-string-face) 8411 8452 (face font-lock-constant-face) 8452 8453 (face font-lock-string-face) 8453 8463 nil 8463 8464 (face font-lock-string-face) 8464 8486 (face font-lock-constant-face) 8486 8487 (face font-lock-string-face) 8487 8497 nil 8497 8498 (face font-lock-string-face) 8498 8519 (face font-lock-constant-face) 8519 8520 (face font-lock-string-face) 8520 8530 nil 8530 8531 (face font-lock-string-face) 8531 8562 (face font-lock-constant-face) 8562 8563 (face font-lock-string-face) 8563 8573 nil 8573 8574 (face font-lock-string-face) 8574 8604 (face font-lock-constant-face) 8604 8605 (face font-lock-string-face) 8605 8615 nil 8615 8616 (face font-lock-string-face) 8616 8643 (face font-lock-constant-face) 8643 8644 (face font-lock-string-face) 8644 8654 nil 8654 8655 (face font-lock-string-face) 8655 8681 (face font-lock-constant-face) 8681 8682 (face font-lock-string-face) 8682 8692 nil 8692 8693 (face font-lock-string-face) 8693 8721 (face font-lock-constant-face) 8721 8722 (face font-lock-string-face) 8722 8732 nil 8732 8733 (face font-lock-string-face) 8733 8760 (face font-lock-constant-face) 8760 8761 (face font-lock-string-face) 8761 8771 nil 8771 8772 (face font-lock-string-face) 8772 8805 (face font-lock-constant-face) 8805 8806 (face font-lock-string-face) 8806 8816 nil 8816 8817 (face font-lock-string-face) 8817 8849 (face font-lock-constant-face) 8849 8850 (face font-lock-string-face) 8850 8860 nil 8860 8861 (face font-lock-string-face) 8861 8892 (face font-lock-constant-face) 8892 8893 (face font-lock-string-face) 8893 8903 nil 8903 8904 (face font-lock-string-face) 8904 8934 (face font-lock-constant-face) 8934 8935 (face font-lock-string-face) 8935 8945 nil 8945 8946 (face font-lock-string-face) 8946 8978 (face font-lock-constant-face) 8978 8979 (face font-lock-string-face) 8979 8989 nil 8989 8990 (face font-lock-string-face) 8990 9021 (face font-lock-constant-face) 9021 9022 (face font-lock-string-face) 9022 9032 nil 9032 9033 (face font-lock-string-face) 9033 9063 (face font-lock-constant-face) 9063 9064 (face font-lock-string-face) 9064 9074 nil 9074 9075 (face font-lock-string-face) 9075 9104 (face font-lock-constant-face) 9104 9105 (face font-lock-string-face) 9105 9115 nil 9115 9116 (face font-lock-string-face) 9116 9158 (face font-lock-constant-face) 9158 9159 (face font-lock-string-face) 9159 9169 nil 9169 9170 (face font-lock-string-face) 9170 9211 (face font-lock-constant-face) 9211 9212 (face font-lock-string-face) 9212 9222 nil 9222 9223 (face font-lock-string-face) 9223 9272 (face font-lock-constant-face) 9272 9273 (face font-lock-string-face) 9273 9283 nil 9283 9284 (face font-lock-string-face) 9284 9332 (face font-lock-constant-face) 9332 9333 (face font-lock-string-face) 9333 9343 nil 9343 9344 (face font-lock-string-face) 9344 9388 (face font-lock-constant-face) 9388 9389 (face font-lock-string-face) 9389 9399 nil 9399 9400 (face font-lock-string-face) 9400 9445 (face font-lock-constant-face) 9445 9446 (face font-lock-string-face) 9446 9456 nil 9456 9457 (face font-lock-string-face) 9457 9507 (face font-lock-constant-face) 9507 9508 (face font-lock-string-face) 9508 9518 nil 9518 9519 (face font-lock-string-face) 9519 9570 (face font-lock-constant-face) 9570 9571 (face font-lock-string-face) 9571 9581 nil 9581 9582 (face font-lock-string-face) 9582 9611 (face font-lock-constant-face) 9611 9612 (face font-lock-string-face) 9612 9622 nil 9622 9623 (face font-lock-string-face) 9623 9659 (face font-lock-constant-face) 9659 9660 (face font-lock-string-face) 9660 9670 nil 9670 9671 (face font-lock-string-face) 9671 9714 (face font-lock-constant-face) 9714 9715 (face font-lock-string-face) 9715 9725 nil 9725 9726 (face font-lock-string-face) 9726 9768 (face font-lock-constant-face) 9768 9769 (face font-lock-string-face) 9769 9779 nil 9779 9780 (face font-lock-string-face) 9780 9816 (face font-lock-constant-face) 9816 9817 (face font-lock-string-face) 9817 9827 nil 9827 9828 (face font-lock-string-face) 9828 9863 (face font-lock-constant-face) 9863 9864 (face font-lock-string-face) 9864 9874 nil 9874 9875 (face font-lock-string-face) 9875 9910 (face font-lock-constant-face) 9910 9911 (face font-lock-string-face) 9911 9921 nil 9921 9922 (face font-lock-string-face) 9922 9958 (face font-lock-constant-face) 9958 9959 (face font-lock-string-face) 9959 9969 nil 9969 9970 (face font-lock-string-face) 9970 10005 (face font-lock-constant-face) 10005 10006 (face font-lock-string-face) 10006 10016 nil 10016 10017 (face font-lock-string-face) 10017 10050 (face font-lock-constant-face) 10050 10051 (face font-lock-string-face) 10051 10061 nil 10061 10062 (face font-lock-string-face) 10062 10094 (face font-lock-constant-face) 10094 10095 (face font-lock-string-face) 10095 10105 nil 10105 10106 (face font-lock-string-face) 10106 10150 (face font-lock-constant-face) 10150 10151 (face font-lock-string-face) 10151 10161 nil 10161 10162 (face font-lock-string-face) 10162 10198 (face font-lock-constant-face) 10198 10199 (face font-lock-string-face) 10199 10209 nil 10209 10210 (face font-lock-string-face) 10210 10245 (face font-lock-constant-face) 10245 10246 (face font-lock-string-face) 10246 10256 nil 10256 10257 (face font-lock-string-face) 10257 10296 (face font-lock-constant-face) 10296 10297 (face font-lock-string-face) 10297 10307 nil 10307 10308 (face font-lock-string-face) 10308 10346 (face font-lock-constant-face) 10346 10347 (face font-lock-string-face) 10347 10357 nil 10357 10358 (face font-lock-string-face) 10358 10403 (face font-lock-constant-face) 10403 10404 (face font-lock-string-face) 10404 10414 nil 10414 10415 (face font-lock-string-face) 10415 10459 (face font-lock-constant-face) 10459 10460 (face font-lock-string-face) 10460 10470 nil 10470 10471 (face font-lock-string-face) 10471 10487 (face font-lock-constant-face) 10487 10488 (face font-lock-string-face) 10488 10498 nil 10498 10499 (face font-lock-string-face) 10499 10514 (face font-lock-constant-face) 10514 10515 (face font-lock-string-face) 10515 10525 nil 10525 10526 (face font-lock-string-face) 10526 10559 (face font-lock-constant-face) 10559 10560 (face font-lock-string-face) 10560 10570 nil 10570 10571 (face font-lock-string-face) 10571 10603 (face font-lock-constant-face) 10603 10604 (face font-lock-string-face) 10604 10614 nil 10614 10615 (face font-lock-string-face) 10615 10636 (face font-lock-constant-face) 10636 10637 (face font-lock-string-face) 10637 10647 nil 10647 10648 (face font-lock-string-face) 10648 10675 (face font-lock-constant-face) 10675 10676 (face font-lock-string-face) 10676 10686 nil 10686 10687 (face font-lock-string-face) 10687 10713 (face font-lock-constant-face) 10713 10714 (face font-lock-string-face) 10714 10724 nil 10724 10725 (face font-lock-string-face) 10725 10755 (face font-lock-constant-face) 10755 10756 (face font-lock-string-face) 10756 10766 nil 10766 10767 (face font-lock-string-face) 10767 10796 (face font-lock-constant-face) 10796 10797 (face font-lock-string-face) 10797 10807 nil 10807 10808 (face font-lock-string-face) 10808 10845 (face font-lock-constant-face) 10845 10846 (face font-lock-string-face) 10846 10856 nil 10856 10857 (face font-lock-string-face) 10857 10893 (face font-lock-constant-face) 10893 10894 (face font-lock-string-face) 10894 10904 nil 10904 10905 (face font-lock-string-face) 10905 10929 (face font-lock-constant-face) 10929 10930 (face font-lock-string-face) 10930 10940 nil 10940 10941 (face font-lock-string-face) 10941 10964 (face font-lock-constant-face) 10964 10965 (face font-lock-string-face) 10965 10975 nil 10975 10976 (face font-lock-string-face) 10976 10995 (face font-lock-constant-face) 10995 10996 (face font-lock-string-face) 10996 11006 nil 11006 11007 (face font-lock-string-face) 11007 11025 (face font-lock-constant-face) 11025 11026 (face font-lock-string-face) 11026 11036 nil 11036 11037 (face font-lock-string-face) 11037 11063 (face font-lock-constant-face) 11063 11064 (face font-lock-string-face) 11064 11074 nil 11074 11075 (face font-lock-string-face) 11075 11100 (face font-lock-constant-face) 11100 11101 (face font-lock-string-face) 11101 11111 nil 11111 11112 (face font-lock-string-face) 11112 11138 (face font-lock-constant-face) 11138 11139 (face font-lock-string-face) 11139 11149 nil 11149 11150 (face font-lock-string-face) 11150 11175 (face font-lock-constant-face) 11175 11176 (face font-lock-string-face) 11176 11193 nil 11193 11194 (face font-lock-string-face) 11194 11219 (face font-lock-keyword-face) 11219 11220 (face font-lock-string-face) 11220 11232 nil 11232 11233 (face font-lock-string-face) 11233 11245 (face font-lock-keyword-face) 11245 11246 (face font-lock-string-face) 11246 11260 nil 11260 11261 (face font-lock-string-face) 11261 11263 (face font-lock-constant-face) 11263 11264 (face font-lock-string-face) 11264 11292 nil 11292 11293 (face font-lock-string-face) 11293 11303 (face font-lock-keyword-face) 11303 11304 (face font-lock-string-face) 11304 11316 nil 11316 11381 (face font-lock-comment-face) 11381 11389 nil 11389 11439 (face font-lock-comment-face) 11439 11448 nil 11448 11449 (face font-lock-string-face) 11449 11464 (face font-lock-variable-name-face) 11464 11465 (face font-lock-string-face) 11465 11479 nil 11479 11480 (face font-lock-string-face) 11480 11492 (face font-lock-keyword-face) 11492 11493 (face font-lock-string-face) 11493 11509 nil 11509 11510 (face font-lock-string-face) 11510 11549 (face font-lock-function-name-face) 11549 11550 (face font-lock-string-face) 11550 11586 nil 11586 11587 (face font-lock-string-face) 11587 11602 (face font-lock-variable-name-face) 11602 11603 (face font-lock-string-face) 11603 11617 nil 11617 11618 (face font-lock-string-face) 11618 11626 (face font-lock-keyword-face) 11626 11627 (face font-lock-string-face) 11627 11643 nil 11643 11644 (face font-lock-string-face) 11644 11663 (face font-lock-constant-face) 11663 11664 (face font-lock-string-face) 11664 11678 nil 11678 11679 (face font-lock-string-face) 11679 11702 (face font-lock-constant-face) 11702 11703 (face font-lock-string-face) 11703 11717 nil 11717 11718 (face font-lock-string-face) 11718 11740 (face font-lock-constant-face) 11740 11741 (face font-lock-string-face) 11741 11755 nil 11755 11756 (face font-lock-string-face) 11756 11779 (face font-lock-constant-face) 11779 11780 (face font-lock-string-face) 11780 11794 nil 11794 11795 (face font-lock-string-face) 11795 11817 (face font-lock-constant-face) 11817 11818 (face font-lock-string-face) 11818 11832 nil 11832 11833 (face font-lock-string-face) 11833 11861 (face font-lock-constant-face) 11861 11862 (face font-lock-string-face) 11862 11876 nil 11876 11877 (face font-lock-string-face) 11877 11904 (face font-lock-constant-face) 11904 11905 (face font-lock-string-face) 11905 11919 nil 11919 11920 (face font-lock-string-face) 11920 11950 (face font-lock-constant-face) 11950 11951 (face font-lock-string-face) 11951 11965 nil 11965 11966 (face font-lock-string-face) 11966 11995 (face font-lock-constant-face) 11995 11996 (face font-lock-string-face) 11996 12010 nil 12010 12011 (face font-lock-string-face) 12011 12035 (face font-lock-constant-face) 12035 12036 (face font-lock-string-face) 12036 12050 nil 12050 12051 (face font-lock-string-face) 12051 12074 (face font-lock-constant-face) 12074 12075 (face font-lock-string-face) 12075 12089 nil 12089 12090 (face font-lock-string-face) 12090 12120 (face font-lock-constant-face) 12120 12121 (face font-lock-string-face) 12121 12135 nil 12135 12136 (face font-lock-string-face) 12136 12167 (face font-lock-constant-face) 12167 12168 (face font-lock-string-face) 12168 12182 nil 12182 12183 (face font-lock-string-face) 12183 12213 (face font-lock-constant-face) 12213 12214 (face font-lock-string-face) 12214 12228 nil 12228 12229 (face font-lock-string-face) 12229 12254 (face font-lock-constant-face) 12254 12255 (face font-lock-string-face) 12255 12269 nil 12269 12270 (face font-lock-string-face) 12270 12294 (face font-lock-constant-face) 12294 12295 (face font-lock-string-face) 12295 12309 nil 12309 12310 (face font-lock-string-face) 12310 12352 (face font-lock-constant-face) 12352 12353 (face font-lock-string-face) 12353 12367 nil 12367 12368 (face font-lock-string-face) 12368 12409 (face font-lock-constant-face) 12409 12410 (face font-lock-string-face) 12410 12424 nil 12424 12425 (face font-lock-string-face) 12425 12447 (face font-lock-constant-face) 12447 12448 (face font-lock-string-face) 12448 12462 nil 12462 12463 (face font-lock-string-face) 12463 12484 (face font-lock-constant-face) 12484 12485 (face font-lock-string-face) 12485 12499 nil 12499 12500 (face font-lock-string-face) 12500 12531 (face font-lock-constant-face) 12531 12532 (face font-lock-string-face) 12532 12546 nil 12546 12547 (face font-lock-string-face) 12547 12577 (face font-lock-constant-face) 12577 12578 (face font-lock-string-face) 12578 12592 nil 12592 12593 (face font-lock-string-face) 12593 12621 (face font-lock-constant-face) 12621 12622 (face font-lock-string-face) 12622 12636 nil 12636 12637 (face font-lock-string-face) 12637 12664 (face font-lock-constant-face) 12664 12665 (face font-lock-string-face) 12665 12679 nil 12679 12680 (face font-lock-string-face) 12680 12707 (face font-lock-constant-face) 12707 12708 (face font-lock-string-face) 12708 12722 nil 12722 12723 (face font-lock-string-face) 12723 12749 (face font-lock-constant-face) 12749 12750 (face font-lock-string-face) 12750 12764 nil 12764 12765 (face font-lock-string-face) 12765 12791 (face font-lock-constant-face) 12791 12792 (face font-lock-string-face) 12792 12806 nil 12806 12807 (face font-lock-string-face) 12807 12832 (face font-lock-constant-face) 12832 12833 (face font-lock-string-face) 12833 12868 nil 12868 12937 (face font-lock-comment-face) 12937 12945 nil 12945 13016 (face font-lock-comment-face) 13016 13024 nil 13024 13040 (face font-lock-comment-face) 13040 13049 nil 13049 13050 (face font-lock-string-face) 13050 13065 (face font-lock-variable-name-face) 13065 13066 (face font-lock-string-face) 13066 13080 nil 13080 13081 (face font-lock-string-face) 13081 13089 (face font-lock-keyword-face) 13089 13090 (face font-lock-string-face) 13090 13105 nil 13105 13106 (face font-lock-string-face) 13106 13149 (face font-lock-constant-face) 13149 13150 (face font-lock-string-face) 13150 13175 nil 13175 13176 (face font-lock-string-face) 13176 13183 (face font-lock-keyword-face) 13183 13184 (face font-lock-string-face) 13184 13199 nil 13199 13200 (face font-lock-string-face) 13200 13248 (face font-lock-constant-face) 13248 13249 (face font-lock-string-face) 13249 13274 nil 13274 13275 (face font-lock-string-face) 13275 13288 (face font-lock-keyword-face) 13288 13289 (face font-lock-string-face) 13289 13305 nil 13305 13306 (face font-lock-string-face) 13306 13315 (face font-lock-keyword-face) 13315 13316 (face font-lock-string-face) 13316 13334 nil 13334 13335 (face font-lock-string-face) 13335 13345 (face font-lock-constant-face) 13345 13346 (face font-lock-string-face) 13346 13397 nil 13397 13398 (face font-lock-string-face) 13398 13443 (face font-lock-variable-name-face) 13443 13444 (face font-lock-string-face) 13444 13458 nil 13458 13459 (face font-lock-string-face) 13459 13472 (face font-lock-keyword-face) 13472 13473 (face font-lock-string-face) 13473 13489 nil 13489 13490 (face font-lock-string-face) 13490 13499 (face font-lock-keyword-face) 13499 13500 (face font-lock-string-face) 13500 13518 nil 13518 13519 (face font-lock-string-face) 13519 13527 (face font-lock-constant-face) 13527 13528 (face font-lock-string-face) 13528 13579 nil 13579 13580 (face font-lock-string-face) 13580 13593 (face font-lock-variable-name-face) 13593 13594 (face font-lock-string-face) 13594 13608 nil 13608 13609 (face font-lock-string-face) 13609 13617 (face font-lock-keyword-face) 13617 13618 (face font-lock-string-face) 13618 13623 nil 13623 13624 (face font-lock-string-face) 13624 13631 (face font-lock-constant-face) 13631 13632 (face font-lock-string-face) 13632 13634 nil 13634 13635 (face font-lock-string-face) 13635 13641 (face font-lock-constant-face) 13641 13642 (face font-lock-string-face) 13642 13671 nil 13671 13672 (face font-lock-string-face) 13672 13679 (face font-lock-constant-face) 13679 13680 (face font-lock-string-face) 13680 13682 nil 13682 13683 (face font-lock-string-face) 13683 13703 (face font-lock-constant-face) 13703 13704 (face font-lock-string-face) 13704 13720 nil 13720 13721 (face font-lock-string-face) 13721 13734 (face font-lock-keyword-face) 13734 13735 (face font-lock-string-face) 13735 13751 nil 13751 13752 (face font-lock-string-face) 13752 13761 (face font-lock-keyword-face) 13761 13762 (face font-lock-string-face) 13762 13815 nil 13815 13816 (face font-lock-string-face) 13816 13829 (face font-lock-variable-name-face) 13829 13830 (face font-lock-string-face) 13830 13844 nil 13844 13845 (face font-lock-string-face) 13845 13853 (face font-lock-keyword-face) 13853 13854 (face font-lock-string-face) 13854 13870 nil 13870 13871 (face font-lock-string-face) 13871 13909 (face font-lock-constant-face) 13909 13910 (face font-lock-string-face) 13910 13924 nil 13924 13925 (face font-lock-string-face) 13925 13962 (face font-lock-constant-face) 13962 13963 (face font-lock-string-face) 13963 13999 nil 13999 14000 (face font-lock-string-face) 14000 14011 (face font-lock-variable-name-face) 14011 14012 (face font-lock-string-face) 14012 14026 nil 14026 14027 (face font-lock-string-face) 14027 14036 (face font-lock-keyword-face) 14036 14037 (face font-lock-string-face) 14037 14053 nil 14053 14054 (face font-lock-string-face) 14054 14064 (face font-lock-keyword-face) 14064 14065 (face font-lock-string-face) 14065 14084 nil 14084 14085 (face font-lock-string-face) 14085 14096 (face font-lock-variable-name-face) 14096 14097 (face font-lock-string-face) 14097 14117 nil 14117 14129 (face font-lock-string-face) 14129 14131 nil 14131 14169 (face font-lock-string-face) 14169 14176 (face font-lock-variable-name-face) 14176 14182 (face font-lock-string-face) 14182 14193 (face font-lock-variable-name-face) 14193 14196 (face font-lock-string-face) 14196 14233 nil 14233 14245 (face font-lock-string-face) 14245 14247 nil 14247 14259 (face font-lock-string-face) 14259 14316 nil 14316 14317 (face font-lock-string-face) 14317 14327 (face font-lock-keyword-face) 14327 14328 (face font-lock-string-face) 14328 14345 nil 14345 14346 (face font-lock-string-face) 14346 14359 (face font-lock-variable-name-face) 14359 14360 (face font-lock-string-face) 14360 14378 nil 14378 14379 (face font-lock-string-face) 14379 14385 (face font-lock-keyword-face) 14385 14386 (face font-lock-string-face) 14386 14406 nil 14406 14411 (face font-lock-string-face) 14411 14413 (face font-lock-variable-name-face) 14413 14423 (face font-lock-variable-name-face) 14423 14443 (face font-lock-string-face) 14443 14476 nil 14476 14477 (face font-lock-string-face) 14477 14490 (face font-lock-keyword-face) 14490 14491 (face font-lock-string-face) 14491 14511 nil 14511 14512 (face font-lock-string-face) 14512 14521 (face font-lock-keyword-face) 14521 14522 (face font-lock-string-face) 14522 14544 nil 14544 14545 (face font-lock-string-face) 14545 14549 (face font-lock-constant-face) 14549 14551 (face font-lock-variable-name-face) 14551 14561 (face font-lock-variable-name-face) 14561 14578 (face font-lock-constant-face) 14578 14579 (face font-lock-string-face) 14579 14631 nil 14631 14632 (face font-lock-string-face) 14632 14639 (face font-lock-keyword-face) 14639 14640 (face font-lock-string-face) 14640 14660 nil 14660 14661 (face font-lock-string-face) 14661 14669 (face font-lock-preprocessor-face) 14669 14670 (face font-lock-string-face) 14670 14707 nil 14707 14729 (face font-lock-comment-face) 14729 14743 nil 14743 14744 (face font-lock-string-face) 14744 14752 (face font-lock-keyword-face) 14752 14753 (face font-lock-string-face) 14753 14773 nil 14773 14774 (face font-lock-string-face) 14774 14800 (face font-lock-constant-face) 14800 14801 (face font-lock-string-face) 14801 14819 nil 14819 14820 (face font-lock-string-face) 14820 14845 (face font-lock-constant-face) 14845 14846 (face font-lock-string-face) 14846 14915 nil 14915 14916 (face font-lock-string-face) 14916 14929 (face font-lock-variable-name-face) 14929 14930 (face font-lock-string-face) 14930 14944 nil 14944 14945 (face font-lock-string-face) 14945 14955 (face font-lock-keyword-face) 14955 14956 (face font-lock-string-face) 14956 14973 nil 14973 14974 (face font-lock-string-face) 14974 14993 (face font-lock-variable-name-face) 14993 14994 (face font-lock-string-face) 14994 15012 nil 15012 15013 (face font-lock-string-face) 15013 15019 (face font-lock-keyword-face) 15019 15020 (face font-lock-string-face) 15020 15040 nil 15040 15075 (face font-lock-string-face) 15075 15108 nil 15108 15109 (face font-lock-string-face) 15109 15122 (face font-lock-keyword-face) 15122 15123 (face font-lock-string-face) 15123 15143 nil 15143 15144 (face font-lock-string-face) 15144 15153 (face font-lock-keyword-face) 15153 15154 (face font-lock-string-face) 15154 15176 nil 15176 15177 (face font-lock-string-face) 15177 15215 (face font-lock-constant-face) 15215 15216 (face font-lock-string-face) 15216 15268 nil 15268 15269 (face font-lock-string-face) 15269 15276 (face font-lock-keyword-face) 15276 15277 (face font-lock-string-face) 15277 15297 nil 15297 15298 (face font-lock-string-face) 15298 15312 (face font-lock-preprocessor-face) 15312 15313 (face font-lock-string-face) 15313 15350 nil 15350 15378 (face font-lock-comment-face) 15378 15392 nil 15392 15393 (face font-lock-string-face) 15393 15401 (face font-lock-keyword-face) 15401 15402 (face font-lock-string-face) 15402 15422 nil 15422 15423 (face font-lock-string-face) 15423 15450 (face font-lock-constant-face) 15450 15451 (face font-lock-string-face) 15451 15469 nil 15469 15470 (face font-lock-string-face) 15470 15496 (face font-lock-constant-face) 15496 15497 (face font-lock-string-face) 15497 15566 nil 15566 15567 (face font-lock-string-face) 15567 15600 (face font-lock-variable-name-face) 15600 15601 (face font-lock-string-face) 15601 15615 nil 15615 15663 (face font-lock-comment-face) 15663 15673 nil 15673 15674 (face font-lock-string-face) 15674 15682 (face font-lock-keyword-face) 15682 15683 (face font-lock-string-face) 15683 15699 nil 15699 15700 (face font-lock-string-face) 15700 15743 (face font-lock-constant-face) 15743 15744 (face font-lock-string-face) 15744 15758 nil 15758 15759 (face font-lock-string-face) 15759 15801 (face font-lock-constant-face) 15801 15802 (face font-lock-string-face) 15802 15838 nil 15838 15839 (face font-lock-string-face) 15839 15848 (face font-lock-variable-name-face) 15848 15849 (face font-lock-string-face) 15849 15863 nil 15863 15864 (face font-lock-string-face) 15864 15877 (face font-lock-keyword-face) 15877 15878 (face font-lock-string-face) 15878 15894 nil 15894 15895 (face font-lock-string-face) 15895 15904 (face font-lock-keyword-face) 15904 15905 (face font-lock-string-face) 15905 15923 nil 15923 15924 (face font-lock-string-face) 15924 15980 (face font-lock-constant-face) 15980 15981 (face font-lock-string-face) 15981 15997 nil 15997 15998 (face font-lock-string-face) 15998 16057 (face font-lock-constant-face) 16057 16058 (face font-lock-string-face) 16058 16074 nil 16074 16075 (face font-lock-string-face) 16075 16131 (face font-lock-constant-face) 16131 16132 (face font-lock-string-face) 16132 16148 nil 16148 16149 (face font-lock-string-face) 16149 16205 (face font-lock-constant-face) 16205 16206 (face font-lock-string-face) 16206 16222 nil 16222 16223 (face font-lock-string-face) 16223 16275 (face font-lock-constant-face) 16275 16276 (face font-lock-string-face) 16276 16327 nil 16327 16328 (face font-lock-string-face) 16328 16337 (face font-lock-variable-name-face) 16337 16338 (face font-lock-string-face) 16338 16352 nil 16352 16353 (face font-lock-string-face) 16353 16361 (face font-lock-keyword-face) 16361 16362 (face font-lock-string-face) 16362 16378 nil 16378 16379 (face font-lock-string-face) 16379 16406 (face font-lock-constant-face) 16406 16407 (face font-lock-string-face) 16407 16421 nil 16421 16422 (face font-lock-string-face) 16422 16448 (face font-lock-constant-face) 16448 16449 (face font-lock-string-face) 16449 16463 nil 16463 16464 (face font-lock-string-face) 16464 16507 (face font-lock-constant-face) 16507 16508 (face font-lock-string-face) 16508 16522 nil 16522 16523 (face font-lock-string-face) 16523 16565 (face font-lock-constant-face) 16565 16566 (face font-lock-string-face) 16566 16602 nil 16602 16603 (face font-lock-string-face) 16603 16646 (face font-lock-variable-name-face) 16646 16647 (face font-lock-string-face) 16647 16661 nil 16661 16662 (face font-lock-string-face) 16662 16669 (face font-lock-keyword-face) 16669 16670 (face font-lock-string-face) 16670 16686 nil 16686 16687 (face font-lock-string-face) 16687 16697 (face font-lock-constant-face) 16697 16698 (face font-lock-string-face) 16698 16712 nil 16712 16713 (face font-lock-string-face) 16713 16722 (face font-lock-constant-face) 16722 16723 (face font-lock-string-face) 16723 16737 nil 16737 16738 (face font-lock-string-face) 16738 16760 (face font-lock-constant-face) 16760 16761 (face font-lock-string-face) 16761 16775 nil 16775 16776 (face font-lock-string-face) 16776 16797 (face font-lock-constant-face) 16797 16798 (face font-lock-string-face) 16798 16812 nil 16812 16813 (face font-lock-string-face) 16813 16830 (face font-lock-constant-face) 16830 16831 (face font-lock-string-face) 16831 16845 nil 16845 16846 (face font-lock-string-face) 16846 16862 (face font-lock-constant-face) 16862 16863 (face font-lock-string-face) 16863 16877 nil 16877 16878 (face font-lock-string-face) 16878 16889 (face font-lock-constant-face) 16889 16890 (face font-lock-string-face) 16890 16904 nil 16904 16905 (face font-lock-string-face) 16905 16915 (face font-lock-constant-face) 16915 16916 (face font-lock-string-face) 16916 16930 nil 16930 16931 (face font-lock-string-face) 16931 16955 (face font-lock-constant-face) 16955 16956 (face font-lock-string-face) 16956 16970 nil 16970 16971 (face font-lock-string-face) 16971 16994 (face font-lock-constant-face) 16994 16995 (face font-lock-string-face) 16995 17009 nil 17009 17010 (face font-lock-string-face) 17010 17034 (face font-lock-constant-face) 17034 17035 (face font-lock-string-face) 17035 17049 nil 17049 17050 (face font-lock-string-face) 17050 17073 (face font-lock-constant-face) 17073 17074 (face font-lock-string-face) 17074 17088 nil 17088 17089 (face font-lock-string-face) 17089 17114 (face font-lock-constant-face) 17114 17115 (face font-lock-string-face) 17115 17129 nil 17129 17130 (face font-lock-string-face) 17130 17154 (face font-lock-constant-face) 17154 17155 (face font-lock-string-face) 17155 17210 nil 17210 17211 (face font-lock-string-face) 17211 17222 (face font-lock-keyword-face) 17222 17223 (face font-lock-string-face) 17223 17225 nil 17225 17226 (face font-lock-string-face) 17226 17237 (face font-lock-function-name-face) 17237 17238 (face font-lock-string-face) 17238 17246 nil 17246 17247 (face font-lock-string-face) 17247 17251 (face font-lock-keyword-face) 17251 17252 (face font-lock-string-face) 17252 17254 nil 17254 17255 (face font-lock-string-face) 17255 17269 (face font-lock-type-face) 17269 17270 (face font-lock-string-face) 17270 17278 nil 17278 17279 (face font-lock-string-face) 17279 17291 (face font-lock-keyword-face) 17291 17292 (face font-lock-string-face) 17292 17304 nil 17304 17305 (face font-lock-string-face) 17305 17307 (face font-lock-constant-face) 17307 17308 (face font-lock-string-face) 17308 17325 nil 17325 17326 (face font-lock-string-face) 17326 17336 (face font-lock-keyword-face) 17336 17337 (face font-lock-string-face) 17337 17350 nil 17350 17351 (face font-lock-string-face) 17351 17371 (face font-lock-variable-name-face) 17371 17372 (face font-lock-string-face) 17372 17386 nil 17386 17387 (face font-lock-string-face) 17387 17404 (face font-lock-keyword-face) 17404 17405 (face font-lock-string-face) 17405 17423 nil 17423 17424 (face font-lock-string-face) 17424 17442 (face font-lock-variable-name-face) 17442 17443 (face font-lock-string-face) 17443 17461 nil 17461 17462 (face font-lock-string-face) 17462 17469 (face font-lock-keyword-face) 17469 17470 (face font-lock-string-face) 17470 17474 nil 17474 17498 (face font-lock-string-face) 17498 17553 nil 17553 17554 (face font-lock-string-face) 17554 17599 (face font-lock-variable-name-face) 17599 17600 (face font-lock-string-face) 17600 17614 nil 17614 17615 (face font-lock-string-face) 17615 17627 (face font-lock-keyword-face) 17627 17628 (face font-lock-string-face) 17628 17644 nil 17644 17645 (face font-lock-string-face) 17645 17665 (face font-lock-function-name-face) 17665 17666 (face font-lock-string-face) 17666 17703 nil 17703 17704 (face font-lock-string-face) 17704 17724 (face font-lock-variable-name-face) 17724 17725 (face font-lock-string-face) 17725 17739 nil 17739 17740 (face font-lock-string-face) 17740 17752 (face font-lock-keyword-face) 17752 17753 (face font-lock-string-face) 17753 17769 nil 17769 17770 (face font-lock-string-face) 17770 17790 (face font-lock-function-name-face) 17790 17791 (face font-lock-string-face) 17791 17833 nil 17833 17834 (face font-lock-string-face) 17834 17841 (face font-lock-keyword-face) 17841 17842 (face font-lock-string-face) 17842 17854 nil 17854 17855 (face font-lock-string-face) 17855 17874 (face font-lock-constant-face) 17874 17875 (face font-lock-string-face) 17875 17885 nil 17885 17886 (face font-lock-string-face) 17886 17904 (face font-lock-constant-face) 17904 17905 (face font-lock-string-face) 17905 17935 nil 17935 17936 (face font-lock-string-face) 17936 17947 (face font-lock-keyword-face) 17947 17948 (face font-lock-string-face) 17948 17950 nil 17950 17951 (face font-lock-string-face) 17951 17971 (face font-lock-function-name-face) 17971 17972 (face font-lock-string-face) 17972 17980 nil 17980 17981 (face font-lock-string-face) 17981 17985 (face font-lock-keyword-face) 17985 17986 (face font-lock-string-face) 17986 17988 nil 17988 17989 (face font-lock-string-face) 17989 18003 (face font-lock-type-face) 18003 18004 (face font-lock-string-face) 18004 18012 nil 18012 18013 (face font-lock-string-face) 18013 18025 (face font-lock-keyword-face) 18025 18026 (face font-lock-string-face) 18026 18038 nil 18038 18039 (face font-lock-string-face) 18039 18041 (face font-lock-constant-face) 18041 18042 (face font-lock-string-face) 18042 18059 nil 18059 18060 (face font-lock-string-face) 18060 18067 (face font-lock-keyword-face) 18067 18068 (face font-lock-string-face) 18068 18080 nil 18080 18081 (face font-lock-string-face) 18081 18114 (face font-lock-constant-face) 18114 18115 (face font-lock-string-face) 18115 18125 nil 18125 18126 (face font-lock-string-face) 18126 18162 (face font-lock-constant-face) 18162 18163 (face font-lock-string-face) 18163 18173 nil 18173 18174 (face font-lock-string-face) 18174 18212 (face font-lock-constant-face) 18212 18213 (face font-lock-string-face) 18213 18223 nil 18223 18224 (face font-lock-string-face) 18224 18261 (face font-lock-constant-face) 18261 18262 (face font-lock-string-face) 18262 18272 nil 18272 18273 (face font-lock-string-face) 18273 18311 (face font-lock-constant-face) 18311 18312 (face font-lock-string-face) 18312 18322 nil 18322 18323 (face font-lock-string-face) 18323 18356 (face font-lock-constant-face) 18356 18357 (face font-lock-string-face) 18357 18367 nil 18367 18368 (face font-lock-string-face) 18368 18403 (face font-lock-constant-face) 18403 18404 (face font-lock-string-face) 18404 18414 nil 18414 18415 (face font-lock-string-face) 18415 18451 (face font-lock-constant-face) 18451 18452 (face font-lock-string-face) 18452 18462 nil 18462 18463 (face font-lock-string-face) 18463 18499 (face font-lock-constant-face) 18499 18500 (face font-lock-string-face) 18500 18510 nil 18510 18511 (face font-lock-string-face) 18511 18547 (face font-lock-constant-face) 18547 18548 (face font-lock-string-face) 18548 18558 nil 18558 18559 (face font-lock-string-face) 18559 18581 (face font-lock-constant-face) 18581 18582 (face font-lock-string-face) 18582 18592 nil 18592 18593 (face font-lock-string-face) 18593 18618 (face font-lock-constant-face) 18618 18619 (face font-lock-string-face) 18619 18629 nil 18629 18630 (face font-lock-string-face) 18630 18657 (face font-lock-constant-face) 18657 18658 (face font-lock-string-face) 18658 18668 nil 18668 18669 (face font-lock-string-face) 18669 18697 (face font-lock-constant-face) 18697 18698 (face font-lock-string-face) 18698 18708 nil 18708 18709 (face font-lock-string-face) 18709 18750 (face font-lock-constant-face) 18750 18751 (face font-lock-string-face) 18751 18761 nil 18761 18762 (face font-lock-string-face) 18762 18803 (face font-lock-constant-face) 18803 18804 (face font-lock-string-face) 18804 18814 nil 18814 18815 (face font-lock-string-face) 18815 18856 (face font-lock-constant-face) 18856 18857 (face font-lock-string-face) 18857 18867 nil 18867 18868 (face font-lock-string-face) 18868 18902 (face font-lock-constant-face) 18902 18903 (face font-lock-string-face) 18903 18913 nil 18913 18914 (face font-lock-string-face) 18914 18948 (face font-lock-constant-face) 18948 18949 (face font-lock-string-face) 18949 18959 nil 18959 18960 (face font-lock-string-face) 18960 18994 (face font-lock-constant-face) 18994 18995 (face font-lock-string-face) 18995 19005 nil 19005 19006 (face font-lock-string-face) 19006 19035 (face font-lock-constant-face) 19035 19036 (face font-lock-string-face) 19036 19046 nil 19046 19047 (face font-lock-string-face) 19047 19075 (face font-lock-constant-face) 19075 19076 (face font-lock-string-face) 19076 19093 nil 19093 19094 (face font-lock-string-face) 19094 19104 (face font-lock-keyword-face) 19104 19105 (face font-lock-string-face) 19105 19118 nil 19118 19119 (face font-lock-string-face) 19119 19139 (face font-lock-variable-name-face) 19139 19140 (face font-lock-string-face) 19140 19154 nil 19154 19155 (face font-lock-string-face) 19155 19172 (face font-lock-keyword-face) 19172 19173 (face font-lock-string-face) 19173 19191 nil 19191 19192 (face font-lock-string-face) 19192 19210 (face font-lock-variable-name-face) 19210 19211 (face font-lock-string-face) 19211 19229 nil 19229 19230 (face font-lock-string-face) 19230 19237 (face font-lock-keyword-face) 19237 19238 (face font-lock-string-face) 19238 19242 nil 19242 19266 (face font-lock-string-face) 19266 19321 nil 19321 19322 (face font-lock-string-face) 19322 19342 (face font-lock-variable-name-face) 19342 19343 (face font-lock-string-face) 19343 19357 nil 19357 19399 (face font-lock-comment-face) 19399 19409 nil 19409 19410 (face font-lock-string-face) 19410 19417 (face font-lock-keyword-face) 19417 19418 (face font-lock-string-face) 19418 19434 nil 19434 19435 (face font-lock-string-face) 19435 19480 (face font-lock-constant-face) 19480 19481 (face font-lock-string-face) 19481 19495 nil 19495 19496 (face font-lock-string-face) 19496 19535 (face font-lock-constant-face) 19535 19536 (face font-lock-string-face) 19536 19573 nil 19573 19574 (face font-lock-string-face) 19574 19623 (face font-lock-variable-name-face) 19623 19624 (face font-lock-string-face) 19624 19638 nil 19638 19639 (face font-lock-string-face) 19639 19645 (face font-lock-keyword-face) 19645 19646 (face font-lock-string-face) 19646 19662 nil 19662 19670 (face font-lock-string-face) 19670 19707 nil 19707 19708 (face font-lock-string-face) 19708 19719 (face font-lock-variable-name-face) 19719 19720 (face font-lock-string-face) 19720 19734 nil 19734 19735 (face font-lock-string-face) 19735 19749 (face font-lock-keyword-face) 19749 19750 (face font-lock-string-face) 19750 19766 nil 19766 19773 (face font-lock-string-face) 19773 19791 nil 19791 19792 (face font-lock-string-face) 19792 19806 (face font-lock-keyword-face) 19806 19807 (face font-lock-string-face) 19807 19827 nil 19827 19890 (face font-lock-comment-face) 19890 19906 nil 19906 19971 (face font-lock-comment-face) 19971 19987 nil 19987 20032 (face font-lock-comment-face) 20032 20048 nil 20048 20072 (face font-lock-string-face) 20072 20074 nil 20074 20077 (face font-lock-string-face) 20077 20080 nil 20080 20086 (face font-lock-comment-face) 20086 20155 nil 20155 20156 (face font-lock-string-face) 20156 20165 (face font-lock-variable-name-face) 20165 20166 (face font-lock-string-face) 20166 20180 nil 20180 20181 (face font-lock-string-face) 20181 20190 (face font-lock-keyword-face) 20190 20191 (face font-lock-string-face) 20191 20207 nil 20207 20208 (face font-lock-string-face) 20208 20218 (face font-lock-variable-name-face) 20218 20219 (face font-lock-string-face) 20219 20237 nil 20237 20246 (face font-lock-string-face) 20246 20262 nil 20262 20270 (face font-lock-string-face) 20270 20286 nil 20286 20298 (face font-lock-string-face) 20298 20314 nil 20314 20322 (face font-lock-string-face) 20322 20374 nil 20374 20375 (face font-lock-string-face) 20375 20384 (face font-lock-variable-name-face) 20384 20385 (face font-lock-string-face) 20385 20399 nil 20399 20400 (face font-lock-string-face) 20400 20409 (face font-lock-keyword-face) 20409 20410 (face font-lock-string-face) 20410 20426 nil 20426 20427 (face font-lock-string-face) 20427 20437 (face font-lock-variable-name-face) 20437 20438 (face font-lock-string-face) 20438 20456 nil 20456 20466 (face font-lock-string-face) 20466 20482 nil 20482 20491 (face font-lock-string-face) 20491 20507 nil 20507 20519 (face font-lock-string-face) 20519 20535 nil 20535 20543 (face font-lock-string-face) 20543 20595 nil 20595 20596 (face font-lock-string-face) 20596 20621 (face font-lock-variable-name-face) 20621 20622 (face font-lock-string-face) 20622 20636 nil 20636 20637 (face font-lock-string-face) 20637 20646 (face font-lock-keyword-face) 20646 20647 (face font-lock-string-face) 20647 20663 nil 20663 20664 (face font-lock-string-face) 20664 20674 (face font-lock-keyword-face) 20674 20675 (face font-lock-string-face) 20675 20695 nil 20695 20696 (face font-lock-string-face) 20696 20715 (face font-lock-variable-name-face) 20715 20716 (face font-lock-string-face) 20716 20736 nil 20736 20748 (face font-lock-string-face) 20748 20770 nil 20770 20780 (face font-lock-string-face) 20780 20800 nil 20800 20807 (face font-lock-string-face) 20807 20827 nil 20827 20839 (face font-lock-string-face) 20839 20859 nil 20859 20867 (face font-lock-string-face) 20867 20923 nil 20923 20935 (face font-lock-string-face) 20935 20957 nil 20957 20972 (face font-lock-string-face) 20972 20992 nil 20992 20999 (face font-lock-string-face) 20999 21019 nil 21019 21026 (face font-lock-string-face) 21026 21046 nil 21046 21058 (face font-lock-string-face) 21058 21078 nil 21078 21086 (face font-lock-string-face) 21086 21180 nil 21180 21181 (face font-lock-string-face) 21181 21190 (face font-lock-keyword-face) 21190 21191 (face font-lock-string-face) 21191 21203 nil 21203 21204 (face font-lock-string-face) 21204 21220 (face font-lock-variable-name-face) 21220 21221 (face font-lock-string-face) 21221 21223 nil 21223 21224 (face font-lock-string-face) 21224 21256 (face font-lock-variable-name-face) 21256 21257 (face font-lock-string-face) 21257 21274 nil 21274 21314 (face font-lock-string-face) 21314 21325 nil 21325 21326 (face font-lock-string-face) 21326 21334 (face font-lock-keyword-face) 21334 21335 (face font-lock-string-face) 21335 21347 nil 21347 21348 (face font-lock-string-face) 21348 21385 (face font-lock-constant-face) 21385 21386 (face font-lock-string-face) 21386 21416 nil 21416 21417 (face font-lock-string-face) 21417 21428 (face font-lock-keyword-face) 21428 21429 (face font-lock-string-face) 21429 21431 nil 21431 21432 (face font-lock-string-face) 21432 21452 (face font-lock-function-name-face) 21452 21453 (face font-lock-string-face) 21453 21461 nil 21461 21462 (face font-lock-string-face) 21462 21466 (face font-lock-keyword-face) 21466 21467 (face font-lock-string-face) 21467 21469 nil 21469 21470 (face font-lock-string-face) 21470 21484 (face font-lock-type-face) 21484 21485 (face font-lock-string-face) 21485 21493 nil 21493 21494 (face font-lock-string-face) 21494 21506 (face font-lock-keyword-face) 21506 21507 (face font-lock-string-face) 21507 21519 nil 21519 21520 (face font-lock-string-face) 21520 21522 (face font-lock-constant-face) 21522 21523 (face font-lock-string-face) 21523 21540 nil 21540 21541 (face font-lock-string-face) 21541 21548 (face font-lock-keyword-face) 21548 21549 (face font-lock-string-face) 21549 21561 nil 21561 21562 (face font-lock-string-face) 21562 21595 (face font-lock-constant-face) 21595 21596 (face font-lock-string-face) 21596 21606 nil 21606 21607 (face font-lock-string-face) 21607 21637 (face font-lock-constant-face) 21637 21638 (face font-lock-string-face) 21638 21648 nil 21648 21649 (face font-lock-string-face) 21649 21682 (face font-lock-constant-face) 21682 21683 (face font-lock-string-face) 21683 21693 nil 21693 21694 (face font-lock-string-face) 21694 21724 (face font-lock-constant-face) 21724 21725 (face font-lock-string-face) 21725 21735 nil 21735 21736 (face font-lock-string-face) 21736 21758 (face font-lock-constant-face) 21758 21759 (face font-lock-string-face) 21759 21769 nil 21769 21770 (face font-lock-string-face) 21770 21795 (face font-lock-constant-face) 21795 21796 (face font-lock-string-face) 21796 21806 nil 21806 21807 (face font-lock-string-face) 21807 21836 (face font-lock-constant-face) 21836 21837 (face font-lock-string-face) 21837 21847 nil 21847 21848 (face font-lock-string-face) 21848 21876 (face font-lock-constant-face) 21876 21877 (face font-lock-string-face) 21877 21907 nil 21907 21908 (face font-lock-string-face) 21908 21919 (face font-lock-keyword-face) 21919 21920 (face font-lock-string-face) 21920 21922 nil 21922 21923 (face font-lock-string-face) 21923 21938 (face font-lock-function-name-face) 21938 21939 (face font-lock-string-face) 21939 21947 nil 21947 21948 (face font-lock-string-face) 21948 21952 (face font-lock-keyword-face) 21952 21953 (face font-lock-string-face) 21953 21955 nil 21955 21956 (face font-lock-string-face) 21956 21966 (face font-lock-type-face) 21966 21967 (face font-lock-string-face) 21967 21975 nil 21975 21976 (face font-lock-string-face) 21976 21988 (face font-lock-keyword-face) 21988 21989 (face font-lock-string-face) 21989 22001 nil 22001 22002 (face font-lock-string-face) 22002 22007 (face font-lock-function-name-face) 22007 22008 (face font-lock-string-face) 22008 22018 nil 22018 22019 (face font-lock-string-face) 22019 22037 (face font-lock-function-name-face) 22037 22038 (face font-lock-string-face) 22038 22048 nil 22048 22049 (face font-lock-string-face) 22049 22060 (face font-lock-function-name-face) 22060 22061 (face font-lock-string-face) 22061 22071 nil 22071 22072 (face font-lock-string-face) 22072 22093 (face font-lock-function-name-face) 22093 22094 (face font-lock-string-face) 22094 22104 nil 22104 22105 (face font-lock-string-face) 22105 22131 (face font-lock-function-name-face) 22131 22132 (face font-lock-string-face) 22132 22142 nil 22142 22143 (face font-lock-string-face) 22143 22177 (face font-lock-function-name-face) 22177 22178 (face font-lock-string-face) 22178 22188 nil 22188 22189 (face font-lock-string-face) 22189 22215 (face font-lock-function-name-face) 22215 22216 (face font-lock-string-face) 22216 22226 nil 22226 22227 (face font-lock-string-face) 22227 22253 (face font-lock-function-name-face) 22253 22254 (face font-lock-string-face) 22254 22264 nil 22264 22265 (face font-lock-string-face) 22265 22280 (face font-lock-function-name-face) 22280 22281 (face font-lock-string-face) 22281 22298 nil 22298 22299 (face font-lock-string-face) 22299 22306 (face font-lock-keyword-face) 22306 22307 (face font-lock-string-face) 22307 22319 nil 22319 22320 (face font-lock-string-face) 22320 22361 (face font-lock-constant-face) 22361 22362 (face font-lock-string-face) 22362 22372 nil 22372 22373 (face font-lock-string-face) 22373 22413 (face font-lock-constant-face) 22413 22414 (face font-lock-string-face) 22414 22424 nil 22424 22425 (face font-lock-string-face) 22425 22461 (face font-lock-constant-face) 22461 22462 (face font-lock-string-face) 22462 22472 nil 22472 22473 (face font-lock-string-face) 22473 22502 (face font-lock-constant-face) 22502 22503 (face font-lock-string-face) 22503 22513 nil 22513 22514 (face font-lock-string-face) 22514 22550 (face font-lock-constant-face) 22550 22551 (face font-lock-string-face) 22551 22561 nil 22561 22562 (face font-lock-string-face) 22562 22610 (face font-lock-constant-face) 22610 22611 (face font-lock-string-face) 22611 22621 nil 22621 22622 (face font-lock-string-face) 22622 22663 (face font-lock-constant-face) 22663 22664 (face font-lock-string-face) 22664 22674 nil 22674 22675 (face font-lock-string-face) 22675 22711 (face font-lock-constant-face) 22711 22712 (face font-lock-string-face) 22712 22722 nil 22722 22723 (face font-lock-string-face) 22723 22757 (face font-lock-constant-face) 22757 22758 (face font-lock-string-face) 22758 22768 nil 22768 22769 (face font-lock-string-face) 22769 22797 (face font-lock-constant-face) 22797 22798 (face font-lock-string-face) 22798 22808 nil 22808 22809 (face font-lock-string-face) 22809 22853 (face font-lock-constant-face) 22853 22854 (face font-lock-string-face) 22854 22864 nil 22864 22865 (face font-lock-string-face) 22865 22900 (face font-lock-constant-face) 22900 22901 (face font-lock-string-face) 22901 22911 nil 22911 22912 (face font-lock-string-face) 22912 22961 (face font-lock-constant-face) 22961 22962 (face font-lock-string-face) 22962 22972 nil 22972 22973 (face font-lock-string-face) 22973 23011 (face font-lock-constant-face) 23011 23012 (face font-lock-string-face) 23012 23022 nil 23022 23023 (face font-lock-string-face) 23023 23055 (face font-lock-constant-face) 23055 23056 (face font-lock-string-face) 23056 23066 nil 23066 23067 (face font-lock-string-face) 23067 23116 (face font-lock-constant-face) 23116 23117 (face font-lock-string-face) 23117 23127 nil 23127 23128 (face font-lock-string-face) 23128 23178 (face font-lock-constant-face) 23178 23179 (face font-lock-string-face) 23179 23189 nil 23189 23190 (face font-lock-string-face) 23190 23228 (face font-lock-constant-face) 23228 23229 (face font-lock-string-face) 23229 23239 nil 23239 23240 (face font-lock-string-face) 23240 23277 (face font-lock-constant-face) 23277 23278 (face font-lock-string-face) 23278 23288 nil 23288 23289 (face font-lock-string-face) 23289 23332 (face font-lock-constant-face) 23332 23333 (face font-lock-string-face) 23333 23343 nil 23343 23344 (face font-lock-string-face) 23344 23368 (face font-lock-constant-face) 23368 23369 (face font-lock-string-face) 23369 23379 nil 23379 23380 (face font-lock-string-face) 23380 23402 (face font-lock-constant-face) 23402 23403 (face font-lock-string-face) 23403 23413 nil 23413 23414 (face font-lock-string-face) 23414 23447 (face font-lock-constant-face) 23447 23448 (face font-lock-string-face) 23448 23458 nil 23458 23459 (face font-lock-string-face) 23459 23487 (face font-lock-constant-face) 23487 23488 (face font-lock-string-face) 23488 23498 nil 23498 23499 (face font-lock-string-face) 23499 23530 (face font-lock-constant-face) 23530 23531 (face font-lock-string-face) 23531 23541 nil 23541 23542 (face font-lock-string-face) 23542 23563 (face font-lock-constant-face) 23563 23564 (face font-lock-string-face) 23564 23574 nil 23574 23575 (face font-lock-string-face) 23575 23609 (face font-lock-constant-face) 23609 23610 (face font-lock-string-face) 23610 23620 nil 23620 23621 (face font-lock-string-face) 23621 23654 (face font-lock-constant-face) 23654 23655 (face font-lock-string-face) 23655 23665 nil 23665 23666 (face font-lock-string-face) 23666 23700 (face font-lock-constant-face) 23700 23701 (face font-lock-string-face) 23701 23711 nil 23711 23712 (face font-lock-string-face) 23712 23753 (face font-lock-constant-face) 23753 23754 (face font-lock-string-face) 23754 23764 nil 23764 23765 (face font-lock-string-face) 23765 23790 (face font-lock-constant-face) 23790 23791 (face font-lock-string-face) 23791 23801 nil 23801 23802 (face font-lock-string-face) 23802 23825 (face font-lock-constant-face) 23825 23826 (face font-lock-string-face) 23826 23836 nil 23836 23837 (face font-lock-string-face) 23837 23862 (face font-lock-constant-face) 23862 23863 (face font-lock-string-face) 23863 23873 nil 23873 23874 (face font-lock-string-face) 23874 23906 (face font-lock-constant-face) 23906 23907 (face font-lock-string-face) 23907 23917 nil 23917 23918 (face font-lock-string-face) 23918 23947 (face font-lock-constant-face) 23947 23948 (face font-lock-string-face) 23948 23958 nil 23958 23959 (face font-lock-string-face) 23959 23981 (face font-lock-constant-face) 23981 23982 (face font-lock-string-face) 23982 23992 nil 23992 23993 (face font-lock-string-face) 23993 24014 (face font-lock-constant-face) 24014 24015 (face font-lock-string-face) 24015 24025 nil 24025 24026 (face font-lock-string-face) 24026 24054 (face font-lock-constant-face) 24054 24055 (face font-lock-string-face) 24055 24065 nil 24065 24066 (face font-lock-string-face) 24066 24093 (face font-lock-constant-face) 24093 24094 (face font-lock-string-face) 24094 24104 nil 24104 24105 (face font-lock-string-face) 24105 24133 (face font-lock-constant-face) 24133 24134 (face font-lock-string-face) 24134 24144 nil 24144 24145 (face font-lock-string-face) 24145 24177 (face font-lock-constant-face) 24177 24178 (face font-lock-string-face) 24178 24188 nil 24188 24189 (face font-lock-string-face) 24189 24221 (face font-lock-constant-face) 24221 24222 (face font-lock-string-face) 24222 24232 nil 24232 24233 (face font-lock-string-face) 24233 24277 (face font-lock-constant-face) 24277 24278 (face font-lock-string-face) 24278 24288 nil 24288 24289 (face font-lock-string-face) 24289 24328 (face font-lock-constant-face) 24328 24329 (face font-lock-string-face) 24329 24339 nil 24339 24340 (face font-lock-string-face) 24340 24379 (face font-lock-constant-face) 24379 24380 (face font-lock-string-face) 24380 24390 nil 24390 24391 (face font-lock-string-face) 24391 24424 (face font-lock-constant-face) 24424 24425 (face font-lock-string-face) 24425 24435 nil 24435 24436 (face font-lock-string-face) 24436 24476 (face font-lock-constant-face) 24476 24477 (face font-lock-string-face) 24477 24487 nil 24487 24488 (face font-lock-string-face) 24488 24521 (face font-lock-constant-face) 24521 24522 (face font-lock-string-face) 24522 24532 nil 24532 24533 (face font-lock-string-face) 24533 24567 (face font-lock-constant-face) 24567 24568 (face font-lock-string-face) 24568 24578 nil 24578 24579 (face font-lock-string-face) 24579 24610 (face font-lock-constant-face) 24610 24611 (face font-lock-string-face) 24611 24621 nil 24621 24622 (face font-lock-string-face) 24622 24673 (face font-lock-constant-face) 24673 24674 (face font-lock-string-face) 24674 24684 nil 24684 24685 (face font-lock-string-face) 24685 24725 (face font-lock-constant-face) 24725 24726 (face font-lock-string-face) 24726 24736 nil 24736 24737 (face font-lock-string-face) 24737 24773 (face font-lock-constant-face) 24773 24774 (face font-lock-string-face) 24774 24784 nil 24784 24785 (face font-lock-string-face) 24785 24821 (face font-lock-constant-face) 24821 24822 (face font-lock-string-face) 24822 24832 nil 24832 24833 (face font-lock-string-face) 24833 24874 (face font-lock-constant-face) 24874 24875 (face font-lock-string-face) 24875 24885 nil 24885 24886 (face font-lock-string-face) 24886 24926 (face font-lock-constant-face) 24926 24927 (face font-lock-string-face) 24927 24937 nil 24937 24938 (face font-lock-string-face) 24938 24977 (face font-lock-constant-face) 24977 24978 (face font-lock-string-face) 24978 24988 nil 24988 24989 (face font-lock-string-face) 24989 25035 (face font-lock-constant-face) 25035 25036 (face font-lock-string-face) 25036 25046 nil 25046 25047 (face font-lock-string-face) 25047 25070 (face font-lock-constant-face) 25070 25071 (face font-lock-string-face) 25071 25081 nil 25081 25082 (face font-lock-string-face) 25082 25104 (face font-lock-constant-face) 25104 25105 (face font-lock-string-face) 25105 25115 nil 25115 25116 (face font-lock-string-face) 25116 25152 (face font-lock-constant-face) 25152 25153 (face font-lock-string-face) 25153 25163 nil 25163 25164 (face font-lock-string-face) 25164 25210 (face font-lock-constant-face) 25210 25211 (face font-lock-string-face) 25211 25221 nil 25221 25222 (face font-lock-string-face) 25222 25250 (face font-lock-constant-face) 25250 25251 (face font-lock-string-face) 25251 25268 nil 25268 25269 (face font-lock-string-face) 25269 25279 (face font-lock-keyword-face) 25279 25280 (face font-lock-string-face) 25280 25293 nil 25293 25294 (face font-lock-string-face) 25294 25319 (face font-lock-variable-name-face) 25319 25320 (face font-lock-string-face) 25320 25334 nil 25334 25335 (face font-lock-string-face) 25335 25345 (face font-lock-keyword-face) 25345 25346 (face font-lock-string-face) 25346 25363 nil 25363 25364 (face font-lock-string-face) 25364 25385 (face font-lock-variable-name-face) 25385 25386 (face font-lock-string-face) 25386 25404 nil 25404 25405 (face font-lock-string-face) 25405 25417 (face font-lock-keyword-face) 25417 25418 (face font-lock-string-face) 25418 25438 nil 25438 25439 (face font-lock-string-face) 25439 25480 (face font-lock-function-name-face) 25480 25481 (face font-lock-string-face) 25481 25550 nil 25550 25551 (face font-lock-string-face) 25551 25566 (face font-lock-variable-name-face) 25566 25567 (face font-lock-string-face) 25567 25581 nil 25581 25582 (face font-lock-string-face) 25582 25594 (face font-lock-keyword-face) 25594 25595 (face font-lock-string-face) 25595 25611 nil 25611 25612 (face font-lock-string-face) 25612 25651 (face font-lock-function-name-face) 25651 25652 (face font-lock-string-face) 25652 25688 nil 25688 25689 (face font-lock-string-face) 25689 25704 (face font-lock-variable-name-face) 25704 25705 (face font-lock-string-face) 25705 25719 nil 25719 25720 (face font-lock-string-face) 25720 25728 (face font-lock-keyword-face) 25728 25729 (face font-lock-string-face) 25729 25745 nil 25745 25746 (face font-lock-string-face) 25746 25782 (face font-lock-constant-face) 25782 25783 (face font-lock-string-face) 25783 25797 nil 25797 25798 (face font-lock-string-face) 25798 25820 (face font-lock-constant-face) 25820 25821 (face font-lock-string-face) 25821 25835 nil 25835 25836 (face font-lock-string-face) 25836 25857 (face font-lock-constant-face) 25857 25858 (face font-lock-string-face) 25858 25872 nil 25872 25873 (face font-lock-string-face) 25873 25905 (face font-lock-constant-face) 25905 25906 (face font-lock-string-face) 25906 25920 nil 25920 25921 (face font-lock-string-face) 25921 25961 (face font-lock-constant-face) 25961 25962 (face font-lock-string-face) 25962 25976 nil 25976 25977 (face font-lock-string-face) 25977 26016 (face font-lock-constant-face) 26016 26017 (face font-lock-string-face) 26017 26031 nil 26031 26032 (face font-lock-string-face) 26032 26065 (face font-lock-constant-face) 26065 26066 (face font-lock-string-face) 26066 26080 nil 26080 26081 (face font-lock-string-face) 26081 26115 (face font-lock-constant-face) 26115 26116 (face font-lock-string-face) 26116 26130 nil 26130 26131 (face font-lock-string-face) 26131 26162 (face font-lock-constant-face) 26162 26163 (face font-lock-string-face) 26163 26177 nil 26177 26178 (face font-lock-string-face) 26178 26229 (face font-lock-constant-face) 26229 26230 (face font-lock-string-face) 26230 26244 nil 26244 26245 (face font-lock-string-face) 26245 26285 (face font-lock-constant-face) 26285 26286 (face font-lock-string-face) 26286 26300 nil 26300 26301 (face font-lock-string-face) 26301 26337 (face font-lock-constant-face) 26337 26338 (face font-lock-string-face) 26338 26352 nil 26352 26353 (face font-lock-string-face) 26353 26394 (face font-lock-constant-face) 26394 26395 (face font-lock-string-face) 26395 26409 nil 26409 26410 (face font-lock-string-face) 26410 26443 (face font-lock-constant-face) 26443 26444 (face font-lock-string-face) 26444 26458 nil 26458 26459 (face font-lock-string-face) 26459 26495 (face font-lock-constant-face) 26495 26496 (face font-lock-string-face) 26496 26532 nil 26532 26533 (face font-lock-string-face) 26533 26546 (face font-lock-variable-name-face) 26546 26547 (face font-lock-string-face) 26547 26561 nil 26561 26562 (face font-lock-string-face) 26562 26572 (face font-lock-keyword-face) 26572 26573 (face font-lock-string-face) 26573 26590 nil 26590 26591 (face font-lock-string-face) 26591 26604 (face font-lock-variable-name-face) 26604 26605 (face font-lock-string-face) 26605 26623 nil 26623 26624 (face font-lock-string-face) 26624 26631 (face font-lock-keyword-face) 26631 26632 (face font-lock-string-face) 26632 26652 nil 26652 26653 (face font-lock-string-face) 26653 26688 (face font-lock-constant-face) 26688 26689 (face font-lock-string-face) 26689 26722 nil 26722 26723 (face font-lock-string-face) 26723 26730 (face font-lock-keyword-face) 26730 26731 (face font-lock-string-face) 26731 26751 nil 26751 26752 (face font-lock-string-face) 26752 26760 (face font-lock-preprocessor-face) 26760 26761 (face font-lock-string-face) 26761 26831 nil 26831 26832 (face font-lock-string-face) 26832 26873 (face font-lock-variable-name-face) 26873 26874 (face font-lock-string-face) 26874 26888 nil 26888 26889 (face font-lock-string-face) 26889 26896 (face font-lock-keyword-face) 26896 26897 (face font-lock-string-face) 26897 26913 nil 26913 26914 (face font-lock-string-face) 26914 26954 (face font-lock-constant-face) 26954 26955 (face font-lock-string-face) 26955 26991 nil 26991 26992 (face font-lock-string-face) 26992 27035 (face font-lock-variable-name-face) 27035 27036 (face font-lock-string-face) 27036 27050 nil 27050 27051 (face font-lock-string-face) 27051 27058 (face font-lock-keyword-face) 27058 27059 (face font-lock-string-face) 27059 27075 nil 27075 27076 (face font-lock-string-face) 27076 27095 (face font-lock-constant-face) 27095 27096 (face font-lock-string-face) 27096 27110 nil 27110 27111 (face font-lock-string-face) 27111 27137 (face font-lock-constant-face) 27137 27138 (face font-lock-string-face) 27138 27152 nil 27152 27153 (face font-lock-string-face) 27153 27186 (face font-lock-constant-face) 27186 27187 (face font-lock-string-face) 27187 27201 nil 27201 27202 (face font-lock-string-face) 27202 27235 (face font-lock-constant-face) 27235 27236 (face font-lock-string-face) 27236 27291 nil 27291 27292 (face font-lock-string-face) 27292 27303 (face font-lock-keyword-face) 27303 27304 (face font-lock-string-face) 27304 27306 nil 27306 27307 (face font-lock-string-face) 27307 27325 (face font-lock-function-name-face) 27325 27326 (face font-lock-string-face) 27326 27334 nil 27334 27335 (face font-lock-string-face) 27335 27339 (face font-lock-keyword-face) 27339 27340 (face font-lock-string-face) 27340 27342 nil 27342 27343 (face font-lock-string-face) 27343 27357 (face font-lock-type-face) 27357 27358 (face font-lock-string-face) 27358 27366 nil 27366 27367 (face font-lock-string-face) 27367 27379 (face font-lock-keyword-face) 27379 27380 (face font-lock-string-face) 27380 27392 nil 27392 27393 (face font-lock-string-face) 27393 27398 (face font-lock-function-name-face) 27398 27399 (face font-lock-string-face) 27399 27409 nil 27409 27410 (face font-lock-string-face) 27410 27431 (face font-lock-function-name-face) 27431 27432 (face font-lock-string-face) 27432 27442 nil 27442 27443 (face font-lock-string-face) 27443 27469 (face font-lock-function-name-face) 27469 27470 (face font-lock-string-face) 27470 27480 nil 27480 27481 (face font-lock-string-face) 27481 27507 (face font-lock-function-name-face) 27507 27508 (face font-lock-string-face) 27508 27525 nil 27525 27526 (face font-lock-string-face) 27526 27533 (face font-lock-keyword-face) 27533 27534 (face font-lock-string-face) 27534 27546 nil 27546 27547 (face font-lock-string-face) 27547 27591 (face font-lock-constant-face) 27591 27592 (face font-lock-string-face) 27592 27602 nil 27602 27603 (face font-lock-string-face) 27603 27646 (face font-lock-constant-face) 27646 27647 (face font-lock-string-face) 27647 27657 nil 27657 27658 (face font-lock-string-face) 27658 27679 (face font-lock-constant-face) 27679 27680 (face font-lock-string-face) 27680 27690 nil 27690 27691 (face font-lock-string-face) 27691 27711 (face font-lock-constant-face) 27711 27712 (face font-lock-string-face) 27712 27722 nil 27722 27723 (face font-lock-string-face) 27723 27752 (face font-lock-constant-face) 27752 27753 (face font-lock-string-face) 27753 27763 nil 27763 27764 (face font-lock-string-face) 27764 27792 (face font-lock-constant-face) 27792 27793 (face font-lock-string-face) 27793 27803 nil 27803 27804 (face font-lock-string-face) 27804 27829 (face font-lock-constant-face) 27829 27830 (face font-lock-string-face) 27830 27840 nil 27840 27841 (face font-lock-string-face) 27841 27865 (face font-lock-constant-face) 27865 27866 (face font-lock-string-face) 27866 27876 nil 27876 27877 (face font-lock-string-face) 27877 27901 (face font-lock-constant-face) 27901 27902 (face font-lock-string-face) 27902 27912 nil 27912 27913 (face font-lock-string-face) 27913 27936 (face font-lock-constant-face) 27936 27937 (face font-lock-string-face) 27937 27947 nil 27947 27948 (face font-lock-string-face) 27948 27968 (face font-lock-constant-face) 27968 27969 (face font-lock-string-face) 27969 27979 nil 27979 27980 (face font-lock-string-face) 27980 27999 (face font-lock-constant-face) 27999 28000 (face font-lock-string-face) 28000 28030 nil 28030 28031 (face font-lock-string-face) 28031 28042 (face font-lock-keyword-face) 28042 28043 (face font-lock-string-face) 28043 28045 nil 28045 28046 (face font-lock-string-face) 28046 28058 (face font-lock-function-name-face) 28058 28059 (face font-lock-string-face) 28059 28067 nil 28067 28068 (face font-lock-string-face) 28068 28072 (face font-lock-keyword-face) 28072 28073 (face font-lock-string-face) 28073 28075 nil 28075 28076 (face font-lock-string-face) 28076 28086 (face font-lock-type-face) 28086 28087 (face font-lock-string-face) 28087 28095 nil 28095 28096 (face font-lock-string-face) 28096 28108 (face font-lock-keyword-face) 28108 28109 (face font-lock-string-face) 28109 28121 nil 28121 28122 (face font-lock-string-face) 28122 28127 (face font-lock-function-name-face) 28127 28128 (face font-lock-string-face) 28128 28138 nil 28138 28139 (face font-lock-string-face) 28139 28150 (face font-lock-function-name-face) 28150 28151 (face font-lock-string-face) 28151 28161 nil 28161 28162 (face font-lock-string-face) 28162 28183 (face font-lock-function-name-face) 28183 28184 (face font-lock-string-face) 28184 28194 nil 28194 28195 (face font-lock-string-face) 28195 28216 (face font-lock-function-name-face) 28216 28217 (face font-lock-string-face) 28217 28234 nil 28234 28235 (face font-lock-string-face) 28235 28242 (face font-lock-keyword-face) 28242 28243 (face font-lock-string-face) 28243 28255 nil 28255 28256 (face font-lock-string-face) 28256 28290 (face font-lock-constant-face) 28290 28291 (face font-lock-string-face) 28291 28321 nil 28321 28322 (face font-lock-string-face) 28322 28333 (face font-lock-keyword-face) 28333 28334 (face font-lock-string-face) 28334 28336 nil 28336 28337 (face font-lock-string-face) 28337 28349 (face font-lock-function-name-face) 28349 28350 (face font-lock-string-face) 28350 28358 nil 28358 28359 (face font-lock-string-face) 28359 28363 (face font-lock-keyword-face) 28363 28364 (face font-lock-string-face) 28364 28366 nil 28366 28367 (face font-lock-string-face) 28367 28377 (face font-lock-type-face) 28377 28378 (face font-lock-string-face) 28378 28386 nil 28386 28387 (face font-lock-string-face) 28387 28394 (face font-lock-keyword-face) 28394 28395 (face font-lock-string-face) 28395 28407 nil 28407 28408 (face font-lock-string-face) 28408 28441 (face font-lock-constant-face) 28441 28442 (face font-lock-string-face) 28442 28471 nil 28471 28472 (face font-lock-string-face) 28472 28483 (face font-lock-keyword-face) 28483 28484 (face font-lock-string-face) 28484 28486 nil 28486 28487 (face font-lock-string-face) 28487 28498 (face font-lock-function-name-face) 28498 28499 (face font-lock-string-face) 28499 28507 nil 28507 28508 (face font-lock-string-face) 28508 28512 (face font-lock-keyword-face) 28512 28513 (face font-lock-string-face) 28513 28515 nil 28515 28516 (face font-lock-string-face) 28516 28526 (face font-lock-type-face) 28526 28527 (face font-lock-string-face) 28527 28535 nil 28535 28536 (face font-lock-string-face) 28536 28548 (face font-lock-keyword-face) 28548 28549 (face font-lock-string-face) 28549 28561 nil 28561 28562 (face font-lock-string-face) 28562 28567 (face font-lock-function-name-face) 28567 28568 (face font-lock-string-face) 28568 28578 nil 28578 28579 (face font-lock-string-face) 28579 28600 (face font-lock-function-name-face) 28600 28601 (face font-lock-string-face) 28601 28618 nil 28618 28619 (face font-lock-string-face) 28619 28626 (face font-lock-keyword-face) 28626 28627 (face font-lock-string-face) 28627 28639 nil 28639 28640 (face font-lock-string-face) 28640 28672 (face font-lock-constant-face) 28672 28673 (face font-lock-string-face) 28673 28698 nil 28698 28699 (face font-lock-string-face) 28699 28709 (face font-lock-keyword-face) 28709 28710 (face font-lock-string-face) 28710 28719 nil 28719 28720 (face font-lock-string-face) 28720 28729 (face font-lock-variable-name-face) 28729 28730 (face font-lock-string-face) 28730 28740 nil 28740 28741 (face font-lock-string-face) 28741 28748 (face font-lock-keyword-face) 28748 28749 (face font-lock-string-face) 28749 28773 nil 28773 28774 (face font-lock-string-face) 28774 28785 (face font-lock-keyword-face) 28785 28786 (face font-lock-string-face) 28786 28788 nil 28788 28789 (face font-lock-string-face) 28789 28799 (face font-lock-function-name-face) 28799 28800 (face font-lock-string-face) 28800 28812 nil 28812 28813 (face font-lock-string-face) 28813 28817 (face font-lock-keyword-face) 28817 28818 (face font-lock-string-face) 28818 28820 nil 28820 28821 (face font-lock-string-face) 28821 28831 (face font-lock-type-face) 28831 28832 (face font-lock-string-face) 28832 28844 nil 28844 28845 (face font-lock-string-face) 28845 28857 (face font-lock-keyword-face) 28857 28858 (face font-lock-string-face) 28858 28874 nil 28874 28875 (face font-lock-string-face) 28875 28880 (face font-lock-function-name-face) 28880 28881 (face font-lock-string-face) 28881 28895 nil 28895 28896 (face font-lock-string-face) 28896 28907 (face font-lock-function-name-face) 28907 28908 (face font-lock-string-face) 28908 28922 nil 28922 28923 (face font-lock-string-face) 28923 28944 (face font-lock-function-name-face) 28944 28945 (face font-lock-string-face) 28945 28959 nil 28959 28960 (face font-lock-string-face) 28960 29043 (face font-lock-function-name-face) 29043 29044 (face font-lock-string-face) 29044 29058 nil 29058 29059 (face font-lock-string-face) 29059 29074 (face font-lock-function-name-face) 29074 29075 (face font-lock-string-face) 29075 29100 nil 29100 29101 (face font-lock-string-face) 29101 29113 (face font-lock-keyword-face) 29113 29114 (face font-lock-string-face) 29114 29130 nil 29130 29131 (face font-lock-string-face) 29131 29133 (face font-lock-constant-face) 29133 29138 (face font-lock-variable-name-face) 29138 29163 (face font-lock-constant-face) 29163 29164 (face font-lock-string-face) 29164 29189 nil 29189 29190 (face font-lock-string-face) 29190 29197 (face font-lock-keyword-face) 29197 29198 (face font-lock-string-face) 29198 29214 nil 29214 29215 (face font-lock-string-face) 29215 29238 (face font-lock-constant-face) 29238 29239 (face font-lock-string-face) 29239 29253 nil 29253 29254 (face font-lock-string-face) 29254 29280 (face font-lock-constant-face) 29280 29281 (face font-lock-string-face) 29281 29295 nil 29295 29296 (face font-lock-string-face) 29296 29321 (face font-lock-constant-face) 29321 29322 (face font-lock-string-face) 29322 29336 nil 29336 29337 (face font-lock-string-face) 29337 29361 (face font-lock-constant-face) 29361 29362 (face font-lock-string-face) 29362 29376 nil 29376 29377 (face font-lock-string-face) 29377 29407 (face font-lock-constant-face) 29407 29408 (face font-lock-string-face) 29408 29422 nil 29422 29423 (face font-lock-string-face) 29423 29453 (face font-lock-constant-face) 29453 29454 (face font-lock-string-face) 29454 29468 nil 29468 29469 (face font-lock-string-face) 29469 29493 (face font-lock-constant-face) 29493 29494 (face font-lock-string-face) 29494 29508 nil 29508 29509 (face font-lock-string-face) 29509 29532 (face font-lock-constant-face) 29532 29533 (face font-lock-string-face) 29533 29547 nil 29547 29548 (face font-lock-string-face) 29548 29575 (face font-lock-constant-face) 29575 29576 (face font-lock-string-face) 29576 29590 nil 29590 29591 (face font-lock-string-face) 29591 29614 (face font-lock-constant-face) 29614 29615 (face font-lock-string-face) 29615 29640 nil 29640 29655 (face font-lock-string-face) 29655 29671 nil 29671 29685 (face font-lock-string-face) 29685 29703 nil 29703 29714 (face font-lock-string-face) 29714 29716 nil 29716 29719 (face font-lock-string-face) 29719 29729 nil 29729 29754 (face font-lock-comment-face) 29754 29792 nil 29792 29793 (face font-lock-string-face) 29793 29800 (face font-lock-keyword-face) 29800 29801 (face font-lock-string-face) 29801 29817 nil 29817 29818 (face font-lock-string-face) 29818 29843 (face font-lock-preprocessor-face) 29843 29844 (face font-lock-string-face) 29844 29892 nil 29892 29893 (face font-lock-string-face) 29893 29929 (face font-lock-variable-name-face) 29929 29930 (face font-lock-string-face) 29930 29940 nil 29940 29941 (face font-lock-string-face) 29941 29948 (face font-lock-keyword-face) 29948 29949 (face font-lock-string-face) 29949 29973 nil 29973 29974 (face font-lock-string-face) 29974 29985 (face font-lock-keyword-face) 29985 29986 (face font-lock-string-face) 29986 29988 nil 29988 29989 (face font-lock-string-face) 29989 30001 (face font-lock-function-name-face) 30001 30002 (face font-lock-string-face) 30002 30014 nil 30014 30015 (face font-lock-string-face) 30015 30019 (face font-lock-keyword-face) 30019 30020 (face font-lock-string-face) 30020 30022 nil 30022 30023 (face font-lock-string-face) 30023 30033 (face font-lock-type-face) 30033 30034 (face font-lock-string-face) 30034 30046 nil 30046 30047 (face font-lock-string-face) 30047 30059 (face font-lock-keyword-face) 30059 30060 (face font-lock-string-face) 30060 30076 nil 30076 30077 (face font-lock-string-face) 30077 30082 (face font-lock-function-name-face) 30082 30083 (face font-lock-string-face) 30083 30097 nil 30097 30098 (face font-lock-string-face) 30098 30109 (face font-lock-function-name-face) 30109 30110 (face font-lock-string-face) 30110 30124 nil 30124 30125 (face font-lock-string-face) 30125 30146 (face font-lock-function-name-face) 30146 30147 (face font-lock-string-face) 30147 30161 nil 30161 30162 (face font-lock-string-face) 30162 30180 (face font-lock-function-name-face) 30180 30181 (face font-lock-string-face) 30181 30206 nil 30206 30207 (face font-lock-string-face) 30207 30214 (face font-lock-keyword-face) 30214 30215 (face font-lock-string-face) 30215 30231 nil 30231 30232 (face font-lock-string-face) 30232 30266 (face font-lock-constant-face) 30266 30267 (face font-lock-string-face) 30267 30281 nil 30281 30282 (face font-lock-string-face) 30282 30321 (face font-lock-constant-face) 30321 30322 (face font-lock-string-face) 30322 30336 nil 30336 30337 (face font-lock-string-face) 30337 30375 (face font-lock-constant-face) 30375 30376 (face font-lock-string-face) 30376 30390 nil 30390 30391 (face font-lock-string-face) 30391 30430 (face font-lock-constant-face) 30430 30431 (face font-lock-string-face) 30431 30445 nil 30445 30446 (face font-lock-string-face) 30446 30484 (face font-lock-constant-face) 30484 30485 (face font-lock-string-face) 30485 30499 nil 30499 30500 (face font-lock-string-face) 30500 30533 (face font-lock-constant-face) 30533 30534 (face font-lock-string-face) 30534 30548 nil 30548 30549 (face font-lock-string-face) 30549 30581 (face font-lock-constant-face) 30581 30582 (face font-lock-string-face) 30582 30596 nil 30596 30597 (face font-lock-string-face) 30597 30626 (face font-lock-constant-face) 30626 30627 (face font-lock-string-face) 30627 30641 nil 30641 30642 (face font-lock-string-face) 30642 30670 (face font-lock-constant-face) 30670 30671 (face font-lock-string-face) 30671 30685 nil 30685 30686 (face font-lock-string-face) 30686 30714 (face font-lock-constant-face) 30714 30715 (face font-lock-string-face) 30715 30729 nil 30729 30730 (face font-lock-string-face) 30730 30757 (face font-lock-constant-face) 30757 30758 (face font-lock-string-face) 30758 30783 nil 30783 30784 (face font-lock-string-face) 30784 30794 (face font-lock-keyword-face) 30794 30795 (face font-lock-string-face) 30795 30812 nil 30812 30813 (face font-lock-string-face) 30813 30834 (face font-lock-variable-name-face) 30834 30835 (face font-lock-string-face) 30835 30853 nil 30853 30854 (face font-lock-string-face) 30854 30866 (face font-lock-keyword-face) 30866 30867 (face font-lock-string-face) 30867 30887 nil 30887 30888 (face font-lock-string-face) 30888 30917 (face font-lock-function-name-face) 30917 30918 (face font-lock-string-face) 30918 30951 nil 30951 30952 (face font-lock-string-face) 30952 30959 (face font-lock-keyword-face) 30959 30960 (face font-lock-string-face) 30960 30980 nil 30980 30981 (face font-lock-string-face) 30981 31015 (face font-lock-constant-face) 31015 31016 (face font-lock-string-face) 31016 31064 nil 31064 31065 (face font-lock-string-face) 31065 31074 (face font-lock-variable-name-face) 31074 31075 (face font-lock-string-face) 31075 31093 nil 31093 31094 (face font-lock-string-face) 31094 31106 (face font-lock-keyword-face) 31106 31107 (face font-lock-string-face) 31107 31127 nil 31127 31128 (face font-lock-string-face) 31128 31175 (face font-lock-function-name-face) 31175 31176 (face font-lock-string-face) 31176 31194 nil 31194 31195 (face font-lock-string-face) 31195 31245 (face font-lock-function-name-face) 31245 31246 (face font-lock-string-face) 31246 31279 nil 31279 31280 (face font-lock-string-face) 31280 31287 (face font-lock-keyword-face) 31287 31288 (face font-lock-string-face) 31288 31308 nil 31308 31309 (face font-lock-string-face) 31309 31341 (face font-lock-constant-face) 31341 31342 (face font-lock-string-face) 31342 31423 nil 31423 31424 (face font-lock-string-face) 31424 31462 (face font-lock-variable-name-face) 31462 31463 (face font-lock-string-face) 31463 31473 nil 31473 31474 (face font-lock-string-face) 31474 31481 (face font-lock-keyword-face) 31481 31482 (face font-lock-string-face) 31482 31506 nil 31506 31507 (face font-lock-string-face) 31507 31518 (face font-lock-keyword-face) 31518 31519 (face font-lock-string-face) 31519 31521 nil 31521 31522 (face font-lock-string-face) 31522 31539 (face font-lock-function-name-face) 31539 31540 (face font-lock-string-face) 31540 31552 nil 31552 31553 (face font-lock-string-face) 31553 31557 (face font-lock-keyword-face) 31557 31558 (face font-lock-string-face) 31558 31560 nil 31560 31561 (face font-lock-string-face) 31561 31571 (face font-lock-type-face) 31571 31572 (face font-lock-string-face) 31572 31584 nil 31584 31585 (face font-lock-string-face) 31585 31597 (face font-lock-keyword-face) 31597 31598 (face font-lock-string-face) 31598 31614 nil 31614 31615 (face font-lock-string-face) 31615 31636 (face font-lock-function-name-face) 31636 31637 (face font-lock-string-face) 31637 31651 nil 31651 31652 (face font-lock-string-face) 31652 31670 (face font-lock-function-name-face) 31670 31671 (face font-lock-string-face) 31671 31696 nil 31696 31697 (face font-lock-string-face) 31697 31706 (face font-lock-keyword-face) 31706 31707 (face font-lock-string-face) 31707 31723 nil 31723 31724 (face font-lock-string-face) 31724 31728 (face font-lock-constant-face) 31728 31729 (face font-lock-string-face) 31729 31743 nil 31743 31744 (face font-lock-string-face) 31744 31748 (face font-lock-constant-face) 31748 31749 (face font-lock-string-face) 31749 31774 nil 31774 31775 (face font-lock-string-face) 31775 31782 (face font-lock-keyword-face) 31782 31783 (face font-lock-string-face) 31783 31799 nil 31799 31800 (face font-lock-string-face) 31800 31844 (face font-lock-constant-face) 31844 31845 (face font-lock-string-face) 31845 31893 nil 31893 31894 (face font-lock-string-face) 31894 31943 (face font-lock-variable-name-face) 31943 31944 (face font-lock-string-face) 31944 31954 nil 31954 31955 (face font-lock-string-face) 31955 31962 (face font-lock-keyword-face) 31962 31963 (face font-lock-string-face) 31963 31987 nil 31987 31988 (face font-lock-string-face) 31988 31999 (face font-lock-keyword-face) 31999 32000 (face font-lock-string-face) 32000 32002 nil 32002 32003 (face font-lock-string-face) 32003 32013 (face font-lock-function-name-face) 32013 32014 (face font-lock-string-face) 32014 32026 nil 32026 32027 (face font-lock-string-face) 32027 32031 (face font-lock-keyword-face) 32031 32032 (face font-lock-string-face) 32032 32034 nil 32034 32035 (face font-lock-string-face) 32035 32045 (face font-lock-type-face) 32045 32046 (face font-lock-string-face) 32046 32058 nil 32058 32059 (face font-lock-string-face) 32059 32071 (face font-lock-keyword-face) 32071 32072 (face font-lock-string-face) 32072 32088 nil 32088 32089 (face font-lock-string-face) 32089 32094 (face font-lock-function-name-face) 32094 32095 (face font-lock-string-face) 32095 32109 nil 32109 32110 (face font-lock-string-face) 32110 32121 (face font-lock-function-name-face) 32121 32122 (face font-lock-string-face) 32122 32136 nil 32136 32137 (face font-lock-string-face) 32137 32158 (face font-lock-function-name-face) 32158 32159 (face font-lock-string-face) 32159 32173 nil 32173 32174 (face font-lock-string-face) 32174 32192 (face font-lock-function-name-face) 32192 32193 (face font-lock-string-face) 32193 32218 nil 32218 32219 (face font-lock-string-face) 32219 32232 (face font-lock-keyword-face) 32232 32233 (face font-lock-string-face) 32233 32249 nil 32249 32250 (face font-lock-string-face) 32250 32259 (face font-lock-keyword-face) 32259 32260 (face font-lock-string-face) 32260 32278 nil 32278 32279 (face font-lock-string-face) 32279 32283 (face font-lock-constant-face) 32283 32284 (face font-lock-string-face) 32284 32300 nil 32300 32301 (face font-lock-string-face) 32301 32306 (face font-lock-constant-face) 32306 32307 (face font-lock-string-face) 32307 32323 nil 32323 32324 (face font-lock-string-face) 32324 32333 (face font-lock-constant-face) 32333 32334 (face font-lock-string-face) 32334 32350 nil 32350 32351 (face font-lock-string-face) 32351 32357 (face font-lock-constant-face) 32357 32358 (face font-lock-string-face) 32358 32398 nil 32398 32399 (face font-lock-string-face) 32399 32406 (face font-lock-keyword-face) 32406 32407 (face font-lock-string-face) 32407 32423 nil 32423 32424 (face font-lock-string-face) 32424 32462 (face font-lock-constant-face) 32462 32463 (face font-lock-string-face) 32463 32477 nil 32477 32478 (face font-lock-string-face) 32478 32515 (face font-lock-constant-face) 32515 32516 (face font-lock-string-face) 32516 32530 nil 32530 32531 (face font-lock-string-face) 32531 32568 (face font-lock-constant-face) 32568 32569 (face font-lock-string-face) 32569 32583 nil 32583 32584 (face font-lock-string-face) 32584 32620 (face font-lock-constant-face) 32620 32621 (face font-lock-string-face) 32621 32635 nil 32635 32636 (face font-lock-string-face) 32636 32666 (face font-lock-constant-face) 32666 32667 (face font-lock-string-face) 32667 32681 nil 32681 32682 (face font-lock-string-face) 32682 32720 (face font-lock-constant-face) 32720 32721 (face font-lock-string-face) 32721 32735 nil 32735 32736 (face font-lock-string-face) 32736 32773 (face font-lock-constant-face) 32773 32774 (face font-lock-string-face) 32774 32822 nil 32822 32823 (face font-lock-string-face) 32823 32838 (face font-lock-variable-name-face) 32838 32839 (face font-lock-string-face) 32839 32849 nil 32849 32850 (face font-lock-string-face) 32850 32857 (face font-lock-keyword-face) 32857 32858 (face font-lock-string-face) 32858 32882 nil 32882 32883 (face font-lock-string-face) 32883 32894 (face font-lock-keyword-face) 32894 32895 (face font-lock-string-face) 32895 32897 nil 32897 32898 (face font-lock-string-face) 32898 32912 (face font-lock-function-name-face) 32912 32913 (face font-lock-string-face) 32913 32925 nil 32925 32926 (face font-lock-string-face) 32926 32930 (face font-lock-keyword-face) 32930 32931 (face font-lock-string-face) 32931 32933 nil 32933 32934 (face font-lock-string-face) 32934 32948 (face font-lock-type-face) 32948 32949 (face font-lock-string-face) 32949 32961 nil 32961 32962 (face font-lock-string-face) 32962 32969 (face font-lock-keyword-face) 32969 32970 (face font-lock-string-face) 32970 32986 nil 32986 32987 (face font-lock-string-face) 32987 33022 (face font-lock-constant-face) 33022 33023 (face font-lock-string-face) 33023 33037 nil 33037 33038 (face font-lock-string-face) 33038 33072 (face font-lock-constant-face) 33072 33073 (face font-lock-string-face) 33073 33098 nil 33098 33099 (face font-lock-string-face) 33099 33111 (face font-lock-keyword-face) 33111 33112 (face font-lock-string-face) 33112 33128 nil 33128 33129 (face font-lock-string-face) 33129 33150 (face font-lock-function-name-face) 33150 33151 (face font-lock-string-face) 33151 33176 nil 33176 33177 (face font-lock-string-face) 33177 33189 (face font-lock-keyword-face) 33189 33190 (face font-lock-string-face) 33190 33206 nil 33206 33207 (face font-lock-string-face) 33207 33209 (face font-lock-constant-face) 33209 33232 (face font-lock-variable-name-face) 33232 33239 (face font-lock-constant-face) 33239 33240 (face font-lock-string-face) 33240 33265 nil 33265 33266 (face font-lock-string-face) 33266 33273 (face font-lock-keyword-face) 33273 33274 (face font-lock-string-face) 33274 33306 nil 33306 33307 (face font-lock-string-face) 33307 33318 (face font-lock-keyword-face) 33318 33319 (face font-lock-string-face) 33319 33321 nil 33321 33322 (face font-lock-string-face) 33322 33342 (face font-lock-function-name-face) 33342 33343 (face font-lock-string-face) 33343 33359 nil 33359 33360 (face font-lock-string-face) 33360 33366 (face font-lock-keyword-face) 33366 33367 (face font-lock-string-face) 33367 33387 nil 33387 33388 (face font-lock-string-face) 33388 33434 (face font-lock-constant-face) 33434 33435 (face font-lock-string-face) 33435 33453 nil 33453 33454 (face font-lock-string-face) 33454 33519 (face font-lock-constant-face) 33519 33520 (face font-lock-string-face) 33520 33553 nil 33553 33554 (face font-lock-string-face) 33554 33561 (face font-lock-keyword-face) 33561 33562 (face font-lock-string-face) 33562 33582 nil 33582 33583 (face font-lock-string-face) 33583 33585 (face font-lock-constant-face) 33585 33608 (face font-lock-variable-name-face) 33608 33647 (face font-lock-constant-face) 33647 33648 (face font-lock-string-face) 33648 33681 nil 33681 33682 (face font-lock-string-face) 33682 33688 (face font-lock-keyword-face) 33688 33689 (face font-lock-string-face) 33689 33709 nil 33709 33710 (face font-lock-string-face) 33710 33716 (face font-lock-constant-face) 33716 33717 (face font-lock-string-face) 33717 33735 nil 33735 33736 (face font-lock-string-face) 33736 33738 (face font-lock-constant-face) 33738 33743 (face font-lock-variable-name-face) 33743 33788 (face font-lock-constant-face) 33788 33789 (face font-lock-string-face) 33789 33807 nil 33807 33808 (face font-lock-string-face) 33808 33810 (face font-lock-constant-face) 33810 33811 (face font-lock-string-face) 33811 33829 nil 33829 33830 (face font-lock-string-face) 33830 33833 (face font-lock-constant-face) 33833 33840 (face font-lock-variable-name-face) 33840 33841 (face font-lock-constant-face) 33841 33842 (face font-lock-string-face) 33842 33860 nil 33860 33861 (face font-lock-string-face) 33861 33864 (face font-lock-constant-face) 33864 33872 (face font-lock-variable-name-face) 33872 33873 (face font-lock-constant-face) 33873 33874 (face font-lock-string-face) 33874 33952 nil 33952 33953 (face font-lock-string-face) 33953 33964 (face font-lock-keyword-face) 33964 33965 (face font-lock-string-face) 33965 33967 nil 33967 33968 (face font-lock-string-face) 33968 33978 (face font-lock-function-name-face) 33978 33979 (face font-lock-string-face) 33979 33991 nil 33991 33992 (face font-lock-string-face) 33992 33996 (face font-lock-keyword-face) 33996 33997 (face font-lock-string-face) 33997 33999 nil 33999 34000 (face font-lock-string-face) 34000 34004 (face font-lock-type-face) 34004 34005 (face font-lock-string-face) 34005 34017 nil 34017 34018 (face font-lock-string-face) 34018 34030 (face font-lock-keyword-face) 34030 34031 (face font-lock-string-face) 34031 34035 nil 34035 34036 (face font-lock-string-face) 34036 34062 (face font-lock-function-name-face) 34062 34063 (face font-lock-string-face) 34063 34077 nil 34077 34078 (face font-lock-string-face) 34078 34087 (face font-lock-keyword-face) 34087 34088 (face font-lock-string-face) 34088 34104 nil 34104 34105 (face font-lock-string-face) 34105 34117 (face font-lock-variable-name-face) 34117 34118 (face font-lock-string-face) 34118 34120 nil 34120 34121 (face font-lock-string-face) 34121 34126 (face font-lock-variable-name-face) 34126 34127 (face font-lock-string-face) 34127 34141 nil 34141 34142 (face font-lock-string-face) 34142 34153 (face font-lock-variable-name-face) 34153 34154 (face font-lock-string-face) 34154 34156 nil 34156 34157 (face font-lock-string-face) 34157 34174 (face font-lock-variable-name-face) 34174 34175 (face font-lock-string-face) 34175 34200 nil 34200 34201 (face font-lock-string-face) 34201 34209 (face font-lock-keyword-face) 34209 34210 (face font-lock-string-face) 34210 34214 nil 34214 34215 (face font-lock-string-face) 34215 34233 (face font-lock-constant-face) 34233 34234 (face font-lock-string-face) 34234 34268 nil 34268 34287 (face font-lock-comment-face) 34287 34293 nil 34293 34365 (face font-lock-comment-face) 34365 34371 nil 34371 34372 (face font-lock-string-face) 34372 34379 (face font-lock-keyword-face) 34379 34380 (face font-lock-string-face) 34380 34404 nil 34404 34405 (face font-lock-string-face) 34405 34416 (face font-lock-keyword-face) 34416 34417 (face font-lock-string-face) 34417 34419 nil 34419 34420 (face font-lock-string-face) 34420 34436 (face font-lock-function-name-face) 34436 34437 (face font-lock-string-face) 34437 34449 nil 34449 34450 (face font-lock-string-face) 34450 34454 (face font-lock-keyword-face) 34454 34455 (face font-lock-string-face) 34455 34457 nil 34457 34458 (face font-lock-string-face) 34458 34468 (face font-lock-type-face) 34468 34469 (face font-lock-string-face) 34469 34481 nil 34481 34482 (face font-lock-string-face) 34482 34494 (face font-lock-keyword-face) 34494 34495 (face font-lock-string-face) 34495 34511 nil 34511 34512 (face font-lock-string-face) 34512 34517 (face font-lock-function-name-face) 34517 34518 (face font-lock-string-face) 34518 34532 nil 34532 34533 (face font-lock-string-face) 34533 34551 (face font-lock-function-name-face) 34551 34552 (face font-lock-string-face) 34552 34566 nil 34566 34567 (face font-lock-string-face) 34567 34588 (face font-lock-function-name-face) 34588 34589 (face font-lock-string-face) 34589 34603 nil 34603 34604 (face font-lock-string-face) 34604 34630 (face font-lock-function-name-face) 34630 34631 (face font-lock-string-face) 34631 34645 nil 34645 34646 (face font-lock-string-face) 34646 34680 (face font-lock-function-name-face) 34680 34681 (face font-lock-string-face) 34681 34695 nil 34695 34696 (face font-lock-string-face) 34696 34730 (face font-lock-function-name-face) 34730 34731 (face font-lock-string-face) 34731 34745 nil 34745 34746 (face font-lock-string-face) 34746 34772 (face font-lock-function-name-face) 34772 34773 (face font-lock-string-face) 34773 34787 nil 34787 34788 (face font-lock-string-face) 34788 34827 (face font-lock-function-name-face) 34827 34828 (face font-lock-string-face) 34828 34853 nil 34853 34854 (face font-lock-string-face) 34854 34861 (face font-lock-keyword-face) 34861 34862 (face font-lock-string-face) 34862 34878 nil 34878 34879 (face font-lock-string-face) 34879 34904 (face font-lock-constant-face) 34904 34905 (face font-lock-string-face) 34905 34930 nil 34930 34931 (face font-lock-string-face) 34931 34941 (face font-lock-keyword-face) 34941 34942 (face font-lock-string-face) 34942 34959 nil 34959 34960 (face font-lock-string-face) 34960 34981 (face font-lock-variable-name-face) 34981 34982 (face font-lock-string-face) 34982 35000 nil 35000 35001 (face font-lock-string-face) 35001 35013 (face font-lock-keyword-face) 35013 35014 (face font-lock-string-face) 35014 35034 nil 35034 35077 (face font-lock-comment-face) 35077 35093 nil 35093 35123 (face font-lock-comment-face) 35123 35139 nil 35139 35164 (face font-lock-comment-face) 35164 35180 nil 35180 35194 (face font-lock-comment-face) 35194 35210 nil 35210 35211 (face font-lock-string-face) 35211 35240 (face font-lock-function-name-face) 35240 35241 (face font-lock-string-face) 35241 35274 nil 35274 35275 (face font-lock-string-face) 35275 35285 (face font-lock-keyword-face) 35285 35286 (face font-lock-string-face) 35286 35307 nil 35307 35308 (face font-lock-string-face) 35308 35329 (face font-lock-variable-name-face) 35329 35330 (face font-lock-string-face) 35330 35352 nil 35352 35353 (face font-lock-string-face) 35353 35365 (face font-lock-keyword-face) 35365 35366 (face font-lock-string-face) 35366 35390 nil 35390 35391 (face font-lock-string-face) 35391 35432 (face font-lock-function-name-face) 35432 35433 (face font-lock-string-face) 35433 35553 nil 35553 35554 (face font-lock-string-face) 35554 35565 (face font-lock-keyword-face) 35565 35566 (face font-lock-string-face) 35566 35568 nil 35568 35569 (face font-lock-string-face) 35569 35592 (face font-lock-function-name-face) 35592 35593 (face font-lock-string-face) 35593 35605 nil 35605 35606 (face font-lock-string-face) 35606 35610 (face font-lock-keyword-face) 35610 35611 (face font-lock-string-face) 35611 35613 nil 35613 35614 (face font-lock-string-face) 35614 35624 (face font-lock-type-face) 35624 35625 (face font-lock-string-face) 35625 35637 nil 35637 35638 (face font-lock-string-face) 35638 35650 (face font-lock-keyword-face) 35650 35651 (face font-lock-string-face) 35651 35667 nil 35667 35668 (face font-lock-string-face) 35668 35673 (face font-lock-function-name-face) 35673 35674 (face font-lock-string-face) 35674 35688 nil 35688 35689 (face font-lock-string-face) 35689 35707 (face font-lock-function-name-face) 35707 35708 (face font-lock-string-face) 35708 35722 nil 35722 35723 (face font-lock-string-face) 35723 35757 (face font-lock-function-name-face) 35757 35758 (face font-lock-string-face) 35758 35772 nil 35772 35773 (face font-lock-string-face) 35773 35799 (face font-lock-function-name-face) 35799 35800 (face font-lock-string-face) 35800 35814 nil 35814 35815 (face font-lock-string-face) 35815 35841 (face font-lock-function-name-face) 35841 35842 (face font-lock-string-face) 35842 35856 nil 35856 35857 (face font-lock-string-face) 35857 35896 (face font-lock-function-name-face) 35896 35897 (face font-lock-string-face) 35897 35922 nil 35922 35923 (face font-lock-string-face) 35923 35930 (face font-lock-keyword-face) 35930 35931 (face font-lock-string-face) 35931 35947 nil 35947 35948 (face font-lock-string-face) 35948 35970 (face font-lock-constant-face) 35970 35971 (face font-lock-string-face) 35971 35985 nil 35985 35986 (face font-lock-string-face) 35986 36011 (face font-lock-constant-face) 36011 36012 (face font-lock-string-face) 36012 36026 nil 36026 36027 (face font-lock-string-face) 36027 36060 (face font-lock-constant-face) 36060 36061 (face font-lock-string-face) 36061 36075 nil 36075 36076 (face font-lock-string-face) 36076 36117 (face font-lock-constant-face) 36117 36118 (face font-lock-string-face) 36118 36143 nil 36143 36144 (face font-lock-string-face) 36144 36154 (face font-lock-keyword-face) 36154 36155 (face font-lock-string-face) 36155 36172 nil 36172 36173 (face font-lock-string-face) 36173 36198 (face font-lock-variable-name-face) 36198 36199 (face font-lock-string-face) 36199 36217 nil 36217 36218 (face font-lock-string-face) 36218 36228 (face font-lock-keyword-face) 36228 36229 (face font-lock-string-face) 36229 36250 nil 36250 36251 (face font-lock-string-face) 36251 36272 (face font-lock-variable-name-face) 36272 36273 (face font-lock-string-face) 36273 36295 nil 36295 36296 (face font-lock-string-face) 36296 36308 (face font-lock-keyword-face) 36308 36309 (face font-lock-string-face) 36309 36333 nil 36333 36334 (face font-lock-string-face) 36334 36375 (face font-lock-function-name-face) 36375 36376 (face font-lock-string-face) 36376 36496 nil 36496 36497 (face font-lock-string-face) 36497 36508 (face font-lock-keyword-face) 36508 36509 (face font-lock-string-face) 36509 36511 nil 36511 36512 (face font-lock-string-face) 36512 36524 (face font-lock-function-name-face) 36524 36525 (face font-lock-string-face) 36525 36537 nil 36537 36538 (face font-lock-string-face) 36538 36542 (face font-lock-keyword-face) 36542 36543 (face font-lock-string-face) 36543 36545 nil 36545 36546 (face font-lock-string-face) 36546 36556 (face font-lock-type-face) 36556 36557 (face font-lock-string-face) 36557 36569 nil 36569 36570 (face font-lock-string-face) 36570 36582 (face font-lock-keyword-face) 36582 36583 (face font-lock-string-face) 36583 36599 nil 36599 36600 (face font-lock-string-face) 36600 36605 (face font-lock-function-name-face) 36605 36606 (face font-lock-string-face) 36606 36620 nil 36620 36621 (face font-lock-string-face) 36621 36642 (face font-lock-function-name-face) 36642 36643 (face font-lock-string-face) 36643 36657 nil 36657 36658 (face font-lock-string-face) 36658 36697 (face font-lock-function-name-face) 36697 36698 (face font-lock-string-face) 36698 36723 nil 36723 36724 (face font-lock-string-face) 36724 36731 (face font-lock-keyword-face) 36731 36732 (face font-lock-string-face) 36732 36748 nil 36748 36749 (face font-lock-string-face) 36749 36782 (face font-lock-constant-face) 36782 36783 (face font-lock-string-face) 36783 36829 nil 36829 36830 (face font-lock-string-face) 36830 36841 (face font-lock-keyword-face) 36841 36842 (face font-lock-string-face) 36842 36844 nil 36844 36845 (face font-lock-string-face) 36845 36856 (face font-lock-function-name-face) 36856 36857 (face font-lock-string-face) 36857 36869 nil 36869 36870 (face font-lock-string-face) 36870 36874 (face font-lock-keyword-face) 36874 36875 (face font-lock-string-face) 36875 36877 nil 36877 36878 (face font-lock-string-face) 36878 36888 (face font-lock-type-face) 36888 36889 (face font-lock-string-face) 36889 36901 nil 36901 36902 (face font-lock-string-face) 36902 36914 (face font-lock-keyword-face) 36914 36915 (face font-lock-string-face) 36915 36931 nil 36931 36932 (face font-lock-string-face) 36932 36937 (face font-lock-function-name-face) 36937 36938 (face font-lock-string-face) 36938 36952 nil 36952 36953 (face font-lock-string-face) 36953 36974 (face font-lock-function-name-face) 36974 36975 (face font-lock-string-face) 36975 36989 nil 36989 36990 (face font-lock-string-face) 36990 37029 (face font-lock-function-name-face) 37029 37030 (face font-lock-string-face) 37030 37055 nil 37055 37056 (face font-lock-string-face) 37056 37063 (face font-lock-keyword-face) 37063 37064 (face font-lock-string-face) 37064 37080 nil 37080 37081 (face font-lock-string-face) 37081 37113 (face font-lock-constant-face) 37113 37114 (face font-lock-string-face) 37114 37163 nil)
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/graphviz.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/graphviz.py
new file mode 100755
index 0000000..f19426b
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/graphviz.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Using the JSON dumped by the dump-dependency-json generator,
+generate input suitable for graphviz to render a dependency graph of
+targets."""
+
+
+import collections
+import json
+import sys
+
+
+def ParseTarget(target):
+ target, _, suffix = target.partition("#")
+ filename, _, target = target.partition(":")
+ return filename, target, suffix
+
+
+def LoadEdges(filename, targets):
+ """Load the edges map from the dump file, and filter it to only
+ show targets in |targets| and their depedendents."""
+
+ file = open("dump.json")
+ edges = json.load(file)
+ file.close()
+
+ # Copy out only the edges we're interested in from the full edge list.
+ target_edges = {}
+ to_visit = targets[:]
+ while to_visit:
+ src = to_visit.pop()
+ if src in target_edges:
+ continue
+ target_edges[src] = edges[src]
+ to_visit.extend(edges[src])
+
+ return target_edges
+
+
+def WriteGraph(edges):
+ """Print a graphviz graph to stdout.
+ |edges| is a map of target to a list of other targets it depends on."""
+
+ # Bucket targets by file.
+ files = collections.defaultdict(list)
+ for src, dst in edges.items():
+ build_file, target_name, toolset = ParseTarget(src)
+ files[build_file].append(src)
+
+ print("digraph D {")
+ print(" fontsize=8") # Used by subgraphs.
+ print(" node [fontsize=8]")
+
+ # Output nodes by file. We must first write out each node within
+ # its file grouping before writing out any edges that may refer
+ # to those nodes.
+ for filename, targets in files.items():
+ if len(targets) == 1:
+ # If there's only one node for this file, simplify
+ # the display by making it a box without an internal node.
+ target = targets[0]
+ build_file, target_name, toolset = ParseTarget(target)
+ print(
+ f' "{target}" [shape=box, label="{filename}\\n{target_name}"]'
+ )
+ else:
+ # Group multiple nodes together in a subgraph.
+ print(' subgraph "cluster_%s" {' % filename)
+ print(' label = "%s"' % filename)
+ for target in targets:
+ build_file, target_name, toolset = ParseTarget(target)
+ print(f' "{target}" [label="{target_name}"]')
+ print(" }")
+
+ # Now that we've placed all the nodes within subgraphs, output all
+ # the edges between nodes.
+ for src, dsts in edges.items():
+ for dst in dsts:
+ print(f' "{src}" -> "{dst}"')
+
+ print("}")
+
+
+def main():
+ if len(sys.argv) < 2:
+ print(__doc__, file=sys.stderr)
+ print(file=sys.stderr)
+ print("usage: %s target1 target2..." % (sys.argv[0]), file=sys.stderr)
+ return 1
+
+ edges = LoadEdges("dump.json", sys.argv[1:])
+
+ WriteGraph(edges)
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_gyp.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_gyp.py
new file mode 100755
index 0000000..6eef3a1
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_gyp.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pretty-prints the contents of a GYP file."""
+
+
+import sys
+import re
+
+
+# Regex to remove comments when we're counting braces.
+COMMENT_RE = re.compile(r"\s*#.*")
+
+# Regex to remove quoted strings when we're counting braces.
+# It takes into account quoted quotes, and makes sure that the quotes match.
+# NOTE: It does not handle quotes that span more than one line, or
+# cases where an escaped quote is preceded by an escaped backslash.
+QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
+QUOTE_RE = re.compile(QUOTE_RE_STR)
+
+
+def comment_replace(matchobj):
+ return matchobj.group(1) + matchobj.group(2) + "#" * len(matchobj.group(3))
+
+
+def mask_comments(input):
+ """Mask the quoted strings so we skip braces inside quoted strings."""
+ search_re = re.compile(r"(.*?)(#)(.*)")
+ return [search_re.sub(comment_replace, line) for line in input]
+
+
+def quote_replace(matchobj):
+ return "{}{}{}{}".format(
+ matchobj.group(1),
+ matchobj.group(2),
+ "x" * len(matchobj.group(3)),
+ matchobj.group(2),
+ )
+
+
+def mask_quotes(input):
+ """Mask the quoted strings so we skip braces inside quoted strings."""
+ search_re = re.compile(r"(.*?)" + QUOTE_RE_STR)
+ return [search_re.sub(quote_replace, line) for line in input]
+
+
+def do_split(input, masked_input, search_re):
+ output = []
+ mask_output = []
+ for (line, masked_line) in zip(input, masked_input):
+ m = search_re.match(masked_line)
+ while m:
+ split = len(m.group(1))
+ line = line[:split] + r"\n" + line[split:]
+ masked_line = masked_line[:split] + r"\n" + masked_line[split:]
+ m = search_re.match(masked_line)
+ output.extend(line.split(r"\n"))
+ mask_output.extend(masked_line.split(r"\n"))
+ return (output, mask_output)
+
+
+def split_double_braces(input):
+ """Masks out the quotes and comments, and then splits appropriate
+ lines (lines that matche the double_*_brace re's above) before
+ indenting them below.
+
+ These are used to split lines which have multiple braces on them, so
+ that the indentation looks prettier when all laid out (e.g. closing
+ braces make a nice diagonal line).
+ """
+ double_open_brace_re = re.compile(r"(.*?[\[\{\(,])(\s*)([\[\{\(])")
+ double_close_brace_re = re.compile(r"(.*?[\]\}\)],?)(\s*)([\]\}\)])")
+
+ masked_input = mask_quotes(input)
+ masked_input = mask_comments(masked_input)
+
+ (output, mask_output) = do_split(input, masked_input, double_open_brace_re)
+ (output, mask_output) = do_split(output, mask_output, double_close_brace_re)
+
+ return output
+
+
+def count_braces(line):
+ """keeps track of the number of braces on a given line and returns the result.
+
+ It starts at zero and subtracts for closed braces, and adds for open braces.
+ """
+ open_braces = ["[", "(", "{"]
+ close_braces = ["]", ")", "}"]
+ closing_prefix_re = re.compile(r"[^\s\]\}\)]\s*[\]\}\)]+,?\s*$")
+ cnt = 0
+ stripline = COMMENT_RE.sub(r"", line)
+ stripline = QUOTE_RE.sub(r"''", stripline)
+ for char in stripline:
+ for brace in open_braces:
+ if char == brace:
+ cnt += 1
+ for brace in close_braces:
+ if char == brace:
+ cnt -= 1
+
+ after = False
+ if cnt > 0:
+ after = True
+
+ # This catches the special case of a closing brace having something
+ # other than just whitespace ahead of it -- we don't want to
+ # unindent that until after this line is printed so it stays with
+ # the previous indentation level.
+ if cnt < 0 and closing_prefix_re.match(stripline):
+ after = True
+ return (cnt, after)
+
+
+def prettyprint_input(lines):
+ """Does the main work of indenting the input based on the brace counts."""
+ indent = 0
+ basic_offset = 2
+ for line in lines:
+ if COMMENT_RE.match(line):
+ print(line)
+ else:
+ line = line.strip("\r\n\t ") # Otherwise doesn't strip \r on Unix.
+ if len(line) > 0:
+ (brace_diff, after) = count_braces(line)
+ if brace_diff != 0:
+ if after:
+ print(" " * (basic_offset * indent) + line)
+ indent += brace_diff
+ else:
+ indent += brace_diff
+ print(" " * (basic_offset * indent) + line)
+ else:
+ print(" " * (basic_offset * indent) + line)
+ else:
+ print("")
+
+
+def main():
+ if len(sys.argv) > 1:
+ data = open(sys.argv[1]).read().splitlines()
+ else:
+ data = sys.stdin.read().splitlines()
+ # Split up the double braces.
+ lines = split_double_braces(data)
+
+ # Indent and print the output.
+ prettyprint_input(lines)
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_sln.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_sln.py
new file mode 100755
index 0000000..6ca0cd1
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_sln.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the information in a sln file in a diffable way.
+
+ It first outputs each projects in alphabetical order with their
+ dependencies.
+
+ Then it outputs a possible build order.
+"""
+
+
+import os
+import re
+import sys
+import pretty_vcproj
+
+__author__ = "nsylvain (Nicolas Sylvain)"
+
+
+def BuildProject(project, built, projects, deps):
+ # if all dependencies are done, we can build it, otherwise we try to build the
+ # dependency.
+ # This is not infinite-recursion proof.
+ for dep in deps[project]:
+ if dep not in built:
+ BuildProject(dep, built, projects, deps)
+ print(project)
+ built.append(project)
+
+
+def ParseSolution(solution_file):
+ # All projects, their clsid and paths.
+ projects = dict()
+
+ # A list of dependencies associated with a project.
+ dependencies = dict()
+
+ # Regular expressions that matches the SLN format.
+ # The first line of a project definition.
+ begin_project = re.compile(
+ r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
+ r'}"\) = "(.*)", "(.*)", "(.*)"$'
+ )
+ # The last line of a project definition.
+ end_project = re.compile("^EndProject$")
+ # The first line of a dependency list.
+ begin_dep = re.compile(r"ProjectSection\(ProjectDependencies\) = postProject$")
+ # The last line of a dependency list.
+ end_dep = re.compile("EndProjectSection$")
+ # A line describing a dependency.
+ dep_line = re.compile(" *({.*}) = ({.*})$")
+
+ in_deps = False
+ solution = open(solution_file)
+ for line in solution:
+ results = begin_project.search(line)
+ if results:
+ # Hack to remove icu because the diff is too different.
+ if results.group(1).find("icu") != -1:
+ continue
+ # We remove "_gyp" from the names because it helps to diff them.
+ current_project = results.group(1).replace("_gyp", "")
+ projects[current_project] = [
+ results.group(2).replace("_gyp", ""),
+ results.group(3),
+ results.group(2),
+ ]
+ dependencies[current_project] = []
+ continue
+
+ results = end_project.search(line)
+ if results:
+ current_project = None
+ continue
+
+ results = begin_dep.search(line)
+ if results:
+ in_deps = True
+ continue
+
+ results = end_dep.search(line)
+ if results:
+ in_deps = False
+ continue
+
+ results = dep_line.search(line)
+ if results and in_deps and current_project:
+ dependencies[current_project].append(results.group(1))
+ continue
+
+ # Change all dependencies clsid to name instead.
+ for project in dependencies:
+ # For each dependencies in this project
+ new_dep_array = []
+ for dep in dependencies[project]:
+ # Look for the project name matching this cldis
+ for project_info in projects:
+ if projects[project_info][1] == dep:
+ new_dep_array.append(project_info)
+ dependencies[project] = sorted(new_dep_array)
+
+ return (projects, dependencies)
+
+
+def PrintDependencies(projects, deps):
+ print("---------------------------------------")
+ print("Dependencies for all projects")
+ print("---------------------------------------")
+ print("-- --")
+
+ for (project, dep_list) in sorted(deps.items()):
+ print("Project : %s" % project)
+ print("Path : %s" % projects[project][0])
+ if dep_list:
+ for dep in dep_list:
+ print(" - %s" % dep)
+ print("")
+
+ print("-- --")
+
+
+def PrintBuildOrder(projects, deps):
+ print("---------------------------------------")
+ print("Build order ")
+ print("---------------------------------------")
+ print("-- --")
+
+ built = []
+ for (project, _) in sorted(deps.items()):
+ if project not in built:
+ BuildProject(project, built, projects, deps)
+
+ print("-- --")
+
+
+def PrintVCProj(projects):
+
+ for project in projects:
+ print("-------------------------------------")
+ print("-------------------------------------")
+ print(project)
+ print(project)
+ print(project)
+ print("-------------------------------------")
+ print("-------------------------------------")
+
+ project_path = os.path.abspath(
+ os.path.join(os.path.dirname(sys.argv[1]), projects[project][2])
+ )
+
+ pretty = pretty_vcproj
+ argv = [
+ "",
+ project_path,
+ "$(SolutionDir)=%s\\" % os.path.dirname(sys.argv[1]),
+ ]
+ argv.extend(sys.argv[3:])
+ pretty.main(argv)
+
+
+def main():
+ # check if we have exactly 1 parameter.
+ if len(sys.argv) < 2:
+ print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0])
+ return 1
+
+ (projects, deps) = ParseSolution(sys.argv[1])
+ PrintDependencies(projects, deps)
+ PrintBuildOrder(projects, deps)
+
+ if "--recursive" in sys.argv:
+ PrintVCProj(projects)
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_vcproj.py b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
new file mode 100755
index 0000000..00d32de
--- /dev/null
+++ b/server/.yarn/unplugged/node-gyp-npm-9.3.1-43540bab9c/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Make the format of a vcproj really pretty.
+
+ This script normalize and sort an xml. It also fetches all the properties
+ inside linked vsprops and include them explicitly in the vcproj.
+
+ It outputs the resulting xml to stdout.
+"""
+
+
+import os
+import sys
+
+from xml.dom.minidom import parse
+from xml.dom.minidom import Node
+
+__author__ = "nsylvain (Nicolas Sylvain)"
+ARGUMENTS = None
+REPLACEMENTS = dict()
+
+
+def cmp(x, y):
+ return (x > y) - (x < y)
+
+
+class CmpTuple:
+ """Compare function between 2 tuple."""
+
+ def __call__(self, x, y):
+ return cmp(x[0], y[0])
+
+
+class CmpNode:
+ """Compare function between 2 xml nodes."""
+
+ def __call__(self, x, y):
+ def get_string(node):
+ node_string = "node"
+ node_string += node.nodeName
+ if node.nodeValue:
+ node_string += node.nodeValue
+
+ if node.attributes:
+ # We first sort by name, if present.
+ node_string += node.getAttribute("Name")
+
+ all_nodes = []
+ for (name, value) in node.attributes.items():
+ all_nodes.append((name, value))
+
+ all_nodes.sort(CmpTuple())
+ for (name, value) in all_nodes:
+ node_string += name
+ node_string += value
+
+ return node_string
+
+ return cmp(get_string(x), get_string(y))
+
+
+def PrettyPrintNode(node, indent=0):
+ if node.nodeType == Node.TEXT_NODE:
+ if node.data.strip():
+ print("{}{}".format(" " * indent, node.data.strip()))
+ return
+
+ if node.childNodes:
+ node.normalize()
+ # Get the number of attributes
+ attr_count = 0
+ if node.attributes:
+ attr_count = node.attributes.length
+
+ # Print the main tag
+ if attr_count == 0:
+ print("{}<{}>".format(" " * indent, node.nodeName))
+ else:
+ print("{}<{}".format(" " * indent, node.nodeName))
+
+ all_attributes = []
+ for (name, value) in node.attributes.items():
+ all_attributes.append((name, value))
+ all_attributes.sort(CmpTuple())
+ for (name, value) in all_attributes:
+ print('{} {}="{}"'.format(" " * indent, name, value))
+ print("%s>" % (" " * indent))
+ if node.nodeValue:
+ print("{} {}".format(" " * indent, node.nodeValue))
+
+ for sub_node in node.childNodes:
+ PrettyPrintNode(sub_node, indent=indent + 2)
+ print("{}</{}>".format(" " * indent, node.nodeName))
+
+
+def FlattenFilter(node):
+ """Returns a list of all the node and sub nodes."""
+ node_list = []
+
+ if node.attributes and node.getAttribute("Name") == "_excluded_files":
+ # We don't add the "_excluded_files" filter.
+ return []
+
+ for current in node.childNodes:
+ if current.nodeName == "Filter":
+ node_list.extend(FlattenFilter(current))
+ else:
+ node_list.append(current)
+
+ return node_list
+
+
+def FixFilenames(filenames, current_directory):
+ new_list = []
+ for filename in filenames:
+ if filename:
+ for key in REPLACEMENTS:
+ filename = filename.replace(key, REPLACEMENTS[key])
+ os.chdir(current_directory)
+ filename = filename.strip("\"' ")
+ if filename.startswith("$"):
+ new_list.append(filename)
+ else:
+ new_list.append(os.path.abspath(filename))
+ return new_list
+
+
+def AbsoluteNode(node):
+ """Makes all the properties we know about in this node absolute."""
+ if node.attributes:
+ for (name, value) in node.attributes.items():
+ if name in [
+ "InheritedPropertySheets",
+ "RelativePath",
+ "AdditionalIncludeDirectories",
+ "IntermediateDirectory",
+ "OutputDirectory",
+ "AdditionalLibraryDirectories",
+ ]:
+ # We want to fix up these paths
+ path_list = value.split(";")
+ new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
+ node.setAttribute(name, ";".join(new_list))
+ if not value:
+ node.removeAttribute(name)
+
+
+def CleanupVcproj(node):
+ """For each sub node, we call recursively this function."""
+ for sub_node in node.childNodes:
+ AbsoluteNode(sub_node)
+ CleanupVcproj(sub_node)
+
+ # Normalize the node, and remove all extraneous whitespaces.
+ for sub_node in node.childNodes:
+ if sub_node.nodeType == Node.TEXT_NODE:
+ sub_node.data = sub_node.data.replace("\r", "")
+ sub_node.data = sub_node.data.replace("\n", "")
+ sub_node.data = sub_node.data.rstrip()
+
+ # Fix all the semicolon separated attributes to be sorted, and we also
+ # remove the dups.
+ if node.attributes:
+ for (name, value) in node.attributes.items():
+ sorted_list = sorted(value.split(";"))
+ unique_list = []
+ for i in sorted_list:
+ if not unique_list.count(i):
+ unique_list.append(i)
+ node.setAttribute(name, ";".join(unique_list))
+ if not value:
+ node.removeAttribute(name)
+
+ if node.childNodes:
+ node.normalize()
+
+ # For each node, take a copy, and remove it from the list.
+ node_array = []
+ while node.childNodes and node.childNodes[0]:
+ # Take a copy of the node and remove it from the list.
+ current = node.childNodes[0]
+ node.removeChild(current)
+
+ # If the child is a filter, we want to append all its children
+ # to this same list.
+ if current.nodeName == "Filter":
+ node_array.extend(FlattenFilter(current))
+ else:
+ node_array.append(current)
+
+ # Sort the list.
+ node_array.sort(CmpNode())
+
+ # Insert the nodes in the correct order.
+ for new_node in node_array:
+ # But don't append empty tool node.
+ if new_node.nodeName == "Tool":
+ if new_node.attributes and new_node.attributes.length == 1:
+ # This one was empty.
+ continue
+ if new_node.nodeName == "UserMacro":
+ continue
+ node.appendChild(new_node)
+
+
+def GetConfiguationNodes(vcproj):
+ # TODO(nsylvain): Find a better way to navigate the xml.
+ nodes = []
+ for node in vcproj.childNodes:
+ if node.nodeName == "Configurations":
+ for sub_node in node.childNodes:
+ if sub_node.nodeName == "Configuration":
+ nodes.append(sub_node)
+
+ return nodes
+
+
+def GetChildrenVsprops(filename):
+ dom = parse(filename)
+ if dom.documentElement.attributes:
+ vsprops = dom.documentElement.getAttribute("InheritedPropertySheets")
+ return FixFilenames(vsprops.split(";"), os.path.dirname(filename))
+ return []
+
+
+def SeekToNode(node1, child2):
+ # A text node does not have properties.
+ if child2.nodeType == Node.TEXT_NODE:
+ return None
+
+ # Get the name of the current node.
+ current_name = child2.getAttribute("Name")
+ if not current_name:
+ # There is no name. We don't know how to merge.
+ return None
+
+ # Look through all the nodes to find a match.
+ for sub_node in node1.childNodes:
+ if sub_node.nodeName == child2.nodeName:
+ name = sub_node.getAttribute("Name")
+ if name == current_name:
+ return sub_node
+
+ # No match. We give up.
+ return None
+
+
+def MergeAttributes(node1, node2):
+ # No attributes to merge?
+ if not node2.attributes:
+ return
+
+ for (name, value2) in node2.attributes.items():
+ # Don't merge the 'Name' attribute.
+ if name == "Name":
+ continue
+ value1 = node1.getAttribute(name)
+ if value1:
+ # The attribute exist in the main node. If it's equal, we leave it
+ # untouched, otherwise we concatenate it.
+ if value1 != value2:
+ node1.setAttribute(name, ";".join([value1, value2]))
+ else:
+ # The attribute does not exist in the main node. We append this one.
+ node1.setAttribute(name, value2)
+
+ # If the attribute was a property sheet attributes, we remove it, since
+ # they are useless.
+ if name == "InheritedPropertySheets":
+ node1.removeAttribute(name)
+
+
+def MergeProperties(node1, node2):
+ MergeAttributes(node1, node2)
+ for child2 in node2.childNodes:
+ child1 = SeekToNode(node1, child2)
+ if child1:
+ MergeProperties(child1, child2)
+ else:
+ node1.appendChild(child2.cloneNode(True))
+
+
+def main(argv):
+ """Main function of this vcproj prettifier."""
+ global ARGUMENTS
+ ARGUMENTS = argv
+
+ # check if we have exactly 1 parameter.
+ if len(argv) < 2:
+ print(
+ 'Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
+ "[key2=value2]" % argv[0]
+ )
+ return 1
+
+ # Parse the keys
+ for i in range(2, len(argv)):
+ (key, value) = argv[i].split("=")
+ REPLACEMENTS[key] = value
+
+ # Open the vcproj and parse the xml.
+ dom = parse(argv[1])
+
+ # First thing we need to do is find the Configuration Node and merge them
+ # with the vsprops they include.
+ for configuration_node in GetConfiguationNodes(dom.documentElement):
+ # Get the property sheets associated with this configuration.
+ vsprops = configuration_node.getAttribute("InheritedPropertySheets")
+
+ # Fix the filenames to be absolute.
+ vsprops_list = FixFilenames(
+ vsprops.strip().split(";"), os.path.dirname(argv[1])
+ )
+
+ # Extend the list of vsprops with all vsprops contained in the current
+ # vsprops.
+ for current_vsprops in vsprops_list:
+ vsprops_list.extend(GetChildrenVsprops(current_vsprops))
+
+ # Now that we have all the vsprops, we need to merge them.
+ for current_vsprops in vsprops_list:
+ MergeProperties(configuration_node, parse(current_vsprops).documentElement)
+
+ # Now that everything is merged, we need to cleanup the xml.
+ CleanupVcproj(dom.documentElement)
+
+ # Finally, we use the prett xml function to print the vcproj back to the
+ # user.
+ # print dom.toprettyxml(newl="\n")
+ PrettyPrintNode(dom.documentElement)
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))