Updating versions
Build KX3DEX.radio / Build and Deploy (push) Failing after 50s
Details
Build KX3DEX.radio / Build and Deploy (push) Failing after 50s
Details
parent
2753b32ca8
commit
5b75e1ffd5
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
0
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/.ready
generated
vendored
Normal file
0
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/.ready
generated
vendored
Normal file
191
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/LICENSE
generated
vendored
Normal file
191
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,191 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and
|
||||
distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright
|
||||
owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities
|
||||
that control, are controlled by, or are under common control with that entity.
|
||||
For the purposes of this definition, "control" means (i) the power, direct or
|
||||
indirect, to cause the direction or management of such entity, whether by
|
||||
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising
|
||||
permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including
|
||||
but not limited to software source code, documentation source, and configuration
|
||||
files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or
|
||||
translation of a Source form, including but not limited to compiled object code,
|
||||
generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made
|
||||
available under the License, as indicated by a copyright notice that is included
|
||||
in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that
|
||||
is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an
|
||||
original work of authorship. For the purposes of this License, Derivative Works
|
||||
shall not include works that remain separable from, or merely link (or bind by
|
||||
name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version
|
||||
of the Work and any modifications or additions to that Work or Derivative Works
|
||||
thereof, that is intentionally submitted to Licensor for inclusion in the Work
|
||||
by the copyright owner or by an individual or Legal Entity authorized to submit
|
||||
on behalf of the copyright owner. For the purposes of this definition,
|
||||
"submitted" means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems, and
|
||||
issue tracking systems that are managed by, or on behalf of, the Licensor for
|
||||
the purpose of discussing and improving the Work, but excluding communication
|
||||
that is conspicuously marked or otherwise designated in writing by the copyright
|
||||
owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
|
||||
of whom a Contribution has been received by Licensor and subsequently
|
||||
incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License.
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the Work and such
|
||||
Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License.
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable (except as stated in this section) patent license to make, have
|
||||
made, use, offer to sell, sell, import, and otherwise transfer the Work, where
|
||||
such license applies only to those patent claims licensable by such Contributor
|
||||
that are necessarily infringed by their Contribution(s) alone or by combination
|
||||
of their Contribution(s) with the Work to which such Contribution(s) was
|
||||
submitted. If You institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work or a
|
||||
Contribution incorporated within the Work constitutes direct or contributory
|
||||
patent infringement, then any patent licenses granted to You under this License
|
||||
for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution.
|
||||
|
||||
You may reproduce and distribute copies of the Work or Derivative Works thereof
|
||||
in any medium, with or without modifications, and in Source or Object form,
|
||||
provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of
|
||||
this License; and
|
||||
You must cause any modified files to carry prominent notices stating that You
|
||||
changed the files; and
|
||||
You must retain, in the Source form of any Derivative Works that You distribute,
|
||||
all copyright, patent, trademark, and attribution notices from the Source form
|
||||
of the Work, excluding those notices that do not pertain to any part of the
|
||||
Derivative Works; and
|
||||
If the Work includes a "NOTICE" text file as part of its distribution, then any
|
||||
Derivative Works that You distribute must include a readable copy of the
|
||||
attribution notices contained within such NOTICE file, excluding those notices
|
||||
that do not pertain to any part of the Derivative Works, in at least one of the
|
||||
following places: within a NOTICE text file distributed as part of the
|
||||
Derivative Works; within the Source form or documentation, if provided along
|
||||
with the Derivative Works; or, within a display generated by the Derivative
|
||||
Works, if and wherever such third-party notices normally appear. The contents of
|
||||
the NOTICE file are for informational purposes only and do not modify the
|
||||
License. You may add Your own attribution notices within Derivative Works that
|
||||
You distribute, alongside or as an addendum to the NOTICE text from the Work,
|
||||
provided that such additional attribution notices cannot be construed as
|
||||
modifying the License.
|
||||
You may add Your own copyright statement to Your modifications and may provide
|
||||
additional or different license terms and conditions for use, reproduction, or
|
||||
distribution of Your modifications, or for any such Derivative Works as a whole,
|
||||
provided Your use, reproduction, and distribution of the Work otherwise complies
|
||||
with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions.
|
||||
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted
|
||||
for inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify the terms of
|
||||
any separate license agreement you may have executed with Licensor regarding
|
||||
such Contributions.
|
||||
|
||||
6. Trademarks.
|
||||
|
||||
This License does not grant permission to use the trade names, trademarks,
|
||||
service marks, or product names of the Licensor, except as required for
|
||||
reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty.
|
||||
|
||||
Unless required by applicable law or agreed to in writing, Licensor provides the
|
||||
Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
|
||||
including, without limitation, any warranties or conditions of TITLE,
|
||||
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
|
||||
solely responsible for determining the appropriateness of using or
|
||||
redistributing the Work and assume any risks associated with Your exercise of
|
||||
permissions under this License.
|
||||
|
||||
8. Limitation of Liability.
|
||||
|
||||
In no event and under no legal theory, whether in tort (including negligence),
|
||||
contract, or otherwise, unless required by applicable law (such as deliberate
|
||||
and grossly negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special, incidental,
|
||||
or consequential damages of any character arising as a result of this License or
|
||||
out of the use or inability to use the Work (including but not limited to
|
||||
damages for loss of goodwill, work stoppage, computer failure or malfunction, or
|
||||
any and all other commercial damages or losses), even if such Contributor has
|
||||
been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability.
|
||||
|
||||
While redistributing the Work or Derivative Works thereof, You may choose to
|
||||
offer, and charge a fee for, acceptance of support, warranty, indemnity, or
|
||||
other liability obligations and/or rights consistent with this License. However,
|
||||
in accepting such obligations, You may act only on Your own behalf and on Your
|
||||
sole responsibility, not on behalf of any other Contributor, and only if You
|
||||
agree to indemnify, defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason of your
|
||||
accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate
|
||||
notice, with the fields enclosed by brackets "[]" replaced with your own
|
||||
identifying information. (Don't include the brackets!) The text should be
|
||||
enclosed in the appropriate comment syntax for the file format. We also
|
||||
recommend that a file or class name and description of purpose be included on
|
||||
the same "printed page" as the copyright notice for easier identification within
|
||||
third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
60
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/README.md
generated
vendored
Normal file
60
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/README.md
generated
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
# `@img/sharp-win32-x64`
|
||||
|
||||
Prebuilt sharp for use with Windows x64.
|
||||
|
||||
## Licensing
|
||||
|
||||
Copyright 2013 Lovell Fuller and others.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software contains third-party libraries
|
||||
used under the terms of the following licences:
|
||||
|
||||
| Library | Used under the terms of |
|
||||
|---------------|-----------------------------------------------------------------------------------------------------------|
|
||||
| aom | BSD 2-Clause + [Alliance for Open Media Patent License 1.0](https://aomedia.org/license/patent-license/) |
|
||||
| cairo | Mozilla Public License 2.0 |
|
||||
| cgif | MIT Licence |
|
||||
| expat | MIT Licence |
|
||||
| fontconfig | [fontconfig Licence](https://gitlab.freedesktop.org/fontconfig/fontconfig/blob/main/COPYING) (BSD-like) |
|
||||
| freetype | [freetype Licence](https://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/FTL.TXT) (BSD-like) |
|
||||
| fribidi | LGPLv3 |
|
||||
| gdk-pixbuf | LGPLv3 |
|
||||
| glib | LGPLv3 |
|
||||
| harfbuzz | MIT Licence |
|
||||
| highway | Apache-2.0 License, BSD 3-Clause |
|
||||
| lcms | MIT Licence |
|
||||
| libarchive | BSD 2-Clause |
|
||||
| libexif | LGPLv3 |
|
||||
| libffi | MIT Licence |
|
||||
| libheif | LGPLv3 |
|
||||
| libimagequant | [BSD 2-Clause](https://github.com/lovell/libimagequant/blob/main/COPYRIGHT) |
|
||||
| libnsgif | MIT Licence |
|
||||
| libpng | [libpng License](https://github.com/glennrp/libpng/blob/master/LICENSE) |
|
||||
| librsvg | LGPLv3 |
|
||||
| libspng | [BSD 2-Clause, libpng License](https://github.com/randy408/libspng/blob/master/LICENSE) |
|
||||
| libtiff | [libtiff License](https://gitlab.com/libtiff/libtiff/blob/master/LICENSE.md) (BSD-like) |
|
||||
| libvips | LGPLv3 |
|
||||
| libwebp | New BSD License |
|
||||
| libxml2 | MIT Licence |
|
||||
| mozjpeg | [zlib License, IJG License, BSD-3-Clause](https://github.com/mozilla/mozjpeg/blob/master/LICENSE.md) |
|
||||
| pango | LGPLv3 |
|
||||
| pixman | MIT Licence |
|
||||
| proxy-libintl | LGPLv3 |
|
||||
| zlib-ng | [zlib Licence](https://github.com/zlib-ng/zlib-ng/blob/develop/LICENSE.md) |
|
||||
|
||||
Use of libraries under the terms of the LGPLv3 is via the
|
||||
"any later version" clause of the LGPLv2 or LGPLv2.1.
|
||||
|
||||
Please report any errors or omissions via
|
||||
https://github.com/lovell/sharp-libvips/issues/new
|
BIN
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/lib/libvips-42.dll
generated
vendored
Normal file
BIN
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/lib/libvips-42.dll
generated
vendored
Normal file
Binary file not shown.
BIN
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/lib/libvips-cpp.dll
generated
vendored
Normal file
BIN
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/lib/libvips-cpp.dll
generated
vendored
Normal file
Binary file not shown.
BIN
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/lib/sharp-win32-x64.node
generated
vendored
Normal file
BIN
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/lib/sharp-win32-x64.node
generated
vendored
Normal file
Binary file not shown.
42
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/package.json
generated
vendored
Normal file
42
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/package.json
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
"name": "@img/sharp-win32-x64",
|
||||
"version": "0.33.3",
|
||||
"description": "Prebuilt sharp for use with Windows x64",
|
||||
"author": "Lovell Fuller <npm@lovell.info>",
|
||||
"homepage": "https://sharp.pixelplumbing.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/lovell/sharp.git",
|
||||
"directory": "npm/win32-x64"
|
||||
},
|
||||
"license": "Apache-2.0 AND LGPL-3.0-or-later",
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"preferUnplugged": true,
|
||||
"files": [
|
||||
"lib",
|
||||
"versions.json"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"type": "commonjs",
|
||||
"exports": {
|
||||
"./sharp.node": "./lib/sharp-win32-x64.node",
|
||||
"./package": "./package.json",
|
||||
"./versions": "./versions.json"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0",
|
||||
"npm": ">=9.6.5",
|
||||
"yarn": ">=3.2.0",
|
||||
"pnpm": ">=7.1.0"
|
||||
},
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
31
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/versions.json
generated
vendored
Normal file
31
.yarn/unplugged/@img-sharp-win32-x64-npm-0.33.3-bfdb7161f8/node_modules/@img/sharp-win32-x64/versions.json
generated
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"aom": "3.8.2",
|
||||
"archive": "3.7.2",
|
||||
"cairo": "1.18.0",
|
||||
"cgif": "0.3.2",
|
||||
"exif": "0.6.24",
|
||||
"expat": "2.6.2",
|
||||
"ffi": "3.4.6",
|
||||
"fontconfig": "2.15.0",
|
||||
"freetype": "2.13.2",
|
||||
"fribidi": "1.0.13",
|
||||
"gdkpixbuf": "2.42.10",
|
||||
"glib": "2.80.0",
|
||||
"harfbuzz": "8.3.0",
|
||||
"heif": "1.17.6",
|
||||
"highway": "1.1.0",
|
||||
"imagequant": "2.4.1",
|
||||
"lcms": "2.16",
|
||||
"mozjpeg": "4.1.5",
|
||||
"pango": "1.52.1",
|
||||
"pixman": "0.43.4",
|
||||
"png": "1.6.43",
|
||||
"proxy-libintl": "0.4",
|
||||
"rsvg": "2.57.92",
|
||||
"spng": "0.7.4",
|
||||
"tiff": "4.6.0",
|
||||
"vips": "8.15.2",
|
||||
"webp": "1.3.2",
|
||||
"xml": "2.12.5",
|
||||
"zlib-ng": "2.1.6"
|
||||
}
|
0
.yarn/unplugged/@next-swc-win32-x64-msvc-npm-14.1.4-8a05d41299/node_modules/@next/swc-win32-x64-msvc/.ready
generated
vendored
Normal file
0
.yarn/unplugged/@next-swc-win32-x64-msvc-npm-14.1.4-8a05d41299/node_modules/@next/swc-win32-x64-msvc/.ready
generated
vendored
Normal file
3
.yarn/unplugged/@next-swc-win32-x64-msvc-npm-14.1.4-8a05d41299/node_modules/@next/swc-win32-x64-msvc/README.md
generated
vendored
Normal file
3
.yarn/unplugged/@next-swc-win32-x64-msvc-npm-14.1.4-8a05d41299/node_modules/@next/swc-win32-x64-msvc/README.md
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
# `@next/swc-win32-x64-msvc`
|
||||
|
||||
This is the **x86_64-pc-windows-msvc** binary for `@next/swc`
|
BIN
.yarn/unplugged/@next-swc-win32-x64-msvc-npm-14.1.4-8a05d41299/node_modules/@next/swc-win32-x64-msvc/next-swc.win32-x64-msvc.node
generated
vendored
Normal file
BIN
.yarn/unplugged/@next-swc-win32-x64-msvc-npm-14.1.4-8a05d41299/node_modules/@next/swc-win32-x64-msvc/next-swc.win32-x64-msvc.node
generated
vendored
Normal file
Binary file not shown.
23
.yarn/unplugged/@next-swc-win32-x64-msvc-npm-14.1.4-8a05d41299/node_modules/@next/swc-win32-x64-msvc/package.json
generated
vendored
Normal file
23
.yarn/unplugged/@next-swc-win32-x64-msvc-npm-14.1.4-8a05d41299/node_modules/@next/swc-win32-x64-msvc/package.json
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"name": "@next/swc-win32-x64-msvc",
|
||||
"version": "14.1.4",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/next.js",
|
||||
"directory": "packages/next-swc/crates/napi/npm/win32-x64-msvc"
|
||||
},
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"main": "next-swc.win32-x64-msvc.node",
|
||||
"files": [
|
||||
"next-swc.win32-x64-msvc.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
0
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/.ready
generated
vendored
Normal file
0
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/.ready
generated
vendored
Normal file
3
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/.release-please-manifest.json
generated
vendored
Normal file
3
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/.release-please-manifest.json
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
".": "10.1.0"
|
||||
}
|
933
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/CHANGELOG.md
generated
vendored
Normal file
933
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/CHANGELOG.md
generated
vendored
Normal file
|
@ -0,0 +1,933 @@
|
|||
# Changelog
|
||||
|
||||
## [10.1.0](https://github.com/nodejs/node-gyp/compare/v10.0.1...v10.1.0) (2024-03-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* improve visual studio detection ([#2957](https://github.com/nodejs/node-gyp/issues/2957)) ([109e3d4](https://github.com/nodejs/node-gyp/commit/109e3d4245504a7b75c99f578e1203c0ef4b518e))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* add support for locally installed headers ([#2964](https://github.com/nodejs/node-gyp/issues/2964)) ([3298731](https://github.com/nodejs/node-gyp/commit/329873141f0d3e3787d3c006801431da04e4ed0c))
|
||||
* **deps:** bump actions/setup-python from 4 to 5 ([#2960](https://github.com/nodejs/node-gyp/issues/2960)) ([3f0df7e](https://github.com/nodejs/node-gyp/commit/3f0df7e9334e49e8c7f6fdbbb9e1e6c5a8cca53b))
|
||||
* **deps:** bump google-github-actions/release-please-action ([#2961](https://github.com/nodejs/node-gyp/issues/2961)) ([b1f1808](https://github.com/nodejs/node-gyp/commit/b1f1808bfff0d51e6d3eb696ab6a5b89b7b9630c))
|
||||
* print Python executable path using UTF-8 ([#2995](https://github.com/nodejs/node-gyp/issues/2995)) ([c472912](https://github.com/nodejs/node-gyp/commit/c4729129daa9bb5204246b857826fb391ac961e1))
|
||||
* update supported vs versions ([#2959](https://github.com/nodejs/node-gyp/issues/2959)) ([391cc5b](https://github.com/nodejs/node-gyp/commit/391cc5b9b25cffe0cb2edcba3583414a771b4a15))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* npm is currently v10 ([#2970](https://github.com/nodejs/node-gyp/issues/2970)) ([7705a22](https://github.com/nodejs/node-gyp/commit/7705a22f31a62076e9f8429780a459f4ad71ea4c))
|
||||
* remove outdated Node versions from readme ([#2955](https://github.com/nodejs/node-gyp/issues/2955)) ([ae8478e](https://github.com/nodejs/node-gyp/commit/ae8478ec32d9b2fa71b591ac22cdf867ef2e9a7d))
|
||||
* remove outdated update engines.node reference in 10.0.0 changelog ([b42e796](https://github.com/nodejs/node-gyp/commit/b42e7966177f006f3d1aab1d27885d8372c8ed01))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* only run release please on push ([cff9ac2](https://github.com/nodejs/node-gyp/commit/cff9ac2c3083769a383e00bc60b91562f03116e3))
|
||||
* upgrade release please action from v2 to v4 ([#2982](https://github.com/nodejs/node-gyp/issues/2982)) ([0035d8e](https://github.com/nodejs/node-gyp/commit/0035d8e9dc98b94f0bc8cd9023a6fa635003703e))
|
||||
|
||||
### [10.0.1](https://www.github.com/nodejs/node-gyp/compare/v10.0.0...v10.0.1) (2023-11-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use local `util` for `findAccessibleSync()` ([b39e681](https://www.github.com/nodejs/node-gyp/commit/b39e6819aa9e2c45107d6e60a4913ca036ebfbfd))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* add parallel test logging ([7de1f5f](https://www.github.com/nodejs/node-gyp/commit/7de1f5f32d550d26d48fe4f76aed5866744edcba))
|
||||
* lint fixes ([4e0ed99](https://www.github.com/nodejs/node-gyp/commit/4e0ed992566f43abc6e988af091ad07fde04acbf))
|
||||
* use platform specific timeouts in tests ([a68586a](https://www.github.com/nodejs/node-gyp/commit/a68586a67d0af238300662cc062422b42820044d))
|
||||
|
||||
## [10.0.0](https://www.github.com/nodejs/node-gyp/compare/v9.4.0...v10.0.0) (2023-10-28)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* use .npmignore file to limit which files are published (#2921)
|
||||
* the `Gyp` class exported is now created using ECMAScript classes and therefore might have small differences to classes that were previously created with `util.inherits`.
|
||||
* All internal functions have been coverted to return promises and no longer accept callbacks. This is not a breaking change for users but may be breaking to consumers of `node-gyp` if you are requiring internal functions directly.
|
||||
* `node-gyp` now supports node `^16.14.0 || >=18.0.0`
|
||||
|
||||
### Features
|
||||
|
||||
* convert all internal functions to async/await ([355622f](https://www.github.com/nodejs/node-gyp/commit/355622f4aac3bd3056b9e03aac5fa2f42a4b3576))
|
||||
* convert internal classes from util.inherits to classes ([d52997e](https://www.github.com/nodejs/node-gyp/commit/d52997e975b9da6e0cea3d9b99873e9ddc768679))
|
||||
* drop node 14 support ([#2929](https://www.github.com/nodejs/node-gyp/issues/2929)) ([1b3bd34](https://www.github.com/nodejs/node-gyp/commit/1b3bd341b40f384988d03207ce8187e93ba609bc))
|
||||
* drop rimraf dependency ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* **gyp:** update gyp to v0.16.1 ([#2923](https://www.github.com/nodejs/node-gyp/issues/2923)) ([707927c](https://www.github.com/nodejs/node-gyp/commit/707927cd579205ef2b4b17e61c1cce24c056b452))
|
||||
* replace npmlog with proc-log ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* update engines.node to ^14.17.0 || ^16.13.0 || >=18.0.0 ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* use .npmignore file to limit which files are published ([#2921](https://www.github.com/nodejs/node-gyp/issues/2921)) ([864a979](https://www.github.com/nodejs/node-gyp/commit/864a979930cf0ef5ad64bc887b901fa8955d058f))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* create Python symlink only during builds, and clean it up after ([#2721](https://www.github.com/nodejs/node-gyp/issues/2721)) ([0f1f667](https://www.github.com/nodejs/node-gyp/commit/0f1f667b737d21905e283df100a2cb639993562a))
|
||||
* promisify build command ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* use fs/promises in favor of fs.promises ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
|
||||
|
||||
### Tests
|
||||
|
||||
* increase mocha timeout ([#2887](https://www.github.com/nodejs/node-gyp/issues/2887)) ([445c28f](https://www.github.com/nodejs/node-gyp/commit/445c28fabc5fbdf9c3bb3341fb70660a3530f6ad))
|
||||
* update expired certs ([#2908](https://www.github.com/nodejs/node-gyp/issues/2908)) ([5746691](https://www.github.com/nodejs/node-gyp/commit/5746691a36f7b37019d4b8d4e9616aec43d20410))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* Add note about Python symlinks (PR 2362) to CHANGELOG.md for 9.1.0 ([#2783](https://www.github.com/nodejs/node-gyp/issues/2783)) ([b3d41ae](https://www.github.com/nodejs/node-gyp/commit/b3d41aeb737ddd54cc292f363abc561dcc0a614e))
|
||||
* README.md Do not hardcode the supported versions of Python ([#2880](https://www.github.com/nodejs/node-gyp/issues/2880)) ([bb93b94](https://www.github.com/nodejs/node-gyp/commit/bb93b946a9c74934b59164deb52128cf913c97d5))
|
||||
* update applicable GitHub links from master to main ([#2843](https://www.github.com/nodejs/node-gyp/issues/2843)) ([d644ce4](https://www.github.com/nodejs/node-gyp/commit/d644ce48311edf090d0e920ad449e5766c757933))
|
||||
* Update windows installation instructions in README.md ([#2882](https://www.github.com/nodejs/node-gyp/issues/2882)) ([c9caa2e](https://www.github.com/nodejs/node-gyp/commit/c9caa2ecf3c7deae68444ce8fabb32d2dca651cd))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* find python checks order changed on windows ([#2872](https://www.github.com/nodejs/node-gyp/issues/2872)) ([b030555](https://www.github.com/nodejs/node-gyp/commit/b030555cdb754d9c23906e7e707115cd077bbf76))
|
||||
* glob@10.3.10 ([#2926](https://www.github.com/nodejs/node-gyp/issues/2926)) ([4bef1ec](https://www.github.com/nodejs/node-gyp/commit/4bef1ecc7554097d92beb397fbe1a546c5227545))
|
||||
* glob@8.0.3 ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* make-fetch-happen@13.0.0 ([#2927](https://www.github.com/nodejs/node-gyp/issues/2927)) ([059bb6f](https://www.github.com/nodejs/node-gyp/commit/059bb6fd41bb50955a9efbd97887773d60d53221))
|
||||
* nopt@^7.0.0 ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* standard@17.0.0 and fix linting errors ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* which@3.0.0 ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* which@4.0.0 ([#2928](https://www.github.com/nodejs/node-gyp/issues/2928)) ([e388255](https://www.github.com/nodejs/node-gyp/commit/e38825531403aabeae7abe58e76867f31b832f36))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* add check engines script to CI ([#2922](https://www.github.com/nodejs/node-gyp/issues/2922)) ([21a7249](https://www.github.com/nodejs/node-gyp/commit/21a7249b40d8f95e7721e450fd18764adb1648a7))
|
||||
* empty commit to add changelog entries from [#2770](https://www.github.com/nodejs/node-gyp/issues/2770) ([4a50fe3](https://www.github.com/nodejs/node-gyp/commit/4a50fe31574217c4b2a798fc72b19947a64ceea1))
|
||||
* GitHub Workflows security hardening ([#2740](https://www.github.com/nodejs/node-gyp/issues/2740)) ([26683e9](https://www.github.com/nodejs/node-gyp/commit/26683e993df038fb94d89f2276f3535e4522d79a))
|
||||
* misc testing fixes ([#2930](https://www.github.com/nodejs/node-gyp/issues/2930)) ([4e493d4](https://www.github.com/nodejs/node-gyp/commit/4e493d4fb262d12ac52c84979071ccc79e666a1a))
|
||||
* run tests after release please PR ([3032e10](https://www.github.com/nodejs/node-gyp/commit/3032e1061cc2b7b49f83c397d385bafddc6b0214))
|
||||
|
||||
## [9.4.0](https://www.github.com/nodejs/node-gyp/compare/v9.3.1...v9.4.0) (2023-06-12)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add support for native windows arm64 build tools ([bb76021](https://www.github.com/nodejs/node-gyp/commit/bb76021d35964d2bb125bc6214286f35ae4e6cad))
|
||||
* Upgrade Python linting from flake8 to ruff ([#2815](https://www.github.com/nodejs/node-gyp/issues/2815)) ([fc0ddc6](https://www.github.com/nodejs/node-gyp/commit/fc0ddc6523c62b10e5ca1257500b3ceac01450a7))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* extract tarball to temp directory on Windows ([#2846](https://www.github.com/nodejs/node-gyp/issues/2846)) ([aaa117c](https://www.github.com/nodejs/node-gyp/commit/aaa117c514430aa2c1e568b95df1b6ed1c1fd3b6))
|
||||
* log statement is for devDir not nodedir ([#2840](https://www.github.com/nodejs/node-gyp/issues/2840)) ([55048f8](https://www.github.com/nodejs/node-gyp/commit/55048f8be5707c295fb0876306aded75638a8b63))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* get update-gyp.py to work with Python >= v3.5 ([#2826](https://www.github.com/nodejs/node-gyp/issues/2826)) ([337e8e6](https://www.github.com/nodejs/node-gyp/commit/337e8e68209bd2481cbb11dacce61234dc5c9419))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* docs/README.md add advise about deprecated node-sass ([#2828](https://www.github.com/nodejs/node-gyp/issues/2828)) ([6f3c2d3](https://www.github.com/nodejs/node-gyp/commit/6f3c2d3c6c0de0dbf8c7245f34c2e0b3eea53812))
|
||||
* Update README.md ([#2822](https://www.github.com/nodejs/node-gyp/issues/2822)) ([c7927e2](https://www.github.com/nodejs/node-gyp/commit/c7927e228dfde059c93e08c26b54dd8026144583))
|
||||
|
||||
|
||||
### Tests
|
||||
|
||||
* remove deprecated Node.js and Python ([#2868](https://www.github.com/nodejs/node-gyp/issues/2868)) ([a0b3d1c](https://www.github.com/nodejs/node-gyp/commit/a0b3d1c3afed71a74501476fcbc6ee3fface4d13))
|
||||
|
||||
### [9.3.1](https://www.github.com/nodejs/node-gyp/compare/v9.3.0...v9.3.1) (2022-12-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* increase node 12 support to ^12.13 ([#2771](https://www.github.com/nodejs/node-gyp/issues/2771)) ([888efb9](https://www.github.com/nodejs/node-gyp/commit/888efb9055857afee6a6b54550722cf9ae3ee323))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* update python test matrix ([#2774](https://www.github.com/nodejs/node-gyp/issues/2774)) ([38f01fa](https://www.github.com/nodejs/node-gyp/commit/38f01fa57d10fdb3db7697121d957bc2e0e96508))
|
||||
|
||||
## [9.3.0](https://www.github.com/nodejs/node-gyp/compare/v9.2.0...v9.3.0) (2022-10-10)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.14.0 ([#2749](https://www.github.com/nodejs/node-gyp/issues/2749)) ([713b8dc](https://www.github.com/nodejs/node-gyp/commit/713b8dcdbf44532ca9453a127da266386cc737f8))
|
||||
* remove support for VS2015 in Node.js >=19 ([#2746](https://www.github.com/nodejs/node-gyp/issues/2746)) ([131d1a4](https://www.github.com/nodejs/node-gyp/commit/131d1a463baf034a04154bcda753a8295f112a34))
|
||||
* support IBM Open XL C/C++ on z/OS ([#2743](https://www.github.com/nodejs/node-gyp/issues/2743)) ([7d0c83d](https://www.github.com/nodejs/node-gyp/commit/7d0c83d2a95aca743dff972826d0da26203acfc4))
|
||||
|
||||
## [9.2.0](https://www.github.com/nodejs/node-gyp/compare/v9.1.0...v9.2.0) (2022-10-02)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add proper support for IBM i ([a26494f](https://www.github.com/nodejs/node-gyp/commit/a26494fbb8883d9ef784503979e115dec3e2791e))
|
||||
* **gyp:** update gyp to v0.13.0 ([3e2a532](https://www.github.com/nodejs/node-gyp/commit/3e2a5324f1c24f3a04bca04cf54fe23d5c4d5e50))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* node.js debugger adds stderr (but exit code is 0) -> shouldn't throw ([#2719](https://www.github.com/nodejs/node-gyp/issues/2719)) ([c379a74](https://www.github.com/nodejs/node-gyp/commit/c379a744c65c7ab07c2c3193d9c7e8f25ae1b05e))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* enable support for zoslib on z/OS ([#2600](https://www.github.com/nodejs/node-gyp/issues/2600)) ([83c0a12](https://www.github.com/nodejs/node-gyp/commit/83c0a12bf23b4cbf3125d41f9e2d4201db76c9ae))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* update dependency - nopt@6.0.0 ([#2707](https://www.github.com/nodejs/node-gyp/issues/2707)) ([8958ecf](https://www.github.com/nodejs/node-gyp/commit/8958ecf2bb719227bbcbf155891c3186ee219a2e))
|
||||
|
||||
## [9.1.0](https://www.github.com/nodejs/node-gyp/compare/v9.0.0...v9.1.0) (2022-07-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Update function getSDK() to support Windows 11 SDK ([#2565](https://www.github.com/nodejs/node-gyp/issues/2565)) ([ea8520e](https://www.github.com/nodejs/node-gyp/commit/ea8520e3855374bd15b6d001fe112d58a8d7d737))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* extend tap timeout length to allow for slow CI ([6f74c76](https://www.github.com/nodejs/node-gyp/commit/6f74c762fe3c19bdd20245cb5c02e2dfa65d9451))
|
||||
* new ca & server certs, bundle in .js file and unpack for testing ([147e3d3](https://www.github.com/nodejs/node-gyp/commit/147e3d34f44a97deb7aa507207680cf0f4e662a2))
|
||||
* re-label ([#2689](https://www.github.com/nodejs/node-gyp/issues/2689)) ([f0b7863](https://www.github.com/nodejs/node-gyp/commit/f0b7863dadfa365afc173025ae95351aec79abd9))
|
||||
* typo on readme ([bf81cd4](https://www.github.com/nodejs/node-gyp/commit/bf81cd452b931dd4dfa82762c23dd530a075d992))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* update docs/README.md with latest version number ([62d2815](https://www.github.com/nodejs/node-gyp/commit/62d28151bf8266a34e1bcceeb25b4e6e2ae5ca5d))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* update due to rename of primary branch ([ca1f068](https://www.github.com/nodejs/node-gyp/commit/ca1f0681a5567ca8cd51acebccd37a633f19bc6a))
|
||||
* Add Python symlink to path (for non-Windows OSes only) ([#2362](https://github.com/nodejs/node-gyp/pull/2362)) ([b9ddcd5](https://github.com/nodejs/node-gyp/commit/b9ddcd5bbd93b05b03674836b6ebdae2c2e74c8c))
|
||||
|
||||
|
||||
### Tests
|
||||
|
||||
* Try msvs-version: [2016, 2019, 2022] ([#2700](https://www.github.com/nodejs/node-gyp/issues/2700)) ([68b5b5b](https://www.github.com/nodejs/node-gyp/commit/68b5b5be9c94ac20c55e88654ff6f55234d7130a))
|
||||
* Upgrade GitHub Actions ([#2623](https://www.github.com/nodejs/node-gyp/issues/2623)) ([245cd5b](https://www.github.com/nodejs/node-gyp/commit/245cd5bbe4441d4f05e88f2fa20a86425419b6af))
|
||||
* Upgrade GitHub Actions ([#2701](https://www.github.com/nodejs/node-gyp/issues/2701)) ([1c64ca7](https://www.github.com/nodejs/node-gyp/commit/1c64ca7f4702c6eb43ecd16fbd67b5d939041621))
|
||||
|
||||
## [9.0.0](https://www.github.com/nodejs/node-gyp/compare/v8.4.1...v9.0.0) (2022-02-24)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* increase "engines" to "node" : "^12.22 || ^14.13 || >=16" (#2601)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* _ in npm_config_ env variables ([eef4eef](https://www.github.com/nodejs/node-gyp/commit/eef4eefccb13ff6a32db862709ee5b2d4edf7e95))
|
||||
* update make-fetch-happen to a minimum of 10.0.3 ([839e414](https://www.github.com/nodejs/node-gyp/commit/839e414b63790c815a4a370d0feee8f24a94d40f))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* add minimal SECURITY.md ([#2560](https://www.github.com/nodejs/node-gyp/issues/2560)) ([c2a1850](https://www.github.com/nodejs/node-gyp/commit/c2a185056e2e589b520fbc0bcc59c2935cd07ede))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* Add notes/disclaimers for upgrading the copy of node-gyp that npm uses ([#2585](https://www.github.com/nodejs/node-gyp/issues/2585)) ([faf6d48](https://www.github.com/nodejs/node-gyp/commit/faf6d48f8a77c08a313baf9332358c4b1231c73c))
|
||||
* Rename and update Common-issues.md --> docs/README.md ([#2567](https://www.github.com/nodejs/node-gyp/issues/2567)) ([2ef5fb8](https://www.github.com/nodejs/node-gyp/commit/2ef5fb86277c4d81baffc0b9f642a8d86be1bfa5))
|
||||
* rephrase explanation of which node-gyp is used by npm ([#2587](https://www.github.com/nodejs/node-gyp/issues/2587)) ([a2f2988](https://www.github.com/nodejs/node-gyp/commit/a2f298870692022302fa27a1d42363c4a72df407))
|
||||
* title match content ([#2574](https://www.github.com/nodejs/node-gyp/issues/2574)) ([6e8f93b](https://www.github.com/nodejs/node-gyp/commit/6e8f93be0443f2649d4effa7bc773a9da06a33b4))
|
||||
* Update Python versions ([#2571](https://www.github.com/nodejs/node-gyp/issues/2571)) ([e069f13](https://www.github.com/nodejs/node-gyp/commit/e069f13658a8bfb5fd60f74708cf8be0856d92e3))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* add lib.target as path for searching libnode on z/OS ([1d499dd](https://www.github.com/nodejs/node-gyp/commit/1d499dd5606f39de2d34fa822fd0fa5ce17fbd06))
|
||||
* increase "engines" to "node" : "^12.22 || ^14.13 || >=16" ([#2601](https://www.github.com/nodejs/node-gyp/issues/2601)) ([6562f92](https://www.github.com/nodejs/node-gyp/commit/6562f92a6f2e67aeae081ddf5272ff117f1fab07))
|
||||
* make-fetch-happen@10.0.1 ([78f6660](https://www.github.com/nodejs/node-gyp/commit/78f66604e0df480d4f36a8fa4f3618c046a6fbdc))
|
||||
|
||||
### [8.4.1](https://www.github.com/nodejs/node-gyp/compare/v8.4.0...v8.4.1) (2021-11-19)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* windows command missing space ([#2553](https://www.github.com/nodejs/node-gyp/issues/2553)) ([cc37b88](https://www.github.com/nodejs/node-gyp/commit/cc37b880690706d3c5d04d5a68c76c392a0a23ed))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* fix typo in powershell node-gyp update ([787cf7f](https://www.github.com/nodejs/node-gyp/commit/787cf7f8e5ddd5039e02b64ace6b7b15e06fe0a4))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* npmlog@6.0.0 ([8083f6b](https://www.github.com/nodejs/node-gyp/commit/8083f6b855bd7f3326af04c5f5269fc28d7f2508))
|
||||
|
||||
## [8.4.0](https://www.github.com/nodejs/node-gyp/compare/v8.3.0...v8.4.0) (2021-11-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* build with config.gypi from node headers ([a27dc08](https://www.github.com/nodejs/node-gyp/commit/a27dc08696911c6d81e76cc228697243069103c1))
|
||||
* support vs2022 ([#2533](https://www.github.com/nodejs/node-gyp/issues/2533)) ([5a00387](https://www.github.com/nodejs/node-gyp/commit/5a00387e5f8018264a1822f6c4d5dbf425f21cf6))
|
||||
|
||||
## [8.3.0](https://www.github.com/nodejs/node-gyp/compare/v8.2.0...v8.3.0) (2021-10-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.10.0 ([#2521](https://www.github.com/nodejs/node-gyp/issues/2521)) ([5585792](https://www.github.com/nodejs/node-gyp/commit/5585792922a97f0629f143c560efd74470eae87f))
|
||||
|
||||
|
||||
### Tests
|
||||
|
||||
* Python 3.10 was release on Oct. 4th ([#2504](https://www.github.com/nodejs/node-gyp/issues/2504)) ([0a67dcd](https://www.github.com/nodejs/node-gyp/commit/0a67dcd1307f3560495219253241eafcbf4e2a69))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* **deps:** bump make-fetch-happen from 8.0.14 to 9.1.0 ([b05b4fe](https://www.github.com/nodejs/node-gyp/commit/b05b4fe9891f718f40edf547e9b50e982826d48a))
|
||||
* refactor the creation of config.gypi file ([f2ad87f](https://www.github.com/nodejs/node-gyp/commit/f2ad87ff65f98ad66daa7225ad59d99b759a2b07))
|
||||
|
||||
## [8.2.0](https://www.github.com/nodejs/node-gyp/compare/v8.1.0...v8.2.0) (2021-08-23)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.9.6 ([#2481](https://www.github.com/nodejs/node-gyp/issues/2481)) ([ed9a9ed](https://www.github.com/nodejs/node-gyp/commit/ed9a9ed653a17c84afa3c327161992d0da7d0cea))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add error arg back into catch block for older Node.js users ([5cde818](https://www.github.com/nodejs/node-gyp/commit/5cde818aac715477e9e9747966bb6b4c4ed070a8))
|
||||
* change default gyp update message ([#2420](https://www.github.com/nodejs/node-gyp/issues/2420)) ([cfd12ff](https://www.github.com/nodejs/node-gyp/commit/cfd12ff3bb0eb4525173413ef6a94b3cd8398cad))
|
||||
* doc how to update node-gyp independently from npm ([c8c0af7](https://www.github.com/nodejs/node-gyp/commit/c8c0af72e78141a02b5da4cd4d704838333a90bd))
|
||||
* missing spaces ([f0882b1](https://www.github.com/nodejs/node-gyp/commit/f0882b1264b2fa701adbc81a3be0b3cba80e333d))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* deep-copy process.config during configure ([#2368](https://www.github.com/nodejs/node-gyp/issues/2368)) ([5f1a06c](https://www.github.com/nodejs/node-gyp/commit/5f1a06c50f3b0c3d292f64948f85a004cfcc5c87))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* **deps:** bump tar from 6.1.0 to 6.1.2 ([#2474](https://www.github.com/nodejs/node-gyp/issues/2474)) ([ec15a3e](https://www.github.com/nodejs/node-gyp/commit/ec15a3e5012004172713c11eebcc9d852d32d380))
|
||||
* fix typos discovered by codespell ([#2442](https://www.github.com/nodejs/node-gyp/issues/2442)) ([2d0ce55](https://www.github.com/nodejs/node-gyp/commit/2d0ce5595e232a3fc7c562cdf39efb77e2312cc1))
|
||||
* GitHub Actions Test on node: [12.x, 14.x, 16.x] ([#2439](https://www.github.com/nodejs/node-gyp/issues/2439)) ([b7bccdb](https://www.github.com/nodejs/node-gyp/commit/b7bccdb527d93b0bb0ce99713f083ce2985fe85c))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* correct link to "binding.gyp files out in the wild" ([#2483](https://www.github.com/nodejs/node-gyp/issues/2483)) ([660dd7b](https://www.github.com/nodejs/node-gyp/commit/660dd7b2a822c184be8027b300e68be67b366772))
|
||||
* **wiki:** Add a link to the node-midi binding.gyp file. ([b354711](https://www.github.com/nodejs/node-gyp/commit/b3547115f6e356358138310e857c7f1ec627a8a7))
|
||||
* **wiki:** add bcrypt ([e199cfa](https://www.github.com/nodejs/node-gyp/commit/e199cfa8fc6161492d2a6ade2190510d0ebf7c0f))
|
||||
* **wiki:** Add helpful information ([4eda827](https://www.github.com/nodejs/node-gyp/commit/4eda8275c03dae6d2f5c40f3c1dbe930d84b0f2b))
|
||||
* **wiki:** Add node-canvas ([13a9553](https://www.github.com/nodejs/node-gyp/commit/13a955317b39caf98fd1f412d8d3f41599e979fd))
|
||||
* **wiki:** Add node-openvg-canvas and node-openvg. ([61f709e](https://www.github.com/nodejs/node-gyp/commit/61f709ec4d9f256a6467e9ff84430a48eeb629d1))
|
||||
* **wiki:** add one more example ([77f3632](https://www.github.com/nodejs/node-gyp/commit/77f363272930d3d4d24fd3973be22e6237128fcc))
|
||||
* **wiki:** add topcube, node-osmium, and node-osrm ([1a75d2b](https://www.github.com/nodejs/node-gyp/commit/1a75d2bf2f562ba50846893a516e111cfbb50885))
|
||||
* **wiki:** Added details for properly fixing ([3d4d9d5](https://www.github.com/nodejs/node-gyp/commit/3d4d9d52d6b5b49de06bb0bb5b68e2686d2b7ebd))
|
||||
* **wiki:** Added Ghostscript4JS ([bf4bed1](https://www.github.com/nodejs/node-gyp/commit/bf4bed1b96a7d22fba6f97f4552ad09f32ac3737))
|
||||
* **wiki:** added levelup ([1575bce](https://www.github.com/nodejs/node-gyp/commit/1575bce3a53db628bfb023fd6f3258fdf98c3195))
|
||||
* **wiki:** Added nk-mysql (nodamysql) ([5b4f2d0](https://www.github.com/nodejs/node-gyp/commit/5b4f2d0e1d5d3eadfd03aaf9c1668340f76c4bea))
|
||||
* **wiki:** Added nk-xrm-installer .gyp references, including .py scripts for providing complete reference to examples of fetching source via http, extracting, and moving files (as opposed to copying) ([ceb3088](https://www.github.com/nodejs/node-gyp/commit/ceb30885b74f6789374ef52267b84767be93ebe4))
|
||||
* **wiki:** Added tip about resolving frustrating LNK1181 error ([e64798d](https://www.github.com/nodejs/node-gyp/commit/e64798de8cac6031ad598a86d7599e81b4d20b17))
|
||||
* **wiki:** ADDED: Node.js binding to OpenCV ([e2dc777](https://www.github.com/nodejs/node-gyp/commit/e2dc77730b09d7ee8682d7713a7603a2d7aacabd))
|
||||
* **wiki:** Adding link to node-cryptopp's gyp file ([875adbe](https://www.github.com/nodejs/node-gyp/commit/875adbe2a4669fa5f2be0250ffbf98fb55e800fd))
|
||||
* **wiki:** Adding the sharp library to the list ([9dce0e4](https://www.github.com/nodejs/node-gyp/commit/9dce0e41650c3fa973e6135a79632d022c662a1d))
|
||||
* **wiki:** Adds node-fann ([23e3d48](https://www.github.com/nodejs/node-gyp/commit/23e3d485ed894ba7c631e9c062f5e366b50c416c))
|
||||
* **wiki:** Adds node-inotify and v8-profiler ([b6e542f](https://www.github.com/nodejs/node-gyp/commit/b6e542f644dbbfe22b88524ec500696e06ee4af7))
|
||||
* **wiki:** Bumping Python version from 2.3 to 2.7 as per the node-gyp readme ([55ebd6e](https://www.github.com/nodejs/node-gyp/commit/55ebd6ebacde975bf84f7bf4d8c66e64cc7cd0da))
|
||||
* **wiki:** C++ build tools version upgraded ([5b899b7](https://www.github.com/nodejs/node-gyp/commit/5b899b70db729c392ced7c98e8e17590c6499fc3))
|
||||
* **wiki:** change bcrypt url to binding.gyp file ([e11bdd8](https://www.github.com/nodejs/node-gyp/commit/e11bdd84de6144492d3eb327d67cbf2d62da1a76))
|
||||
* **wiki:** Clarification + direct link to VS2010 ([531c724](https://www.github.com/nodejs/node-gyp/commit/531c724561d947b5d870de8d52dd8c3c51c5ec2d))
|
||||
* **wiki:** Correcting the link to node-osmium ([fae7516](https://www.github.com/nodejs/node-gyp/commit/fae7516a1d2829b6e234eaded74fb112ebd79a05))
|
||||
* **wiki:** Created "binding.gyp" files out in the wild (markdown) ([d4fd143](https://www.github.com/nodejs/node-gyp/commit/d4fd14355bbe57f229f082f47bb2b3670868203f))
|
||||
* **wiki:** Created Common issues (markdown) ([a38299e](https://www.github.com/nodejs/node-gyp/commit/a38299ea340ceb0e732c6dc6a1b4760257644839))
|
||||
* **wiki:** Created Error: "pre" versions of node cannot be installed (markdown) ([98bc80d](https://www.github.com/nodejs/node-gyp/commit/98bc80d7a62ba70c881f3c39d94f804322e57852))
|
||||
* **wiki:** Created Linking to OpenSSL (markdown) ([c46d00d](https://www.github.com/nodejs/node-gyp/commit/c46d00d83bac5173dea8bbbb175a1a7de74fdaca))
|
||||
* **wiki:** Created Updating npm's bundled node gyp (markdown) ([e0ac8d1](https://www.github.com/nodejs/node-gyp/commit/e0ac8d15af46aadd1c220599e63199b154a514e6))
|
||||
* **wiki:** Created use of undeclared identifier 'TypedArray' (markdown) ([65ba711](https://www.github.com/nodejs/node-gyp/commit/65ba71139e9b7f64ac823e575ee9dbf17d937ce4))
|
||||
* **wiki:** Created Visual Studio 2010 Setup (markdown) ([5b80e83](https://www.github.com/nodejs/node-gyp/commit/5b80e834c8f79dda9fb2770a876ff3cf649c06f3))
|
||||
* **wiki:** Created Visual studio 2012 setup (markdown) ([becef31](https://www.github.com/nodejs/node-gyp/commit/becef316b6c46a33e783667720ee074a0141d1a5))
|
||||
* **wiki:** Destroyed Visual Studio 2010 Setup (markdown) ([93423b4](https://www.github.com/nodejs/node-gyp/commit/93423b43606de9664aeb79635825f5e9941ec9bc))
|
||||
* **wiki:** Destroyed Visual studio 2012 setup (markdown) ([3601508](https://www.github.com/nodejs/node-gyp/commit/3601508bb10fa05da0ddc7e70d57e4b4dd679657))
|
||||
* **wiki:** Different commands for Windows npm v6 vs. v7 ([0fce46b](https://www.github.com/nodejs/node-gyp/commit/0fce46b53340c85e8091cde347d5ed23a443c82f))
|
||||
* **wiki:** Drop in favor of ([9285ff6](https://www.github.com/nodejs/node-gyp/commit/9285ff6e451c52c070a05f05f0a9602621d91d53))
|
||||
* **wiki:** Explicit link to Visual C++ 2010 Express ([378c363](https://www.github.com/nodejs/node-gyp/commit/378c3632f02c096ed819ec8f2611c65bef0c0554))
|
||||
* **wiki:** fix link to gyp file used to build libsqlite3 ([54db8d7](https://www.github.com/nodejs/node-gyp/commit/54db8d7ac33e3f98220960b5d86cfa18a75b53cb))
|
||||
* **wiki:** Fix link to node-zipfile ([92e49a8](https://www.github.com/nodejs/node-gyp/commit/92e49a858ed69cb4847a26a5676ab56ef5e2de33))
|
||||
* **wiki:** fixed node-serialport link ([954ee53](https://www.github.com/nodejs/node-gyp/commit/954ee530b3972d1db591fce32368e4e31b5a25d8))
|
||||
* **wiki:** I highly missing it in common issue as every windows biggner face that issue ([d617fae](https://www.github.com/nodejs/node-gyp/commit/d617faee29c40871ca5c8f93efd0ce929a40d541))
|
||||
* **wiki:** if ouns that the -h did not help. I founs on github that there was support for visual studio 2015, while i couldn't install node-red beacuse it kept telling me the key 2015 was missing. looking in he gyp python code i found the local file was bot up t dat with the github repo. updating took several efforts before i tried to drop the -g option. ([408b72f](https://www.github.com/nodejs/node-gyp/commit/408b72f561329408daeb17834436e381406efcc8))
|
||||
* **wiki:** If permissions error, please try and then the command. ([ee8e1c1](https://www.github.com/nodejs/node-gyp/commit/ee8e1c1e5334096d58e0d6bca6c006f2ee9c88cb))
|
||||
* **wiki:** Improve Unix instructions ([c3e5487](https://www.github.com/nodejs/node-gyp/commit/c3e548736645b535ea5bce613d74ca3e98598243))
|
||||
* **wiki:** link to docs/ from README ([b52e487](https://www.github.com/nodejs/node-gyp/commit/b52e487eac1eb421573d1e67114a242eeff45a00))
|
||||
* **wiki:** Lower case L ([3aa2c6b](https://www.github.com/nodejs/node-gyp/commit/3aa2c6bdb07971b87505e32e32548d75264bd19f))
|
||||
* **wiki:** Make changes discussed in https://github.com/nodejs/node-gyp/issues/2416 ([1dcad87](https://www.github.com/nodejs/node-gyp/commit/1dcad873539027511a5f0243baf770ea90f6f4e2))
|
||||
* **wiki:** move wiki docs into doc/ ([f0a4835](https://www.github.com/nodejs/node-gyp/commit/f0a48355d86534ec3bdabcdb3ce3340fa2e17f39))
|
||||
* **wiki:** node-sass in the wild ([d310a73](https://www.github.com/nodejs/node-gyp/commit/d310a73d64d0065050377baac7047472f7424a1b))
|
||||
* **wiki:** node-srs was a 404 ([bbca21a](https://www.github.com/nodejs/node-gyp/commit/bbca21a1e1ede4c473aff365ca71989a5bda7b57))
|
||||
* **wiki:** Note: VS2010 seems to be no longer available! VS2013 or nothing! ([7b5dcaf](https://www.github.com/nodejs/node-gyp/commit/7b5dcafafccdceae4b8f2b53ac9081a694b6ade8))
|
||||
* **wiki:** safer doc names, remove unnecessary TypedArray doc ([161c235](https://www.github.com/nodejs/node-gyp/commit/161c2353ef5b562f4acfb2fd77608fcbd0800fc0))
|
||||
* **wiki:** sorry, forgot to mention a specific windows version. ([d69dffc](https://www.github.com/nodejs/node-gyp/commit/d69dffc16c2b1e3c60dcb5d1c35a49270ba22a35))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([7444b47](https://www.github.com/nodejs/node-gyp/commit/7444b47a7caac1e14d1da474a7fcfcf88d328017))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d766b74](https://www.github.com/nodejs/node-gyp/commit/d766b7427851e6c2edc02e2504a7be9be7e330c0))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d319b0e](https://www.github.com/nodejs/node-gyp/commit/d319b0e98c7085de8e51bc5595eba4264b99a7d5))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3c6692d](https://www.github.com/nodejs/node-gyp/commit/3c6692d538f0ce973869aa237118b7d2483feccd))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([93392d5](https://www.github.com/nodejs/node-gyp/commit/93392d559ce6f250b9c7fe8177e6c88603809dc1))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([8841158](https://www.github.com/nodejs/node-gyp/commit/88411588f300e9b7c00fe516ecd977a1feeeb15c))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([81bfa1f](https://www.github.com/nodejs/node-gyp/commit/81bfa1f1b63d522a9f8a9ae9ca0c7ae90fe75140))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d1cd237](https://www.github.com/nodejs/node-gyp/commit/d1cd237bad06fa507adb354b9e2181a14dc63d24))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3de9e17](https://www.github.com/nodejs/node-gyp/commit/3de9e17e0b8a387eafe7bd18d0ec1e3191d118e8))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([a9b7096](https://www.github.com/nodejs/node-gyp/commit/a9b70968fb956eab3b95672048b94350e1565ca3))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3236069](https://www.github.com/nodejs/node-gyp/commit/3236069689e7e0eb15b324fce74ab58158956f98))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([1462755](https://www.github.com/nodejs/node-gyp/commit/14627556966e5d513bdb8e5208f0e1300f68991f))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([7ab1337](https://www.github.com/nodejs/node-gyp/commit/7ab133752a6c402bb96dcd3d671d73e03e9487ad))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([640895d](https://www.github.com/nodejs/node-gyp/commit/640895d36b7448c646a3b850c1e159106f83c724))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([ced8c96](https://www.github.com/nodejs/node-gyp/commit/ced8c968457f285ab8989c291d28173d7730833c))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([27b883a](https://www.github.com/nodejs/node-gyp/commit/27b883a350ad0db6b9130d7b996f35855ec34c7a))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d29fb13](https://www.github.com/nodejs/node-gyp/commit/d29fb134f1c4b9dd729ba95f2979e69e0934809f))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([2765891](https://www.github.com/nodejs/node-gyp/commit/27658913e6220cf0371b4b73e25a0e4ab11108a1))
|
||||
* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([dc97766](https://www.github.com/nodejs/node-gyp/commit/dc9776648d432bca6775c176641f16da14522d4c))
|
||||
* **wiki:** Updated Error: "pre" versions of node cannot be installed (markdown) ([e9f8b33](https://www.github.com/nodejs/node-gyp/commit/e9f8b33d1f87d04f22cb09a814d7c55d0fa38446))
|
||||
* **wiki:** Updated Home (markdown) ([3407109](https://www.github.com/nodejs/node-gyp/commit/3407109325cf7ba1e925656b9eb75feffab0557c))
|
||||
* **wiki:** Updated Home (markdown) ([6e392bc](https://www.github.com/nodejs/node-gyp/commit/6e392bcdd3dd1691773e6e16e1dffc35931b81e0))
|
||||
* **wiki:** Updated Home (markdown) ([65efe32](https://www.github.com/nodejs/node-gyp/commit/65efe32ccb8d446ce569453364f922dd9d27c945))
|
||||
* **wiki:** Updated Home (markdown) ([ea28f09](https://www.github.com/nodejs/node-gyp/commit/ea28f0947af91fa638be355143f5df89d2e431c8))
|
||||
* **wiki:** Updated Home (markdown) ([0e37ff4](https://www.github.com/nodejs/node-gyp/commit/0e37ff48b306c12149661b375895741d3d710da7))
|
||||
* **wiki:** Updated Home (markdown) ([b398ef4](https://www.github.com/nodejs/node-gyp/commit/b398ef46f660d2b1506508550dadfb4c35639e4b))
|
||||
* **wiki:** Updated Linking to OpenSSL (markdown) ([8919028](https://www.github.com/nodejs/node-gyp/commit/8919028921fd304f08044098434f0dc6071fb7cf))
|
||||
* **wiki:** Updated Linking to OpenSSL (markdown) ([c00eb77](https://www.github.com/nodejs/node-gyp/commit/c00eb778fc7dc27e4dab3a9219035ea20458b33b))
|
||||
* **wiki:** Updated node-levelup to node-leveldown (broken links) ([59668bb](https://www.github.com/nodejs/node-gyp/commit/59668bb0b904feccf3c09afa2fd37378c77af967))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([d314854](https://www.github.com/nodejs/node-gyp/commit/d31485415ef69d46effa6090c95698341965de1b))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([11858b0](https://www.github.com/nodejs/node-gyp/commit/11858b0655d1eee00c62ad628e719d4378803d14))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([33561e9](https://www.github.com/nodejs/node-gyp/commit/33561e9cbf5f4eb46111318503c77df2c6eb484a))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([4a7f2d0](https://www.github.com/nodejs/node-gyp/commit/4a7f2d0d869a65c99a78504976567017edadf657))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([979a706](https://www.github.com/nodejs/node-gyp/commit/979a7063b950c088a7f4896fc3a48e1d00dfd231))
|
||||
* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([e50e04d](https://www.github.com/nodejs/node-gyp/commit/e50e04d7b6a3754ea0aa11fe8cef491b3bc5bdd4))
|
||||
|
||||
## [8.1.0](https://www.github.com/nodejs/node-gyp/compare/v8.0.0...v8.1.0) (2021-05-28)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **gyp:** update gyp to v0.9.1 ([#2402](https://www.github.com/nodejs/node-gyp/issues/2402)) ([814b1b0](https://www.github.com/nodejs/node-gyp/commit/814b1b0eda102afb9fc87e81638a9cf5b650bb10))
|
||||
|
||||
|
||||
### Miscellaneous
|
||||
|
||||
* add `release-please-action` for automated releases ([#2395](https://www.github.com/nodejs/node-gyp/issues/2395)) ([07e9d7c](https://www.github.com/nodejs/node-gyp/commit/07e9d7c7ee80ba119ea760c635f72fd8e7efe198))
|
||||
|
||||
|
||||
### Core
|
||||
|
||||
* fail gracefully if we can't find the username ([#2375](https://www.github.com/nodejs/node-gyp/issues/2375)) ([fca4795](https://www.github.com/nodejs/node-gyp/commit/fca4795512c67dc8420aaa0d913b5b89a4b147f3))
|
||||
* log as yes/no whether build dir was created ([#2370](https://www.github.com/nodejs/node-gyp/issues/2370)) ([245dee5](https://www.github.com/nodejs/node-gyp/commit/245dee5b62581309946872ae253226ea3a42c0e3))
|
||||
|
||||
|
||||
### Doc
|
||||
|
||||
* fix v8.0.0 release date ([4b83c3d](https://www.github.com/nodejs/node-gyp/commit/4b83c3de7300457919d53f26d96ea9ad6f6bedd8))
|
||||
* remove redundant version info ([#2403](https://www.github.com/nodejs/node-gyp/issues/2403)) ([1423670](https://www.github.com/nodejs/node-gyp/commit/14236709de64b100a424396b91a5115639daa0ef))
|
||||
* Update README.md Visual Studio Community page polski to auto ([#2371](https://www.github.com/nodejs/node-gyp/issues/2371)) ([1b4697a](https://www.github.com/nodejs/node-gyp/commit/1b4697abf69ef574a48faf832a7098f4c6c224a5))
|
||||
|
||||
## v8.0.0 2021-04-03
|
||||
|
||||
* [[`0d8a6f1b19`](https://github.com/nodejs/node-gyp/commit/0d8a6f1b19)] - **ci**: update actions/setup-node to v2 (#2302) (Sora Morimoto) [#2302](https://github.com/nodejs/node-gyp/pull/2302)
|
||||
* [[`15a5c7d45b`](https://github.com/nodejs/node-gyp/commit/15a5c7d45b)] - **ci**: migrate deprecated grammar (#2285) (Jiawen Geng) [#2285](https://github.com/nodejs/node-gyp/pull/2285)
|
||||
* [[`06ddde27f9`](https://github.com/nodejs/node-gyp/commit/06ddde27f9)] - **deps**: sync mutual dependencies with npm (DeeDeeG) [#2348](https://github.com/nodejs/node-gyp/pull/2348)
|
||||
* [[`a5fd1f41e3`](https://github.com/nodejs/node-gyp/commit/a5fd1f41e3)] - **doc**: add downloads badge (#2352) (Jiawen Geng) [#2352](https://github.com/nodejs/node-gyp/pull/2352)
|
||||
* [[`cc1cbce056`](https://github.com/nodejs/node-gyp/commit/cc1cbce056)] - **doc**: update macOS\_Catalina.md (#2293) (iMrLopez) [#2293](https://github.com/nodejs/node-gyp/pull/2293)
|
||||
* [[`6287118fc4`](https://github.com/nodejs/node-gyp/commit/6287118fc4)] - **doc**: updated README.md to copy easily (#2281) (மனோஜ்குமார் பழனிச்சாமி) [#2281](https://github.com/nodejs/node-gyp/pull/2281)
|
||||
* [[`66c0f04467`](https://github.com/nodejs/node-gyp/commit/66c0f04467)] - **doc**: add missing `sudo` to Catalina doc (Karl Horky) [#2244](https://github.com/nodejs/node-gyp/pull/2244)
|
||||
* [[`0da2e0140d`](https://github.com/nodejs/node-gyp/commit/0da2e0140d)] - **gyp**: update gyp to v0.8.1 (#2355) (DeeDeeG) [#2355](https://github.com/nodejs/node-gyp/pull/2355)
|
||||
* [[`0093ec8646`](https://github.com/nodejs/node-gyp/commit/0093ec8646)] - **gyp**: Improve our flake8 linting tests (Christian Clauss) [#2356](https://github.com/nodejs/node-gyp/pull/2356)
|
||||
* [[`a78b584236`](https://github.com/nodejs/node-gyp/commit/a78b584236)] - **(SEMVER-MAJOR)** **gyp**: remove support for Python 2 (#2300) (Christian Clauss) [#2300](https://github.com/nodejs/node-gyp/pull/2300)
|
||||
* [[`c3c510d89e`](https://github.com/nodejs/node-gyp/commit/c3c510d89e)] - **gyp**: update gyp to v0.8.0 (#2318) (Christian Clauss) [#2318](https://github.com/nodejs/node-gyp/pull/2318)
|
||||
* [[`9e1397c52e`](https://github.com/nodejs/node-gyp/commit/9e1397c52e)] - **(SEMVER-MAJOR)** **gyp**: update gyp to v0.7.0 (#2284) (Jiawen Geng) [#2284](https://github.com/nodejs/node-gyp/pull/2284)
|
||||
* [[`1bd18f3e77`](https://github.com/nodejs/node-gyp/commit/1bd18f3e77)] - **(SEMVER-MAJOR)** **lib**: drop Python 2 support in find-python.js (#2333) (DeeDeeG) [#2333](https://github.com/nodejs/node-gyp/pull/2333)
|
||||
* [[`e81602ef55`](https://github.com/nodejs/node-gyp/commit/e81602ef55)] - **(SEMVER-MAJOR)** **lib**: migrate requests to fetch (#2220) (Matias Lopez) [#2220](https://github.com/nodejs/node-gyp/pull/2220)
|
||||
* [[`392b7760b4`](https://github.com/nodejs/node-gyp/commit/392b7760b4)] - **lib**: avoid changing process.config (#2322) (Michaël Zasso) [#2322](https://github.com/nodejs/node-gyp/pull/2322)
|
||||
|
||||
## v7.1.2 2020-10-17
|
||||
|
||||
* [[`096e3aded5`](https://github.com/nodejs/node-gyp/commit/096e3aded5)] - **gyp**: update gyp to 0.6.2 (Myles Borins) [#2241](https://github.com/nodejs/node-gyp/pull/2241)
|
||||
* [[`54f97cd243`](https://github.com/nodejs/node-gyp/commit/54f97cd243)] - **doc**: add cmd to reset `xcode-select` to initial state (Valera Rozuvan) [#2235](https://github.com/nodejs/node-gyp/pull/2235)
|
||||
|
||||
## v7.1.1 2020-10-15
|
||||
|
||||
This release restores the location of shared library builds to the pre-v7
|
||||
location. In v7.0.0 until this release, shared library outputs were placed
|
||||
in a lib.target subdirectory inside the build/{Release,Debug} directory for
|
||||
builds using `make` (Linux, etc.). This is inconsistent with macOS (Xcode)
|
||||
behavior and previous node-gyp behavior so has been reverted.
|
||||
We consider this a bug-fix rather than semver-major change.
|
||||
|
||||
* [[`18bf2d1d38`](https://github.com/nodejs/node-gyp/commit/18bf2d1d38)] - **deps**: update deps to match npm@7 (Rod Vagg) [#2240](https://github.com/nodejs/node-gyp/pull/2240)
|
||||
* [[`ee6a837cb7`](https://github.com/nodejs/node-gyp/commit/ee6a837cb7)] - **gyp**: update gyp to 0.6.1 (Rod Vagg) [#2238](https://github.com/nodejs/node-gyp/pull/2238)
|
||||
* [[`3e7f8ccafc`](https://github.com/nodejs/node-gyp/commit/3e7f8ccafc)] - **lib**: better log message when ps fails (Martin Midtgaard) [#2229](https://github.com/nodejs/node-gyp/pull/2229)
|
||||
* [[`7fb314339f`](https://github.com/nodejs/node-gyp/commit/7fb314339f)] - **test**: GitHub Actions: Test on Python 3.9 (Christian Clauss) [#2230](https://github.com/nodejs/node-gyp/pull/2230)
|
||||
* [[`754996b9ec`](https://github.com/nodejs/node-gyp/commit/754996b9ec)] - **doc**: replace status badges with new Actions badge (Rod Vagg) [#2218](https://github.com/nodejs/node-gyp/pull/2218)
|
||||
* [[`2317dc400c`](https://github.com/nodejs/node-gyp/commit/2317dc400c)] - **ci**: switch to GitHub Actions (Shelley Vohr) [#2210](https://github.com/nodejs/node-gyp/pull/2210)
|
||||
* [[`2cca9b74f7`](https://github.com/nodejs/node-gyp/commit/2cca9b74f7)] - **doc**: drop the --production flag for installing windows-build-tools (DeeDeeG) [#2206](https://github.com/nodejs/node-gyp/pull/2206)
|
||||
|
||||
## v7.1.0 2020-08-12
|
||||
|
||||
* [[`aaf33c3029`](https://github.com/nodejs/node-gyp/commit/aaf33c3029)] - **build**: add update-gyp script (Samuel Attard) [#2167](https://github.com/nodejs/node-gyp/pull/2167)
|
||||
* * [[`3baa4e4172`](https://github.com/nodejs/node-gyp/commit/3baa4e4172)] - **(SEMVER-MINOR)** **gyp**: update gyp to 0.4.0 (Samuel Attard) [#2165](https://github.com/nodejs/node-gyp/pull/2165)
|
||||
* * [[`f461d56c53`](https://github.com/nodejs/node-gyp/commit/f461d56c53)] - **(SEMVER-MINOR)** **build**: support apple silicon (arm64 darwin) builds (Samuel Attard) [#2165](https://github.com/nodejs/node-gyp/pull/2165)
|
||||
* * [[`ee6fa7d3bc`](https://github.com/nodejs/node-gyp/commit/ee6fa7d3bc)] - **docs**: note that node-gyp@7 should solve Catalina CLT issues (Rod Vagg) [#2156](https://github.com/nodejs/node-gyp/pull/2156)
|
||||
* * [[`4fc8ff179d`](https://github.com/nodejs/node-gyp/commit/4fc8ff179d)] - **doc**: silence curl for macOS Catalina acid test (Chia Wei Ong) [#2150](https://github.com/nodejs/node-gyp/pull/2150)
|
||||
* * [[`7857cb2eb1`](https://github.com/nodejs/node-gyp/commit/7857cb2eb1)] - **deps**: increase "engines" to "node" : "\>= 10.12.0" (DeeDeeG) [#2153](https://github.com/nodejs/node-gyp/pull/2153)
|
||||
|
||||
## v7.0.0 2020-06-03
|
||||
|
||||
* [[`e18a61afc1`](https://github.com/nodejs/node-gyp/commit/e18a61afc1)] - **build**: shrink bloated addon binaries on windows (Shelley Vohr) [#2060](https://github.com/nodejs/node-gyp/pull/2060)
|
||||
* [[`4937722cf5`](https://github.com/nodejs/node-gyp/commit/4937722cf5)] - **(SEMVER-MAJOR)** **deps**: replace mkdirp with {recursive} mkdir (Rod Vagg) [#2123](https://github.com/nodejs/node-gyp/pull/2123)
|
||||
* [[`d45438a047`](https://github.com/nodejs/node-gyp/commit/d45438a047)] - **(SEMVER-MAJOR)** **deps**: update deps, match to npm@7 (Rod Vagg) [#2126](https://github.com/nodejs/node-gyp/pull/2126)
|
||||
* [[`ba4f34b7d6`](https://github.com/nodejs/node-gyp/commit/ba4f34b7d6)] - **doc**: update catalina xcode clt download link (Dario Vladovic) [#2133](https://github.com/nodejs/node-gyp/pull/2133)
|
||||
* [[`f7bfce96ed`](https://github.com/nodejs/node-gyp/commit/f7bfce96ed)] - **doc**: update acid test and introduce curl|bash test script (Dario Vladovic) [#2105](https://github.com/nodejs/node-gyp/pull/2105)
|
||||
* [[`e529f3309d`](https://github.com/nodejs/node-gyp/commit/e529f3309d)] - **doc**: update README to reflect upgrade to gyp-next (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092)
|
||||
* [[`9aed6286a3`](https://github.com/nodejs/node-gyp/commit/9aed6286a3)] - **doc**: give more attention to Catalina issues doc (Matheus Marchini) [#2134](https://github.com/nodejs/node-gyp/pull/2134)
|
||||
* [[`963f2a7b48`](https://github.com/nodejs/node-gyp/commit/963f2a7b48)] - **doc**: improve Catalina discoverability for search engines (Matheus Marchini) [#2135](https://github.com/nodejs/node-gyp/pull/2135)
|
||||
* [[`7b75af349b`](https://github.com/nodejs/node-gyp/commit/7b75af349b)] - **doc**: add macOS Catalina software update info (Karl Horky) [#2078](https://github.com/nodejs/node-gyp/pull/2078)
|
||||
* [[`4f23c7bee2`](https://github.com/nodejs/node-gyp/commit/4f23c7bee2)] - **doc**: update link to the code of conduct (#2073) (Michaël Zasso) [#2073](https://github.com/nodejs/node-gyp/pull/2073)
|
||||
* [[`473cfa283f`](https://github.com/nodejs/node-gyp/commit/473cfa283f)] - **doc**: note in README that Python 3.8 is supported (#2072) (Michaël Zasso) [#2072](https://github.com/nodejs/node-gyp/pull/2072)
|
||||
* [[`e7402b4a7c`](https://github.com/nodejs/node-gyp/commit/e7402b4a7c)] - **doc**: update catalina xcode cli tools download link (#2044) (Dario Vladović) [#2044](https://github.com/nodejs/node-gyp/pull/2044)
|
||||
* [[`35de45984f`](https://github.com/nodejs/node-gyp/commit/35de45984f)] - **doc**: update catalina xcode cli tools download link; formatting (Jonathan Hult) [#2034](https://github.com/nodejs/node-gyp/pull/2034)
|
||||
* [[`48642191f5`](https://github.com/nodejs/node-gyp/commit/48642191f5)] - **doc**: add download link for Command Line Tools for Xcode (Przemysław Bitkowski) [#2029](https://github.com/nodejs/node-gyp/pull/2029)
|
||||
* [[`ae5b150051`](https://github.com/nodejs/node-gyp/commit/ae5b150051)] - **doc**: Catalina suggestion: remove /Library/Developer/CommandLineTools (Christian Clauss) [#2022](https://github.com/nodejs/node-gyp/pull/2022)
|
||||
* [[`d1dea13fe4`](https://github.com/nodejs/node-gyp/commit/d1dea13fe4)] - **doc**: fix changelog 6.1.0 release year to be 2020 (Quentin Vernot) [#2021](https://github.com/nodejs/node-gyp/pull/2021)
|
||||
* [[`6356117b08`](https://github.com/nodejs/node-gyp/commit/6356117b08)] - **doc, bin**: stop suggesting opening node-gyp issues (Bartosz Sosnowski) [#2096](https://github.com/nodejs/node-gyp/pull/2096)
|
||||
* [[`a6b76a8b48`](https://github.com/nodejs/node-gyp/commit/a6b76a8b48)] - **gyp**: update gyp to 0.2.1 (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092)
|
||||
* [[`ebc34ec823`](https://github.com/nodejs/node-gyp/commit/ebc34ec823)] - **gyp**: update gyp to 0.2.0 (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092)
|
||||
* [[`972780bde7`](https://github.com/nodejs/node-gyp/commit/972780bde7)] - **(SEMVER-MAJOR)** **gyp**: sync code base with nodejs repo (#1975) (Michaël Zasso) [#1975](https://github.com/nodejs/node-gyp/pull/1975)
|
||||
* [[`c255ffbf6a`](https://github.com/nodejs/node-gyp/commit/c255ffbf6a)] - **lib**: drop "-2" flag for "py.exe" launcher (DeeDeeG) [#2131](https://github.com/nodejs/node-gyp/pull/2131)
|
||||
* [[`1f7e1e93b5`](https://github.com/nodejs/node-gyp/commit/1f7e1e93b5)] - **lib**: ignore VS instances that cause COMExceptions (Andrew Casey) [#2018](https://github.com/nodejs/node-gyp/pull/2018)
|
||||
* [[`741ab096d5`](https://github.com/nodejs/node-gyp/commit/741ab096d5)] - **test**: remove support for EOL versions of Node.js (Shelley Vohr)
|
||||
* [[`ca86ef2539`](https://github.com/nodejs/node-gyp/commit/ca86ef2539)] - **test**: bump actions/checkout from v1 to v2 (BSKY) [#2063](https://github.com/nodejs/node-gyp/pull/2063)
|
||||
|
||||
## v6.1.0 2020-01-08
|
||||
|
||||
* [[`9a7dd16b76`](https://github.com/nodejs/node-gyp/commit/9a7dd16b76)] - **doc**: remove backticks from Python version list (Rod Vagg) [#2011](https://github.com/nodejs/node-gyp/pull/2011)
|
||||
* [[`26cd6eaea6`](https://github.com/nodejs/node-gyp/commit/26cd6eaea6)] - **doc**: add GitHub Actions badge (#1994) (Rod Vagg) [#1994](https://github.com/nodejs/node-gyp/pull/1994)
|
||||
* [[`312c12ef4f`](https://github.com/nodejs/node-gyp/commit/312c12ef4f)] - **doc**: update macOS\_Catalina.md (#1992) (James Home) [#1992](https://github.com/nodejs/node-gyp/pull/1992)
|
||||
* [[`f7b6b6b77b`](https://github.com/nodejs/node-gyp/commit/f7b6b6b77b)] - **doc**: fix typo in README.md (#1985) (Suraneti Rodsuwan) [#1985](https://github.com/nodejs/node-gyp/pull/1985)
|
||||
* [[`6b8f2652dd`](https://github.com/nodejs/node-gyp/commit/6b8f2652dd)] - **doc**: add travis badge (Rod Vagg) [#1971](https://github.com/nodejs/node-gyp/pull/1971)
|
||||
* [[`20aa0b44f7`](https://github.com/nodejs/node-gyp/commit/20aa0b44f7)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962)
|
||||
* [[`14f2a07a39`](https://github.com/nodejs/node-gyp/commit/14f2a07a39)] - **gyp**: list(dict) so we can del dict(key) while iterating (Christian Clauss) [#2009](https://github.com/nodejs/node-gyp/pull/2009)
|
||||
* [[`f242ce4d2c`](https://github.com/nodejs/node-gyp/commit/f242ce4d2c)] - **lib**: compatibility with semver ≥ 7 (`new` for semver.Range) (Xavier Guimard) [#2006](https://github.com/nodejs/node-gyp/pull/2006)
|
||||
* [[`3bcba2a01a`](https://github.com/nodejs/node-gyp/commit/3bcba2a01a)] - **(SEMVER-MINOR)** **lib**: noproxy support, match proxy detection to `request` (Matias Lopez) [#1978](https://github.com/nodejs/node-gyp/pull/1978)
|
||||
* [[`470cc2178e`](https://github.com/nodejs/node-gyp/commit/470cc2178e)] - **test**: remove old docker test harness (#1993) (Rod Vagg) [#1993](https://github.com/nodejs/node-gyp/pull/1993)
|
||||
* [[`31ecc8421d`](https://github.com/nodejs/node-gyp/commit/31ecc8421d)] - **test**: add Windows to GitHub Actions testing (#1996) (Christian Clauss) [#1996](https://github.com/nodejs/node-gyp/pull/1996)
|
||||
* [[`5a729e86ee`](https://github.com/nodejs/node-gyp/commit/5a729e86ee)] - **test**: fix typo in header download test (#2001) (Richard Lau) [#2001](https://github.com/nodejs/node-gyp/pull/2001)
|
||||
* [[`345c70e56d`](https://github.com/nodejs/node-gyp/commit/345c70e56d)] - **test**: direct python invocation & simpler pyenv (Matias Lopez) [#1979](https://github.com/nodejs/node-gyp/pull/1979)
|
||||
* [[`d6a7e0e1fb`](https://github.com/nodejs/node-gyp/commit/d6a7e0e1fb)] - **test**: fix macOS Travis on Python 2.7 & 3.7 (Christian Clauss) [#1979](https://github.com/nodejs/node-gyp/pull/1979)
|
||||
* [[`5a64e9bd32`](https://github.com/nodejs/node-gyp/commit/5a64e9bd32)] - **test**: initial Github Actions with Ubuntu & macOS (Christian Clauss) [#1985](https://github.com/nodejs/node-gyp/pull/1985)
|
||||
* [[`04da736d38`](https://github.com/nodejs/node-gyp/commit/04da736d38)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961)
|
||||
* [[`0670e5189d`](https://github.com/nodejs/node-gyp/commit/0670e5189d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796)
|
||||
* [[`c506a6a150`](https://github.com/nodejs/node-gyp/commit/c506a6a150)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796)
|
||||
|
||||
## v6.0.1 2019-11-01
|
||||
|
||||
* [[`8ec2e681d5`](https://github.com/nodejs/node-gyp/commit/8ec2e681d5)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940)
|
||||
* [[`1b11be63cc`](https://github.com/nodejs/node-gyp/commit/1b11be63cc)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925)
|
||||
* [[`c0282daa48`](https://github.com/nodejs/node-gyp/commit/c0282daa48)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947)
|
||||
* [[`d8e09a1b6a`](https://github.com/nodejs/node-gyp/commit/d8e09a1b6a)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944)
|
||||
* [[`9c0f3404f0`](https://github.com/nodejs/node-gyp/commit/9c0f3404f0)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939)
|
||||
* [[`bb2eb72a3f`](https://github.com/nodejs/node-gyp/commit/bb2eb72a3f)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937)
|
||||
* [[`f0693413d9`](https://github.com/nodejs/node-gyp/commit/f0693413d9)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934)
|
||||
* [[`c60c22de58`](https://github.com/nodejs/node-gyp/commit/c60c22de58)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920)
|
||||
* [[`b91718eefc`](https://github.com/nodejs/node-gyp/commit/b91718eefc)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923)
|
||||
* [[`3538a317b6`](https://github.com/nodejs/node-gyp/commit/3538a317b6)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919)
|
||||
* [[`4fff8458c0`](https://github.com/nodejs/node-gyp/commit/4fff8458c0)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932)
|
||||
* [[`60e4488f08`](https://github.com/nodejs/node-gyp/commit/60e4488f08)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932)
|
||||
* [[`032db2a2d0`](https://github.com/nodejs/node-gyp/commit/032db2a2d0)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926)
|
||||
* [[`5a83630c33`](https://github.com/nodejs/node-gyp/commit/5a83630c33)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921)
|
||||
|
||||
## v6.0.0 2019-10-04
|
||||
|
||||
* [[`dd0e97ef0b`](https://github.com/nodejs/node-gyp/commit/dd0e97ef0b)] - **(SEMVER-MAJOR)** **lib**: try to find `python` after `python3` (Sam Roberts) [#1907](https://github.com/nodejs/node-gyp/pull/1907)
|
||||
* [[`f60ed47d14`](https://github.com/nodejs/node-gyp/commit/f60ed47d14)] - **travis**: add Python 3.5 and 3.6 tests on Linux (Christian Clauss) [#1903](https://github.com/nodejs/node-gyp/pull/1903)
|
||||
* [[`c763ca1838`](https://github.com/nodejs/node-gyp/commit/c763ca1838)] - **(SEMVER-MAJOR)** **doc**: Declare that node-gyp is Python 3 compatible (cclauss) [#1811](https://github.com/nodejs/node-gyp/pull/1811)
|
||||
* [[`3d1c60ab81`](https://github.com/nodejs/node-gyp/commit/3d1c60ab81)] - **(SEMVER-MAJOR)** **lib**: accept Python 3 by default (João Reis) [#1844](https://github.com/nodejs/node-gyp/pull/1844)
|
||||
* [[`c6e3b65a23`](https://github.com/nodejs/node-gyp/commit/c6e3b65a23)] - **(SEMVER-MAJOR)** **lib**: raise the minimum Python version from 2.6 to 2.7 (cclauss) [#1818](https://github.com/nodejs/node-gyp/pull/1818)
|
||||
|
||||
## v5.1.1 2020-05-25
|
||||
|
||||
* [[`bdd3a79abe`](https://github.com/nodejs/node-gyp/commit/bdd3a79abe)] - **build**: shrink bloated addon binaries on windows (Shelley Vohr) [#2060](https://github.com/nodejs/node-gyp/pull/2060)
|
||||
* [[`1f2ba75bc0`](https://github.com/nodejs/node-gyp/commit/1f2ba75bc0)] - **doc**: add macOS Catalina software update info (Karl Horky) [#2078](https://github.com/nodejs/node-gyp/pull/2078)
|
||||
* [[`c106d915f5`](https://github.com/nodejs/node-gyp/commit/c106d915f5)] - **doc**: update catalina xcode cli tools download link (#2044) (Dario Vladović) [#2044](https://github.com/nodejs/node-gyp/pull/2044)
|
||||
* [[`9a6fea92e2`](https://github.com/nodejs/node-gyp/commit/9a6fea92e2)] - **doc**: update catalina xcode cli tools download link; formatting (Jonathan Hult) [#2034](https://github.com/nodejs/node-gyp/pull/2034)
|
||||
* [[`59b0b1add8`](https://github.com/nodejs/node-gyp/commit/59b0b1add8)] - **doc**: add download link for Command Line Tools for Xcode (Przemysław Bitkowski) [#2029](https://github.com/nodejs/node-gyp/pull/2029)
|
||||
* [[`bb8d0e7b10`](https://github.com/nodejs/node-gyp/commit/bb8d0e7b10)] - **doc**: Catalina suggestion: remove /Library/Developer/CommandLineTools (Christian Clauss) [#2022](https://github.com/nodejs/node-gyp/pull/2022)
|
||||
* [[`fb2e80d4e3`](https://github.com/nodejs/node-gyp/commit/fb2e80d4e3)] - **doc**: update link to the code of conduct (#2073) (Michaël Zasso) [#2073](https://github.com/nodejs/node-gyp/pull/2073)
|
||||
* [[`251d9c885c`](https://github.com/nodejs/node-gyp/commit/251d9c885c)] - **doc**: note in README that Python 3.8 is supported (#2072) (Michaël Zasso) [#2072](https://github.com/nodejs/node-gyp/pull/2072)
|
||||
* [[`2b6fc3c8d6`](https://github.com/nodejs/node-gyp/commit/2b6fc3c8d6)] - **doc, bin**: stop suggesting opening node-gyp issues (Bartosz Sosnowski) [#2096](https://github.com/nodejs/node-gyp/pull/2096)
|
||||
* [[`a876ae58ad`](https://github.com/nodejs/node-gyp/commit/a876ae58ad)] - **test**: bump actions/checkout from v1 to v2 (BSKY) [#2063](https://github.com/nodejs/node-gyp/pull/2063)
|
||||
|
||||
## v5.1.0 2020-02-05
|
||||
|
||||
* [[`f37a8b40d0`](https://github.com/nodejs/node-gyp/commit/f37a8b40d0)] - **doc**: add GitHub Actions badge (#1994) (Rod Vagg) [#1994](https://github.com/nodejs/node-gyp/pull/1994)
|
||||
* [[`cb3f6aae5e`](https://github.com/nodejs/node-gyp/commit/cb3f6aae5e)] - **doc**: update macOS\_Catalina.md (#1992) (James Home) [#1992](https://github.com/nodejs/node-gyp/pull/1992)
|
||||
* [[`0607596a4c`](https://github.com/nodejs/node-gyp/commit/0607596a4c)] - **doc**: fix typo in README.md (#1985) (Suraneti Rodsuwan) [#1985](https://github.com/nodejs/node-gyp/pull/1985)
|
||||
* [[`0d5a415a14`](https://github.com/nodejs/node-gyp/commit/0d5a415a14)] - **doc**: add travis badge (Rod Vagg) [#1971](https://github.com/nodejs/node-gyp/pull/1971)
|
||||
* [[`103740cd95`](https://github.com/nodejs/node-gyp/commit/103740cd95)] - **gyp**: list(dict) so we can del dict(key) while iterating (Christian Clauss) [#2009](https://github.com/nodejs/node-gyp/pull/2009)
|
||||
* [[`278dcddbdd`](https://github.com/nodejs/node-gyp/commit/278dcddbdd)] - **lib**: ignore VS instances that cause COMExceptions (Andrew Casey) [#2018](https://github.com/nodejs/node-gyp/pull/2018)
|
||||
* [[`1694907bbf`](https://github.com/nodejs/node-gyp/commit/1694907bbf)] - **lib**: compatibility with semver ≥ 7 (`new` for semver.Range) (Xavier Guimard) [#2006](https://github.com/nodejs/node-gyp/pull/2006)
|
||||
* [[`a3f1143514`](https://github.com/nodejs/node-gyp/commit/a3f1143514)] - **(SEMVER-MINOR)** **lib**: noproxy support, match proxy detection to `request` (Matias Lopez) [#1978](https://github.com/nodejs/node-gyp/pull/1978)
|
||||
* [[`52365819c7`](https://github.com/nodejs/node-gyp/commit/52365819c7)] - **test**: remove old docker test harness (#1993) (Rod Vagg) [#1993](https://github.com/nodejs/node-gyp/pull/1993)
|
||||
* [[`bc509c511d`](https://github.com/nodejs/node-gyp/commit/bc509c511d)] - **test**: add Windows to GitHub Actions testing (#1996) (Christian Clauss) [#1996](https://github.com/nodejs/node-gyp/pull/1996)
|
||||
* [[`91ee26dd48`](https://github.com/nodejs/node-gyp/commit/91ee26dd48)] - **test**: fix typo in header download test (#2001) (Richard Lau) [#2001](https://github.com/nodejs/node-gyp/pull/2001)
|
||||
* [[`0923f344c9`](https://github.com/nodejs/node-gyp/commit/0923f344c9)] - **test**: direct python invocation & simpler pyenv (Matias Lopez) [#1979](https://github.com/nodejs/node-gyp/pull/1979)
|
||||
* [[`32c8744b34`](https://github.com/nodejs/node-gyp/commit/32c8744b34)] - **test**: fix macOS Travis on Python 2.7 & 3.7 (Christian Clauss) [#1979](https://github.com/nodejs/node-gyp/pull/1979)
|
||||
* [[`fd4b1351e4`](https://github.com/nodejs/node-gyp/commit/fd4b1351e4)] - **test**: initial Github Actions with Ubuntu & macOS (Christian Clauss) [#1985](https://github.com/nodejs/node-gyp/pull/1985)
|
||||
|
||||
## v5.0.7 2019-12-16
|
||||
|
||||
Republish of v5.0.6 with unnecessary tarball removed from pack file.
|
||||
|
||||
## v5.0.6 2019-12-16
|
||||
|
||||
* [[`cdec00286f`](https://github.com/nodejs/node-gyp/commit/cdec00286f)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919)
|
||||
* [[`b7c8233ef2`](https://github.com/nodejs/node-gyp/commit/b7c8233ef2)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961)
|
||||
* [[`e12b00ab0a`](https://github.com/nodejs/node-gyp/commit/e12b00ab0a)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962)
|
||||
* [[`70b9890c0d`](https://github.com/nodejs/node-gyp/commit/70b9890c0d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796)
|
||||
* [[`4029fa8629`](https://github.com/nodejs/node-gyp/commit/4029fa8629)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796)
|
||||
* [[`fe8b02cc8b`](https://github.com/nodejs/node-gyp/commit/fe8b02cc8b)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940)
|
||||
* [[`8ea47ce365`](https://github.com/nodejs/node-gyp/commit/8ea47ce365)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925)
|
||||
* [[`c7229716ba`](https://github.com/nodejs/node-gyp/commit/c7229716ba)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947)
|
||||
* [[`2a18b2a0f8`](https://github.com/nodejs/node-gyp/commit/2a18b2a0f8)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944)
|
||||
* [[`70f391e844`](https://github.com/nodejs/node-gyp/commit/70f391e844)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939)
|
||||
* [[`9f4f0fa34e`](https://github.com/nodejs/node-gyp/commit/9f4f0fa34e)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937)
|
||||
* [[`7cf507906d`](https://github.com/nodejs/node-gyp/commit/7cf507906d)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934)
|
||||
* [[`ad0d182c01`](https://github.com/nodejs/node-gyp/commit/ad0d182c01)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920)
|
||||
* [[`1553081ed6`](https://github.com/nodejs/node-gyp/commit/1553081ed6)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923)
|
||||
* [[`0705cae9aa`](https://github.com/nodejs/node-gyp/commit/0705cae9aa)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932)
|
||||
* [[`7bfdb6f5bf`](https://github.com/nodejs/node-gyp/commit/7bfdb6f5bf)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932)
|
||||
* [[`7edf7658fa`](https://github.com/nodejs/node-gyp/commit/7edf7658fa)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926)
|
||||
* [[`69056d04fe`](https://github.com/nodejs/node-gyp/commit/69056d04fe)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921)
|
||||
|
||||
## v5.0.5 2019-10-04
|
||||
|
||||
* [[`3891391746`](https://github.com/nodejs/node-gyp/commit/3891391746)] - **doc**: reconcile README with Python 3 compat changes (Rod Vagg) [#1911](https://github.com/nodejs/node-gyp/pull/1911)
|
||||
* [[`07f81f1920`](https://github.com/nodejs/node-gyp/commit/07f81f1920)] - **lib**: accept Python 3 after Python 2 (Sam Roberts) [#1910](https://github.com/nodejs/node-gyp/pull/1910)
|
||||
* [[`04ce59f4a2`](https://github.com/nodejs/node-gyp/commit/04ce59f4a2)] - **doc**: clarify Python configuration, etc (Sam Roberts) [#1908](https://github.com/nodejs/node-gyp/pull/1908)
|
||||
* [[`01c46ee3df`](https://github.com/nodejs/node-gyp/commit/01c46ee3df)] - **gyp**: add \_\_lt\_\_ to MSVSSolutionEntry (João Reis) [#1904](https://github.com/nodejs/node-gyp/pull/1904)
|
||||
* [[`735d961b99`](https://github.com/nodejs/node-gyp/commit/735d961b99)] - **win**: support VS 2017 Desktop Express (João Reis) [#1902](https://github.com/nodejs/node-gyp/pull/1902)
|
||||
* [[`3834156a92`](https://github.com/nodejs/node-gyp/commit/3834156a92)] - **test**: add Python 3.5 and 3.6 tests on Linux (cclauss) [#1909](https://github.com/nodejs/node-gyp/pull/1909)
|
||||
* [[`1196e990d8`](https://github.com/nodejs/node-gyp/commit/1196e990d8)] - **src**: update to standard@14 (Rod Vagg) [#1899](https://github.com/nodejs/node-gyp/pull/1899)
|
||||
* [[`53ee7dfe89`](https://github.com/nodejs/node-gyp/commit/53ee7dfe89)] - **gyp**: fix undefined name: cflags --\> ldflags (Christian Clauss) [#1901](https://github.com/nodejs/node-gyp/pull/1901)
|
||||
* [[`5871dcf6c9`](https://github.com/nodejs/node-gyp/commit/5871dcf6c9)] - **src,win**: add support for fetching arm64 node.lib (Richard Townsend) [#1875](https://github.com/nodejs/node-gyp/pull/1875)
|
||||
|
||||
## v5.0.4 2019-09-27
|
||||
|
||||
* [[`1236869ffc`](https://github.com/nodejs/node-gyp/commit/1236869ffc)] - **gyp**: modify XcodeVersion() to convert "4.2" to "0420" and "10.0" to "1000" (Christian Clauss) [#1895](https://github.com/nodejs/node-gyp/pull/1895)
|
||||
* [[`36638afe48`](https://github.com/nodejs/node-gyp/commit/36638afe48)] - **gyp**: more decode stdout on Python 3 (cclauss) [#1894](https://github.com/nodejs/node-gyp/pull/1894)
|
||||
* [[`f753c167c5`](https://github.com/nodejs/node-gyp/commit/f753c167c5)] - **gyp**: decode stdout on Python 3 (cclauss) [#1890](https://github.com/nodejs/node-gyp/pull/1890)
|
||||
* [[`60a4083523`](https://github.com/nodejs/node-gyp/commit/60a4083523)] - **doc**: update xcode install instructions to match Node's BUILDING (Nhan Khong) [#1884](https://github.com/nodejs/node-gyp/pull/1884)
|
||||
* [[`19dbc9ac32`](https://github.com/nodejs/node-gyp/commit/19dbc9ac32)] - **deps**: update tar to 4.4.12 (Matheus Marchini) [#1889](https://github.com/nodejs/node-gyp/pull/1889)
|
||||
* [[`5f3ed92181`](https://github.com/nodejs/node-gyp/commit/5f3ed92181)] - **bin**: fix the usage instructions (Halit Ogunc) [#1888](https://github.com/nodejs/node-gyp/pull/1888)
|
||||
* [[`aab118edf1`](https://github.com/nodejs/node-gyp/commit/aab118edf1)] - **lib**: adding keep-alive header to download requests (Milad Farazmand) [#1863](https://github.com/nodejs/node-gyp/pull/1863)
|
||||
* [[`1186e89326`](https://github.com/nodejs/node-gyp/commit/1186e89326)] - **lib**: ignore non-critical os.userInfo() failures (Rod Vagg) [#1835](https://github.com/nodejs/node-gyp/pull/1835)
|
||||
* [[`785e527c3d`](https://github.com/nodejs/node-gyp/commit/785e527c3d)] - **doc**: fix missing argument for setting python path (lagorsse) [#1802](https://github.com/nodejs/node-gyp/pull/1802)
|
||||
* [[`a97615196c`](https://github.com/nodejs/node-gyp/commit/a97615196c)] - **gyp**: rm semicolons (Python != JavaScript) (MattIPv4) [#1858](https://github.com/nodejs/node-gyp/pull/1858)
|
||||
* [[`06019bac24`](https://github.com/nodejs/node-gyp/commit/06019bac24)] - **gyp**: assorted typo fixes (XhmikosR) [#1853](https://github.com/nodejs/node-gyp/pull/1853)
|
||||
* [[`3f4972c1ca`](https://github.com/nodejs/node-gyp/commit/3f4972c1ca)] - **gyp**: use "is" when comparing to None (Vladyslav Burzakovskyy) [#1860](https://github.com/nodejs/node-gyp/pull/1860)
|
||||
* [[`1cb4708073`](https://github.com/nodejs/node-gyp/commit/1cb4708073)] - **src,win**: improve unmanaged handling (Peter Sabath) [#1852](https://github.com/nodejs/node-gyp/pull/1852)
|
||||
* [[`5553cd910e`](https://github.com/nodejs/node-gyp/commit/5553cd910e)] - **gyp**: improve Windows+Cygwin compatibility (Jose Quijada) [#1817](https://github.com/nodejs/node-gyp/pull/1817)
|
||||
* [[`8bcb1fbb43`](https://github.com/nodejs/node-gyp/commit/8bcb1fbb43)] - **gyp**: Python 3 Windows fixes (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843)
|
||||
* [[`2e24d0a326`](https://github.com/nodejs/node-gyp/commit/2e24d0a326)] - **test**: accept Python 3 in test-find-python.js (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843)
|
||||
* [[`1267b4dc1c`](https://github.com/nodejs/node-gyp/commit/1267b4dc1c)] - **build**: add test run Python 3.7 on macOS (Christian Clauss) [#1843](https://github.com/nodejs/node-gyp/pull/1843)
|
||||
* [[`da1b031aa3`](https://github.com/nodejs/node-gyp/commit/da1b031aa3)] - **build**: import StringIO on Python 2 and Python 3 (Christian Clauss) [#1836](https://github.com/nodejs/node-gyp/pull/1836)
|
||||
* [[`fa0ed4aa42`](https://github.com/nodejs/node-gyp/commit/fa0ed4aa42)] - **build**: more Python 3 compat, replace compile with ast (cclauss) [#1820](https://github.com/nodejs/node-gyp/pull/1820)
|
||||
* [[`18d5c7c9d0`](https://github.com/nodejs/node-gyp/commit/18d5c7c9d0)] - **win,src**: update win\_delay\_load\_hook.cc to work with /clr (Ivan Petrovic) [#1819](https://github.com/nodejs/node-gyp/pull/1819)
|
||||
|
||||
## v5.0.3 2019-07-17
|
||||
|
||||
* [[`66ad305775`](https://github.com/nodejs/node-gyp/commit/66ad305775)] - **python**: accept Python 3 conditionally (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815)
|
||||
* [[`7e7fce3fed`](https://github.com/nodejs/node-gyp/commit/7e7fce3fed)] - **python**: move Python detection to its own file (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815)
|
||||
* [[`e40c99e283`](https://github.com/nodejs/node-gyp/commit/e40c99e283)] - **src**: implement standard.js linting (Rod Vagg) [#1794](https://github.com/nodejs/node-gyp/pull/1794)
|
||||
* [[`bb92c761a9`](https://github.com/nodejs/node-gyp/commit/bb92c761a9)] - **test**: add Node.js 6 on Windows to Travis CI (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812)
|
||||
* [[`7fd924079f`](https://github.com/nodejs/node-gyp/commit/7fd924079f)] - **test**: increase tap timeout (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812)
|
||||
* [[`7e8127068f`](https://github.com/nodejs/node-gyp/commit/7e8127068f)] - **test**: cover supported node versions with travis (Rod Vagg) [#1809](https://github.com/nodejs/node-gyp/pull/1809)
|
||||
* [[`24109148df`](https://github.com/nodejs/node-gyp/commit/24109148df)] - **test**: downgrade to tap@^12 for continued Node 6 support (Rod Vagg) [#1808](https://github.com/nodejs/node-gyp/pull/1808)
|
||||
* [[`656117cc4a`](https://github.com/nodejs/node-gyp/commit/656117cc4a)] - **win**: make VS path match case-insensitive (João Reis) [#1806](https://github.com/nodejs/node-gyp/pull/1806)
|
||||
|
||||
## v5.0.2 2019-06-27
|
||||
|
||||
* [[`2761afbf73`](https://github.com/nodejs/node-gyp/commit/2761afbf73)] - **build,test**: add duplicate symbol test (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689)
|
||||
* [[`82f129d6de`](https://github.com/nodejs/node-gyp/commit/82f129d6de)] - **gyp**: replace optparse to argparse (KiYugadgeter) [#1591](https://github.com/nodejs/node-gyp/pull/1591)
|
||||
* [[`afaaa29c61`](https://github.com/nodejs/node-gyp/commit/afaaa29c61)] - **gyp**: remove from \_\_future\_\_ import with\_statement (cclauss) [#1799](https://github.com/nodejs/node-gyp/pull/1799)
|
||||
* [[`a991f633d6`](https://github.com/nodejs/node-gyp/commit/a991f633d6)] - **gyp**: fix the remaining Python 3 issues (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793)
|
||||
* [[`f952b08f84`](https://github.com/nodejs/node-gyp/commit/f952b08f84)] - **gyp**: move from \_\_future\_\_ import to the top of the file (cclauss) [#1789](https://github.com/nodejs/node-gyp/pull/1789)
|
||||
* [[`4f4a677dfa`](https://github.com/nodejs/node-gyp/commit/4f4a677dfa)] - **gyp**: use different default compiler for z/OS (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768)
|
||||
* [[`03683f09d6`](https://github.com/nodejs/node-gyp/commit/03683f09d6)] - **lib**: code de-duplication (Pavel Medvedev) [#965](https://github.com/nodejs/node-gyp/pull/965)
|
||||
* [[`611bc3c89f`](https://github.com/nodejs/node-gyp/commit/611bc3c89f)] - **lib**: add .json suffix for explicit require (Rod Vagg) [#1787](https://github.com/nodejs/node-gyp/pull/1787)
|
||||
* [[`d3478d7b0b`](https://github.com/nodejs/node-gyp/commit/d3478d7b0b)] - **meta**: add to .gitignore (Refael Ackermann) [#1573](https://github.com/nodejs/node-gyp/pull/1573)
|
||||
* [[`7a9a038e9e`](https://github.com/nodejs/node-gyp/commit/7a9a038e9e)] - **test**: add parallel test runs on macOS and Windows (cclauss) [#1800](https://github.com/nodejs/node-gyp/pull/1800)
|
||||
* [[`7dd7f2b2a2`](https://github.com/nodejs/node-gyp/commit/7dd7f2b2a2)] - **test**: fix Python syntax error in test-adding.js (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793)
|
||||
* [[`395f843de0`](https://github.com/nodejs/node-gyp/commit/395f843de0)] - **test**: replace self-signed cert with 'localhost' (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795)
|
||||
* [[`a52c6eb9e8`](https://github.com/nodejs/node-gyp/commit/a52c6eb9e8)] - **test**: migrate from tape to tap (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795)
|
||||
* [[`ec2eb44a30`](https://github.com/nodejs/node-gyp/commit/ec2eb44a30)] - **test**: use Nan in duplicate\_symbols (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689)
|
||||
* [[`1597c84aad`](https://github.com/nodejs/node-gyp/commit/1597c84aad)] - **test**: use Travis CI to run tests on every pull request (cclauss) [#1752](https://github.com/nodejs/node-gyp/pull/1752)
|
||||
* [[`dd9bf929ac`](https://github.com/nodejs/node-gyp/commit/dd9bf929ac)] - **zos**: update compiler options (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768)
|
||||
|
||||
## v5.0.1 2019-06-20
|
||||
|
||||
* [[`e3861722ed`](https://github.com/nodejs/node-gyp/commit/e3861722ed)] - **doc**: document --jobs max (David Sanders) [#1770](https://github.com/nodejs/node-gyp/pull/1770)
|
||||
* [[`1cfdb28886`](https://github.com/nodejs/node-gyp/commit/1cfdb28886)] - **lib**: reintroduce support for iojs file naming for releases \>= 1 && \< 4 (Samuel Attard) [#1777](https://github.com/nodejs/node-gyp/pull/1777)
|
||||
|
||||
## v5.0.0 2019-06-13
|
||||
|
||||
* [[`8a83972743`](https://github.com/nodejs/node-gyp/commit/8a83972743)] - **(SEMVER-MAJOR)** **bin**: follow XDG OS conventions for storing data (Selwyn) [#1570](https://github.com/nodejs/node-gyp/pull/1570)
|
||||
* [[`9e46872ea3`](https://github.com/nodejs/node-gyp/commit/9e46872ea3)] - **bin,lib**: remove extra comments/lines/spaces (Jon Moss) [#1508](https://github.com/nodejs/node-gyp/pull/1508)
|
||||
* [[`8098ebdeb4`](https://github.com/nodejs/node-gyp/commit/8098ebdeb4)] - **deps**: replace `osenv` dependency with native `os` (Selwyn)
|
||||
* [[`f83b457e03`](https://github.com/nodejs/node-gyp/commit/f83b457e03)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492)
|
||||
* [[`323cee7323`](https://github.com/nodejs/node-gyp/commit/323cee7323)] - **deps**: pin `request` version range (Refael Ackermann) [#1300](https://github.com/nodejs/node-gyp/pull/1300)
|
||||
* [[`c515912d08`](https://github.com/nodejs/node-gyp/commit/c515912d08)] - **doc**: improve issue template (Bartosz Sosnowski) [#1618](https://github.com/nodejs/node-gyp/pull/1618)
|
||||
* [[`cca2d66727`](https://github.com/nodejs/node-gyp/commit/cca2d66727)] - **doc**: python info needs own header (Taylor D. Lee) [#1245](https://github.com/nodejs/node-gyp/pull/1245)
|
||||
* [[`3e64c780f5`](https://github.com/nodejs/node-gyp/commit/3e64c780f5)] - **doc**: lint README.md (Jon Moss) [#1498](https://github.com/nodejs/node-gyp/pull/1498)
|
||||
* [[`a20faedc91`](https://github.com/nodejs/node-gyp/commit/a20faedc91)] - **(SEMVER-MAJOR)** **gyp**: enable MARMASM items only on new VS versions (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`721eb691cf`](https://github.com/nodejs/node-gyp/commit/721eb691cf)] - **gyp**: teach MSVS generator about MARMASM Items (Jon Kunkee) [#1679](https://github.com/nodejs/node-gyp/pull/1679)
|
||||
* [[`91744bfecc`](https://github.com/nodejs/node-gyp/commit/91744bfecc)] - **gyp**: add support for Windows on Arm (Richard Townsend) [#1739](https://github.com/nodejs/node-gyp/pull/1739)
|
||||
* [[`a6e0a6c7ed`](https://github.com/nodejs/node-gyp/commit/a6e0a6c7ed)] - **gyp**: move compile\_commands\_json (Paul Maréchal) [#1661](https://github.com/nodejs/node-gyp/pull/1661)
|
||||
* [[`92e8b52cee`](https://github.com/nodejs/node-gyp/commit/92e8b52cee)] - **gyp**: fix target --\> self.target (cclauss)
|
||||
* [[`febdfa2137`](https://github.com/nodejs/node-gyp/commit/febdfa2137)] - **gyp**: fix sntex error (cclauss) [#1333](https://github.com/nodejs/node-gyp/pull/1333)
|
||||
* [[`588d333c14`](https://github.com/nodejs/node-gyp/commit/588d333c14)] - **gyp**: \_winreg module was renamed to winreg in Python 3. (Craig Rodrigues)
|
||||
* [[`98226d198c`](https://github.com/nodejs/node-gyp/commit/98226d198c)] - **gyp**: replace basestring with str, but only on Python 3. (Craig Rodrigues)
|
||||
* [[`7535e4478e`](https://github.com/nodejs/node-gyp/commit/7535e4478e)] - **gyp**: replace deprecated functions (Craig Rodrigues)
|
||||
* [[`2040cd21cc`](https://github.com/nodejs/node-gyp/commit/2040cd21cc)] - **gyp**: use print as a function, as specified in PEP 3105. (Craig Rodrigues)
|
||||
* [[`abef93ded5`](https://github.com/nodejs/node-gyp/commit/abef93ded5)] - **gyp**: get ready for python 3 (cclauss)
|
||||
* [[`43031fadcb`](https://github.com/nodejs/node-gyp/commit/43031fadcb)] - **python**: clean-up detection (João Reis) [#1582](https://github.com/nodejs/node-gyp/pull/1582)
|
||||
* [[`49ab79d221`](https://github.com/nodejs/node-gyp/commit/49ab79d221)] - **python**: more informative error (Refael Ackermann) [#1269](https://github.com/nodejs/node-gyp/pull/1269)
|
||||
* [[`997bc3c748`](https://github.com/nodejs/node-gyp/commit/997bc3c748)] - **readme**: add ARM64 info to MSVC setup instructions (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655)
|
||||
* [[`788e767179`](https://github.com/nodejs/node-gyp/commit/788e767179)] - **test**: remove unused variable (João Reis)
|
||||
* [[`6f5a408934`](https://github.com/nodejs/node-gyp/commit/6f5a408934)] - **tools**: fix usage of inherited -fPIC and -fPIE (Jens) [#1340](https://github.com/nodejs/node-gyp/pull/1340)
|
||||
* [[`0efb8fb34b`](https://github.com/nodejs/node-gyp/commit/0efb8fb34b)] - **(SEMVER-MAJOR)** **win**: support running in VS Command Prompt (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`360ddbdf3a`](https://github.com/nodejs/node-gyp/commit/360ddbdf3a)] - **(SEMVER-MAJOR)** **win**: add support for Visual Studio 2019 (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`8f43f68275`](https://github.com/nodejs/node-gyp/commit/8f43f68275)] - **(SEMVER-MAJOR)** **win**: detect all VS versions in node-gyp (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`7fe4095974`](https://github.com/nodejs/node-gyp/commit/7fe4095974)] - **(SEMVER-MAJOR)** **win**: generic Visual Studio 2017 detection (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762)
|
||||
* [[`7a71d68bce`](https://github.com/nodejs/node-gyp/commit/7a71d68bce)] - **win**: use msbuild from the configure stage (Bartosz Sosnowski) [#1654](https://github.com/nodejs/node-gyp/pull/1654)
|
||||
* [[`d3b21220a0`](https://github.com/nodejs/node-gyp/commit/d3b21220a0)] - **win**: fix delay-load hook for electron 4 (Andy Dill)
|
||||
* [[`81f3a92338`](https://github.com/nodejs/node-gyp/commit/81f3a92338)] - Update list of Node.js versions to test against. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670)
|
||||
* [[`4748f6ab75`](https://github.com/nodejs/node-gyp/commit/4748f6ab75)] - Remove deprecated compatibility code. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670)
|
||||
* [[`45e3221fd4`](https://github.com/nodejs/node-gyp/commit/45e3221fd4)] - Remove an outdated workaround for Python 2.4 (cclauss) [#1650](https://github.com/nodejs/node-gyp/pull/1650)
|
||||
* [[`721dc7d314`](https://github.com/nodejs/node-gyp/commit/721dc7d314)] - Add ARM64 to MSBuild /Platform logic (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655)
|
||||
* [[`a5b7410497`](https://github.com/nodejs/node-gyp/commit/a5b7410497)] - Add ESLint no-unused-vars rule (Jon Moss) [#1497](https://github.com/nodejs/node-gyp/pull/1497)
|
||||
|
||||
## v4.0.0 2019-04-24
|
||||
|
||||
* [[`ceed5cbe10`](https://github.com/nodejs/node-gyp/commit/ceed5cbe10)] - **deps**: updated tar package version to 4.4.8 (Pobegaylo Maksim) [#1713](https://github.com/nodejs/node-gyp/pull/1713)
|
||||
* [[`374519e066`](https://github.com/nodejs/node-gyp/commit/374519e066)] - **(SEMVER-MAJOR)** Upgrade to tar v3 (isaacs) [#1212](https://github.com/nodejs/node-gyp/pull/1212)
|
||||
* [[`e6699d13cd`](https://github.com/nodejs/node-gyp/commit/e6699d13cd)] - **test**: fix addon test for Node.js 12 and V8 7.4 (Richard Lau) [#1705](https://github.com/nodejs/node-gyp/pull/1705)
|
||||
* [[`0c6bf530a0`](https://github.com/nodejs/node-gyp/commit/0c6bf530a0)] - **lib**: use print() for python version detection (GreenAddress) [#1534](https://github.com/nodejs/node-gyp/pull/1534)
|
||||
|
||||
## v3.8.0 2018-08-09
|
||||
|
||||
* [[`c5929cb4fe`](https://github.com/nodejs/node-gyp/commit/c5929cb4fe)] - **doc**: update Xcode preferences tab name. (Ivan Daniluk) [#1330](https://github.com/nodejs/node-gyp/pull/1330)
|
||||
* [[`8b488da8b9`](https://github.com/nodejs/node-gyp/commit/8b488da8b9)] - **doc**: update link to commit guidelines (Jonas Hermsmeier) [#1456](https://github.com/nodejs/node-gyp/pull/1456)
|
||||
* [[`b4fe8c16f9`](https://github.com/nodejs/node-gyp/commit/b4fe8c16f9)] - **doc**: fix visual studio links (Bartosz Sosnowski) [#1490](https://github.com/nodejs/node-gyp/pull/1490)
|
||||
* [[`536759c7e9`](https://github.com/nodejs/node-gyp/commit/536759c7e9)] - **configure**: use sys.version\_info to get python version (Yang Guo) [#1504](https://github.com/nodejs/node-gyp/pull/1504)
|
||||
* [[`94c39c604e`](https://github.com/nodejs/node-gyp/commit/94c39c604e)] - **gyp**: fix ninja build failure (GYP patch) (Daniel Bevenius) [nodejs/node#12484](https://github.com/nodejs/node/pull/12484)
|
||||
* [[`e8ea74e0fa`](https://github.com/nodejs/node-gyp/commit/e8ea74e0fa)] - **tools**: patch gyp to avoid xcrun errors (Ujjwal Sharma) [nodejs/node#21520](https://github.com/nodejs/node/pull/21520)
|
||||
* [[`ea9aff44f2`](https://github.com/nodejs/node-gyp/commit/ea9aff44f2)] - **tools**: fix "the the" typos in comments (Masashi Hirano) [nodejs/node#20716](https://github.com/nodejs/node/pull/20716)
|
||||
* [[`207e5aa4fd`](https://github.com/nodejs/node-gyp/commit/207e5aa4fd)] - **gyp**: implement LD/LDXX for ninja and FIPS (Sam Roberts)
|
||||
* [[`b416c5f4b7`](https://github.com/nodejs/node-gyp/commit/b416c5f4b7)] - **gyp**: enable cctest to use objects (gyp part) (Daniel Bevenius) [nodejs/node#12450](https://github.com/nodejs/node/pull/12450)
|
||||
* [[`40692d016b`](https://github.com/nodejs/node-gyp/commit/40692d016b)] - **gyp**: add compile\_commands.json gyp generator (Ben Noordhuis) [nodejs/node#12450](https://github.com/nodejs/node/pull/12450)
|
||||
* [[`fc3c4e2b10`](https://github.com/nodejs/node-gyp/commit/fc3c4e2b10)] - **gyp**: float gyp patch for long filenames (Anna Henningsen) [nodejs/node#7963](https://github.com/nodejs/node/pull/7963)
|
||||
* [[`8aedbfdef6`](https://github.com/nodejs/node-gyp/commit/8aedbfdef6)] - **gyp**: backport GYP fix to fix AIX shared suffix (Stewart Addison)
|
||||
* [[`6cd84b84fc`](https://github.com/nodejs/node-gyp/commit/6cd84b84fc)] - **test**: formatting and minor fixes for execFileSync replacement (Rod Vagg) [#1521](https://github.com/nodejs/node-gyp/pull/1521)
|
||||
* [[`60e421363f`](https://github.com/nodejs/node-gyp/commit/60e421363f)] - **test**: added test/processExecSync.js for when execFileSync is not available. (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492)
|
||||
* [[`969447c5bd`](https://github.com/nodejs/node-gyp/commit/969447c5bd)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492)
|
||||
* [[`340403ccfe`](https://github.com/nodejs/node-gyp/commit/340403ccfe)] - **win**: improve parsing of SDK version (Alessandro Vergani) [#1516](https://github.com/nodejs/node-gyp/pull/1516)
|
||||
|
||||
## v3.7.0 2018-06-08
|
||||
|
||||
* [[`84cea7b30d`](https://github.com/nodejs/node-gyp/commit/84cea7b30d)] - Remove unused gyp test scripts. (Ben Noordhuis) [#1458](https://github.com/nodejs/node-gyp/pull/1458)
|
||||
* [[`0540e4ec63`](https://github.com/nodejs/node-gyp/commit/0540e4ec63)] - **gyp**: escape spaces in filenames in make generator (Jeff Senn) [#1436](https://github.com/nodejs/node-gyp/pull/1436)
|
||||
* [[`88fc6fa0ec`](https://github.com/nodejs/node-gyp/commit/88fc6fa0ec)] - Drop dependency on minimatch. (Brian Woodward) [#1158](https://github.com/nodejs/node-gyp/pull/1158)
|
||||
* [[`1e203c5148`](https://github.com/nodejs/node-gyp/commit/1e203c5148)] - Fix include path when pointing to Node.js source (Richard Lau) [#1055](https://github.com/nodejs/node-gyp/pull/1055)
|
||||
* [[`53d8cb967c`](https://github.com/nodejs/node-gyp/commit/53d8cb967c)] - Prefix build targets with /t: on Windows (Natalie Wolfe) [#1164](https://github.com/nodejs/node-gyp/pull/1164)
|
||||
* [[`53a5f8ff38`](https://github.com/nodejs/node-gyp/commit/53a5f8ff38)] - **gyp**: add support for .mm files to msvs generator (Julien Racle) [#1167](https://github.com/nodejs/node-gyp/pull/1167)
|
||||
* [[`dd8561e528`](https://github.com/nodejs/node-gyp/commit/dd8561e528)] - **zos**: don't use universal-new-lines mode (John Barboza) [#1451](https://github.com/nodejs/node-gyp/pull/1451)
|
||||
* [[`e5a69010ed`](https://github.com/nodejs/node-gyp/commit/e5a69010ed)] - **zos**: add search locations for libnode.x (John Barboza) [#1451](https://github.com/nodejs/node-gyp/pull/1451)
|
||||
* [[`79febace53`](https://github.com/nodejs/node-gyp/commit/79febace53)] - **doc**: update macOS information in README (Josh Parnham) [#1323](https://github.com/nodejs/node-gyp/pull/1323)
|
||||
* [[`9425448945`](https://github.com/nodejs/node-gyp/commit/9425448945)] - **gyp**: don't print xcodebuild not found errors (Gibson Fahnestock) [#1370](https://github.com/nodejs/node-gyp/pull/1370)
|
||||
* [[`6f1286f5b2`](https://github.com/nodejs/node-gyp/commit/6f1286f5b2)] - Fix infinite install loop. (Ben Noordhuis) [#1384](https://github.com/nodejs/node-gyp/pull/1384)
|
||||
* [[`2580b9139e`](https://github.com/nodejs/node-gyp/commit/2580b9139e)] - Update `--nodedir` description in README. (Ben Noordhuis) [#1372](https://github.com/nodejs/node-gyp/pull/1372)
|
||||
* [[`a61360391a`](https://github.com/nodejs/node-gyp/commit/a61360391a)] - Update README with another way to install on windows (JeffAtDeere) [#1352](https://github.com/nodejs/node-gyp/pull/1352)
|
||||
* [[`47496bf6dc`](https://github.com/nodejs/node-gyp/commit/47496bf6dc)] - Fix IndexError when parsing GYP files. (Ben Noordhuis) [#1267](https://github.com/nodejs/node-gyp/pull/1267)
|
||||
* [[`b2024dee7b`](https://github.com/nodejs/node-gyp/commit/b2024dee7b)] - **zos**: support platform (John Barboza) [#1276](https://github.com/nodejs/node-gyp/pull/1276)
|
||||
* [[`90d86512f4`](https://github.com/nodejs/node-gyp/commit/90d86512f4)] - **win**: run PS with `-NoProfile` (Refael Ackermann) [#1292](https://github.com/nodejs/node-gyp/pull/1292)
|
||||
* [[`2da5f86ef7`](https://github.com/nodejs/node-gyp/commit/2da5f86ef7)] - **doc**: add github PR and Issue templates (Gibson Fahnestock) [#1228](https://github.com/nodejs/node-gyp/pull/1228)
|
||||
* [[`a46a770d68`](https://github.com/nodejs/node-gyp/commit/a46a770d68)] - **doc**: update proposed DCO and CoC (Mikeal Rogers) [#1229](https://github.com/nodejs/node-gyp/pull/1229)
|
||||
* [[`7e803d58e0`](https://github.com/nodejs/node-gyp/commit/7e803d58e0)] - **doc**: headerify the Install instructions (Nick Schonning) [#1225](https://github.com/nodejs/node-gyp/pull/1225)
|
||||
* [[`f27599193a`](https://github.com/nodejs/node-gyp/commit/f27599193a)] - **gyp**: update xml string encoding conversion (Liu Chao) [#1203](https://github.com/nodejs/node-gyp/pull/1203)
|
||||
* [[`0a07e481f7`](https://github.com/nodejs/node-gyp/commit/0a07e481f7)] - **configure**: don't set ensure if tarball is set (Gibson Fahnestock) [#1220](https://github.com/nodejs/node-gyp/pull/1220)
|
||||
|
||||
## v3.6.3 2018-06-08
|
||||
|
||||
* [[`90cd2e8da9`](https://github.com/nodejs/node-gyp/commit/90cd2e8da9)] - **gyp**: fix regex to match multi-digit versions (Jonas Hermsmeier) [#1455](https://github.com/nodejs/node-gyp/pull/1455)
|
||||
* [[`7900122337`](https://github.com/nodejs/node-gyp/commit/7900122337)] - deps: pin `request` version range (Refael Ackerman) [#1300](https://github.com/nodejs/node-gyp/pull/1300)
|
||||
|
||||
## v3.6.2 2017-06-01
|
||||
|
||||
* [[`72afdd62cd`](https://github.com/nodejs/node-gyp/commit/72afdd62cd)] - **build**: rename copyNodeLib() to doBuild() (Liu Chao) [#1206](https://github.com/nodejs/node-gyp/pull/1206)
|
||||
* [[`bad903ac70`](https://github.com/nodejs/node-gyp/commit/bad903ac70)] - **win**: more robust parsing of SDK version (Refael Ackermann) [#1198](https://github.com/nodejs/node-gyp/pull/1198)
|
||||
* [[`241752f381`](https://github.com/nodejs/node-gyp/commit/241752f381)] - Log dist-url. (Ben Noordhuis) [#1170](https://github.com/nodejs/node-gyp/pull/1170)
|
||||
* [[`386746c7d1`](https://github.com/nodejs/node-gyp/commit/386746c7d1)] - **configure**: use full path in node_lib_file GYP var (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964)
|
||||
* [[`0913b2dd99`](https://github.com/nodejs/node-gyp/commit/0913b2dd99)] - **build, win**: use target_arch to link with node.lib (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964)
|
||||
* [[`c307b302f7`](https://github.com/nodejs/node-gyp/commit/c307b302f7)] - **doc**: blorb about setting `npm_config_OPTION_NAME` (Refael Ackermann) [#1185](https://github.com/nodejs/node-gyp/pull/1185)
|
||||
|
||||
## v3.6.1 2017-04-30
|
||||
|
||||
* [[`49801716c2`](https://github.com/nodejs/node-gyp/commit/49801716c2)] - **test**: fix test-find-python on v0.10.x buildbot. (Ben Noordhuis) [#1172](https://github.com/nodejs/node-gyp/pull/1172)
|
||||
* [[`a83a3801fc`](https://github.com/nodejs/node-gyp/commit/a83a3801fc)] - **test**: fix test/test-configure-python on AIX (Richard Lau) [#1131](https://github.com/nodejs/node-gyp/pull/1131)
|
||||
* [[`8a767145c9`](https://github.com/nodejs/node-gyp/commit/8a767145c9)] - **gyp**: Revert quote_cmd workaround (Kunal Pathak) [#1153](https://github.com/nodejs/node-gyp/pull/1153)
|
||||
* [[`c09cf7671e`](https://github.com/nodejs/node-gyp/commit/c09cf7671e)] - **doc**: add a note for using `configure` on Windows (Vse Mozhet Byt) [#1152](https://github.com/nodejs/node-gyp/pull/1152)
|
||||
* [[`da9cb5f411`](https://github.com/nodejs/node-gyp/commit/da9cb5f411)] - Delete superfluous .patch files. (Ben Noordhuis) [#1122](https://github.com/nodejs/node-gyp/pull/1122)
|
||||
|
||||
## v3.6.0 2017-03-16
|
||||
|
||||
* [[`ae141e1906`](https://github.com/nodejs/node-gyp/commit/ae141e1906)] - **win**: find and setup for VS2017 (Refael Ackermann) [#1130](https://github.com/nodejs/node-gyp/pull/1130)
|
||||
* [[`ec5fc36a80`](https://github.com/nodejs/node-gyp/commit/ec5fc36a80)] - Add support to build node.js with chakracore for ARM. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873)
|
||||
* [[`a04ea3051a`](https://github.com/nodejs/node-gyp/commit/a04ea3051a)] - Add support to build node.js with chakracore. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873)
|
||||
* [[`93d7fa83c8`](https://github.com/nodejs/node-gyp/commit/93d7fa83c8)] - Upgrade semver dependency. (Ben Noordhuis) [#1107](https://github.com/nodejs/node-gyp/pull/1107)
|
||||
* [[`ff9a6fadfd`](https://github.com/nodejs/node-gyp/commit/ff9a6fadfd)] - Update link of gyp as Google code is shutting down (Peter Dave Hello) [#1061](https://github.com/nodejs/node-gyp/pull/1061)
|
||||
|
||||
## v3.5.0 2017-01-10
|
||||
|
||||
* [[`762d19a39e`](https://github.com/nodejs/node-gyp/commit/762d19a39e)] - \[doc\] merge History.md and CHANGELOG.md (Rod Vagg)
|
||||
* [[`80fc5c3d31`](https://github.com/nodejs/node-gyp/commit/80fc5c3d31)] - Fix deprecated dependency warning (Simone Primarosa) [#1069](https://github.com/nodejs/node-gyp/pull/1069)
|
||||
* [[`05c44944fd`](https://github.com/nodejs/node-gyp/commit/05c44944fd)] - Open the build file with universal-newlines mode (Guy Margalit) [#1053](https://github.com/nodejs/node-gyp/pull/1053)
|
||||
* [[`37ae7be114`](https://github.com/nodejs/node-gyp/commit/37ae7be114)] - Try python launcher when stock python is python 3. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992)
|
||||
* [[`e3778d9907`](https://github.com/nodejs/node-gyp/commit/e3778d9907)] - Add lots of findPython() tests. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992)
|
||||
* [[`afc766adf6`](https://github.com/nodejs/node-gyp/commit/afc766adf6)] - Unset executable bit for .bat files (Pavel Medvedev) [#969](https://github.com/nodejs/node-gyp/pull/969)
|
||||
* [[`ddac348991`](https://github.com/nodejs/node-gyp/commit/ddac348991)] - Use push on PYTHONPATH and add tests (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
|
||||
* [[`b182a19042`](https://github.com/nodejs/node-gyp/commit/b182a19042)] - ***Revert*** "add "path-array" dep" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
|
||||
* [[`7c08b85c5a`](https://github.com/nodejs/node-gyp/commit/7c08b85c5a)] - ***Revert*** "**configure**: use "path-array" for PYTHONPATH" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
|
||||
* [[`9c8d275526`](https://github.com/nodejs/node-gyp/commit/9c8d275526)] - Add --devdir flag. (Ben Noordhuis) [#916](https://github.com/nodejs/node-gyp/pull/916)
|
||||
* [[`f6eab1f9e4`](https://github.com/nodejs/node-gyp/commit/f6eab1f9e4)] - **doc**: add windows-build-tools to readme (Felix Rieseberg) [#970](https://github.com/nodejs/node-gyp/pull/970)
|
||||
|
||||
## v3.4.0 2016-06-28
|
||||
|
||||
* [[`ce5fd04e94`](https://github.com/nodejs/node-gyp/commit/ce5fd04e94)] - **deps**: update minimatch version (delphiactual) [#961](https://github.com/nodejs/node-gyp/pull/961)
|
||||
* [[`77383ddd85`](https://github.com/nodejs/node-gyp/commit/77383ddd85)] - Replace fs.accessSync call to fs.statSync (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955)
|
||||
* [[`0dba4bda57`](https://github.com/nodejs/node-gyp/commit/0dba4bda57)] - **test**: add simple addon test (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955)
|
||||
* [[`c4344b3889`](https://github.com/nodejs/node-gyp/commit/c4344b3889)] - **doc**: add --target option to README (Gibson Fahnestock) [#958](https://github.com/nodejs/node-gyp/pull/958)
|
||||
* [[`cc778e9215`](https://github.com/nodejs/node-gyp/commit/cc778e9215)] - Override BUILDING_UV_SHARED, BUILDING_V8_SHARED. (Ben Noordhuis) [#915](https://github.com/nodejs/node-gyp/pull/915)
|
||||
* [[`af35b2ad32`](https://github.com/nodejs/node-gyp/commit/af35b2ad32)] - Move VC++ Build Tools to Build Tools landing page. (Andrew Pardoe) [#953](https://github.com/nodejs/node-gyp/pull/953)
|
||||
* [[`f31482e226`](https://github.com/nodejs/node-gyp/commit/f31482e226)] - **win**: work around __pfnDliNotifyHook2 type change (Alexis Campailla) [#952](https://github.com/nodejs/node-gyp/pull/952)
|
||||
* [[`3df8222fa5`](https://github.com/nodejs/node-gyp/commit/3df8222fa5)] - Allow for npmlog@3.x (Rebecca Turner) [#950](https://github.com/nodejs/node-gyp/pull/950)
|
||||
* [[`a4fa07b390`](https://github.com/nodejs/node-gyp/commit/a4fa07b390)] - More verbose error on locating msbuild.exe failure. (Mateusz Jaworski) [#930](https://github.com/nodejs/node-gyp/pull/930)
|
||||
* [[`4ee31329e0`](https://github.com/nodejs/node-gyp/commit/4ee31329e0)] - **doc**: add command options to README.md (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937)
|
||||
* [[`c8c7ca86b9`](https://github.com/nodejs/node-gyp/commit/c8c7ca86b9)] - Add --silent option for zero output. (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937)
|
||||
* [[`ac29d23a7c`](https://github.com/nodejs/node-gyp/commit/ac29d23a7c)] - Upgrade to glob@7.0.3. (Ben Noordhuis) [#943](https://github.com/nodejs/node-gyp/pull/943)
|
||||
* [[`15fd56be3d`](https://github.com/nodejs/node-gyp/commit/15fd56be3d)] - Enable V8 deprecation warnings for native modules (Matt Loring) [#920](https://github.com/nodejs/node-gyp/pull/920)
|
||||
* [[`7f1c1b960c`](https://github.com/nodejs/node-gyp/commit/7f1c1b960c)] - **gyp**: improvements for android generator (Robert Chiras) [#935](https://github.com/nodejs/node-gyp/pull/935)
|
||||
* [[`088082766c`](https://github.com/nodejs/node-gyp/commit/088082766c)] - Update Windows install instructions (Sara Itani) [#867](https://github.com/nodejs/node-gyp/pull/867)
|
||||
* [[`625c1515f9`](https://github.com/nodejs/node-gyp/commit/625c1515f9)] - **gyp**: inherit CC/CXX for CC/CXX.host (Johan Bergström) [#908](https://github.com/nodejs/node-gyp/pull/908)
|
||||
* [[`3bcb1720e4`](https://github.com/nodejs/node-gyp/commit/3bcb1720e4)] - Add support for the Python launcher on Windows (Patrick Westerhoff) [#894](https://github.com/nodejs/node-gyp/pull/894
|
||||
|
||||
## v3.3.1 2016-03-04
|
||||
|
||||
* [[`a981ef847a`](https://github.com/nodejs/node-gyp/commit/a981ef847a)] - **gyp**: fix android generator (Robert Chiras) [#889](https://github.com/nodejs/node-gyp/pull/889)
|
||||
|
||||
## v3.3.0 2016-02-16
|
||||
|
||||
* [[`818d854a4d`](https://github.com/nodejs/node-gyp/commit/818d854a4d)] - Introduce NODEJS_ORG_MIRROR and IOJS_ORG_MIRROR (Rod Vagg) [#878](https://github.com/nodejs/node-gyp/pull/878)
|
||||
* [[`d1e4cc4b62`](https://github.com/nodejs/node-gyp/commit/d1e4cc4b62)] - **(SEMVER-MINOR)** Download headers tarball for ~0.12.10 || ~0.10.42 (Rod Vagg) [#877](https://github.com/nodejs/node-gyp/pull/877)
|
||||
* [[`6e28ad1bea`](https://github.com/nodejs/node-gyp/commit/6e28ad1bea)] - Allow for npmlog@2.x (Rebecca Turner) [#861](https://github.com/nodejs/node-gyp/pull/861)
|
||||
* [[`07371e5812`](https://github.com/nodejs/node-gyp/commit/07371e5812)] - Use -fPIC for NetBSD. (Marcin Cieślak) [#856](https://github.com/nodejs/node-gyp/pull/856)
|
||||
* [[`8c4b0ffa50`](https://github.com/nodejs/node-gyp/commit/8c4b0ffa50)] - **(SEMVER-MINOR)** Add --cafile command line option. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837)
|
||||
* [[`b3ad43498e`](https://github.com/nodejs/node-gyp/commit/b3ad43498e)] - **(SEMVER-MINOR)** Make download() function testable. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837)
|
||||
|
||||
## v3.2.1 2015-12-03
|
||||
|
||||
* [[`ab89b477c4`](https://github.com/nodejs/node-gyp/commit/ab89b477c4)] - Upgrade gyp to b3cef02. (Ben Noordhuis) [#831](https://github.com/nodejs/node-gyp/pull/831)
|
||||
* [[`90078ecb17`](https://github.com/nodejs/node-gyp/commit/90078ecb17)] - Define WIN32_LEAN_AND_MEAN conditionally. (Ben Noordhuis) [#824](https://github.com/nodejs/node-gyp/pull/824)
|
||||
|
||||
## v3.2.0 2015-11-25
|
||||
|
||||
* [[`268f1ca4c7`](https://github.com/nodejs/node-gyp/commit/268f1ca4c7)] - Use result of `which` when searching for python. (Refael Ackermann) [#668](https://github.com/nodejs/node-gyp/pull/668)
|
||||
* [[`817ed9bd78`](https://github.com/nodejs/node-gyp/commit/817ed9bd78)] - Add test for python executable search logic. (Ben Noordhuis) [#756](https://github.com/nodejs/node-gyp/pull/756)
|
||||
* [[`0e2dfda1f3`](https://github.com/nodejs/node-gyp/commit/0e2dfda1f3)] - Fix test/test-options when run through `npm test`. (Ben Noordhuis) [#755](https://github.com/nodejs/node-gyp/pull/755)
|
||||
* [[`9bfa0876b4`](https://github.com/nodejs/node-gyp/commit/9bfa0876b4)] - Add support for AIX (Michael Dawson) [#753](https://github.com/nodejs/node-gyp/pull/753)
|
||||
* [[`a8d441a0a2`](https://github.com/nodejs/node-gyp/commit/a8d441a0a2)] - Update README for Windows 10 support. (Jason Williams) [#766](https://github.com/nodejs/node-gyp/pull/766)
|
||||
* [[`d1d6015276`](https://github.com/nodejs/node-gyp/commit/d1d6015276)] - Update broken links and switch to HTTPS. (andrew morton)
|
||||
|
||||
## v3.1.0 2015-11-14
|
||||
|
||||
* [[`9049241f91`](https://github.com/nodejs/node-gyp/commit/9049241f91)] - **gyp**: don't use links at all, just copy the files instead (Nathan Zadoks)
|
||||
* [[`8ef90348d1`](https://github.com/nodejs/node-gyp/commit/8ef90348d1)] - **gyp**: apply https://codereview.chromium.org/11361103/ (Nathan Rajlich)
|
||||
* [[`a2ed0df84e`](https://github.com/nodejs/node-gyp/commit/a2ed0df84e)] - **gyp**: always install into $PRODUCT_DIR (Nathan Rajlich)
|
||||
* [[`cc8b2fa83e`](https://github.com/nodejs/node-gyp/commit/cc8b2fa83e)] - Update gyp to b3cef02. (Imran Iqbal) [#781](https://github.com/nodejs/node-gyp/pull/781)
|
||||
* [[`f5d86eb84e`](https://github.com/nodejs/node-gyp/commit/f5d86eb84e)] - Update to tar@2.0.0. (Edgar Muentes) [#797](https://github.com/nodejs/node-gyp/pull/797)
|
||||
* [[`2ac7de02c4`](https://github.com/nodejs/node-gyp/commit/2ac7de02c4)] - Fix infinite loop with zero-length options. (Ben Noordhuis) [#745](https://github.com/nodejs/node-gyp/pull/745)
|
||||
* [[`101bed639b`](https://github.com/nodejs/node-gyp/commit/101bed639b)] - This platform value came from debian package, and now the value (Jérémy Lal) [#738](https://github.com/nodejs/node-gyp/pull/738)
|
||||
|
||||
## v3.0.3 2015-09-14
|
||||
|
||||
* [[`ad827cda30`](https://github.com/nodejs/node-gyp/commit/ad827cda30)] - tarballUrl global and && when checking for iojs (Lars-Magnus Skog) [#729](https://github.com/nodejs/node-gyp/pull/729)
|
||||
|
||||
## v3.0.2 2015-09-12
|
||||
|
||||
* [[`6e8c3bf3c6`](https://github.com/nodejs/node-gyp/commit/6e8c3bf3c6)] - add back support for passing additional cmdline args (Rod Vagg) [#723](https://github.com/nodejs/node-gyp/pull/723)
|
||||
* [[`ff82f2f3b9`](https://github.com/nodejs/node-gyp/commit/ff82f2f3b9)] - fixed broken link in docs to Visual Studio 2013 download (simon-p-r) [#722](https://github.com/nodejs/node-gyp/pull/722)
|
||||
|
||||
## v3.0.1 2015-09-08
|
||||
|
||||
* [[`846337e36b`](https://github.com/nodejs/node-gyp/commit/846337e36b)] - normalise versions for target == this comparison (Rod Vagg) [#716](https://github.com/nodejs/node-gyp/pull/716)
|
||||
|
||||
## v3.0.0 2015-09-08
|
||||
|
||||
* [[`9720d0373c`](https://github.com/nodejs/node-gyp/commit/9720d0373c)] - remove node_modules from tree (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
|
||||
* [[`6dcf220db7`](https://github.com/nodejs/node-gyp/commit/6dcf220db7)] - test version major directly, don't use semver.satisfies() (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
|
||||
* [[`938dd18d1c`](https://github.com/nodejs/node-gyp/commit/938dd18d1c)] - refactor for clarity, fix dist-url, add env var dist-url functionality (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
|
||||
* [[`9e9df66a06`](https://github.com/nodejs/node-gyp/commit/9e9df66a06)] - use process.release, make aware of io.js & node v4 differences (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
|
||||
* [[`1ea7ed01f4`](https://github.com/nodejs/node-gyp/commit/1ea7ed01f4)] - **deps**: update graceful-fs dependency to the latest (Sakthipriyan Vairamani) [#714](https://github.com/nodejs/node-gyp/pull/714)
|
||||
* [[`0fbc387b35`](https://github.com/nodejs/node-gyp/commit/0fbc387b35)] - Update repository URLs. (Ben Noordhuis) [#715](https://github.com/nodejs/node-gyp/pull/715)
|
||||
* [[`bbedb8868b`](https://github.com/nodejs/node-gyp/commit/bbedb8868b)] - **(SEMVER-MAJOR)** **win**: enable delay-load hook by default (Jeremiah Senkpiel) [#708](https://github.com/nodejs/node-gyp/pull/708)
|
||||
* [[`85ed107565`](https://github.com/nodejs/node-gyp/commit/85ed107565)] - Merge pull request #664 from othiym23/othiym23/allow-semver-5 (Nathan Rajlich)
|
||||
* [[`0c720d234c`](https://github.com/nodejs/node-gyp/commit/0c720d234c)] - allow semver@5 (Forrest L Norvell)
|
||||
|
||||
## 2.0.2 / 2015-07-14
|
||||
|
||||
* Use HTTPS for dist url (#656, @SonicHedgehog)
|
||||
* Merge pull request #648 from nevosegal/master
|
||||
* Merge pull request #650 from magic890/patch-1
|
||||
* Updated Installation section on README
|
||||
* Updated link to gyp user documentation
|
||||
* Fix download error message spelling (#643, @tomxtobin)
|
||||
* Merge pull request #637 from lygstate/master
|
||||
* Set NODE_GYP_DIR for addon.gypi to setting absolute path for
|
||||
src/win_delay_load_hook.c, and fixes of the long relative path issue on Win32.
|
||||
Fixes #636 (#637, @lygstate).
|
||||
|
||||
## 2.0.1 / 2015-05-28
|
||||
|
||||
* configure: try/catch the semver range.test() call
|
||||
* README: update for visual studio 2013 (#510, @samccone)
|
||||
|
||||
## 2.0.0 / 2015-05-24
|
||||
|
||||
* configure: check for python2 executable by default, fallback to python
|
||||
* configure: don't clobber existing $PYTHONPATH
|
||||
* configure: use "path-array" for PYTHONPATH
|
||||
* gyp: fix for non-acsii userprofile name on Windows
|
||||
* gyp: always install into $PRODUCT_DIR
|
||||
* gyp: apply https://codereview.chromium.org/11361103/
|
||||
* gyp: don't use links at all, just copy the files instead
|
||||
* gyp: update gyp to e1c8fcf7
|
||||
* Updated README.md with updated Windows build info
|
||||
* Show URL when a download fails
|
||||
* package: add a "license" field
|
||||
* move HMODULE m declaration to top
|
||||
* Only add "-undefined dynamic_lookup" to loadable_module targets
|
||||
* win: optionally allow node.exe/iojs.exe to be renamed
|
||||
* Avoid downloading shasums if using tarPath
|
||||
* Add target name preprocessor define: `NODE_GYP_MODULE_NAME`
|
||||
* Show better error message in case of bad network settings
|
34
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/CONTRIBUTING.md
generated
vendored
Normal file
34
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/CONTRIBUTING.md
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
# Contributing to node-gyp
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Please read the
|
||||
[Code of Conduct](https://github.com/nodejs/admin/blob/main/CODE_OF_CONDUCT.md)
|
||||
which explains the minimum behavior expectations for node-gyp contributors.
|
||||
|
||||
<a id="developers-certificate-of-origin"></a>
|
||||
## Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
* (a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
* (b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
* (c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
* (d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
24
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/LICENSE
generated
vendored
Normal file
24
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
(The MIT License)
|
||||
|
||||
Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
263
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/README.md
generated
vendored
Normal file
263
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/README.md
generated
vendored
Normal file
|
@ -0,0 +1,263 @@
|
|||
# `node-gyp` - Node.js native addon build tool
|
||||
|
||||
[![Build Status](https://github.com/nodejs/node-gyp/workflows/Tests/badge.svg?branch=main)](https://github.com/nodejs/node-gyp/actions?query=workflow%3ATests+branch%3Amain)
|
||||
![npm](https://img.shields.io/npm/dm/node-gyp)
|
||||
|
||||
`node-gyp` is a cross-platform command-line tool written in Node.js for
|
||||
compiling native addon modules for Node.js. It contains a vendored copy of the
|
||||
[gyp-next](https://github.com/nodejs/gyp-next) project that was previously used
|
||||
by the Chromium team and extended to support the development of Node.js native
|
||||
addons.
|
||||
|
||||
Note that `node-gyp` is _not_ used to build Node.js itself.
|
||||
|
||||
All current and LTS target versions of Node.js are supported. Depending on what version of Node.js is actually installed on your system
|
||||
`node-gyp` downloads the necessary development files or headers for the target version. List of stable Node.js versions can be found on [Node.js website](https://nodejs.org/en/about/previous-releases).
|
||||
|
||||
## Features
|
||||
|
||||
* The same build commands work on any of the supported platforms
|
||||
* Supports the targeting of different versions of Node.js
|
||||
|
||||
## Installation
|
||||
|
||||
You can install `node-gyp` using `npm`:
|
||||
|
||||
``` bash
|
||||
npm install -g node-gyp
|
||||
```
|
||||
|
||||
Depending on your operating system, you will need to install:
|
||||
|
||||
### On Unix
|
||||
|
||||
* [A supported version of Python](https://devguide.python.org/versions/)
|
||||
* `make`
|
||||
* A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org)
|
||||
|
||||
### On macOS
|
||||
|
||||
* [A supported version of Python](https://devguide.python.org/versions/)
|
||||
* `Xcode Command Line Tools` which will install `clang`, `clang++`, and `make`.
|
||||
* Install the `Xcode Command Line Tools` standalone by running `xcode-select --install`. -- OR --
|
||||
* Alternatively, if you already have the [full Xcode installed](https://developer.apple.com/xcode/download/), you can install the Command Line Tools under the menu `Xcode -> Open Developer Tool -> More Developer Tools...`.
|
||||
|
||||
|
||||
### On Windows
|
||||
|
||||
Install the current [version of Python](https://devguide.python.org/versions/) from the
|
||||
[Microsoft Store](https://apps.microsoft.com/store/search?publisher=Python+Software+Foundation).
|
||||
|
||||
Install tools and configuration manually:
|
||||
* Install Visual C++ Build Environment: For Visual Studio 2019 or later, use the `Desktop development with C++` workload from [Visual Studio Community](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=Community). For a version older than Visual Studio 2019, install [Visual Studio Build Tools](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools) with the `Visual C++ build tools` option.
|
||||
|
||||
If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips.
|
||||
|
||||
To target native ARM64 Node.js on Windows on ARM, add the components "Visual C++ compilers and libraries for ARM64" and "Visual C++ ATL for ARM64".
|
||||
|
||||
To use the native ARM64 C++ compiler on Windows on ARM, ensure that you have Visual Studio 2022 [17.4 or later](https://devblogs.microsoft.com/visualstudio/arm64-visual-studio-is-officially-here/) installed.
|
||||
|
||||
It's advised to install following Powershell module: [VSSetup](https://github.com/microsoft/vssetup.powershell) using `Install-Module VSSetup -Scope CurrentUser`.
|
||||
This will make Visual Studio detection logic to use more flexible and accessible method, avoiding Powershell's `ConstrainedLanguage` mode.
|
||||
|
||||
### Configuring Python Dependency
|
||||
|
||||
`node-gyp` requires that you have installed a [supported version of Python](https://devguide.python.org/versions/).
|
||||
If you have multiple versions of Python installed, you can identify which version
|
||||
`node-gyp` should use in one of the following ways:
|
||||
|
||||
1. by setting the `--python` command-line option, e.g.:
|
||||
|
||||
``` bash
|
||||
node-gyp <command> --python /path/to/executable/python
|
||||
```
|
||||
|
||||
2. If `node-gyp` is called by way of `npm`, *and* you have multiple versions of
|
||||
Python installed, then you can set the `npm_config_python` environment variable
|
||||
to the appropriate path:
|
||||
``` bash
|
||||
export npm_config_python=/path/to/executable/python
|
||||
```
|
||||
Or on Windows:
|
||||
```console
|
||||
py --list-paths # To see the installed Python versions
|
||||
set npm_config_python=C:\path\to\python.exe
|
||||
```
|
||||
|
||||
3. If the `PYTHON` environment variable is set to the path of a Python executable,
|
||||
then that version will be used if it is a supported version.
|
||||
|
||||
4. If the `NODE_GYP_FORCE_PYTHON` environment variable is set to the path of a
|
||||
Python executable, it will be used instead of any of the other configured or
|
||||
built-in Python search paths. If it's not a compatible version, no further
|
||||
searching will be done.
|
||||
|
||||
### Build for Third Party Node.js Runtimes
|
||||
|
||||
When building modules for third-party Node.js runtimes like Electron, which have
|
||||
different build configurations from the official Node.js distribution, you
|
||||
should use `--dist-url` or `--nodedir` flags to specify the headers of the
|
||||
runtime to build for.
|
||||
|
||||
Also when `--dist-url` or `--nodedir` flags are passed, node-gyp will use the
|
||||
`config.gypi` shipped in the headers distribution to generate build
|
||||
configurations, which is different from the default mode that would use the
|
||||
`process.config` object of the running Node.js instance.
|
||||
|
||||
Some old versions of Electron shipped malformed `config.gypi` in their headers
|
||||
distributions, and you might need to pass `--force-process-config` to node-gyp
|
||||
to work around configuration errors.
|
||||
|
||||
## How to Use
|
||||
|
||||
To compile your native addon first go to its root directory:
|
||||
|
||||
``` bash
|
||||
cd my_node_addon
|
||||
```
|
||||
|
||||
The next step is to generate the appropriate project build files for the current
|
||||
platform. Use `configure` for that:
|
||||
|
||||
``` bash
|
||||
node-gyp configure
|
||||
```
|
||||
|
||||
Auto-detection fails for Visual C++ Build Tools 2015, so `--msvs_version=2015`
|
||||
needs to be added (not needed when run by npm as configured above):
|
||||
``` bash
|
||||
node-gyp configure --msvs_version=2015
|
||||
```
|
||||
|
||||
__Note__: The `configure` step looks for a `binding.gyp` file in the current
|
||||
directory to process. See below for instructions on creating a `binding.gyp` file.
|
||||
|
||||
Now you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file
|
||||
(on Windows) in the `build/` directory. Next, invoke the `build` command:
|
||||
|
||||
``` bash
|
||||
node-gyp build
|
||||
```
|
||||
|
||||
Now you have your compiled `.node` bindings file! The compiled bindings end up
|
||||
in `build/Debug/` or `build/Release/`, depending on the build mode. At this point,
|
||||
you can require the `.node` file with Node.js and run your tests!
|
||||
|
||||
__Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or
|
||||
`-d`) switch when running either the `configure`, `build` or `rebuild` commands.
|
||||
|
||||
## The `binding.gyp` file
|
||||
|
||||
A `binding.gyp` file describes the configuration to build your module, in a
|
||||
JSON-like format. This file gets placed in the root of your package, alongside
|
||||
`package.json`.
|
||||
|
||||
A barebones `gyp` file appropriate for building a Node.js addon could look like:
|
||||
|
||||
```python
|
||||
{
|
||||
"targets": [
|
||||
{
|
||||
"target_name": "binding",
|
||||
"sources": [ "src/binding.cc" ]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Further reading
|
||||
|
||||
The **[docs](./docs/)** directory contains additional documentation on specific node-gyp topics that may be useful if you are experiencing problems installing or building addons using node-gyp.
|
||||
|
||||
Some additional resources for Node.js native addons and writing `gyp` configuration files:
|
||||
|
||||
* ["Going Native" a nodeschool.io tutorial](http://nodeschool.io/#goingnative)
|
||||
* ["Hello World" node addon example](https://github.com/nodejs/node/tree/main/test/addons/hello-world)
|
||||
* [gyp user documentation](https://gyp.gsrc.io/docs/UserDocumentation.md)
|
||||
* [gyp input format reference](https://gyp.gsrc.io/docs/InputFormatReference.md)
|
||||
* [*"binding.gyp" files out in the wild* wiki page](./docs/binding.gyp-files-in-the-wild.md)
|
||||
|
||||
## Commands
|
||||
|
||||
`node-gyp` responds to the following commands:
|
||||
|
||||
| **Command** | **Description**
|
||||
|:--------------|:---------------------------------------------------------------
|
||||
| `help` | Shows the help dialog
|
||||
| `build` | Invokes `make`/`msbuild.exe` and builds the native addon
|
||||
| `clean` | Removes the `build` directory if it exists
|
||||
| `configure` | Generates project build files for the current platform
|
||||
| `rebuild` | Runs `clean`, `configure` and `build` all in a row
|
||||
| `install` | Installs Node.js header files for the given version
|
||||
| `list` | Lists the currently installed Node.js header versions
|
||||
| `remove` | Removes the Node.js header files for the given version
|
||||
|
||||
|
||||
## Command Options
|
||||
|
||||
`node-gyp` accepts the following command options:
|
||||
|
||||
| **Command** | **Description**
|
||||
|:----------------------------------|:------------------------------------------
|
||||
| `-j n`, `--jobs n` | Run `make` in parallel. The value `max` will use all available CPU cores
|
||||
| `--target=v6.2.1` | Node.js version to build for (default is `process.version`)
|
||||
| `--silly`, `--loglevel=silly` | Log all progress to console
|
||||
| `--verbose`, `--loglevel=verbose` | Log most progress to console
|
||||
| `--silent`, `--loglevel=silent` | Don't log anything to console
|
||||
| `debug`, `--debug` | Make Debug build (default is `Release`)
|
||||
| `--release`, `--no-debug` | Make Release build
|
||||
| `-C $dir`, `--directory=$dir` | Run command in different directory
|
||||
| `--make=$make` | Override `make` command (e.g. `gmake`)
|
||||
| `--thin=yes` | Enable thin static libraries
|
||||
| `--arch=$arch` | Set target architecture (e.g. ia32)
|
||||
| `--tarball=$path` | Get headers from a local tarball
|
||||
| `--devdir=$path` | SDK download directory (default is OS cache directory)
|
||||
| `--ensure` | Don't reinstall headers if already present
|
||||
| `--dist-url=$url` | Download header tarball from custom URL
|
||||
| `--proxy=$url` | Set HTTP(S) proxy for downloading header tarball
|
||||
| `--noproxy=$urls` | Set urls to ignore proxies when downloading header tarball
|
||||
| `--cafile=$cafile` | Override default CA chain (to download tarball)
|
||||
| `--nodedir=$path` | Set the path to the node source code
|
||||
| `--python=$path` | Set path to the Python binary
|
||||
| `--msvs_version=$version` | Set Visual Studio version (Windows only)
|
||||
| `--solution=$solution` | Set Visual Studio Solution version (Windows only)
|
||||
| `--force-process-config` | Force using runtime's `process.config` object to generate `config.gypi` file
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment variables
|
||||
|
||||
Use the form `npm_config_OPTION_NAME` for any of the command options listed
|
||||
above (dashes in option names should be replaced by underscores).
|
||||
|
||||
For example, to set `devdir` equal to `/tmp/.gyp`, you would:
|
||||
|
||||
Run this on Unix:
|
||||
|
||||
```bash
|
||||
export npm_config_devdir=/tmp/.gyp
|
||||
```
|
||||
|
||||
Or this on Windows:
|
||||
|
||||
```console
|
||||
set npm_config_devdir=c:\temp\.gyp
|
||||
```
|
||||
|
||||
### `npm` configuration for npm versions before v9
|
||||
|
||||
Use the form `OPTION_NAME` for any of the command options listed above.
|
||||
|
||||
For example, to set `devdir` equal to `/tmp/.gyp`, you would run:
|
||||
|
||||
```bash
|
||||
npm config set [--global] devdir /tmp/.gyp
|
||||
```
|
||||
|
||||
**Note:** Configuration set via `npm` will only be used when `node-gyp`
|
||||
is run via `npm`, not when `node-gyp` is run directly.
|
||||
|
||||
## License
|
||||
|
||||
`node-gyp` is available under the MIT license. See the [LICENSE
|
||||
file](LICENSE) for details.
|
2
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/SECURITY.md
generated
vendored
Normal file
2
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/SECURITY.md
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
If you believe you have found a security issue in the software in this
|
||||
repository, please consult https://github.com/nodejs/node/blob/HEAD/SECURITY.md.
|
204
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/addon.gypi
generated
vendored
Normal file
204
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/addon.gypi
generated
vendored
Normal file
|
@ -0,0 +1,204 @@
|
|||
{
|
||||
'variables' : {
|
||||
'node_engine_include_dir%': 'deps/v8/include',
|
||||
'node_host_binary%': 'node',
|
||||
'node_with_ltcg%': 'true',
|
||||
},
|
||||
'target_defaults': {
|
||||
'type': 'loadable_module',
|
||||
'win_delay_load_hook': 'true',
|
||||
'product_prefix': '',
|
||||
|
||||
'conditions': [
|
||||
[ 'node_engine=="chakracore"', {
|
||||
'variables': {
|
||||
'node_engine_include_dir%': 'deps/chakrashim/include'
|
||||
},
|
||||
}]
|
||||
],
|
||||
|
||||
'include_dirs': [
|
||||
'<(node_root_dir)/include/node',
|
||||
'<(node_root_dir)/src',
|
||||
'<(node_root_dir)/deps/openssl/config',
|
||||
'<(node_root_dir)/deps/openssl/openssl/include',
|
||||
'<(node_root_dir)/deps/uv/include',
|
||||
'<(node_root_dir)/deps/zlib',
|
||||
'<(node_root_dir)/<(node_engine_include_dir)'
|
||||
],
|
||||
'defines!': [
|
||||
'BUILDING_UV_SHARED=1', # Inherited from common.gypi.
|
||||
'BUILDING_V8_SHARED=1', # Inherited from common.gypi.
|
||||
],
|
||||
'defines': [
|
||||
'NODE_GYP_MODULE_NAME=>(_target_name)',
|
||||
'USING_UV_SHARED=1',
|
||||
'USING_V8_SHARED=1',
|
||||
# Warn when using deprecated V8 APIs.
|
||||
'V8_DEPRECATION_WARNINGS=1'
|
||||
],
|
||||
|
||||
'target_conditions': [
|
||||
['_type=="loadable_module"', {
|
||||
'product_extension': 'node',
|
||||
'defines': [
|
||||
'BUILDING_NODE_EXTENSION'
|
||||
],
|
||||
'xcode_settings': {
|
||||
'OTHER_LDFLAGS': [
|
||||
'-undefined dynamic_lookup'
|
||||
],
|
||||
},
|
||||
}],
|
||||
|
||||
['_type=="static_library"', {
|
||||
# set to `1` to *disable* the -T thin archive 'ld' flag.
|
||||
# older linkers don't support this flag.
|
||||
'standalone_static_library': '<(standalone_static_library)'
|
||||
}],
|
||||
|
||||
['_type!="executable"', {
|
||||
'conditions': [
|
||||
[ 'OS=="android"', {
|
||||
'cflags!': [ '-fPIE' ],
|
||||
}]
|
||||
]
|
||||
}],
|
||||
|
||||
['_win_delay_load_hook=="true"', {
|
||||
# If the addon specifies `'win_delay_load_hook': 'true'` in its
|
||||
# binding.gyp, link a delay-load hook into the DLL. This hook ensures
|
||||
# that the addon will work regardless of whether the node/iojs binary
|
||||
# is named node.exe, iojs.exe, or something else.
|
||||
'conditions': [
|
||||
[ 'OS=="win"', {
|
||||
'defines': [ 'HOST_BINARY=\"<(node_host_binary)<(EXECUTABLE_SUFFIX)\"', ],
|
||||
'sources': [
|
||||
'<(node_gyp_dir)/src/win_delay_load_hook.cc',
|
||||
],
|
||||
'msvs_settings': {
|
||||
'VCLinkerTool': {
|
||||
'DelayLoadDLLs': [ '<(node_host_binary)<(EXECUTABLE_SUFFIX)' ],
|
||||
# Don't print a linker warning when no imports from either .exe
|
||||
# are used.
|
||||
'AdditionalOptions': [ '/ignore:4199' ],
|
||||
},
|
||||
},
|
||||
}],
|
||||
],
|
||||
}],
|
||||
],
|
||||
|
||||
'conditions': [
|
||||
[ 'OS=="mac"', {
|
||||
'defines': [
|
||||
'_DARWIN_USE_64_BIT_INODE=1'
|
||||
],
|
||||
'xcode_settings': {
|
||||
'DYLIB_INSTALL_NAME_BASE': '@rpath'
|
||||
},
|
||||
}],
|
||||
[ 'OS=="aix"', {
|
||||
'ldflags': [
|
||||
'-Wl,-bimport:<(node_exp_file)'
|
||||
],
|
||||
}],
|
||||
[ 'OS=="os400"', {
|
||||
'ldflags': [
|
||||
'-Wl,-bimport:<(node_exp_file)'
|
||||
],
|
||||
}],
|
||||
[ 'OS=="zos"', {
|
||||
'conditions': [
|
||||
[ '"<!(echo $CC)" != "clang" and \
|
||||
"<!(echo $CC)" != "ibm-clang64" and \
|
||||
"<!(echo $CC)" != "ibm-clang"', {
|
||||
'cflags': [
|
||||
'-q64',
|
||||
'-Wc,DLL',
|
||||
'-qlonglong',
|
||||
'-qenum=int',
|
||||
'-qxclang=-fexec-charset=ISO8859-1'
|
||||
],
|
||||
'ldflags': [
|
||||
'-q64',
|
||||
'<(node_exp_file)',
|
||||
],
|
||||
}, {
|
||||
'cflags': [
|
||||
'-m64',
|
||||
],
|
||||
'ldflags': [
|
||||
'-m64',
|
||||
'<(node_exp_file)',
|
||||
],
|
||||
}],
|
||||
],
|
||||
'defines': [
|
||||
'_ALL_SOURCE',
|
||||
'MAP_FAILED=-1',
|
||||
'_UNIX03_SOURCE',
|
||||
],
|
||||
}],
|
||||
[ 'OS=="win"', {
|
||||
'conditions': [
|
||||
['node_engine=="chakracore"', {
|
||||
'library_dirs': [ '<(node_root_dir)/$(ConfigurationName)' ],
|
||||
'libraries': [ '<@(node_engine_libs)' ],
|
||||
}],
|
||||
['node_with_ltcg=="true"', {
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'WholeProgramOptimization': 'true' # /GL, whole program optimization, needed for LTCG
|
||||
},
|
||||
'VCLibrarianTool': {
|
||||
'AdditionalOptions': [
|
||||
'/LTCG:INCREMENTAL', # incremental link-time code generation
|
||||
]
|
||||
},
|
||||
'VCLinkerTool': {
|
||||
'OptimizeReferences': 2, # /OPT:REF
|
||||
'EnableCOMDATFolding': 2, # /OPT:ICF
|
||||
'LinkIncremental': 1, # disable incremental linking
|
||||
'AdditionalOptions': [
|
||||
'/LTCG:INCREMENTAL', # incremental link-time code generation
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
],
|
||||
'libraries': [
|
||||
'-lkernel32.lib',
|
||||
'-luser32.lib',
|
||||
'-lgdi32.lib',
|
||||
'-lwinspool.lib',
|
||||
'-lcomdlg32.lib',
|
||||
'-ladvapi32.lib',
|
||||
'-lshell32.lib',
|
||||
'-lole32.lib',
|
||||
'-loleaut32.lib',
|
||||
'-luuid.lib',
|
||||
'-lodbc32.lib',
|
||||
'-lDelayImp.lib',
|
||||
'-l"<(node_lib_file)"'
|
||||
],
|
||||
'msvs_disabled_warnings': [
|
||||
# warning C4251: 'node::ObjectWrap::handle_' : class 'v8::Persistent<T>'
|
||||
# needs to have dll-interface to be used by
|
||||
# clients of class 'node::ObjectWrap'
|
||||
4251
|
||||
],
|
||||
}, {
|
||||
# OS!="win"
|
||||
'defines': [
|
||||
'_LARGEFILE_SOURCE',
|
||||
'_FILE_OFFSET_BITS=64'
|
||||
],
|
||||
}],
|
||||
[ 'OS in "freebsd openbsd netbsd solaris android" or \
|
||||
(OS=="linux" and target_arch!="ia32")', {
|
||||
'cflags': [ '-fPIC' ],
|
||||
}],
|
||||
]
|
||||
}
|
||||
}
|
138
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/bin/node-gyp.js
generated
vendored
Normal file
138
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/bin/node-gyp.js
generated
vendored
Normal file
|
@ -0,0 +1,138 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
process.title = 'node-gyp'
|
||||
|
||||
const envPaths = require('env-paths')
|
||||
const gyp = require('../')
|
||||
const log = require('../lib/log')
|
||||
const os = require('os')
|
||||
|
||||
/**
|
||||
* Process and execute the selected commands.
|
||||
*/
|
||||
|
||||
const prog = gyp()
|
||||
let completed = false
|
||||
prog.parseArgv(process.argv)
|
||||
prog.devDir = prog.opts.devdir
|
||||
|
||||
const homeDir = os.homedir()
|
||||
if (prog.devDir) {
|
||||
prog.devDir = prog.devDir.replace(/^~/, homeDir)
|
||||
} else if (homeDir) {
|
||||
prog.devDir = envPaths('node-gyp', { suffix: '' }).cache
|
||||
} else {
|
||||
throw new Error(
|
||||
"node-gyp requires that the user's home directory is specified " +
|
||||
'in either of the environmental variables HOME or USERPROFILE. ' +
|
||||
'Overide with: --devdir /path/to/.node-gyp')
|
||||
}
|
||||
|
||||
if (prog.todo.length === 0) {
|
||||
if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) {
|
||||
log.stdout('v%s', prog.version)
|
||||
} else {
|
||||
log.stdout('%s', prog.usage())
|
||||
}
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
log.info('it worked if it ends with', 'ok')
|
||||
log.verbose('cli', process.argv)
|
||||
log.info('using', 'node-gyp@%s', prog.version)
|
||||
log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch)
|
||||
|
||||
/**
|
||||
* Change dir if -C/--directory was passed.
|
||||
*/
|
||||
|
||||
const dir = prog.opts.directory
|
||||
if (dir) {
|
||||
const fs = require('fs')
|
||||
try {
|
||||
const stat = fs.statSync(dir)
|
||||
if (stat.isDirectory()) {
|
||||
log.info('chdir', dir)
|
||||
process.chdir(dir)
|
||||
} else {
|
||||
log.warn('chdir', dir + ' is not a directory')
|
||||
}
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT') {
|
||||
log.warn('chdir', dir + ' is not a directory')
|
||||
} else {
|
||||
log.warn('chdir', 'error during chdir() "%s"', e.message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function run () {
|
||||
const command = prog.todo.shift()
|
||||
if (!command) {
|
||||
// done!
|
||||
completed = true
|
||||
log.info('ok')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const args = await prog.commands[command.name](command.args) ?? []
|
||||
|
||||
if (command.name === 'list') {
|
||||
if (args.length) {
|
||||
args.forEach((version) => log.stdout(version))
|
||||
} else {
|
||||
log.stdout('No node development files installed. Use `node-gyp install` to install a version.')
|
||||
}
|
||||
} else if (args.length >= 1) {
|
||||
log.stdout(...args.slice(1))
|
||||
}
|
||||
|
||||
// now run the next command in the queue
|
||||
return run()
|
||||
} catch (err) {
|
||||
log.error(command.name + ' error')
|
||||
log.error('stack', err.stack)
|
||||
errorMessage()
|
||||
log.error('not ok')
|
||||
return process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
process.on('exit', function (code) {
|
||||
if (!completed && !code) {
|
||||
log.error('Completion callback never invoked!')
|
||||
issueMessage()
|
||||
process.exit(6)
|
||||
}
|
||||
})
|
||||
|
||||
process.on('uncaughtException', function (err) {
|
||||
log.error('UNCAUGHT EXCEPTION')
|
||||
log.error('stack', err.stack)
|
||||
issueMessage()
|
||||
process.exit(7)
|
||||
})
|
||||
|
||||
function errorMessage () {
|
||||
// copied from npm's lib/utils/error-handler.js
|
||||
const os = require('os')
|
||||
log.error('System', os.type() + ' ' + os.release())
|
||||
log.error('command', process.argv
|
||||
.map(JSON.stringify).join(' '))
|
||||
log.error('cwd', process.cwd())
|
||||
log.error('node -v', process.version)
|
||||
log.error('node-gyp -v', 'v' + prog.package.version)
|
||||
}
|
||||
|
||||
function issueMessage () {
|
||||
errorMessage()
|
||||
log.error('', ['Node-gyp failed to build your package.',
|
||||
'Try to update npm and/or node-gyp and if it does not help file an issue with the package author.'
|
||||
].join('\n'))
|
||||
}
|
||||
|
||||
// start running the given commands!
|
||||
run()
|
28
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/LICENSE
generated
vendored
Normal file
28
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
Copyright (c) 2020 Node.js contributors. All rights reserved.
|
||||
Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
12
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
generated
vendored
Normal file
12
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
// Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
|
||||
// then used during the final link for modules that have large PDBs. Otherwise,
|
||||
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
|
||||
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
|
||||
// (rather than the linker), this limit is avoided. With this in place PDBs may
|
||||
// grow to 2GB.
|
||||
//
|
||||
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
|
8
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/gyp
generated
vendored
Normal file
8
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/gyp
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
#!/bin/sh
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
set -e
|
||||
base=$(dirname "$0")
|
||||
exec python "${base}/gyp_main.py" "$@"
|
5
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/gyp.bat
generated
vendored
Normal file
5
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/gyp.bat
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
@rem Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
@rem Use of this source code is governed by a BSD-style license that can be
|
||||
@rem found in the LICENSE file.
|
||||
|
||||
@python "%~dp0gyp_main.py" %*
|
45
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/gyp_main.py
generated
vendored
Normal file
45
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/gyp_main.py
generated
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def IsCygwin():
|
||||
# Function copied from pylib/gyp/common.py
|
||||
try:
|
||||
out = subprocess.Popen(
|
||||
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout, _ = out.communicate()
|
||||
return "CYGWIN" in stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def UnixifyPath(path):
|
||||
try:
|
||||
if not IsCygwin():
|
||||
return path
|
||||
out = subprocess.Popen(
|
||||
["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout, _ = out.communicate()
|
||||
return stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return path
|
||||
|
||||
|
||||
# Make sure we're using the version of pylib in this repo, not one installed
|
||||
# elsewhere on the system. Also convert to Unix style path on Cygwin systems,
|
||||
# else the 'gyp' library will not be found
|
||||
path = UnixifyPath(sys.argv[0])
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib"))
|
||||
import gyp # noqa: E402
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(gyp.script_main())
|
365
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
generated
vendored
Normal file
365
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
generated
vendored
Normal file
|
@ -0,0 +1,365 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""New implementation of Visual Studio project generation."""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import random
|
||||
from operator import attrgetter
|
||||
|
||||
import gyp.common
|
||||
|
||||
|
||||
def cmp(x, y):
|
||||
return (x > y) - (x < y)
|
||||
|
||||
|
||||
# Initialize random number generator
|
||||
random.seed()
|
||||
|
||||
# GUIDs for project types
|
||||
ENTRY_TYPE_GUIDS = {
|
||||
"project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
|
||||
"folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}",
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Helper functions
|
||||
|
||||
|
||||
def MakeGuid(name, seed="msvs_new"):
|
||||
"""Returns a GUID for the specified target name.
|
||||
|
||||
Args:
|
||||
name: Target name.
|
||||
seed: Seed for MD5 hash.
|
||||
Returns:
|
||||
A GUID-line string calculated from the name and seed.
|
||||
|
||||
This generates something which looks like a GUID, but depends only on the
|
||||
name and seed. This means the same name/seed will always generate the same
|
||||
GUID, so that projects and solutions which refer to each other can explicitly
|
||||
determine the GUID to refer to explicitly. It also means that the GUID will
|
||||
not change when the project for a target is rebuilt.
|
||||
"""
|
||||
# Calculate a MD5 signature for the seed and name.
|
||||
d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper()
|
||||
# Convert most of the signature to GUID form (discard the rest)
|
||||
guid = (
|
||||
"{"
|
||||
+ d[:8]
|
||||
+ "-"
|
||||
+ d[8:12]
|
||||
+ "-"
|
||||
+ d[12:16]
|
||||
+ "-"
|
||||
+ d[16:20]
|
||||
+ "-"
|
||||
+ d[20:32]
|
||||
+ "}"
|
||||
)
|
||||
return guid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolutionEntry:
|
||||
def __cmp__(self, other):
|
||||
# Sort by name then guid (so things are in order on vs2008).
|
||||
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
|
||||
|
||||
|
||||
class MSVSFolder(MSVSSolutionEntry):
|
||||
"""Folder in a Visual Studio project or solution."""
|
||||
|
||||
def __init__(self, path, name=None, entries=None, guid=None, items=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
path: Full path to the folder.
|
||||
name: Name of the folder.
|
||||
entries: List of folder entries to nest inside this folder. May contain
|
||||
Folder or Project objects. May be None, if the folder is empty.
|
||||
guid: GUID to use for folder, if not None.
|
||||
items: List of solution items to include in the folder project. May be
|
||||
None, if the folder does not directly contain items.
|
||||
"""
|
||||
if name:
|
||||
self.name = name
|
||||
else:
|
||||
# Use last layer.
|
||||
self.name = os.path.basename(path)
|
||||
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = sorted(entries or [], key=attrgetter("path"))
|
||||
self.items = list(items or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"]
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Use consistent guids for folders (so things don't regenerate).
|
||||
self.guid = MakeGuid(self.path, seed="msvs_folder")
|
||||
return self.guid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSProject(MSVSSolutionEntry):
|
||||
"""Visual Studio project."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path,
|
||||
name=None,
|
||||
dependencies=None,
|
||||
guid=None,
|
||||
spec=None,
|
||||
build_file=None,
|
||||
config_platform_overrides=None,
|
||||
fixpath_prefix=None,
|
||||
):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
path: Absolute path to the project file.
|
||||
name: Name of project. If None, the name will be the same as the base
|
||||
name of the project file.
|
||||
dependencies: List of other Project objects this project is dependent
|
||||
upon, if not None.
|
||||
guid: GUID to use for project, if not None.
|
||||
spec: Dictionary specifying how to build this project.
|
||||
build_file: Filename of the .gyp file that the vcproj file comes from.
|
||||
config_platform_overrides: optional dict of configuration platforms to
|
||||
used in place of the default for this target.
|
||||
fixpath_prefix: the path used to adjust the behavior of _fixpath
|
||||
"""
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
self.spec = spec
|
||||
self.build_file = build_file
|
||||
# Use project filename if name not specified
|
||||
self.name = name or os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS["project"]
|
||||
|
||||
if config_platform_overrides:
|
||||
self.config_platform_overrides = config_platform_overrides
|
||||
else:
|
||||
self.config_platform_overrides = {}
|
||||
self.fixpath_prefix = fixpath_prefix
|
||||
self.msbuild_toolset = None
|
||||
|
||||
def set_dependencies(self, dependencies):
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Set GUID from path
|
||||
# TODO(rspangler): This is fragile.
|
||||
# 1. We can't just use the project filename sans path, since there could
|
||||
# be multiple projects with the same base name (for example,
|
||||
# foo/unittest.vcproj and bar/unittest.vcproj).
|
||||
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
|
||||
# GUID is the same whether it's included from base/base.sln or
|
||||
# foo/bar/baz/baz.sln.
|
||||
# 3. The GUID needs to be the same each time this builder is invoked, so
|
||||
# that we don't need to rebuild the solution when the project changes.
|
||||
# 4. We should be able to handle pre-built project files by reading the
|
||||
# GUID from the files.
|
||||
self.guid = MakeGuid(self.name)
|
||||
return self.guid
|
||||
|
||||
def set_msbuild_toolset(self, msbuild_toolset):
|
||||
self.msbuild_toolset = msbuild_toolset
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolution:
|
||||
"""Visual Studio solution."""
|
||||
|
||||
def __init__(
|
||||
self, path, version, entries=None, variants=None, websiteProperties=True
|
||||
):
|
||||
"""Initializes the solution.
|
||||
|
||||
Args:
|
||||
path: Path to solution file.
|
||||
version: Format version to emit.
|
||||
entries: List of entries in solution. May contain Folder or Project
|
||||
objects. May be None, if the folder is empty.
|
||||
variants: List of build variant strings. If none, a default list will
|
||||
be used.
|
||||
websiteProperties: Flag to decide if the website properties section
|
||||
is generated.
|
||||
"""
|
||||
self.path = path
|
||||
self.websiteProperties = websiteProperties
|
||||
self.version = version
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = list(entries or [])
|
||||
|
||||
if variants:
|
||||
# Copy passed list
|
||||
self.variants = variants[:]
|
||||
else:
|
||||
# Use default
|
||||
self.variants = ["Debug|Win32", "Release|Win32"]
|
||||
# TODO(rspangler): Need to be able to handle a mapping of solution config
|
||||
# to project config. Should we be able to handle variants being a dict,
|
||||
# or add a separate variant_map variable? If it's a dict, we can't
|
||||
# guarantee the order of variants since dict keys aren't ordered.
|
||||
|
||||
# TODO(rspangler): Automatically write to disk for now; should delay until
|
||||
# node-evaluation time.
|
||||
self.Write()
|
||||
|
||||
def Write(self, writer=gyp.common.WriteOnDiff):
|
||||
"""Writes the solution file to disk.
|
||||
|
||||
Raises:
|
||||
IndexError: An entry appears multiple times.
|
||||
"""
|
||||
# Walk the entry tree and collect all the folders and projects.
|
||||
all_entries = set()
|
||||
entries_to_check = self.entries[:]
|
||||
while entries_to_check:
|
||||
e = entries_to_check.pop(0)
|
||||
|
||||
# If this entry has been visited, nothing to do.
|
||||
if e in all_entries:
|
||||
continue
|
||||
|
||||
all_entries.add(e)
|
||||
|
||||
# If this is a folder, check its entries too.
|
||||
if isinstance(e, MSVSFolder):
|
||||
entries_to_check += e.entries
|
||||
|
||||
all_entries = sorted(all_entries, key=attrgetter("path"))
|
||||
|
||||
# Open file and print header
|
||||
f = writer(self.path)
|
||||
f.write(
|
||||
"Microsoft Visual Studio Solution File, "
|
||||
"Format Version %s\r\n" % self.version.SolutionVersion()
|
||||
)
|
||||
f.write("# %s\r\n" % self.version.Description())
|
||||
|
||||
# Project entries
|
||||
sln_root = os.path.split(self.path)[0]
|
||||
for e in all_entries:
|
||||
relative_path = gyp.common.RelativePath(e.path, sln_root)
|
||||
# msbuild does not accept an empty folder_name.
|
||||
# use '.' in case relative_path is empty.
|
||||
folder_name = relative_path.replace("/", "\\") or "."
|
||||
f.write(
|
||||
'Project("%s") = "%s", "%s", "%s"\r\n'
|
||||
% (
|
||||
e.entry_type_guid, # Entry type GUID
|
||||
e.name, # Folder name
|
||||
folder_name, # Folder name (again)
|
||||
e.get_guid(), # Entry GUID
|
||||
)
|
||||
)
|
||||
|
||||
# TODO(rspangler): Need a way to configure this stuff
|
||||
if self.websiteProperties:
|
||||
f.write(
|
||||
"\tProjectSection(WebsiteProperties) = preProject\r\n"
|
||||
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
|
||||
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
|
||||
"\tEndProjectSection\r\n"
|
||||
)
|
||||
|
||||
if isinstance(e, MSVSFolder) and e.items:
|
||||
f.write("\tProjectSection(SolutionItems) = preProject\r\n")
|
||||
for i in e.items:
|
||||
f.write(f"\t\t{i} = {i}\r\n")
|
||||
f.write("\tEndProjectSection\r\n")
|
||||
|
||||
if isinstance(e, MSVSProject) and e.dependencies:
|
||||
f.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
|
||||
for d in e.dependencies:
|
||||
f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n")
|
||||
f.write("\tEndProjectSection\r\n")
|
||||
|
||||
f.write("EndProject\r\n")
|
||||
|
||||
# Global section
|
||||
f.write("Global\r\n")
|
||||
|
||||
# Configurations (variants)
|
||||
f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
|
||||
for v in self.variants:
|
||||
f.write(f"\t\t{v} = {v}\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# Sort config guids for easier diffing of solution changes.
|
||||
config_guids = []
|
||||
config_guids_overrides = {}
|
||||
for e in all_entries:
|
||||
if isinstance(e, MSVSProject):
|
||||
config_guids.append(e.get_guid())
|
||||
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
|
||||
config_guids.sort()
|
||||
|
||||
f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
|
||||
for g in config_guids:
|
||||
for v in self.variants:
|
||||
nv = config_guids_overrides[g].get(v, v)
|
||||
# Pick which project configuration to build for this solution
|
||||
# configuration.
|
||||
f.write(
|
||||
"\t\t%s.%s.ActiveCfg = %s\r\n"
|
||||
% (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
)
|
||||
)
|
||||
|
||||
# Enable project in this solution configuration.
|
||||
f.write(
|
||||
"\t\t%s.%s.Build.0 = %s\r\n"
|
||||
% (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
)
|
||||
)
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# TODO(rspangler): Should be able to configure this stuff too (though I've
|
||||
# never seen this be any different)
|
||||
f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
|
||||
f.write("\t\tHideSolutionNode = FALSE\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# Folder mappings
|
||||
# Omit this section if there are no folders
|
||||
if any(e.entries for e in all_entries if isinstance(e, MSVSFolder)):
|
||||
f.write("\tGlobalSection(NestedProjects) = preSolution\r\n")
|
||||
for e in all_entries:
|
||||
if not isinstance(e, MSVSFolder):
|
||||
continue # Does not apply to projects, only folders
|
||||
for subentry in e.entries:
|
||||
f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
f.write("EndGlobal\r\n")
|
||||
|
||||
f.close()
|
206
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
generated
vendored
Normal file
206
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
generated
vendored
Normal file
|
@ -0,0 +1,206 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Tool:
|
||||
"""Visual Studio tool."""
|
||||
|
||||
def __init__(self, name, attrs=None):
|
||||
"""Initializes the tool.
|
||||
|
||||
Args:
|
||||
name: Tool name.
|
||||
attrs: Dict of tool attributes; may be None.
|
||||
"""
|
||||
self._attrs = attrs or {}
|
||||
self._attrs["Name"] = name
|
||||
|
||||
def _GetSpecification(self):
|
||||
"""Creates an element for the tool.
|
||||
|
||||
Returns:
|
||||
A new xml.dom.Element for the tool.
|
||||
"""
|
||||
return ["Tool", self._attrs]
|
||||
|
||||
|
||||
class Filter:
|
||||
"""Visual Studio filter - that is, a virtual folder."""
|
||||
|
||||
def __init__(self, name, contents=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
name: Filter (folder) name.
|
||||
contents: List of filenames and/or Filter objects contained.
|
||||
"""
|
||||
self.name = name
|
||||
self.contents = list(contents or [])
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML project writer."""
|
||||
|
||||
def __init__(self, project_path, version, name, guid=None, platforms=None):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
project_path: Path to the project file.
|
||||
version: Format version to emit.
|
||||
name: Name of the project.
|
||||
guid: GUID to use for project, if not None.
|
||||
platforms: Array of string, the supported platforms. If null, ['Win32']
|
||||
"""
|
||||
self.project_path = project_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.guid = guid
|
||||
|
||||
# Default to Win32 for platforms.
|
||||
if not platforms:
|
||||
platforms = ["Win32"]
|
||||
|
||||
# Initialize the specifications of the various sections.
|
||||
self.platform_section = ["Platforms"]
|
||||
for platform in platforms:
|
||||
self.platform_section.append(["Platform", {"Name": platform}])
|
||||
self.tool_files_section = ["ToolFiles"]
|
||||
self.configurations_section = ["Configurations"]
|
||||
self.files_section = ["Files"]
|
||||
|
||||
# Keep a dict keyed on filename to speed up access.
|
||||
self.files_dict = {}
|
||||
|
||||
def AddToolFile(self, path):
|
||||
"""Adds a tool file to the project.
|
||||
|
||||
Args:
|
||||
path: Relative path from project to tool file.
|
||||
"""
|
||||
self.tool_files_section.append(["ToolFile", {"RelativePath": path}])
|
||||
|
||||
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
|
||||
"""Returns the specification for a configuration.
|
||||
|
||||
Args:
|
||||
config_type: Type of configuration node.
|
||||
config_name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
Returns:
|
||||
"""
|
||||
# Handle defaults
|
||||
if not attrs:
|
||||
attrs = {}
|
||||
if not tools:
|
||||
tools = []
|
||||
|
||||
# Add configuration node and its attributes
|
||||
node_attrs = attrs.copy()
|
||||
node_attrs["Name"] = config_name
|
||||
specification = [config_type, node_attrs]
|
||||
|
||||
# Add tool nodes and their attributes
|
||||
if tools:
|
||||
for t in tools:
|
||||
if isinstance(t, Tool):
|
||||
specification.append(t._GetSpecification())
|
||||
else:
|
||||
specification.append(Tool(t)._GetSpecification())
|
||||
return specification
|
||||
|
||||
def AddConfig(self, name, attrs=None, tools=None):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
"""
|
||||
spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools)
|
||||
self.configurations_section.append(spec)
|
||||
|
||||
def _AddFilesToNode(self, parent, files):
|
||||
"""Adds files and/or filters to the parent node.
|
||||
|
||||
Args:
|
||||
parent: Destination node
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
Will call itself recursively, if the files list contains Filter objects.
|
||||
"""
|
||||
for f in files:
|
||||
if isinstance(f, Filter):
|
||||
node = ["Filter", {"Name": f.name}]
|
||||
self._AddFilesToNode(node, f.contents)
|
||||
else:
|
||||
node = ["File", {"RelativePath": f}]
|
||||
self.files_dict[f] = node
|
||||
parent.append(node)
|
||||
|
||||
def AddFiles(self, files):
|
||||
"""Adds files to the project.
|
||||
|
||||
Args:
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
This makes a copy of the file/filter tree at the time of this call. If you
|
||||
later add files to a Filter object which was passed into a previous call
|
||||
to AddFiles(), it will not be reflected in this project.
|
||||
"""
|
||||
self._AddFilesToNode(self.files_section, files)
|
||||
# TODO(rspangler) This also doesn't handle adding files to an existing
|
||||
# filter. That is, it doesn't merge the trees.
|
||||
|
||||
def AddFileConfig(self, path, config, attrs=None, tools=None):
|
||||
"""Adds a configuration to a file.
|
||||
|
||||
Args:
|
||||
path: Relative path to the file.
|
||||
config: Name of configuration to add.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
|
||||
Raises:
|
||||
ValueError: Relative path does not match any file added via AddFiles().
|
||||
"""
|
||||
# Find the file node with the right relative path
|
||||
parent = self.files_dict.get(path)
|
||||
if not parent:
|
||||
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
|
||||
|
||||
# Add the config to the file node
|
||||
spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools)
|
||||
parent.append(spec)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the project file."""
|
||||
# First create XML content definition
|
||||
content = [
|
||||
"VisualStudioProject",
|
||||
{
|
||||
"ProjectType": "Visual C++",
|
||||
"Version": self.version.ProjectVersion(),
|
||||
"Name": self.name,
|
||||
"ProjectGUID": self.guid,
|
||||
"RootNamespace": self.name,
|
||||
"Keyword": "Win32Proj",
|
||||
},
|
||||
self.platform_section,
|
||||
self.tool_files_section,
|
||||
self.configurations_section,
|
||||
["References"], # empty section
|
||||
self.files_section,
|
||||
["Globals"], # empty section
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252")
|
1270
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
generated
vendored
Normal file
1270
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1547
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
generated
vendored
Normal file
1547
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
59
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
generated
vendored
Normal file
59
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
generated
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML tool file writer."""
|
||||
|
||||
def __init__(self, tool_file_path, name):
|
||||
"""Initializes the tool file.
|
||||
|
||||
Args:
|
||||
tool_file_path: Path to the tool file.
|
||||
name: Name of the tool file.
|
||||
"""
|
||||
self.tool_file_path = tool_file_path
|
||||
self.name = name
|
||||
self.rules_section = ["Rules"]
|
||||
|
||||
def AddCustomBuildRule(
|
||||
self, name, cmd, description, additional_dependencies, outputs, extensions
|
||||
):
|
||||
"""Adds a rule to the tool file.
|
||||
|
||||
Args:
|
||||
name: Name of the rule.
|
||||
description: Description of the rule.
|
||||
cmd: Command line of the rule.
|
||||
additional_dependencies: other files which may trigger the rule.
|
||||
outputs: outputs of the rule.
|
||||
extensions: extensions handled by the rule.
|
||||
"""
|
||||
rule = [
|
||||
"CustomBuildRule",
|
||||
{
|
||||
"Name": name,
|
||||
"ExecutionDescription": description,
|
||||
"CommandLine": cmd,
|
||||
"Outputs": ";".join(outputs),
|
||||
"FileExtensions": ";".join(extensions),
|
||||
"AdditionalDependencies": ";".join(additional_dependencies),
|
||||
},
|
||||
]
|
||||
self.rules_section.append(rule)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the tool file."""
|
||||
content = [
|
||||
"VisualStudioToolFile",
|
||||
{"Version": "8.00", "Name": self.name},
|
||||
self.rules_section,
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(
|
||||
content, self.tool_file_path, encoding="Windows-1252"
|
||||
)
|
153
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
generated
vendored
Normal file
153
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
generated
vendored
Normal file
|
@ -0,0 +1,153 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio user preferences file writer."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket # for gethostname
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _FindCommandInPath(command):
|
||||
"""If there are no slashes in the command given, this function
|
||||
searches the PATH env to find the given command, and converts it
|
||||
to an absolute path. We have to do this because MSVS is looking
|
||||
for an actual file to launch a debugger on, not just a command
|
||||
line. Note that this happens at GYP time, so anything needing to
|
||||
be built needs to have a full path."""
|
||||
if "/" in command or "\\" in command:
|
||||
# If the command already has path elements (either relative or
|
||||
# absolute), then assume it is constructed properly.
|
||||
return command
|
||||
else:
|
||||
# Search through the path list and find an existing file that
|
||||
# we can access.
|
||||
paths = os.environ.get("PATH", "").split(os.pathsep)
|
||||
for path in paths:
|
||||
item = os.path.join(path, command)
|
||||
if os.path.isfile(item) and os.access(item, os.X_OK):
|
||||
return item
|
||||
return command
|
||||
|
||||
|
||||
def _QuoteWin32CommandLineArgs(args):
|
||||
new_args = []
|
||||
for arg in args:
|
||||
# Replace all double-quotes with double-double-quotes to escape
|
||||
# them for cmd shell, and then quote the whole thing if there
|
||||
# are any.
|
||||
if arg.find('"') != -1:
|
||||
arg = '""'.join(arg.split('"'))
|
||||
arg = '"%s"' % arg
|
||||
|
||||
# Otherwise, if there are any spaces, quote the whole arg.
|
||||
elif re.search(r"[ \t\n]", arg):
|
||||
arg = '"%s"' % arg
|
||||
new_args.append(arg)
|
||||
return new_args
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML user user file writer."""
|
||||
|
||||
def __init__(self, user_file_path, version, name):
|
||||
"""Initializes the user file.
|
||||
|
||||
Args:
|
||||
user_file_path: Path to the user file.
|
||||
version: Version info.
|
||||
name: Name of the user file.
|
||||
"""
|
||||
self.user_file_path = user_file_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.configurations = {}
|
||||
|
||||
def AddConfig(self, name):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
"""
|
||||
self.configurations[name] = ["Configuration", {"Name": name}]
|
||||
|
||||
def AddDebugSettings(
|
||||
self, config_name, command, environment={}, working_directory=""
|
||||
):
|
||||
"""Adds a DebugSettings node to the user file for a particular config.
|
||||
|
||||
Args:
|
||||
command: command line to run. First element in the list is the
|
||||
executable. All elements of the command will be quoted if
|
||||
necessary.
|
||||
working_directory: other files which may trigger the rule. (optional)
|
||||
"""
|
||||
command = _QuoteWin32CommandLineArgs(command)
|
||||
|
||||
abs_command = _FindCommandInPath(command[0])
|
||||
|
||||
if environment and isinstance(environment, dict):
|
||||
env_list = [f'{key}="{val}"' for (key, val) in environment.items()]
|
||||
environment = " ".join(env_list)
|
||||
else:
|
||||
environment = ""
|
||||
|
||||
n_cmd = [
|
||||
"DebugSettings",
|
||||
{
|
||||
"Command": abs_command,
|
||||
"WorkingDirectory": working_directory,
|
||||
"CommandArguments": " ".join(command[1:]),
|
||||
"RemoteMachine": socket.gethostname(),
|
||||
"Environment": environment,
|
||||
"EnvironmentMerge": "true",
|
||||
# Currently these are all "dummy" values that we're just setting
|
||||
# in the default manner that MSVS does it. We could use some of
|
||||
# these to add additional capabilities, I suppose, but they might
|
||||
# not have parity with other platforms then.
|
||||
"Attach": "false",
|
||||
"DebuggerType": "3", # 'auto' debugger
|
||||
"Remote": "1",
|
||||
"RemoteCommand": "",
|
||||
"HttpUrl": "",
|
||||
"PDBPath": "",
|
||||
"SQLDebugging": "",
|
||||
"DebuggerFlavor": "0",
|
||||
"MPIRunCommand": "",
|
||||
"MPIRunArguments": "",
|
||||
"MPIRunWorkingDirectory": "",
|
||||
"ApplicationCommand": "",
|
||||
"ApplicationArguments": "",
|
||||
"ShimCommand": "",
|
||||
"MPIAcceptMode": "",
|
||||
"MPIAcceptFilter": "",
|
||||
},
|
||||
]
|
||||
|
||||
# Find the config, and add it if it doesn't exist.
|
||||
if config_name not in self.configurations:
|
||||
self.AddConfig(config_name)
|
||||
|
||||
# Add the DebugSettings onto the appropriate config.
|
||||
self.configurations[config_name].append(n_cmd)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the user file."""
|
||||
configs = ["Configurations"]
|
||||
for config, spec in sorted(self.configurations.items()):
|
||||
configs.append(spec)
|
||||
|
||||
content = [
|
||||
"VisualStudioUserFile",
|
||||
{"Version": self.version.ProjectVersion(), "Name": self.name},
|
||||
configs,
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(
|
||||
content, self.user_file_path, encoding="Windows-1252"
|
||||
)
|
271
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
generated
vendored
Normal file
271
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
generated
vendored
Normal file
|
@ -0,0 +1,271 @@
|
|||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions shared amongst the Windows generators."""
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
|
||||
# A dictionary mapping supported target types to extensions.
|
||||
TARGET_TYPE_EXT = {
|
||||
"executable": "exe",
|
||||
"loadable_module": "dll",
|
||||
"shared_library": "dll",
|
||||
"static_library": "lib",
|
||||
"windows_driver": "sys",
|
||||
}
|
||||
|
||||
|
||||
def _GetLargePdbShimCcPath():
|
||||
"""Returns the path of the large_pdb_shim.cc file."""
|
||||
this_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
src_dir = os.path.abspath(os.path.join(this_dir, "..", ".."))
|
||||
win_data_dir = os.path.join(src_dir, "data", "win")
|
||||
large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc")
|
||||
return large_pdb_shim_cc
|
||||
|
||||
|
||||
def _DeepCopySomeKeys(in_dict, keys):
|
||||
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
|
||||
|
||||
Arguments:
|
||||
in_dict: The dictionary to copy.
|
||||
keys: The keys to be copied. If a key is in this list and doesn't exist in
|
||||
|in_dict| this is not an error.
|
||||
Returns:
|
||||
The partially deep-copied dictionary.
|
||||
"""
|
||||
d = {}
|
||||
for key in keys:
|
||||
if key not in in_dict:
|
||||
continue
|
||||
d[key] = copy.deepcopy(in_dict[key])
|
||||
return d
|
||||
|
||||
|
||||
def _SuffixName(name, suffix):
|
||||
"""Add a suffix to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
suffix: the suffix to be added
|
||||
Returns:
|
||||
Target name with suffix added (foo_suffix#target)
|
||||
"""
|
||||
parts = name.rsplit("#", 1)
|
||||
parts[0] = f"{parts[0]}_{suffix}"
|
||||
return "#".join(parts)
|
||||
|
||||
|
||||
def _ShardName(name, number):
|
||||
"""Add a shard number to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
number: shard number
|
||||
Returns:
|
||||
Target name with shard added (foo_1#target)
|
||||
"""
|
||||
return _SuffixName(name, str(number))
|
||||
|
||||
|
||||
def ShardTargets(target_list, target_dicts):
|
||||
"""Shard some targets apart to work around the linkers limits.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
Returns:
|
||||
Tuple of the new sharded versions of the inputs.
|
||||
"""
|
||||
# Gather the targets to shard, and how many pieces.
|
||||
targets_to_shard = {}
|
||||
for t in target_dicts:
|
||||
shards = int(target_dicts[t].get("msvs_shard", 0))
|
||||
if shards:
|
||||
targets_to_shard[t] = shards
|
||||
# Shard target_list.
|
||||
new_target_list = []
|
||||
for t in target_list:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
new_target_list.append(_ShardName(t, i))
|
||||
else:
|
||||
new_target_list.append(t)
|
||||
# Shard target_dict.
|
||||
new_target_dicts = {}
|
||||
for t in target_dicts:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
name = _ShardName(t, i)
|
||||
new_target_dicts[name] = copy.copy(target_dicts[t])
|
||||
new_target_dicts[name]["target_name"] = _ShardName(
|
||||
new_target_dicts[name]["target_name"], i
|
||||
)
|
||||
sources = new_target_dicts[name].get("sources", [])
|
||||
new_sources = []
|
||||
for pos in range(i, len(sources), targets_to_shard[t]):
|
||||
new_sources.append(sources[pos])
|
||||
new_target_dicts[name]["sources"] = new_sources
|
||||
else:
|
||||
new_target_dicts[t] = target_dicts[t]
|
||||
# Shard dependencies.
|
||||
for t in sorted(new_target_dicts):
|
||||
for deptype in ("dependencies", "dependencies_original"):
|
||||
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
|
||||
new_dependencies = []
|
||||
for d in dependencies:
|
||||
if d in targets_to_shard:
|
||||
for i in range(targets_to_shard[d]):
|
||||
new_dependencies.append(_ShardName(d, i))
|
||||
else:
|
||||
new_dependencies.append(d)
|
||||
new_target_dicts[t][deptype] = new_dependencies
|
||||
|
||||
return (new_target_list, new_target_dicts)
|
||||
|
||||
|
||||
def _GetPdbPath(target_dict, config_name, vars):
|
||||
"""Returns the path to the PDB file that will be generated by a given
|
||||
configuration.
|
||||
|
||||
The lookup proceeds as follows:
|
||||
- Look for an explicit path in the VCLinkerTool configuration block.
|
||||
- Look for an 'msvs_large_pdb_path' variable.
|
||||
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
|
||||
specified.
|
||||
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
|
||||
|
||||
Arguments:
|
||||
target_dict: The target dictionary to be searched.
|
||||
config_name: The name of the configuration of interest.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
The path of the corresponding PDB file.
|
||||
"""
|
||||
config = target_dict["configurations"][config_name]
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
|
||||
linker = msvs.get("VCLinkerTool", {})
|
||||
|
||||
pdb_path = linker.get("ProgramDatabaseFile")
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
variables = target_dict.get("variables", {})
|
||||
pdb_path = variables.get("msvs_large_pdb_path", None)
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
pdb_base = target_dict.get("product_name", target_dict["target_name"])
|
||||
pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
|
||||
pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
|
||||
|
||||
return pdb_path
|
||||
|
||||
|
||||
def InsertLargePdbShims(target_list, target_dicts, vars):
|
||||
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
|
||||
|
||||
This is a workaround for targets with PDBs greater than 1GB in size, the
|
||||
limit for the 1KB pagesize PDBs created by the linker by default.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
Tuple of the shimmed version of the inputs.
|
||||
"""
|
||||
# Determine which targets need shimming.
|
||||
targets_to_shim = []
|
||||
for t in target_dicts:
|
||||
target_dict = target_dicts[t]
|
||||
|
||||
# We only want to shim targets that have msvs_large_pdb enabled.
|
||||
if not int(target_dict.get("msvs_large_pdb", 0)):
|
||||
continue
|
||||
# This is intended for executable, shared_library and loadable_module
|
||||
# targets where every configuration is set up to produce a PDB output.
|
||||
# If any of these conditions is not true then the shim logic will fail
|
||||
# below.
|
||||
targets_to_shim.append(t)
|
||||
|
||||
large_pdb_shim_cc = _GetLargePdbShimCcPath()
|
||||
|
||||
for t in targets_to_shim:
|
||||
target_dict = target_dicts[t]
|
||||
target_name = target_dict.get("target_name")
|
||||
|
||||
base_dict = _DeepCopySomeKeys(
|
||||
target_dict, ["configurations", "default_configuration", "toolset"]
|
||||
)
|
||||
|
||||
# This is the dict for copying the source file (part of the GYP tree)
|
||||
# to the intermediate directory of the project. This is necessary because
|
||||
# we can't always build a relative path to the shim source file (on Windows
|
||||
# GYP and the project may be on different drives), and Ninja hates absolute
|
||||
# paths (it ends up generating the .obj and .obj.d alongside the source
|
||||
# file, polluting GYPs tree).
|
||||
copy_suffix = "large_pdb_copy"
|
||||
copy_target_name = target_name + "_" + copy_suffix
|
||||
full_copy_target_name = _SuffixName(t, copy_suffix)
|
||||
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
|
||||
shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name
|
||||
shim_cc_path = shim_cc_dir + "/" + shim_cc_basename
|
||||
copy_dict = copy.deepcopy(base_dict)
|
||||
copy_dict["target_name"] = copy_target_name
|
||||
copy_dict["type"] = "none"
|
||||
copy_dict["sources"] = [large_pdb_shim_cc]
|
||||
copy_dict["copies"] = [
|
||||
{"destination": shim_cc_dir, "files": [large_pdb_shim_cc]}
|
||||
]
|
||||
|
||||
# This is the dict for the PDB generating shim target. It depends on the
|
||||
# copy target.
|
||||
shim_suffix = "large_pdb_shim"
|
||||
shim_target_name = target_name + "_" + shim_suffix
|
||||
full_shim_target_name = _SuffixName(t, shim_suffix)
|
||||
shim_dict = copy.deepcopy(base_dict)
|
||||
shim_dict["target_name"] = shim_target_name
|
||||
shim_dict["type"] = "static_library"
|
||||
shim_dict["sources"] = [shim_cc_path]
|
||||
shim_dict["dependencies"] = [full_copy_target_name]
|
||||
|
||||
# Set up the shim to output its PDB to the same location as the final linker
|
||||
# target.
|
||||
for config_name, config in shim_dict.get("configurations").items():
|
||||
pdb_path = _GetPdbPath(target_dict, config_name, vars)
|
||||
|
||||
# A few keys that we don't want to propagate.
|
||||
for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]:
|
||||
config.pop(key, None)
|
||||
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
|
||||
# Update the compiler directives in the shim target.
|
||||
compiler = msvs.setdefault("VCCLCompilerTool", {})
|
||||
compiler["DebugInformationFormat"] = "3"
|
||||
compiler["ProgramDataBaseFileName"] = pdb_path
|
||||
|
||||
# Set the explicit PDB path in the appropriate configuration of the
|
||||
# original target.
|
||||
config = target_dict["configurations"][config_name]
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
linker = msvs.setdefault("VCLinkerTool", {})
|
||||
linker["GenerateDebugInformation"] = "true"
|
||||
linker["ProgramDatabaseFile"] = pdb_path
|
||||
|
||||
# Add the new targets. They must go to the beginning of the list so that
|
||||
# the dependency generation works as expected in ninja.
|
||||
target_list.insert(0, full_copy_target_name)
|
||||
target_list.insert(0, full_shim_target_name)
|
||||
target_dicts[full_copy_target_name] = copy_dict
|
||||
target_dicts[full_shim_target_name] = shim_dict
|
||||
|
||||
# Update the original target to depend on the shim target.
|
||||
target_dict.setdefault("dependencies", []).append(full_shim_target_name)
|
||||
|
||||
return (target_list, target_dicts)
|
574
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
Normal file
574
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
Normal file
|
@ -0,0 +1,574 @@
|
|||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Handle version information related to Visual Stuio."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import glob
|
||||
|
||||
|
||||
def JoinPath(*args):
|
||||
return os.path.normpath(os.path.join(*args))
|
||||
|
||||
|
||||
class VisualStudioVersion:
|
||||
"""Information regarding a version of Visual Studio."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
short_name,
|
||||
description,
|
||||
solution_version,
|
||||
project_version,
|
||||
flat_sln,
|
||||
uses_vcxproj,
|
||||
path,
|
||||
sdk_based,
|
||||
default_toolset=None,
|
||||
compatible_sdks=None,
|
||||
):
|
||||
self.short_name = short_name
|
||||
self.description = description
|
||||
self.solution_version = solution_version
|
||||
self.project_version = project_version
|
||||
self.flat_sln = flat_sln
|
||||
self.uses_vcxproj = uses_vcxproj
|
||||
self.path = path
|
||||
self.sdk_based = sdk_based
|
||||
self.default_toolset = default_toolset
|
||||
compatible_sdks = compatible_sdks or []
|
||||
compatible_sdks.sort(key=lambda v: float(v.replace("v", "")), reverse=True)
|
||||
self.compatible_sdks = compatible_sdks
|
||||
|
||||
def ShortName(self):
|
||||
return self.short_name
|
||||
|
||||
def Description(self):
|
||||
"""Get the full description of the version."""
|
||||
return self.description
|
||||
|
||||
def SolutionVersion(self):
|
||||
"""Get the version number of the sln files."""
|
||||
return self.solution_version
|
||||
|
||||
def ProjectVersion(self):
|
||||
"""Get the version number of the vcproj or vcxproj files."""
|
||||
return self.project_version
|
||||
|
||||
def FlatSolution(self):
|
||||
return self.flat_sln
|
||||
|
||||
def UsesVcxproj(self):
|
||||
"""Returns true if this version uses a vcxproj file."""
|
||||
return self.uses_vcxproj
|
||||
|
||||
def ProjectExtension(self):
|
||||
"""Returns the file extension for the project."""
|
||||
return self.uses_vcxproj and ".vcxproj" or ".vcproj"
|
||||
|
||||
def Path(self):
|
||||
"""Returns the path to Visual Studio installation."""
|
||||
return self.path
|
||||
|
||||
def ToolPath(self, tool):
|
||||
"""Returns the path to a given compiler tool. """
|
||||
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
|
||||
|
||||
def DefaultToolset(self):
|
||||
"""Returns the msbuild toolset version that will be used in the absence
|
||||
of a user override."""
|
||||
return self.default_toolset
|
||||
|
||||
def _SetupScriptInternal(self, target_arch):
|
||||
"""Returns a command (with arguments) to be used to set up the
|
||||
environment."""
|
||||
assert target_arch in ("x86", "x64"), "target_arch not supported"
|
||||
# If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
|
||||
# depot_tools build tools and should run SetEnv.Cmd to set up the
|
||||
# environment. The check for WindowsSDKDir alone is not sufficient because
|
||||
# this is set by running vcvarsall.bat.
|
||||
sdk_dir = os.environ.get("WindowsSDKDir", "")
|
||||
setup_path = JoinPath(sdk_dir, "Bin", "SetEnv.Cmd")
|
||||
if self.sdk_based and sdk_dir and os.path.exists(setup_path):
|
||||
return [setup_path, "/" + target_arch]
|
||||
|
||||
is_host_arch_x64 = (
|
||||
os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64"
|
||||
or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64"
|
||||
)
|
||||
|
||||
# For VS2017 (and newer) it's fairly easy
|
||||
if self.short_name >= "2017":
|
||||
script_path = JoinPath(
|
||||
self.path, "VC", "Auxiliary", "Build", "vcvarsall.bat"
|
||||
)
|
||||
|
||||
# Always use a native executable, cross-compiling if necessary.
|
||||
host_arch = "amd64" if is_host_arch_x64 else "x86"
|
||||
msvc_target_arch = "amd64" if target_arch == "x64" else "x86"
|
||||
arg = host_arch
|
||||
if host_arch != msvc_target_arch:
|
||||
arg += "_" + msvc_target_arch
|
||||
|
||||
return [script_path, arg]
|
||||
|
||||
# We try to find the best version of the env setup batch.
|
||||
vcvarsall = JoinPath(self.path, "VC", "vcvarsall.bat")
|
||||
if target_arch == "x86":
|
||||
if (
|
||||
self.short_name >= "2013"
|
||||
and self.short_name[-1] != "e"
|
||||
and is_host_arch_x64
|
||||
):
|
||||
# VS2013 and later, non-Express have a x64-x86 cross that we want
|
||||
# to prefer.
|
||||
return [vcvarsall, "amd64_x86"]
|
||||
else:
|
||||
# Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat
|
||||
# for x86 because vcvarsall calls vcvars32, which it can only find if
|
||||
# VS??COMNTOOLS is set, which isn't guaranteed.
|
||||
return [JoinPath(self.path, "Common7", "Tools", "vsvars32.bat")]
|
||||
elif target_arch == "x64":
|
||||
arg = "x86_amd64"
|
||||
# Use the 64-on-64 compiler if we're not using an express edition and
|
||||
# we're running on a 64bit OS.
|
||||
if self.short_name[-1] != "e" and is_host_arch_x64:
|
||||
arg = "amd64"
|
||||
return [vcvarsall, arg]
|
||||
|
||||
def SetupScript(self, target_arch):
|
||||
script_data = self._SetupScriptInternal(target_arch)
|
||||
script_path = script_data[0]
|
||||
if not os.path.exists(script_path):
|
||||
raise Exception(
|
||||
"%s is missing - make sure VC++ tools are installed." % script_path
|
||||
)
|
||||
return script_data
|
||||
|
||||
|
||||
def _RegistryQueryBase(sysdir, key, value):
|
||||
"""Use reg.exe to read a particular key.
|
||||
|
||||
While ideally we might use the win32 module, we would like gyp to be
|
||||
python neutral, so for instance cygwin python lacks this module.
|
||||
|
||||
Arguments:
|
||||
sysdir: The system subdirectory to attempt to launch reg.exe from.
|
||||
key: The registry key to read from.
|
||||
value: The particular value to read.
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
# Skip if not on Windows or Python Win32 setup issue
|
||||
if sys.platform not in ("win32", "cygwin"):
|
||||
return None
|
||||
# Setup params to pass to and attempt to launch reg.exe
|
||||
cmd = [os.path.join(os.environ.get("WINDIR", ""), sysdir, "reg.exe"), "query", key]
|
||||
if value:
|
||||
cmd.extend(["/v", value])
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
|
||||
# Note that the error text may be in [1] in some cases
|
||||
text = p.communicate()[0].decode("utf-8")
|
||||
# Check return code from reg.exe; officially 0==success and 1==error
|
||||
if p.returncode:
|
||||
return None
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryQuery(key, value=None):
|
||||
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
|
||||
|
||||
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
|
||||
that fails, it falls back to System32. Sysnative is available on Vista and
|
||||
up and available on Windows Server 2003 and XP through KB patch 942589. Note
|
||||
that Sysnative will always fail if using 64-bit python due to it being a
|
||||
virtual directory and System32 will work correctly in the first place.
|
||||
|
||||
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
|
||||
|
||||
Arguments:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read (optional).
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
text = None
|
||||
try:
|
||||
text = _RegistryQueryBase("Sysnative", key, value)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
text = _RegistryQueryBase("System32", key, value)
|
||||
else:
|
||||
raise
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryGetValueUsingWinReg(key, value):
|
||||
"""Use the _winreg module to obtain the value of a registry key.
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure. Throws
|
||||
ImportError if winreg is unavailable.
|
||||
"""
|
||||
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
|
||||
try:
|
||||
root, subkey = key.split("\\", 1)
|
||||
assert root == "HKLM" # Only need HKLM for now.
|
||||
with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey:
|
||||
return QueryValueEx(hkey, value)[0]
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
|
||||
def _RegistryGetValue(key, value):
|
||||
"""Use _winreg or reg.exe to obtain the value of a registry key.
|
||||
|
||||
Using _winreg is preferable because it solves an issue on some corporate
|
||||
environments where access to reg.exe is locked down. However, we still need
|
||||
to fallback to reg.exe for the case where the _winreg module is not available
|
||||
(for example in cygwin python).
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure.
|
||||
"""
|
||||
try:
|
||||
return _RegistryGetValueUsingWinReg(key, value)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Fallback to reg.exe if we fail to import _winreg.
|
||||
text = _RegistryQuery(key, value)
|
||||
if not text:
|
||||
return None
|
||||
# Extract value.
|
||||
match = re.search(r"REG_\w+\s+([^\r]+)\r\n", text)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def _CreateVersion(name, path, sdk_based=False):
|
||||
"""Sets up MSVS project generation.
|
||||
|
||||
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
|
||||
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
|
||||
passed in that doesn't match a value in versions python will throw a error.
|
||||
"""
|
||||
if path:
|
||||
path = os.path.normpath(path)
|
||||
versions = {
|
||||
"2022": VisualStudioVersion(
|
||||
"2022",
|
||||
"Visual Studio 2022",
|
||||
solution_version="12.00",
|
||||
project_version="17.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v143",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2019": VisualStudioVersion(
|
||||
"2019",
|
||||
"Visual Studio 2019",
|
||||
solution_version="12.00",
|
||||
project_version="16.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v142",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2017": VisualStudioVersion(
|
||||
"2017",
|
||||
"Visual Studio 2017",
|
||||
solution_version="12.00",
|
||||
project_version="15.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v141",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2015": VisualStudioVersion(
|
||||
"2015",
|
||||
"Visual Studio 2015",
|
||||
solution_version="12.00",
|
||||
project_version="14.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v140",
|
||||
),
|
||||
"2013": VisualStudioVersion(
|
||||
"2013",
|
||||
"Visual Studio 2013",
|
||||
solution_version="13.00",
|
||||
project_version="12.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v120",
|
||||
),
|
||||
"2013e": VisualStudioVersion(
|
||||
"2013e",
|
||||
"Visual Studio 2013",
|
||||
solution_version="13.00",
|
||||
project_version="12.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v120",
|
||||
),
|
||||
"2012": VisualStudioVersion(
|
||||
"2012",
|
||||
"Visual Studio 2012",
|
||||
solution_version="12.00",
|
||||
project_version="4.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v110",
|
||||
),
|
||||
"2012e": VisualStudioVersion(
|
||||
"2012e",
|
||||
"Visual Studio 2012",
|
||||
solution_version="12.00",
|
||||
project_version="4.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v110",
|
||||
),
|
||||
"2010": VisualStudioVersion(
|
||||
"2010",
|
||||
"Visual Studio 2010",
|
||||
solution_version="11.00",
|
||||
project_version="4.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2010e": VisualStudioVersion(
|
||||
"2010e",
|
||||
"Visual C++ Express 2010",
|
||||
solution_version="11.00",
|
||||
project_version="4.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2008": VisualStudioVersion(
|
||||
"2008",
|
||||
"Visual Studio 2008",
|
||||
solution_version="10.00",
|
||||
project_version="9.00",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2008e": VisualStudioVersion(
|
||||
"2008e",
|
||||
"Visual Studio 2008",
|
||||
solution_version="10.00",
|
||||
project_version="9.00",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2005": VisualStudioVersion(
|
||||
"2005",
|
||||
"Visual Studio 2005",
|
||||
solution_version="9.00",
|
||||
project_version="8.00",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2005e": VisualStudioVersion(
|
||||
"2005e",
|
||||
"Visual Studio 2005",
|
||||
solution_version="9.00",
|
||||
project_version="8.00",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
}
|
||||
return versions[str(name)]
|
||||
|
||||
|
||||
def _ConvertToCygpath(path):
|
||||
"""Convert to cygwin path if we are using cygwin."""
|
||||
if sys.platform == "cygwin":
|
||||
p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE)
|
||||
path = p.communicate()[0].decode("utf-8").strip()
|
||||
return path
|
||||
|
||||
|
||||
def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||
"""Collect the list of installed visual studio versions.
|
||||
|
||||
Returns:
|
||||
A list of visual studio versions installed in descending order of
|
||||
usage preference.
|
||||
Base this on the registry and a quick check if devenv.exe exists.
|
||||
Possibilities are:
|
||||
2005(e) - Visual Studio 2005 (8)
|
||||
2008(e) - Visual Studio 2008 (9)
|
||||
2010(e) - Visual Studio 2010 (10)
|
||||
2012(e) - Visual Studio 2012 (11)
|
||||
2013(e) - Visual Studio 2013 (12)
|
||||
2015 - Visual Studio 2015 (14)
|
||||
2017 - Visual Studio 2017 (15)
|
||||
2019 - Visual Studio 2019 (16)
|
||||
2022 - Visual Studio 2022 (17)
|
||||
Where (e) is e for express editions of MSVS and blank otherwise.
|
||||
"""
|
||||
version_to_year = {
|
||||
"8.0": "2005",
|
||||
"9.0": "2008",
|
||||
"10.0": "2010",
|
||||
"11.0": "2012",
|
||||
"12.0": "2013",
|
||||
"14.0": "2015",
|
||||
"15.0": "2017",
|
||||
"16.0": "2019",
|
||||
"17.0": "2022",
|
||||
}
|
||||
versions = []
|
||||
for version in versions_to_check:
|
||||
# Old method of searching for which VS version is installed
|
||||
# We don't use the 2010-encouraged-way because we also want to get the
|
||||
# path to the binaries, which it doesn't offer.
|
||||
keys = [
|
||||
r"HKLM\Software\Microsoft\VisualStudio\%s" % version,
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s" % version,
|
||||
r"HKLM\Software\Microsoft\VCExpress\%s" % version,
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s" % version,
|
||||
]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], "InstallDir")
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
# Check for full.
|
||||
full_path = os.path.join(path, "devenv.exe")
|
||||
express_path = os.path.join(path, "*express.exe")
|
||||
if not force_express and os.path.exists(full_path):
|
||||
# Add this one.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version], os.path.join(path, "..", "..")
|
||||
)
|
||||
)
|
||||
# Check for express.
|
||||
elif glob.glob(express_path):
|
||||
# Add this one.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version] + "e", os.path.join(path, "..", "..")
|
||||
)
|
||||
)
|
||||
|
||||
# The old method above does not work when only SDK is installed.
|
||||
keys = [
|
||||
r"HKLM\Software\Microsoft\VisualStudio\SxS\VC7",
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7",
|
||||
r"HKLM\Software\Microsoft\VisualStudio\SxS\VS7",
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7",
|
||||
]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], version)
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
if version == "15.0":
|
||||
if os.path.exists(path):
|
||||
versions.append(_CreateVersion("2017", path))
|
||||
elif version != "14.0": # There is no Express edition for 2015.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version] + "e",
|
||||
os.path.join(path, ".."),
|
||||
sdk_based=True,
|
||||
)
|
||||
)
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
def SelectVisualStudioVersion(version="auto", allow_fallback=True):
|
||||
"""Select which version of Visual Studio projects to generate.
|
||||
|
||||
Arguments:
|
||||
version: Hook to allow caller to force a particular version (vs auto).
|
||||
Returns:
|
||||
An object representing a visual studio project format version.
|
||||
"""
|
||||
# In auto mode, check environment variable for override.
|
||||
if version == "auto":
|
||||
version = os.environ.get("GYP_MSVS_VERSION", "auto")
|
||||
version_map = {
|
||||
"auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"),
|
||||
"2005": ("8.0",),
|
||||
"2005e": ("8.0",),
|
||||
"2008": ("9.0",),
|
||||
"2008e": ("9.0",),
|
||||
"2010": ("10.0",),
|
||||
"2010e": ("10.0",),
|
||||
"2012": ("11.0",),
|
||||
"2012e": ("11.0",),
|
||||
"2013": ("12.0",),
|
||||
"2013e": ("12.0",),
|
||||
"2015": ("14.0",),
|
||||
"2017": ("15.0",),
|
||||
"2019": ("16.0",),
|
||||
"2022": ("17.0",),
|
||||
}
|
||||
override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
|
||||
if override_path:
|
||||
msvs_version = os.environ.get("GYP_MSVS_VERSION")
|
||||
if not msvs_version:
|
||||
raise ValueError(
|
||||
"GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be "
|
||||
"set to a particular version (e.g. 2010e)."
|
||||
)
|
||||
return _CreateVersion(msvs_version, override_path, sdk_based=True)
|
||||
version = str(version)
|
||||
versions = _DetectVisualStudioVersions(version_map[version], "e" in version)
|
||||
if not versions:
|
||||
if not allow_fallback:
|
||||
raise ValueError("Could not locate Visual Studio installation.")
|
||||
if version == "auto":
|
||||
# Default to 2005 if we couldn't find anything
|
||||
return _CreateVersion("2005", None)
|
||||
else:
|
||||
return _CreateVersion(version, None)
|
||||
return versions[0]
|
692
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
generated
vendored
Normal file
692
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
generated
vendored
Normal file
|
@ -0,0 +1,692 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import copy
|
||||
import gyp.input
|
||||
import argparse
|
||||
import os.path
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
import traceback
|
||||
from gyp.common import GypError
|
||||
|
||||
|
||||
# Default debug modes for GYP
|
||||
debug = {}
|
||||
|
||||
# List of "official" debug modes, but you can use anything you like.
|
||||
DEBUG_GENERAL = "general"
|
||||
DEBUG_VARIABLES = "variables"
|
||||
DEBUG_INCLUDES = "includes"
|
||||
|
||||
|
||||
def DebugOutput(mode, message, *args):
|
||||
if "all" in gyp.debug or mode in gyp.debug:
|
||||
ctx = ("unknown", 0, "unknown")
|
||||
try:
|
||||
f = traceback.extract_stack(limit=2)
|
||||
if f:
|
||||
ctx = f[0][:3]
|
||||
except Exception:
|
||||
pass
|
||||
if args:
|
||||
message %= args
|
||||
print(
|
||||
"%s:%s:%d:%s %s"
|
||||
% (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message)
|
||||
)
|
||||
|
||||
|
||||
def FindBuildFiles():
|
||||
extension = ".gyp"
|
||||
files = os.listdir(os.getcwd())
|
||||
build_files = []
|
||||
for file in files:
|
||||
if file.endswith(extension):
|
||||
build_files.append(file)
|
||||
return build_files
|
||||
|
||||
|
||||
def Load(
|
||||
build_files,
|
||||
format,
|
||||
default_variables={},
|
||||
includes=[],
|
||||
depth=".",
|
||||
params=None,
|
||||
check=False,
|
||||
circular_check=True,
|
||||
):
|
||||
"""
|
||||
Loads one or more specified build files.
|
||||
default_variables and includes will be copied before use.
|
||||
Returns the generator for the specified format and the
|
||||
data returned by loading the specified build files.
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
if "-" in format:
|
||||
format, params["flavor"] = format.split("-", 1)
|
||||
|
||||
default_variables = copy.copy(default_variables)
|
||||
|
||||
# Default variables provided by this program and its modules should be
|
||||
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
|
||||
# avoiding collisions with user and automatic variables.
|
||||
default_variables["GENERATOR"] = format
|
||||
default_variables["GENERATOR_FLAVOR"] = params.get("flavor", "")
|
||||
|
||||
# Format can be a custom python file, or by default the name of a module
|
||||
# within gyp.generator.
|
||||
if format.endswith(".py"):
|
||||
generator_name = os.path.splitext(format)[0]
|
||||
path, generator_name = os.path.split(generator_name)
|
||||
|
||||
# Make sure the path to the custom generator is in sys.path
|
||||
# Don't worry about removing it once we are done. Keeping the path
|
||||
# to each generator that is used in sys.path is likely harmless and
|
||||
# arguably a good idea.
|
||||
path = os.path.abspath(path)
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
else:
|
||||
generator_name = "gyp.generator." + format
|
||||
|
||||
# These parameters are passed in order (as opposed to by key)
|
||||
# because ActivePython cannot handle key parameters to __import__.
|
||||
generator = __import__(generator_name, globals(), locals(), generator_name)
|
||||
for (key, val) in generator.generator_default_variables.items():
|
||||
default_variables.setdefault(key, val)
|
||||
|
||||
output_dir = params["options"].generator_output or params["options"].toplevel_dir
|
||||
if default_variables["GENERATOR"] == "ninja":
|
||||
default_variables.setdefault(
|
||||
"PRODUCT_DIR_ABS",
|
||||
os.path.join(
|
||||
output_dir, "out", default_variables.get("build_type", "default")
|
||||
),
|
||||
)
|
||||
else:
|
||||
default_variables.setdefault(
|
||||
"PRODUCT_DIR_ABS",
|
||||
os.path.join(output_dir, default_variables["CONFIGURATION_NAME"]),
|
||||
)
|
||||
|
||||
# Give the generator the opportunity to set additional variables based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, "CalculateVariables", None):
|
||||
generator.CalculateVariables(default_variables, params)
|
||||
|
||||
# Give the generator the opportunity to set generator_input_info based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, "CalculateGeneratorInputInfo", None):
|
||||
generator.CalculateGeneratorInputInfo(params)
|
||||
|
||||
# Fetch the generator specific info that gets fed to input, we use getattr
|
||||
# so we can default things and the generators only have to provide what
|
||||
# they need.
|
||||
generator_input_info = {
|
||||
"non_configuration_keys": getattr(
|
||||
generator, "generator_additional_non_configuration_keys", []
|
||||
),
|
||||
"path_sections": getattr(generator, "generator_additional_path_sections", []),
|
||||
"extra_sources_for_rules": getattr(
|
||||
generator, "generator_extra_sources_for_rules", []
|
||||
),
|
||||
"generator_supports_multiple_toolsets": getattr(
|
||||
generator, "generator_supports_multiple_toolsets", False
|
||||
),
|
||||
"generator_wants_static_library_dependencies_adjusted": getattr(
|
||||
generator, "generator_wants_static_library_dependencies_adjusted", True
|
||||
),
|
||||
"generator_wants_sorted_dependencies": getattr(
|
||||
generator, "generator_wants_sorted_dependencies", False
|
||||
),
|
||||
"generator_filelist_paths": getattr(
|
||||
generator, "generator_filelist_paths", None
|
||||
),
|
||||
}
|
||||
|
||||
# Process the input specific to this generator.
|
||||
result = gyp.input.Load(
|
||||
build_files,
|
||||
default_variables,
|
||||
includes[:],
|
||||
depth,
|
||||
generator_input_info,
|
||||
check,
|
||||
circular_check,
|
||||
params["parallel"],
|
||||
params["root_targets"],
|
||||
)
|
||||
return [generator] + result
|
||||
|
||||
|
||||
def NameValueListToDict(name_value_list):
|
||||
"""
|
||||
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
|
||||
of the pairs. If a string is simply NAME, then the value in the dictionary
|
||||
is set to True. If VALUE can be converted to an integer, it is.
|
||||
"""
|
||||
result = {}
|
||||
for item in name_value_list:
|
||||
tokens = item.split("=", 1)
|
||||
if len(tokens) == 2:
|
||||
# If we can make it an int, use that, otherwise, use the string.
|
||||
try:
|
||||
token_value = int(tokens[1])
|
||||
except ValueError:
|
||||
token_value = tokens[1]
|
||||
# Set the variable to the supplied value.
|
||||
result[tokens[0]] = token_value
|
||||
else:
|
||||
# No value supplied, treat it as a boolean and set it.
|
||||
result[tokens[0]] = True
|
||||
return result
|
||||
|
||||
|
||||
def ShlexEnv(env_name):
|
||||
flags = os.environ.get(env_name, [])
|
||||
if flags:
|
||||
flags = shlex.split(flags)
|
||||
return flags
|
||||
|
||||
|
||||
def FormatOpt(opt, value):
|
||||
if opt.startswith("--"):
|
||||
return f"{opt}={value}"
|
||||
return opt + value
|
||||
|
||||
|
||||
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
|
||||
"""Regenerate a list of command line flags, for an option of action='append'.
|
||||
|
||||
The |env_name|, if given, is checked in the environment and used to generate
|
||||
an initial list of options, then the options that were specified on the
|
||||
command line (given in |values|) are appended. This matches the handling of
|
||||
environment variables and command line flags where command line flags override
|
||||
the environment, while not requiring the environment to be set when the flags
|
||||
are used again.
|
||||
"""
|
||||
flags = []
|
||||
if options.use_environment and env_name:
|
||||
for flag_value in ShlexEnv(env_name):
|
||||
value = FormatOpt(flag, predicate(flag_value))
|
||||
if value in flags:
|
||||
flags.remove(value)
|
||||
flags.append(value)
|
||||
if values:
|
||||
for flag_value in values:
|
||||
flags.append(FormatOpt(flag, predicate(flag_value)))
|
||||
return flags
|
||||
|
||||
|
||||
def RegenerateFlags(options):
|
||||
"""Given a parsed options object, and taking the environment variables into
|
||||
account, returns a list of flags that should regenerate an equivalent options
|
||||
object (even in the absence of the environment variables.)
|
||||
|
||||
Any path options will be normalized relative to depth.
|
||||
|
||||
The format flag is not included, as it is assumed the calling generator will
|
||||
set that as appropriate.
|
||||
"""
|
||||
|
||||
def FixPath(path):
|
||||
path = gyp.common.FixIfRelativePath(path, options.depth)
|
||||
if not path:
|
||||
return os.path.curdir
|
||||
return path
|
||||
|
||||
def Noop(value):
|
||||
return value
|
||||
|
||||
# We always want to ignore the environment when regenerating, to avoid
|
||||
# duplicate or changed flags in the environment at the time of regeneration.
|
||||
flags = ["--ignore-environment"]
|
||||
for name, metadata in options._regeneration_metadata.items():
|
||||
opt = metadata["opt"]
|
||||
value = getattr(options, name)
|
||||
value_predicate = metadata["type"] == "path" and FixPath or Noop
|
||||
action = metadata["action"]
|
||||
env_name = metadata["env_name"]
|
||||
if action == "append":
|
||||
flags.extend(
|
||||
RegenerateAppendFlag(opt, value, value_predicate, env_name, options)
|
||||
)
|
||||
elif action in ("store", None): # None is a synonym for 'store'.
|
||||
if value:
|
||||
flags.append(FormatOpt(opt, value_predicate(value)))
|
||||
elif options.use_environment and env_name and os.environ.get(env_name):
|
||||
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
|
||||
elif action in ("store_true", "store_false"):
|
||||
if (action == "store_true" and value) or (
|
||||
action == "store_false" and not value
|
||||
):
|
||||
flags.append(opt)
|
||||
elif options.use_environment and env_name:
|
||||
print(
|
||||
"Warning: environment regeneration unimplemented "
|
||||
"for %s flag %r env_name %r" % (action, opt, env_name),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
print(
|
||||
"Warning: regeneration unimplemented for action %r "
|
||||
"flag %r" % (action, opt),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
return flags
|
||||
|
||||
|
||||
class RegeneratableOptionParser(argparse.ArgumentParser):
|
||||
def __init__(self, usage):
|
||||
self.__regeneratable_options = {}
|
||||
argparse.ArgumentParser.__init__(self, usage=usage)
|
||||
|
||||
def add_argument(self, *args, **kw):
|
||||
"""Add an option to the parser.
|
||||
|
||||
This accepts the same arguments as ArgumentParser.add_argument, plus the
|
||||
following:
|
||||
regenerate: can be set to False to prevent this option from being included
|
||||
in regeneration.
|
||||
env_name: name of environment variable that additional values for this
|
||||
option come from.
|
||||
type: adds type='path', to tell the regenerator that the values of
|
||||
this option need to be made relative to options.depth
|
||||
"""
|
||||
env_name = kw.pop("env_name", None)
|
||||
if "dest" in kw and kw.pop("regenerate", True):
|
||||
dest = kw["dest"]
|
||||
|
||||
# The path type is needed for regenerating, for optparse we can just treat
|
||||
# it as a string.
|
||||
type = kw.get("type")
|
||||
if type == "path":
|
||||
kw["type"] = str
|
||||
|
||||
self.__regeneratable_options[dest] = {
|
||||
"action": kw.get("action"),
|
||||
"type": type,
|
||||
"env_name": env_name,
|
||||
"opt": args[0],
|
||||
}
|
||||
|
||||
argparse.ArgumentParser.add_argument(self, *args, **kw)
|
||||
|
||||
def parse_args(self, *args):
|
||||
values, args = argparse.ArgumentParser.parse_known_args(self, *args)
|
||||
values._regeneration_metadata = self.__regeneratable_options
|
||||
return values, args
|
||||
|
||||
|
||||
def gyp_main(args):
|
||||
my_name = os.path.basename(sys.argv[0])
|
||||
usage = "usage: %(prog)s [options ...] [build_file ...]"
|
||||
|
||||
parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s"))
|
||||
parser.add_argument(
|
||||
"--build",
|
||||
dest="configs",
|
||||
action="append",
|
||||
help="configuration for build after project generation",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--check", dest="check", action="store_true", help="check format of gyp files"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config-dir",
|
||||
dest="config_dir",
|
||||
action="store",
|
||||
env_name="GYP_CONFIG_DIR",
|
||||
default=None,
|
||||
help="The location for configuration files like " "include.gypi.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--debug",
|
||||
dest="debug",
|
||||
metavar="DEBUGMODE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="turn on a debugging "
|
||||
'mode for debugging GYP. Supported modes are "variables", '
|
||||
'"includes" and "general" or "all" for all of them.',
|
||||
)
|
||||
parser.add_argument(
|
||||
"-D",
|
||||
dest="defines",
|
||||
action="append",
|
||||
metavar="VAR=VAL",
|
||||
env_name="GYP_DEFINES",
|
||||
help="sets variable VAR to value VAL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--depth",
|
||||
dest="depth",
|
||||
metavar="PATH",
|
||||
type="path",
|
||||
help="set DEPTH gyp variable to a relative path to PATH",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
dest="formats",
|
||||
action="append",
|
||||
env_name="GYP_GENERATORS",
|
||||
regenerate=False,
|
||||
help="output formats to generate",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
dest="generator_flags",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="FLAG=VAL",
|
||||
env_name="GYP_GENERATOR_FLAGS",
|
||||
help="sets generator flag FLAG to VAL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--generator-output",
|
||||
dest="generator_output",
|
||||
action="store",
|
||||
default=None,
|
||||
metavar="DIR",
|
||||
type="path",
|
||||
env_name="GYP_GENERATOR_OUTPUT",
|
||||
help="puts generated build files under DIR",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-environment",
|
||||
dest="use_environment",
|
||||
action="store_false",
|
||||
default=True,
|
||||
regenerate=False,
|
||||
help="do not read options from environment variables",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-I",
|
||||
"--include",
|
||||
dest="includes",
|
||||
action="append",
|
||||
metavar="INCLUDE",
|
||||
type="path",
|
||||
help="files to include in all loaded .gyp files",
|
||||
)
|
||||
# --no-circular-check disables the check for circular relationships between
|
||||
# .gyp files. These relationships should not exist, but they've only been
|
||||
# observed to be harmful with the Xcode generator. Chromium's .gyp files
|
||||
# currently have some circular relationships on non-Mac platforms, so this
|
||||
# option allows the strict behavior to be used on Macs and the lenient
|
||||
# behavior to be used elsewhere.
|
||||
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
|
||||
parser.add_argument(
|
||||
"--no-circular-check",
|
||||
dest="circular_check",
|
||||
action="store_false",
|
||||
default=True,
|
||||
regenerate=False,
|
||||
help="don't check for circular relationships between files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-parallel",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Disable multiprocessing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-S",
|
||||
"--suffix",
|
||||
dest="suffix",
|
||||
default="",
|
||||
help="suffix to add to generated files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--toplevel-dir",
|
||||
dest="toplevel_dir",
|
||||
action="store",
|
||||
default=None,
|
||||
metavar="DIR",
|
||||
type="path",
|
||||
help="directory to use as the root of the source tree",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-R",
|
||||
"--root-target",
|
||||
dest="root_targets",
|
||||
action="append",
|
||||
metavar="TARGET",
|
||||
help="include only TARGET and its deep dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-V",
|
||||
"--version",
|
||||
dest="version",
|
||||
action="store_true",
|
||||
help="Show the version and exit.",
|
||||
)
|
||||
|
||||
options, build_files_arg = parser.parse_args(args)
|
||||
if options.version:
|
||||
import pkg_resources
|
||||
print(f"v{pkg_resources.get_distribution('gyp-next').version}")
|
||||
return 0
|
||||
build_files = build_files_arg
|
||||
|
||||
# Set up the configuration directory (defaults to ~/.gyp)
|
||||
if not options.config_dir:
|
||||
home = None
|
||||
home_dot_gyp = None
|
||||
if options.use_environment:
|
||||
home_dot_gyp = os.environ.get("GYP_CONFIG_DIR", None)
|
||||
if home_dot_gyp:
|
||||
home_dot_gyp = os.path.expanduser(home_dot_gyp)
|
||||
|
||||
if not home_dot_gyp:
|
||||
home_vars = ["HOME"]
|
||||
if sys.platform in ("cygwin", "win32"):
|
||||
home_vars.append("USERPROFILE")
|
||||
for home_var in home_vars:
|
||||
home = os.getenv(home_var)
|
||||
if home:
|
||||
home_dot_gyp = os.path.join(home, ".gyp")
|
||||
if not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
else:
|
||||
break
|
||||
else:
|
||||
home_dot_gyp = os.path.expanduser(options.config_dir)
|
||||
|
||||
if home_dot_gyp and not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
|
||||
if not options.formats:
|
||||
# If no format was given on the command line, then check the env variable.
|
||||
generate_formats = []
|
||||
if options.use_environment:
|
||||
generate_formats = os.environ.get("GYP_GENERATORS", [])
|
||||
if generate_formats:
|
||||
generate_formats = re.split(r"[\s,]", generate_formats)
|
||||
if generate_formats:
|
||||
options.formats = generate_formats
|
||||
else:
|
||||
# Nothing in the variable, default based on platform.
|
||||
if sys.platform == "darwin":
|
||||
options.formats = ["xcode"]
|
||||
elif sys.platform in ("win32", "cygwin"):
|
||||
options.formats = ["msvs"]
|
||||
else:
|
||||
options.formats = ["make"]
|
||||
|
||||
if not options.generator_output and options.use_environment:
|
||||
g_o = os.environ.get("GYP_GENERATOR_OUTPUT")
|
||||
if g_o:
|
||||
options.generator_output = g_o
|
||||
|
||||
options.parallel = not options.no_parallel
|
||||
|
||||
for mode in options.debug:
|
||||
gyp.debug[mode] = 1
|
||||
|
||||
# Do an extra check to avoid work when we're not debugging.
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(DEBUG_GENERAL, "running with these options:")
|
||||
for option, value in sorted(options.__dict__.items()):
|
||||
if option[0] == "_":
|
||||
continue
|
||||
if isinstance(value, str):
|
||||
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
|
||||
else:
|
||||
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
|
||||
|
||||
if not build_files:
|
||||
build_files = FindBuildFiles()
|
||||
if not build_files:
|
||||
raise GypError((usage + "\n\n%s: error: no build_file") % (my_name, my_name))
|
||||
|
||||
# TODO(mark): Chromium-specific hack!
|
||||
# For Chromium, the gyp "depth" variable should always be a relative path
|
||||
# to Chromium's top-level "src" directory. If no depth variable was set
|
||||
# on the command line, try to find a "src" directory by looking at the
|
||||
# absolute path to each build file's directory. The first "src" component
|
||||
# found will be treated as though it were the path used for --depth.
|
||||
if not options.depth:
|
||||
for build_file in build_files:
|
||||
build_file_dir = os.path.abspath(os.path.dirname(build_file))
|
||||
build_file_dir_components = build_file_dir.split(os.path.sep)
|
||||
components_len = len(build_file_dir_components)
|
||||
for index in range(components_len - 1, -1, -1):
|
||||
if build_file_dir_components[index] == "src":
|
||||
options.depth = os.path.sep.join(build_file_dir_components)
|
||||
break
|
||||
del build_file_dir_components[index]
|
||||
|
||||
# If the inner loop found something, break without advancing to another
|
||||
# build file.
|
||||
if options.depth:
|
||||
break
|
||||
|
||||
if not options.depth:
|
||||
raise GypError(
|
||||
"Could not automatically locate src directory. This is"
|
||||
"a temporary Chromium feature that will be removed. Use"
|
||||
"--depth as a workaround."
|
||||
)
|
||||
|
||||
# If toplevel-dir is not set, we assume that depth is the root of our source
|
||||
# tree.
|
||||
if not options.toplevel_dir:
|
||||
options.toplevel_dir = options.depth
|
||||
|
||||
# -D on the command line sets variable defaults - D isn't just for define,
|
||||
# it's for default. Perhaps there should be a way to force (-F?) a
|
||||
# variable's value so that it can't be overridden by anything else.
|
||||
cmdline_default_variables = {}
|
||||
defines = []
|
||||
if options.use_environment:
|
||||
defines += ShlexEnv("GYP_DEFINES")
|
||||
if options.defines:
|
||||
defines += options.defines
|
||||
cmdline_default_variables = NameValueListToDict(defines)
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(
|
||||
DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables
|
||||
)
|
||||
|
||||
# Set up includes.
|
||||
includes = []
|
||||
|
||||
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
|
||||
# .gyp file that's loaded, before anything else is included.
|
||||
if home_dot_gyp:
|
||||
default_include = os.path.join(home_dot_gyp, "include.gypi")
|
||||
if os.path.exists(default_include):
|
||||
print("Using overrides found in " + default_include)
|
||||
includes.append(default_include)
|
||||
|
||||
# Command-line --include files come after the default include.
|
||||
if options.includes:
|
||||
includes.extend(options.includes)
|
||||
|
||||
# Generator flags should be prefixed with the target generator since they
|
||||
# are global across all generator runs.
|
||||
gen_flags = []
|
||||
if options.use_environment:
|
||||
gen_flags += ShlexEnv("GYP_GENERATOR_FLAGS")
|
||||
if options.generator_flags:
|
||||
gen_flags += options.generator_flags
|
||||
generator_flags = NameValueListToDict(gen_flags)
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
|
||||
|
||||
# Generate all requested formats (use a set in case we got one format request
|
||||
# twice)
|
||||
for format in set(options.formats):
|
||||
params = {
|
||||
"options": options,
|
||||
"build_files": build_files,
|
||||
"generator_flags": generator_flags,
|
||||
"cwd": os.getcwd(),
|
||||
"build_files_arg": build_files_arg,
|
||||
"gyp_binary": sys.argv[0],
|
||||
"home_dot_gyp": home_dot_gyp,
|
||||
"parallel": options.parallel,
|
||||
"root_targets": options.root_targets,
|
||||
"target_arch": cmdline_default_variables.get("target_arch", ""),
|
||||
}
|
||||
|
||||
# Start with the default variables from the command line.
|
||||
[generator, flat_list, targets, data] = Load(
|
||||
build_files,
|
||||
format,
|
||||
cmdline_default_variables,
|
||||
includes,
|
||||
options.depth,
|
||||
params,
|
||||
options.check,
|
||||
options.circular_check,
|
||||
)
|
||||
|
||||
# TODO(mark): Pass |data| for now because the generator needs a list of
|
||||
# build files that came in. In the future, maybe it should just accept
|
||||
# a list, and not the whole data dict.
|
||||
# NOTE: flat_list is the flattened dependency graph specifying the order
|
||||
# that targets may be built. Build systems that operate serially or that
|
||||
# need to have dependencies defined before dependents reference them should
|
||||
# generate targets in the order specified in flat_list.
|
||||
generator.GenerateOutput(flat_list, targets, data, params)
|
||||
|
||||
if options.configs:
|
||||
valid_configs = targets[flat_list[0]]["configurations"]
|
||||
for conf in options.configs:
|
||||
if conf not in valid_configs:
|
||||
raise GypError("Invalid config specified via --build: %s" % conf)
|
||||
generator.PerformBuild(data, options.configs, params)
|
||||
|
||||
# Done
|
||||
return 0
|
||||
|
||||
|
||||
def main(args):
|
||||
try:
|
||||
return gyp_main(args)
|
||||
except GypError as e:
|
||||
sys.stderr.write("gyp: %s\n" % e)
|
||||
return 1
|
||||
|
||||
|
||||
# NOTE: setuptools generated console_scripts calls function with no arguments
|
||||
def script_main():
|
||||
return main(sys.argv[1:])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(script_main())
|
654
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/common.py
generated
vendored
Normal file
654
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/common.py
generated
vendored
Normal file
|
@ -0,0 +1,654 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import filecmp
|
||||
import os.path
|
||||
import re
|
||||
import tempfile
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from collections.abc import MutableSet
|
||||
|
||||
|
||||
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
|
||||
# among other "problems".
|
||||
class memoize:
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, *args):
|
||||
try:
|
||||
return self.cache[args]
|
||||
except KeyError:
|
||||
result = self.func(*args)
|
||||
self.cache[args] = result
|
||||
return result
|
||||
|
||||
|
||||
class GypError(Exception):
|
||||
"""Error class representing an error, which is to be presented
|
||||
to the user. The main entry point will catch and display this.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def ExceptionAppend(e, msg):
|
||||
"""Append a message to the given exception's message."""
|
||||
if not e.args:
|
||||
e.args = (msg,)
|
||||
elif len(e.args) == 1:
|
||||
e.args = (str(e.args[0]) + " " + msg,)
|
||||
else:
|
||||
e.args = (str(e.args[0]) + " " + msg,) + e.args[1:]
|
||||
|
||||
|
||||
def FindQualifiedTargets(target, qualified_list):
|
||||
"""
|
||||
Given a list of qualified targets, return the qualified targets for the
|
||||
specified |target|.
|
||||
"""
|
||||
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
|
||||
|
||||
|
||||
def ParseQualifiedTarget(target):
|
||||
# Splits a qualified target into a build file, target name and toolset.
|
||||
|
||||
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
|
||||
target_split = target.rsplit(":", 1)
|
||||
if len(target_split) == 2:
|
||||
[build_file, target] = target_split
|
||||
else:
|
||||
build_file = None
|
||||
|
||||
target_split = target.rsplit("#", 1)
|
||||
if len(target_split) == 2:
|
||||
[target, toolset] = target_split
|
||||
else:
|
||||
toolset = None
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def ResolveTarget(build_file, target, toolset):
|
||||
# This function resolves a target into a canonical form:
|
||||
# - a fully defined build file, either absolute or relative to the current
|
||||
# directory
|
||||
# - a target name
|
||||
# - a toolset
|
||||
#
|
||||
# build_file is the file relative to which 'target' is defined.
|
||||
# target is the qualified target.
|
||||
# toolset is the default toolset for that target.
|
||||
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
|
||||
|
||||
if parsed_build_file:
|
||||
if build_file:
|
||||
# If a relative path, parsed_build_file is relative to the directory
|
||||
# containing build_file. If build_file is not in the current directory,
|
||||
# parsed_build_file is not a usable path as-is. Resolve it by
|
||||
# interpreting it as relative to build_file. If parsed_build_file is
|
||||
# absolute, it is usable as a path regardless of the current directory,
|
||||
# and os.path.join will return it as-is.
|
||||
build_file = os.path.normpath(
|
||||
os.path.join(os.path.dirname(build_file), parsed_build_file)
|
||||
)
|
||||
# Further (to handle cases like ../cwd), make it relative to cwd)
|
||||
if not os.path.isabs(build_file):
|
||||
build_file = RelativePath(build_file, ".")
|
||||
else:
|
||||
build_file = parsed_build_file
|
||||
|
||||
if parsed_toolset:
|
||||
toolset = parsed_toolset
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def BuildFile(fully_qualified_target):
|
||||
# Extracts the build file from the fully qualified target.
|
||||
return ParseQualifiedTarget(fully_qualified_target)[0]
|
||||
|
||||
|
||||
def GetEnvironFallback(var_list, default):
|
||||
"""Look up a key in the environment, with fallback to secondary keys
|
||||
and finally falling back to a default value."""
|
||||
for var in var_list:
|
||||
if var in os.environ:
|
||||
return os.environ[var]
|
||||
return default
|
||||
|
||||
|
||||
def QualifiedTarget(build_file, target, toolset):
|
||||
# "Qualified" means the file that a target was defined in and the target
|
||||
# name, separated by a colon, suffixed by a # and the toolset name:
|
||||
# /path/to/file.gyp:target_name#toolset
|
||||
fully_qualified = build_file + ":" + target
|
||||
if toolset:
|
||||
fully_qualified = fully_qualified + "#" + toolset
|
||||
return fully_qualified
|
||||
|
||||
|
||||
@memoize
|
||||
def RelativePath(path, relative_to, follow_path_symlink=True):
|
||||
# Assuming both |path| and |relative_to| are relative to the current
|
||||
# directory, returns a relative path that identifies path relative to
|
||||
# relative_to.
|
||||
# If |follow_symlink_path| is true (default) and |path| is a symlink, then
|
||||
# this method returns a path to the real file represented by |path|. If it is
|
||||
# false, this method returns a path to the symlink. If |path| is not a
|
||||
# symlink, this option has no effect.
|
||||
|
||||
# Convert to normalized (and therefore absolute paths).
|
||||
path = os.path.realpath(path) if follow_path_symlink else os.path.abspath(path)
|
||||
relative_to = os.path.realpath(relative_to)
|
||||
|
||||
# On Windows, we can't create a relative path to a different drive, so just
|
||||
# use the absolute path.
|
||||
if sys.platform == "win32" and (
|
||||
os.path.splitdrive(path)[0].lower()
|
||||
!= os.path.splitdrive(relative_to)[0].lower()
|
||||
):
|
||||
return path
|
||||
|
||||
# Split the paths into components.
|
||||
path_split = path.split(os.path.sep)
|
||||
relative_to_split = relative_to.split(os.path.sep)
|
||||
|
||||
# Determine how much of the prefix the two paths share.
|
||||
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
|
||||
|
||||
# Put enough ".." components to back up out of relative_to to the common
|
||||
# prefix, and then append the part of path_split after the common prefix.
|
||||
relative_split = [os.path.pardir] * (
|
||||
len(relative_to_split) - prefix_len
|
||||
) + path_split[prefix_len:]
|
||||
|
||||
if len(relative_split) == 0:
|
||||
# The paths were the same.
|
||||
return ""
|
||||
|
||||
# Turn it back into a string and we're done.
|
||||
return os.path.join(*relative_split)
|
||||
|
||||
|
||||
@memoize
|
||||
def InvertRelativePath(path, toplevel_dir=None):
|
||||
"""Given a path like foo/bar that is relative to toplevel_dir, return
|
||||
the inverse relative path back to the toplevel_dir.
|
||||
|
||||
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
||||
should always produce the empty string, unless the path contains symlinks.
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
toplevel_dir = "." if toplevel_dir is None else toplevel_dir
|
||||
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
|
||||
|
||||
|
||||
def FixIfRelativePath(path, relative_to):
|
||||
# Like RelativePath but returns |path| unchanged if it is absolute.
|
||||
if os.path.isabs(path):
|
||||
return path
|
||||
return RelativePath(path, relative_to)
|
||||
|
||||
|
||||
def UnrelativePath(path, relative_to):
|
||||
# Assuming that |relative_to| is relative to the current directory, and |path|
|
||||
# is a path relative to the dirname of |relative_to|, returns a path that
|
||||
# identifies |path| relative to the current directory.
|
||||
rel_dir = os.path.dirname(relative_to)
|
||||
return os.path.normpath(os.path.join(rel_dir, path))
|
||||
|
||||
|
||||
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
|
||||
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
|
||||
# and the documentation for various shells.
|
||||
|
||||
# _quote is a pattern that should match any argument that needs to be quoted
|
||||
# with double-quotes by EncodePOSIXShellArgument. It matches the following
|
||||
# characters appearing anywhere in an argument:
|
||||
# \t, \n, space parameter separators
|
||||
# # comments
|
||||
# $ expansions (quoted to always expand within one argument)
|
||||
# % called out by IEEE 1003.1 XCU.2.2
|
||||
# & job control
|
||||
# ' quoting
|
||||
# (, ) subshell execution
|
||||
# *, ?, [ pathname expansion
|
||||
# ; command delimiter
|
||||
# <, >, | redirection
|
||||
# = assignment
|
||||
# {, } brace expansion (bash)
|
||||
# ~ tilde expansion
|
||||
# It also matches the empty string, because "" (or '') is the only way to
|
||||
# represent an empty string literal argument to a POSIX shell.
|
||||
#
|
||||
# This does not match the characters in _escape, because those need to be
|
||||
# backslash-escaped regardless of whether they appear in a double-quoted
|
||||
# string.
|
||||
_quote = re.compile("[\t\n #$%&'()*;<=>?[{|}~]|^$")
|
||||
|
||||
# _escape is a pattern that should match any character that needs to be
|
||||
# escaped with a backslash, whether or not the argument matched the _quote
|
||||
# pattern. _escape is used with re.sub to backslash anything in _escape's
|
||||
# first match group, hence the (parentheses) in the regular expression.
|
||||
#
|
||||
# _escape matches the following characters appearing anywhere in an argument:
|
||||
# " to prevent POSIX shells from interpreting this character for quoting
|
||||
# \ to prevent POSIX shells from interpreting this character for escaping
|
||||
# ` to prevent POSIX shells from interpreting this character for command
|
||||
# substitution
|
||||
# Missing from this list is $, because the desired behavior of
|
||||
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
|
||||
#
|
||||
# Also missing from this list is !, which bash will interpret as the history
|
||||
# expansion character when history is enabled. bash does not enable history
|
||||
# by default in non-interactive shells, so this is not thought to be a problem.
|
||||
# ! was omitted from this list because bash interprets "\!" as a literal string
|
||||
# including the backslash character (avoiding history expansion but retaining
|
||||
# the backslash), which would not be correct for argument encoding. Handling
|
||||
# this case properly would also be problematic because bash allows the history
|
||||
# character to be changed with the histchars shell variable. Fortunately,
|
||||
# as history is not enabled in non-interactive shells and
|
||||
# EncodePOSIXShellArgument is only expected to encode for non-interactive
|
||||
# shells, there is no room for error here by ignoring !.
|
||||
_escape = re.compile(r'(["\\`])')
|
||||
|
||||
|
||||
def EncodePOSIXShellArgument(argument):
|
||||
"""Encodes |argument| suitably for consumption by POSIX shells.
|
||||
|
||||
argument may be quoted and escaped as necessary to ensure that POSIX shells
|
||||
treat the returned value as a literal representing the argument passed to
|
||||
this function. Parameter (variable) expansions beginning with $ are allowed
|
||||
to remain intact without escaping the $, to allow the argument to contain
|
||||
references to variables to be expanded by the shell.
|
||||
"""
|
||||
|
||||
if not isinstance(argument, str):
|
||||
argument = str(argument)
|
||||
|
||||
quote = '"' if _quote.search(argument) else ""
|
||||
|
||||
encoded = quote + re.sub(_escape, r"\\\1", argument) + quote
|
||||
|
||||
return encoded
|
||||
|
||||
|
||||
def EncodePOSIXShellList(list):
|
||||
"""Encodes |list| suitably for consumption by POSIX shells.
|
||||
|
||||
Returns EncodePOSIXShellArgument for each item in list, and joins them
|
||||
together using the space character as an argument separator.
|
||||
"""
|
||||
|
||||
encoded_arguments = []
|
||||
for argument in list:
|
||||
encoded_arguments.append(EncodePOSIXShellArgument(argument))
|
||||
return " ".join(encoded_arguments)
|
||||
|
||||
|
||||
def DeepDependencyTargets(target_dicts, roots):
|
||||
"""Returns the recursive list of target dependencies."""
|
||||
dependencies = set()
|
||||
pending = set(roots)
|
||||
while pending:
|
||||
# Pluck out one.
|
||||
r = pending.pop()
|
||||
# Skip if visited already.
|
||||
if r in dependencies:
|
||||
continue
|
||||
# Add it.
|
||||
dependencies.add(r)
|
||||
# Add its children.
|
||||
spec = target_dicts[r]
|
||||
pending.update(set(spec.get("dependencies", [])))
|
||||
pending.update(set(spec.get("dependencies_original", [])))
|
||||
return list(dependencies - set(roots))
|
||||
|
||||
|
||||
def BuildFileTargets(target_list, build_file):
|
||||
"""From a target_list, returns the subset from the specified build_file.
|
||||
"""
|
||||
return [p for p in target_list if BuildFile(p) == build_file]
|
||||
|
||||
|
||||
def AllTargets(target_list, target_dicts, build_file):
|
||||
"""Returns all targets (direct and dependencies) for the specified build_file.
|
||||
"""
|
||||
bftargets = BuildFileTargets(target_list, build_file)
|
||||
deptargets = DeepDependencyTargets(target_dicts, bftargets)
|
||||
return bftargets + deptargets
|
||||
|
||||
|
||||
def WriteOnDiff(filename):
|
||||
"""Write to a file only if the new contents differ.
|
||||
|
||||
Arguments:
|
||||
filename: name of the file to potentially write to.
|
||||
Returns:
|
||||
A file like object which will write to temporary file and only overwrite
|
||||
the target if it differs (on close).
|
||||
"""
|
||||
|
||||
class Writer:
|
||||
"""Wrapper around file which only covers the target if it differs."""
|
||||
|
||||
def __init__(self):
|
||||
# On Cygwin remove the "dir" argument
|
||||
# `C:` prefixed paths are treated as relative,
|
||||
# consequently ending up with current dir "/cygdrive/c/..."
|
||||
# being prefixed to those, which was
|
||||
# obviously a non-existent path,
|
||||
# for example: "/cygdrive/c/<some folder>/C:\<my win style abs path>".
|
||||
# For more details see:
|
||||
# https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp
|
||||
base_temp_dir = "" if IsCygwin() else os.path.dirname(filename)
|
||||
# Pick temporary file.
|
||||
tmp_fd, self.tmp_path = tempfile.mkstemp(
|
||||
suffix=".tmp",
|
||||
prefix=os.path.split(filename)[1] + ".gyp.",
|
||||
dir=base_temp_dir,
|
||||
)
|
||||
try:
|
||||
self.tmp_file = os.fdopen(tmp_fd, "wb")
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def __getattr__(self, attrname):
|
||||
# Delegate everything else to self.tmp_file
|
||||
return getattr(self.tmp_file, attrname)
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
# Close tmp file.
|
||||
self.tmp_file.close()
|
||||
# Determine if different.
|
||||
same = False
|
||||
try:
|
||||
same = filecmp.cmp(self.tmp_path, filename, False)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
if same:
|
||||
# The new file is identical to the old one, just get rid of the new
|
||||
# one.
|
||||
os.unlink(self.tmp_path)
|
||||
else:
|
||||
# The new file is different from the old one,
|
||||
# or there is no old one.
|
||||
# Rename the new file to the permanent name.
|
||||
#
|
||||
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
|
||||
# file that can only be read by the owner, regardless of the umask.
|
||||
# There's no reason to not respect the umask here,
|
||||
# which means that an extra hoop is required
|
||||
# to fetch it and reset the new file's mode.
|
||||
#
|
||||
# No way to get the umask without setting a new one? Set a safe one
|
||||
# and then set it back to the old value.
|
||||
umask = os.umask(0o77)
|
||||
os.umask(umask)
|
||||
os.chmod(self.tmp_path, 0o666 & ~umask)
|
||||
if sys.platform == "win32" and os.path.exists(filename):
|
||||
# NOTE: on windows (but not cygwin) rename will not replace an
|
||||
# existing file, so it must be preceded with a remove.
|
||||
# Sadly there is no way to make the switch atomic.
|
||||
os.remove(filename)
|
||||
os.rename(self.tmp_path, filename)
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def write(self, s):
|
||||
self.tmp_file.write(s.encode("utf-8"))
|
||||
|
||||
return Writer()
|
||||
|
||||
|
||||
def EnsureDirExists(path):
|
||||
"""Make sure the directory for |path| exists."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def GetFlavor(params):
|
||||
"""Returns |params.flavor| if it's set, the system's default flavor else."""
|
||||
flavors = {
|
||||
"cygwin": "win",
|
||||
"win32": "win",
|
||||
"darwin": "mac",
|
||||
}
|
||||
|
||||
if "flavor" in params:
|
||||
return params["flavor"]
|
||||
if sys.platform in flavors:
|
||||
return flavors[sys.platform]
|
||||
if sys.platform.startswith("sunos"):
|
||||
return "solaris"
|
||||
if sys.platform.startswith(("dragonfly", "freebsd")):
|
||||
return "freebsd"
|
||||
if sys.platform.startswith("openbsd"):
|
||||
return "openbsd"
|
||||
if sys.platform.startswith("netbsd"):
|
||||
return "netbsd"
|
||||
if sys.platform.startswith("aix"):
|
||||
return "aix"
|
||||
if sys.platform.startswith(("os390", "zos")):
|
||||
return "zos"
|
||||
if sys.platform == "os400":
|
||||
return "os400"
|
||||
|
||||
return "linux"
|
||||
|
||||
|
||||
def CopyTool(flavor, out_path, generator_flags={}):
|
||||
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
|
||||
to |out_path|."""
|
||||
# aix and solaris just need flock emulation. mac and win use more complicated
|
||||
# support scripts.
|
||||
prefix = {
|
||||
"aix": "flock",
|
||||
"os400": "flock",
|
||||
"solaris": "flock",
|
||||
"mac": "mac",
|
||||
"ios": "mac",
|
||||
"win": "win",
|
||||
}.get(flavor, None)
|
||||
if not prefix:
|
||||
return
|
||||
|
||||
# Slurp input file.
|
||||
source_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "%s_tool.py" % prefix
|
||||
)
|
||||
with open(source_path) as source_file:
|
||||
source = source_file.readlines()
|
||||
|
||||
# Set custom header flags.
|
||||
header = "# Generated by gyp. Do not edit.\n"
|
||||
mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
|
||||
if flavor == "mac" and mac_toolchain_dir:
|
||||
header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir
|
||||
|
||||
# Add header and write it out.
|
||||
tool_path = os.path.join(out_path, "gyp-%s-tool" % prefix)
|
||||
with open(tool_path, "w") as tool_file:
|
||||
tool_file.write("".join([source[0], header] + source[1:]))
|
||||
|
||||
# Make file executable.
|
||||
os.chmod(tool_path, 0o755)
|
||||
|
||||
|
||||
# From Alex Martelli,
|
||||
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
|
||||
# ASPN: Python Cookbook: Remove duplicates from a sequence
|
||||
# First comment, dated 2001/10/13.
|
||||
# (Also in the printed Python Cookbook.)
|
||||
|
||||
|
||||
def uniquer(seq, idfun=lambda x: x):
|
||||
seen = {}
|
||||
result = []
|
||||
for item in seq:
|
||||
marker = idfun(item)
|
||||
if marker in seen:
|
||||
continue
|
||||
seen[marker] = 1
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
# Based on http://code.activestate.com/recipes/576694/.
|
||||
class OrderedSet(MutableSet):
|
||||
def __init__(self, iterable=None):
|
||||
self.end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.map = {} # key --> [key, prev, next]
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
return len(self.map)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
if key not in self.map:
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
||||
|
||||
def discard(self, key):
|
||||
if key in self.map:
|
||||
key, prev_item, next_item = self.map.pop(key)
|
||||
prev_item[2] = next_item
|
||||
next_item[1] = prev_item
|
||||
|
||||
def __iter__(self):
|
||||
end = self.end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
def __reversed__(self):
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[1]
|
||||
|
||||
# The second argument is an addition that causes a pylint warning.
|
||||
def pop(self, last=True): # pylint: disable=W0221
|
||||
if not self:
|
||||
raise KeyError("set is empty")
|
||||
key = self.end[1][0] if last else self.end[2][0]
|
||||
self.discard(key)
|
||||
return key
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return f"{self.__class__.__name__}()"
|
||||
return f"{self.__class__.__name__}({list(self)!r})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedSet):
|
||||
return len(self) == len(other) and list(self) == list(other)
|
||||
return set(self) == set(other)
|
||||
|
||||
# Extensions to the recipe.
|
||||
def update(self, iterable):
|
||||
for i in iterable:
|
||||
if i not in self:
|
||||
self.add(i)
|
||||
|
||||
|
||||
class CycleError(Exception):
|
||||
"""An exception raised when an unexpected cycle is detected."""
|
||||
|
||||
def __init__(self, nodes):
|
||||
self.nodes = nodes
|
||||
|
||||
def __str__(self):
|
||||
return "CycleError: cycle involving: " + str(self.nodes)
|
||||
|
||||
|
||||
def TopologicallySorted(graph, get_edges):
|
||||
r"""Topologically sort based on a user provided edge definition.
|
||||
|
||||
Args:
|
||||
graph: A list of node names.
|
||||
get_edges: A function mapping from node name to a hashable collection
|
||||
of node names which this node has outgoing edges to.
|
||||
Returns:
|
||||
A list containing all of the node in graph in topological order.
|
||||
It is assumed that calling get_edges once for each node and caching is
|
||||
cheaper than repeatedly calling get_edges.
|
||||
Raises:
|
||||
CycleError in the event of a cycle.
|
||||
Example:
|
||||
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
|
||||
def GetEdges(node):
|
||||
return re.findall(r'\$\(([^))]\)', graph[node])
|
||||
print TopologicallySorted(graph.keys(), GetEdges)
|
||||
==>
|
||||
['a', 'c', b']
|
||||
"""
|
||||
get_edges = memoize(get_edges)
|
||||
visited = set()
|
||||
visiting = set()
|
||||
ordered_nodes = []
|
||||
|
||||
def Visit(node):
|
||||
if node in visiting:
|
||||
raise CycleError(visiting)
|
||||
if node in visited:
|
||||
return
|
||||
visited.add(node)
|
||||
visiting.add(node)
|
||||
for neighbor in get_edges(node):
|
||||
Visit(neighbor)
|
||||
visiting.remove(node)
|
||||
ordered_nodes.insert(0, node)
|
||||
|
||||
for node in sorted(graph):
|
||||
Visit(node)
|
||||
return ordered_nodes
|
||||
|
||||
|
||||
def CrossCompileRequested():
|
||||
# TODO: figure out how to not build extra host objects in the
|
||||
# non-cross-compile case when this is enabled, and enable unconditionally.
|
||||
return (
|
||||
os.environ.get("GYP_CROSSCOMPILE")
|
||||
or os.environ.get("AR_host")
|
||||
or os.environ.get("CC_host")
|
||||
or os.environ.get("CXX_host")
|
||||
or os.environ.get("AR_target")
|
||||
or os.environ.get("CC_target")
|
||||
or os.environ.get("CXX_target")
|
||||
)
|
||||
|
||||
|
||||
def IsCygwin():
|
||||
try:
|
||||
out = subprocess.Popen(
|
||||
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout = out.communicate()[0].decode("utf-8")
|
||||
return "CYGWIN" in str(stdout)
|
||||
except Exception:
|
||||
return False
|
78
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
generated
vendored
Normal file
78
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
generated
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the common.py file."""
|
||||
|
||||
import gyp.common
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestTopologicallySorted(unittest.TestCase):
|
||||
def test_Valid(self):
|
||||
"""Test that sorting works on a valid graph with one possible order."""
|
||||
graph = {
|
||||
"a": ["b", "c"],
|
||||
"b": [],
|
||||
"c": ["d"],
|
||||
"d": ["b"],
|
||||
}
|
||||
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
|
||||
self.assertEqual(
|
||||
gyp.common.TopologicallySorted(graph.keys(), GetEdge), ["a", "c", "d", "b"]
|
||||
)
|
||||
|
||||
def test_Cycle(self):
|
||||
"""Test that an exception is thrown on a cyclic graph."""
|
||||
graph = {
|
||||
"a": ["b"],
|
||||
"b": ["c"],
|
||||
"c": ["d"],
|
||||
"d": ["a"],
|
||||
}
|
||||
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
|
||||
self.assertRaises(
|
||||
gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge
|
||||
)
|
||||
|
||||
|
||||
class TestGetFlavor(unittest.TestCase):
|
||||
"""Test that gyp.common.GetFlavor works as intended"""
|
||||
|
||||
original_platform = ""
|
||||
|
||||
def setUp(self):
|
||||
self.original_platform = sys.platform
|
||||
|
||||
def tearDown(self):
|
||||
sys.platform = self.original_platform
|
||||
|
||||
def assertFlavor(self, expected, argument, param):
|
||||
sys.platform = argument
|
||||
self.assertEqual(expected, gyp.common.GetFlavor(param))
|
||||
|
||||
def test_platform_default(self):
|
||||
self.assertFlavor("freebsd", "freebsd9", {})
|
||||
self.assertFlavor("freebsd", "freebsd10", {})
|
||||
self.assertFlavor("openbsd", "openbsd5", {})
|
||||
self.assertFlavor("solaris", "sunos5", {})
|
||||
self.assertFlavor("solaris", "sunos", {})
|
||||
self.assertFlavor("linux", "linux2", {})
|
||||
self.assertFlavor("linux", "linux3", {})
|
||||
self.assertFlavor("linux", "linux", {})
|
||||
|
||||
def test_param(self):
|
||||
self.assertFlavor("foobar", "linux2", {"flavor": "foobar"})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
169
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
generated
vendored
Normal file
169
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
generated
vendored
Normal file
|
@ -0,0 +1,169 @@
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
import locale
|
||||
from functools import reduce
|
||||
|
||||
|
||||
def XmlToString(content, encoding="utf-8", pretty=False):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Visual Studio files have a lot of pre-defined structures. This function makes
|
||||
it easy to represent these structures as Python data structures, instead of
|
||||
having to create a lot of function calls.
|
||||
|
||||
Each XML element of the content is represented as a list composed of:
|
||||
1. The name of the element, a string,
|
||||
2. The attributes of the element, a dictionary (optional), and
|
||||
3+. The content of the element, if any. Strings are simple text nodes and
|
||||
lists are child elements.
|
||||
|
||||
Example 1:
|
||||
<test/>
|
||||
becomes
|
||||
['test']
|
||||
|
||||
Example 2:
|
||||
<myelement a='value1' b='value2'>
|
||||
<childtype>This is</childtype>
|
||||
<childtype>it!</childtype>
|
||||
</myelement>
|
||||
|
||||
becomes
|
||||
['myelement', {'a':'value1', 'b':'value2'},
|
||||
['childtype', 'This is'],
|
||||
['childtype', 'it!'],
|
||||
]
|
||||
|
||||
Args:
|
||||
content: The structured content to be converted.
|
||||
encoding: The encoding to report on the first XML line.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
|
||||
Returns:
|
||||
The XML content as a string.
|
||||
"""
|
||||
# We create a huge list of all the elements of the file.
|
||||
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
|
||||
if pretty:
|
||||
xml_parts.append("\n")
|
||||
_ConstructContentList(xml_parts, content, pretty)
|
||||
|
||||
# Convert it to a string
|
||||
return "".join(xml_parts)
|
||||
|
||||
|
||||
def _ConstructContentList(xml_parts, specification, pretty, level=0):
|
||||
""" Appends the XML parts corresponding to the specification.
|
||||
|
||||
Args:
|
||||
xml_parts: A list of XML parts to be appended to.
|
||||
specification: The specification of the element. See EasyXml docs.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
level: Indentation level.
|
||||
"""
|
||||
# The first item in a specification is the name of the element.
|
||||
if pretty:
|
||||
indentation = " " * level
|
||||
new_line = "\n"
|
||||
else:
|
||||
indentation = ""
|
||||
new_line = ""
|
||||
name = specification[0]
|
||||
if not isinstance(name, str):
|
||||
raise Exception(
|
||||
"The first item of an EasyXml specification should be "
|
||||
"a string. Specification was " + str(specification)
|
||||
)
|
||||
xml_parts.append(indentation + "<" + name)
|
||||
|
||||
# Optionally in second position is a dictionary of the attributes.
|
||||
rest = specification[1:]
|
||||
if rest and isinstance(rest[0], dict):
|
||||
for at, val in sorted(rest[0].items()):
|
||||
xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"')
|
||||
rest = rest[1:]
|
||||
if rest:
|
||||
xml_parts.append(">")
|
||||
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
|
||||
multi_line = not all_strings
|
||||
if multi_line and new_line:
|
||||
xml_parts.append(new_line)
|
||||
for child_spec in rest:
|
||||
# If it's a string, append a text node.
|
||||
# Otherwise recurse over that child definition
|
||||
if isinstance(child_spec, str):
|
||||
xml_parts.append(_XmlEscape(child_spec))
|
||||
else:
|
||||
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
|
||||
if multi_line and indentation:
|
||||
xml_parts.append(indentation)
|
||||
xml_parts.append(f"</{name}>{new_line}")
|
||||
else:
|
||||
xml_parts.append("/>%s" % new_line)
|
||||
|
||||
|
||||
def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
|
||||
win32=(sys.platform == "win32")):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Args:
|
||||
content: The structured content to be written.
|
||||
path: Location of the file.
|
||||
encoding: The encoding to report on the first line of the XML file.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
"""
|
||||
xml_string = XmlToString(content, encoding, pretty)
|
||||
if win32 and os.linesep != "\r\n":
|
||||
xml_string = xml_string.replace("\n", "\r\n")
|
||||
|
||||
try: # getdefaultlocale() was removed in Python 3.11
|
||||
default_encoding = locale.getdefaultlocale()[1]
|
||||
except AttributeError:
|
||||
default_encoding = locale.getencoding()
|
||||
|
||||
if default_encoding and default_encoding.upper() != encoding.upper():
|
||||
xml_string = xml_string.encode(encoding)
|
||||
|
||||
# Get the old content
|
||||
try:
|
||||
with open(path) as file:
|
||||
existing = file.read()
|
||||
except OSError:
|
||||
existing = None
|
||||
|
||||
# It has changed, write it
|
||||
if existing != xml_string:
|
||||
with open(path, "wb") as file:
|
||||
file.write(xml_string)
|
||||
|
||||
|
||||
_xml_escape_map = {
|
||||
'"': """,
|
||||
"'": "'",
|
||||
"<": "<",
|
||||
">": ">",
|
||||
"&": "&",
|
||||
"\n": "
",
|
||||
"\r": "
",
|
||||
}
|
||||
|
||||
|
||||
_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
|
||||
|
||||
|
||||
def _XmlEscape(value, attr=False):
|
||||
""" Escape a string for inclusion in XML."""
|
||||
|
||||
def replace(match):
|
||||
m = match.string[match.start() : match.end()]
|
||||
# don't replace single quotes in attrs
|
||||
if attr and m == "'":
|
||||
return m
|
||||
return _xml_escape_map[m]
|
||||
|
||||
return _xml_escape_re.sub(replace, value)
|
113
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
generated
vendored
Normal file
113
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
generated
vendored
Normal file
|
@ -0,0 +1,113 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the easy_xml.py file. """
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.stderr = StringIO()
|
||||
|
||||
def test_EasyXml_simple(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test"]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test/>',
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test"], encoding="Windows-1252"),
|
||||
'<?xml version="1.0" encoding="Windows-1252"?><test/>',
|
||||
)
|
||||
|
||||
def test_EasyXml_simple_with_attributes(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>',
|
||||
)
|
||||
|
||||
def test_EasyXml_escaping(self):
|
||||
original = "<test>'\"\r&\nfoo"
|
||||
converted = "<test>'"
&
foo"
|
||||
converted_apos = converted.replace("'", "'")
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test3", {"a": original}, original]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>'
|
||||
% (converted, converted_apos),
|
||||
)
|
||||
|
||||
def test_EasyXml_pretty(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(
|
||||
["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]],
|
||||
pretty=True,
|
||||
),
|
||||
'<?xml version="1.0" encoding="utf-8"?>\n'
|
||||
"<test3>\n"
|
||||
" <GrandParent>\n"
|
||||
" <Parent1>\n"
|
||||
" <Child/>\n"
|
||||
" </Parent1>\n"
|
||||
" <Parent2/>\n"
|
||||
" </GrandParent>\n"
|
||||
"</test3>\n",
|
||||
)
|
||||
|
||||
def test_EasyXml_complex(self):
|
||||
# We want to create:
|
||||
target = (
|
||||
'<?xml version="1.0" encoding="utf-8"?>'
|
||||
"<Project>"
|
||||
'<PropertyGroup Label="Globals">'
|
||||
"<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>"
|
||||
"<Keyword>Win32Proj</Keyword>"
|
||||
"<RootNamespace>automated_ui_tests</RootNamespace>"
|
||||
"</PropertyGroup>"
|
||||
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
|
||||
"<PropertyGroup "
|
||||
"Condition=\"'$(Configuration)|$(Platform)'=="
|
||||
'\'Debug|Win32\'" Label="Configuration">'
|
||||
"<ConfigurationType>Application</ConfigurationType>"
|
||||
"<CharacterSet>Unicode</CharacterSet>"
|
||||
"<SpectreMitigation>SpectreLoadCF</SpectreMitigation>"
|
||||
"<VCToolsVersion>14.36.32532</VCToolsVersion>"
|
||||
"</PropertyGroup>"
|
||||
"</Project>"
|
||||
)
|
||||
|
||||
xml = easy_xml.XmlToString(
|
||||
[
|
||||
"Project",
|
||||
[
|
||||
"PropertyGroup",
|
||||
{"Label": "Globals"},
|
||||
["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"],
|
||||
["Keyword", "Win32Proj"],
|
||||
["RootNamespace", "automated_ui_tests"],
|
||||
],
|
||||
["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}],
|
||||
[
|
||||
"PropertyGroup",
|
||||
{
|
||||
"Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'",
|
||||
"Label": "Configuration",
|
||||
},
|
||||
["ConfigurationType", "Application"],
|
||||
["CharacterSet", "Unicode"],
|
||||
["SpectreMitigation", "SpectreLoadCF"],
|
||||
["VCToolsVersion", "14.36.32532"],
|
||||
],
|
||||
]
|
||||
)
|
||||
self.assertEqual(xml, target)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
55
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
generated
vendored
Normal file
55
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
generated
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""These functions are executed via gyp-flock-tool when using the Makefile
|
||||
generator. Used on systems that don't have a built-in flock."""
|
||||
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = FlockTool()
|
||||
executor.Dispatch(args)
|
||||
|
||||
|
||||
class FlockTool:
|
||||
"""This class emulates the 'flock' command."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
# Note that the stock python on SunOS has a bug
|
||||
# where fcntl.flock(fd, LOCK_EX) always fails
|
||||
# with EBADF, that's why we use this F_SETLK
|
||||
# hack instead.
|
||||
fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
|
||||
if sys.platform.startswith("aix") or sys.platform == "os400":
|
||||
# Python on AIX is compiled with LARGEFILE support, which changes the
|
||||
# struct size.
|
||||
op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
else:
|
||||
op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
fcntl.fcntl(fd, fcntl.F_SETLK, op)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
0
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
generated
vendored
Normal file
0
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
generated
vendored
Normal file
804
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
generated
vendored
Normal file
804
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
generated
vendored
Normal file
|
@ -0,0 +1,804 @@
|
|||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
|
||||
the generator flag config_path) the path of a json file that dictates the files
|
||||
and targets to search for. The following keys are supported:
|
||||
files: list of paths (relative) of the files to search for.
|
||||
test_targets: unqualified target names to search for. Any target in this list
|
||||
that depends upon a file in |files| is output regardless of the type of target
|
||||
or chain of dependencies.
|
||||
additional_compile_targets: Unqualified targets to search for in addition to
|
||||
test_targets. Targets in the combined list that depend upon a file in |files|
|
||||
are not necessarily output. For example, if the target is of type none then the
|
||||
target is not output (but one of the descendants of the target will be).
|
||||
|
||||
The following is output:
|
||||
error: only supplied if there is an error.
|
||||
compile_targets: minimal set of targets that directly or indirectly (for
|
||||
targets of type none) depend on the files in |files| and is one of the
|
||||
supplied targets or a target that one of the supplied targets depends on.
|
||||
The expectation is this set of targets is passed into a build step. This list
|
||||
always contains the output of test_targets as well.
|
||||
test_targets: set of targets from the supplied |test_targets| that either
|
||||
directly or indirectly depend upon a file in |files|. This list if useful
|
||||
if additional processing needs to be done for certain targets after the
|
||||
build, such as running tests.
|
||||
status: outputs one of three values: none of the supplied files were found,
|
||||
one of the include files changed so that it should be assumed everything
|
||||
changed (in this case test_targets and compile_targets are not output) or at
|
||||
least one file was found.
|
||||
invalid_targets: list of supplied targets that were not found.
|
||||
|
||||
Example:
|
||||
Consider a graph like the following:
|
||||
A D
|
||||
/ \
|
||||
B C
|
||||
A depends upon both B and C, A is of type none and B and C are executables.
|
||||
D is an executable, has no dependencies and nothing depends on it.
|
||||
If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
|
||||
files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
|
||||
the following is output:
|
||||
|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
|
||||
and the supplied target A depends upon it. A is not output as a build_target
|
||||
as it is of type none with no rules and actions.
|
||||
|test_targets| = ["B"] B directly depends upon the change file b.cc.
|
||||
|
||||
Even though the file d.cc, which D depends upon, has changed D is not output
|
||||
as it was not supplied by way of |additional_compile_targets| or |test_targets|.
|
||||
|
||||
If the generator flag analyzer_output_path is specified, output is written
|
||||
there. Otherwise output is written to stdout.
|
||||
|
||||
In Gyp the "all" target is shorthand for the root targets in the files passed
|
||||
to gyp. For example, if file "a.gyp" contains targets "a1" and
|
||||
"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
|
||||
on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
|
||||
Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
|
||||
directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
|
||||
then the "all" target includes "b1" and "b2".
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import json
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
debug = False
|
||||
|
||||
found_dependency_string = "Found dependency"
|
||||
no_dependency_string = "No dependencies"
|
||||
# Status when it should be assumed that everything has changed.
|
||||
all_changed_string = "Found dependency (all)"
|
||||
|
||||
# MatchStatus is used indicate if and how a target depends upon the supplied
|
||||
# sources.
|
||||
# The target's sources contain one of the supplied paths.
|
||||
MATCH_STATUS_MATCHES = 1
|
||||
# The target has a dependency on another target that contains one of the
|
||||
# supplied paths.
|
||||
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
|
||||
# The target's sources weren't in the supplied paths and none of the target's
|
||||
# dependencies depend upon a target that matched.
|
||||
MATCH_STATUS_DOESNT_MATCH = 3
|
||||
# The target doesn't contain the source, but the dependent targets have not yet
|
||||
# been visited to determine a more specific status yet.
|
||||
MATCH_STATUS_TBD = 4
|
||||
|
||||
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {}
|
||||
for dirname in [
|
||||
"INTERMEDIATE_DIR",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"LIB_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
]:
|
||||
generator_default_variables[dirname] = "!!!"
|
||||
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
|
||||
def _ToGypPath(path):
|
||||
"""Converts a path to the format used by gyp."""
|
||||
if os.sep == "\\" and os.altsep == "/":
|
||||
return path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def _ResolveParent(path, base_path_components):
|
||||
"""Resolves |path|, which starts with at least one '../'. Returns an empty
|
||||
string if the path shouldn't be considered. See _AddSources() for a
|
||||
description of |base_path_components|."""
|
||||
depth = 0
|
||||
while path.startswith("../"):
|
||||
depth += 1
|
||||
path = path[3:]
|
||||
# Relative includes may go outside the source tree. For example, an action may
|
||||
# have inputs in /usr/include, which are not in the source tree.
|
||||
if depth > len(base_path_components):
|
||||
return ""
|
||||
if depth == len(base_path_components):
|
||||
return path
|
||||
return (
|
||||
"/".join(base_path_components[0 : len(base_path_components) - depth])
|
||||
+ "/"
|
||||
+ path
|
||||
)
|
||||
|
||||
|
||||
def _AddSources(sources, base_path, base_path_components, result):
|
||||
"""Extracts valid sources from |sources| and adds them to |result|. Each
|
||||
source file is relative to |base_path|, but may contain '..'. To make
|
||||
resolving '..' easier |base_path_components| contains each of the
|
||||
directories in |base_path|. Additionally each source may contain variables.
|
||||
Such sources are ignored as it is assumed dependencies on them are expressed
|
||||
and tracked in some other means."""
|
||||
# NOTE: gyp paths are always posix style.
|
||||
for source in sources:
|
||||
if not len(source) or source.startswith("!!!") or source.startswith("$"):
|
||||
continue
|
||||
# variable expansion may lead to //.
|
||||
org_source = source
|
||||
source = source[0] + source[1:].replace("//", "/")
|
||||
if source.startswith("../"):
|
||||
source = _ResolveParent(source, base_path_components)
|
||||
if len(source):
|
||||
result.append(source)
|
||||
continue
|
||||
result.append(base_path + source)
|
||||
if debug:
|
||||
print("AddSource", org_source, result[len(result) - 1])
|
||||
|
||||
|
||||
def _ExtractSourcesFromAction(action, base_path, base_path_components, results):
|
||||
if "inputs" in action:
|
||||
_AddSources(action["inputs"], base_path, base_path_components, results)
|
||||
|
||||
|
||||
def _ToLocalPath(toplevel_dir, path):
|
||||
"""Converts |path| to a path relative to |toplevel_dir|."""
|
||||
if path == toplevel_dir:
|
||||
return ""
|
||||
if path.startswith(toplevel_dir + "/"):
|
||||
return path[len(toplevel_dir) + len("/") :]
|
||||
return path
|
||||
|
||||
|
||||
def _ExtractSources(target, target_dict, toplevel_dir):
|
||||
# |target| is either absolute or relative and in the format of the OS. Gyp
|
||||
# source paths are always posix. Convert |target| to a posix path relative to
|
||||
# |toplevel_dir_|. This is done to make it easy to build source paths.
|
||||
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
|
||||
base_path_components = base_path.split("/")
|
||||
|
||||
# Add a trailing '/' so that _AddSources() can easily build paths.
|
||||
if len(base_path):
|
||||
base_path += "/"
|
||||
|
||||
if debug:
|
||||
print("ExtractSources", target, base_path)
|
||||
|
||||
results = []
|
||||
if "sources" in target_dict:
|
||||
_AddSources(target_dict["sources"], base_path, base_path_components, results)
|
||||
# Include the inputs from any actions. Any changes to these affect the
|
||||
# resulting output.
|
||||
if "actions" in target_dict:
|
||||
for action in target_dict["actions"]:
|
||||
_ExtractSourcesFromAction(action, base_path, base_path_components, results)
|
||||
if "rules" in target_dict:
|
||||
for rule in target_dict["rules"]:
|
||||
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class Target:
|
||||
"""Holds information about a particular target:
|
||||
deps: set of Targets this Target depends upon. This is not recursive, only the
|
||||
direct dependent Targets.
|
||||
match_status: one of the MatchStatus values.
|
||||
back_deps: set of Targets that have a dependency on this Target.
|
||||
visited: used during iteration to indicate whether we've visited this target.
|
||||
This is used for two iterations, once in building the set of Targets and
|
||||
again in _GetBuildTargets().
|
||||
name: fully qualified name of the target.
|
||||
requires_build: True if the target type is such that it needs to be built.
|
||||
See _DoesTargetTypeRequireBuild for details.
|
||||
added_to_compile_targets: used when determining if the target was added to the
|
||||
set of targets that needs to be built.
|
||||
in_roots: true if this target is a descendant of one of the root nodes.
|
||||
is_executable: true if the type of target is executable.
|
||||
is_static_library: true if the type of target is static_library.
|
||||
is_or_has_linked_ancestor: true if the target does a link (eg executable), or
|
||||
if there is a target in back_deps that does a link."""
|
||||
|
||||
def __init__(self, name):
|
||||
self.deps = set()
|
||||
self.match_status = MATCH_STATUS_TBD
|
||||
self.back_deps = set()
|
||||
self.name = name
|
||||
# TODO(sky): I don't like hanging this off Target. This state is specific
|
||||
# to certain functions and should be isolated there.
|
||||
self.visited = False
|
||||
self.requires_build = False
|
||||
self.added_to_compile_targets = False
|
||||
self.in_roots = False
|
||||
self.is_executable = False
|
||||
self.is_static_library = False
|
||||
self.is_or_has_linked_ancestor = False
|
||||
|
||||
|
||||
class Config:
|
||||
"""Details what we're looking for
|
||||
files: set of files to search for
|
||||
targets: see file description for details."""
|
||||
|
||||
def __init__(self):
|
||||
self.files = []
|
||||
self.targets = set()
|
||||
self.additional_compile_target_names = set()
|
||||
self.test_target_names = set()
|
||||
|
||||
def Init(self, params):
|
||||
"""Initializes Config. This is a separate method as it raises an exception
|
||||
if there is a parse error."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
config_path = generator_flags.get("config_path", None)
|
||||
if not config_path:
|
||||
return
|
||||
try:
|
||||
f = open(config_path)
|
||||
config = json.load(f)
|
||||
f.close()
|
||||
except OSError:
|
||||
raise Exception("Unable to open file " + config_path)
|
||||
except ValueError as e:
|
||||
raise Exception("Unable to parse config file " + config_path + str(e))
|
||||
if not isinstance(config, dict):
|
||||
raise Exception("config_path must be a JSON file containing a dictionary")
|
||||
self.files = config.get("files", [])
|
||||
self.additional_compile_target_names = set(
|
||||
config.get("additional_compile_targets", [])
|
||||
)
|
||||
self.test_target_names = set(config.get("test_targets", []))
|
||||
|
||||
|
||||
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
|
||||
"""Returns true if the build file |build_file| is either in |files| or
|
||||
one of the files included by |build_file| is in |files|. |toplevel_dir| is
|
||||
the root of the source tree."""
|
||||
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
|
||||
if debug:
|
||||
print("gyp file modified", build_file)
|
||||
return True
|
||||
|
||||
# First element of included_files is the file itself.
|
||||
if len(data[build_file]["included_files"]) <= 1:
|
||||
return False
|
||||
|
||||
for include_file in data[build_file]["included_files"][1:]:
|
||||
# |included_files| are relative to the directory of the |build_file|.
|
||||
rel_include_file = _ToGypPath(
|
||||
gyp.common.UnrelativePath(include_file, build_file)
|
||||
)
|
||||
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
|
||||
if debug:
|
||||
print(
|
||||
"included gyp file modified, gyp_file=",
|
||||
build_file,
|
||||
"included file=",
|
||||
rel_include_file,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _GetOrCreateTargetByName(targets, target_name):
|
||||
"""Creates or returns the Target at targets[target_name]. If there is no
|
||||
Target for |target_name| one is created. Returns a tuple of whether a new
|
||||
Target was created and the Target."""
|
||||
if target_name in targets:
|
||||
return False, targets[target_name]
|
||||
target = Target(target_name)
|
||||
targets[target_name] = target
|
||||
return True, target
|
||||
|
||||
|
||||
def _DoesTargetTypeRequireBuild(target_dict):
|
||||
"""Returns true if the target type is such that it needs to be built."""
|
||||
# If a 'none' target has rules or actions we assume it requires a build.
|
||||
return bool(
|
||||
target_dict["type"] != "none"
|
||||
or target_dict.get("actions")
|
||||
or target_dict.get("rules")
|
||||
)
|
||||
|
||||
|
||||
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files):
|
||||
"""Returns a tuple of the following:
|
||||
. A dictionary mapping from fully qualified name to Target.
|
||||
. A list of the targets that have a source file in |files|.
|
||||
. Targets that constitute the 'all' target. See description at top of file
|
||||
for details on the 'all' target.
|
||||
This sets the |match_status| of the targets that contain any of the source
|
||||
files in |files| to MATCH_STATUS_MATCHES.
|
||||
|toplevel_dir| is the root of the source tree."""
|
||||
# Maps from target name to Target.
|
||||
name_to_target = {}
|
||||
|
||||
# Targets that matched.
|
||||
matching_targets = []
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
# Maps from build file to a boolean indicating whether the build file is in
|
||||
# |files|.
|
||||
build_file_in_files = {}
|
||||
|
||||
# Root targets across all files.
|
||||
roots = set()
|
||||
|
||||
# Set of Targets in |build_files|.
|
||||
build_file_targets = set()
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target_name = targets_to_visit.pop()
|
||||
created_target, target = _GetOrCreateTargetByName(name_to_target, target_name)
|
||||
if created_target:
|
||||
roots.add(target)
|
||||
elif target.visited:
|
||||
continue
|
||||
|
||||
target.visited = True
|
||||
target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name])
|
||||
target_type = target_dicts[target_name]["type"]
|
||||
target.is_executable = target_type == "executable"
|
||||
target.is_static_library = target_type == "static_library"
|
||||
target.is_or_has_linked_ancestor = (
|
||||
target_type in {"executable", "shared_library"}
|
||||
)
|
||||
|
||||
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
|
||||
if build_file not in build_file_in_files:
|
||||
build_file_in_files[build_file] = _WasBuildFileModified(
|
||||
build_file, data, files, toplevel_dir
|
||||
)
|
||||
|
||||
if build_file in build_files:
|
||||
build_file_targets.add(target)
|
||||
|
||||
# If a build file (or any of its included files) is modified we assume all
|
||||
# targets in the file are modified.
|
||||
if build_file_in_files[build_file]:
|
||||
print("matching target from modified build file", target_name)
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
else:
|
||||
sources = _ExtractSources(
|
||||
target_name, target_dicts[target_name], toplevel_dir
|
||||
)
|
||||
for source in sources:
|
||||
if _ToGypPath(os.path.normpath(source)) in files:
|
||||
print("target", target_name, "matches", source)
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
break
|
||||
|
||||
# Add dependencies to visit as well as updating back pointers for deps.
|
||||
for dep in target_dicts[target_name].get("dependencies", []):
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
created_dep_target, dep_target = _GetOrCreateTargetByName(
|
||||
name_to_target, dep
|
||||
)
|
||||
if not created_dep_target:
|
||||
roots.discard(dep_target)
|
||||
|
||||
target.deps.add(dep_target)
|
||||
dep_target.back_deps.add(target)
|
||||
|
||||
return name_to_target, matching_targets, roots & build_file_targets
|
||||
|
||||
|
||||
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
|
||||
"""Returns a tuple of the following:
|
||||
. mapping (dictionary) from unqualified name to Target for all the
|
||||
Targets in |to_find|.
|
||||
. any target names not found. If this is empty all targets were found."""
|
||||
result = {}
|
||||
if not to_find:
|
||||
return {}, []
|
||||
to_find = set(to_find)
|
||||
for target_name in all_targets:
|
||||
extracted = gyp.common.ParseQualifiedTarget(target_name)
|
||||
if len(extracted) > 1 and extracted[1] in to_find:
|
||||
to_find.remove(extracted[1])
|
||||
result[extracted[1]] = all_targets[target_name]
|
||||
if not to_find:
|
||||
return result, []
|
||||
return result, list(to_find)
|
||||
|
||||
|
||||
def _DoesTargetDependOnMatchingTargets(target):
|
||||
"""Returns true if |target| or any of its dependencies is one of the
|
||||
targets containing the files supplied as input to analyzer. This updates
|
||||
|matches| of the Targets as it recurses.
|
||||
target: the Target to look for."""
|
||||
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
|
||||
return False
|
||||
if (
|
||||
target.match_status in {MATCH_STATUS_MATCHES,
|
||||
MATCH_STATUS_MATCHES_BY_DEPENDENCY}
|
||||
):
|
||||
return True
|
||||
for dep in target.deps:
|
||||
if _DoesTargetDependOnMatchingTargets(dep):
|
||||
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||
print("\t", target.name, "matches by dep", dep.name)
|
||||
return True
|
||||
target.match_status = MATCH_STATUS_DOESNT_MATCH
|
||||
return False
|
||||
|
||||
|
||||
def _GetTargetsDependingOnMatchingTargets(possible_targets):
|
||||
"""Returns the list of Targets in |possible_targets| that depend (either
|
||||
directly on indirectly) on at least one of the targets containing the files
|
||||
supplied as input to analyzer.
|
||||
possible_targets: targets to search from."""
|
||||
found = []
|
||||
print("Targets that matched by dependency:")
|
||||
for target in possible_targets:
|
||||
if _DoesTargetDependOnMatchingTargets(target):
|
||||
found.append(target)
|
||||
return found
|
||||
|
||||
|
||||
def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
|
||||
"""Recurses through all targets that depend on |target|, adding all targets
|
||||
that need to be built (and are in |roots|) to |result|.
|
||||
roots: set of root targets.
|
||||
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|
||||
|target| to |result|. |target| must still be in |roots|.
|
||||
result: targets that need to be built are added here."""
|
||||
if target.visited:
|
||||
return
|
||||
|
||||
target.visited = True
|
||||
target.in_roots = target in roots
|
||||
|
||||
for back_dep_target in target.back_deps:
|
||||
_AddCompileTargets(back_dep_target, roots, False, result)
|
||||
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
|
||||
target.in_roots |= back_dep_target.in_roots
|
||||
target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor
|
||||
|
||||
# Always add 'executable' targets. Even though they may be built by other
|
||||
# targets that depend upon them it makes detection of what is going to be
|
||||
# built easier.
|
||||
# And always add static_libraries that have no dependencies on them from
|
||||
# linkables. This is necessary as the other dependencies on them may be
|
||||
# static libraries themselves, which are not compile time dependencies.
|
||||
if target.in_roots and (
|
||||
target.is_executable
|
||||
or (
|
||||
not target.added_to_compile_targets
|
||||
and (add_if_no_ancestor or target.requires_build)
|
||||
)
|
||||
or (
|
||||
target.is_static_library
|
||||
and add_if_no_ancestor
|
||||
and not target.is_or_has_linked_ancestor
|
||||
)
|
||||
):
|
||||
print(
|
||||
"\t\tadding to compile targets",
|
||||
target.name,
|
||||
"executable",
|
||||
target.is_executable,
|
||||
"added_to_compile_targets",
|
||||
target.added_to_compile_targets,
|
||||
"add_if_no_ancestor",
|
||||
add_if_no_ancestor,
|
||||
"requires_build",
|
||||
target.requires_build,
|
||||
"is_static_library",
|
||||
target.is_static_library,
|
||||
"is_or_has_linked_ancestor",
|
||||
target.is_or_has_linked_ancestor,
|
||||
)
|
||||
result.add(target)
|
||||
target.added_to_compile_targets = True
|
||||
|
||||
|
||||
def _GetCompileTargets(matching_targets, supplied_targets):
|
||||
"""Returns the set of Targets that require a build.
|
||||
matching_targets: targets that changed and need to be built.
|
||||
supplied_targets: set of targets supplied to analyzer to search from."""
|
||||
result = set()
|
||||
for target in matching_targets:
|
||||
print("finding compile targets for match", target.name)
|
||||
_AddCompileTargets(target, supplied_targets, True, result)
|
||||
return result
|
||||
|
||||
|
||||
def _WriteOutput(params, **values):
|
||||
"""Writes the output, either to stdout or a file is specified."""
|
||||
if "error" in values:
|
||||
print("Error:", values["error"])
|
||||
if "status" in values:
|
||||
print(values["status"])
|
||||
if "targets" in values:
|
||||
values["targets"].sort()
|
||||
print("Supplied targets that depend on changed files:")
|
||||
for target in values["targets"]:
|
||||
print("\t", target)
|
||||
if "invalid_targets" in values:
|
||||
values["invalid_targets"].sort()
|
||||
print("The following targets were not found:")
|
||||
for target in values["invalid_targets"]:
|
||||
print("\t", target)
|
||||
if "build_targets" in values:
|
||||
values["build_targets"].sort()
|
||||
print("Targets that require a build:")
|
||||
for target in values["build_targets"]:
|
||||
print("\t", target)
|
||||
if "compile_targets" in values:
|
||||
values["compile_targets"].sort()
|
||||
print("Targets that need to be built:")
|
||||
for target in values["compile_targets"]:
|
||||
print("\t", target)
|
||||
if "test_targets" in values:
|
||||
values["test_targets"].sort()
|
||||
print("Test targets:")
|
||||
for target in values["test_targets"]:
|
||||
print("\t", target)
|
||||
|
||||
output_path = params.get("generator_flags", {}).get("analyzer_output_path", None)
|
||||
if not output_path:
|
||||
print(json.dumps(values))
|
||||
return
|
||||
try:
|
||||
f = open(output_path, "w")
|
||||
f.write(json.dumps(values) + "\n")
|
||||
f.close()
|
||||
except OSError as e:
|
||||
print("Error writing to output file", output_path, str(e))
|
||||
|
||||
|
||||
def _WasGypIncludeFileModified(params, files):
|
||||
"""Returns true if one of the files in |files| is in the set of included
|
||||
files."""
|
||||
if params["options"].includes:
|
||||
for include in params["options"].includes:
|
||||
if _ToGypPath(os.path.normpath(include)) in files:
|
||||
print("Include file modified, assuming all changed", include)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _NamesNotIn(names, mapping):
|
||||
"""Returns a list of the values in |names| that are not in |mapping|."""
|
||||
return [name for name in names if name not in mapping]
|
||||
|
||||
|
||||
def _LookupTargets(names, mapping):
|
||||
"""Returns a list of the mapping[name] for each value in |names| that is in
|
||||
|mapping|."""
|
||||
return [mapping[name] for name in names if name in mapping]
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
"""Calculate additional variables for use in the build (called by gyp)."""
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "mac":
|
||||
default_variables.setdefault("OS", "mac")
|
||||
elif flavor == "win":
|
||||
default_variables.setdefault("OS", "win")
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
else:
|
||||
operating_system = flavor
|
||||
if flavor == "android":
|
||||
operating_system = "linux" # Keep this legacy behavior for now.
|
||||
default_variables.setdefault("OS", operating_system)
|
||||
|
||||
|
||||
class TargetCalculator:
|
||||
"""Calculates the matching test_targets and matching compile_targets."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
files,
|
||||
additional_compile_target_names,
|
||||
test_target_names,
|
||||
data,
|
||||
target_list,
|
||||
target_dicts,
|
||||
toplevel_dir,
|
||||
build_files,
|
||||
):
|
||||
self._additional_compile_target_names = set(additional_compile_target_names)
|
||||
self._test_target_names = set(test_target_names)
|
||||
(
|
||||
self._name_to_target,
|
||||
self._changed_targets,
|
||||
self._root_targets,
|
||||
) = _GenerateTargets(
|
||||
data, target_list, target_dicts, toplevel_dir, frozenset(files), build_files
|
||||
)
|
||||
(
|
||||
self._unqualified_mapping,
|
||||
self.invalid_targets,
|
||||
) = _GetUnqualifiedToTargetMapping(
|
||||
self._name_to_target, self._supplied_target_names_no_all()
|
||||
)
|
||||
|
||||
def _supplied_target_names(self):
|
||||
return self._additional_compile_target_names | self._test_target_names
|
||||
|
||||
def _supplied_target_names_no_all(self):
|
||||
"""Returns the supplied test targets without 'all'."""
|
||||
result = self._supplied_target_names()
|
||||
result.discard("all")
|
||||
return result
|
||||
|
||||
def is_build_impacted(self):
|
||||
"""Returns true if the supplied files impact the build at all."""
|
||||
return self._changed_targets
|
||||
|
||||
def find_matching_test_target_names(self):
|
||||
"""Returns the set of output test targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Find the test targets first. 'all' is special cased to mean all the
|
||||
# root targets. To deal with all the supplied |test_targets| are expanded
|
||||
# to include the root targets during lookup. If any of the root targets
|
||||
# match, we remove it and replace it with 'all'.
|
||||
test_target_names_no_all = set(self._test_target_names)
|
||||
test_target_names_no_all.discard("all")
|
||||
test_targets_no_all = _LookupTargets(
|
||||
test_target_names_no_all, self._unqualified_mapping
|
||||
)
|
||||
test_target_names_contains_all = "all" in self._test_target_names
|
||||
if test_target_names_contains_all:
|
||||
test_targets = list(set(test_targets_no_all) | set(self._root_targets))
|
||||
else:
|
||||
test_targets = list(test_targets_no_all)
|
||||
print("supplied test_targets")
|
||||
for target_name in self._test_target_names:
|
||||
print("\t", target_name)
|
||||
print("found test_targets")
|
||||
for target in test_targets:
|
||||
print("\t", target.name)
|
||||
print("searching for matching test targets")
|
||||
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
|
||||
matching_test_targets_contains_all = test_target_names_contains_all and set(
|
||||
matching_test_targets
|
||||
) & set(self._root_targets)
|
||||
if matching_test_targets_contains_all:
|
||||
# Remove any of the targets for all that were not explicitly supplied,
|
||||
# 'all' is subsequentely added to the matching names below.
|
||||
matching_test_targets = list(
|
||||
set(matching_test_targets) & set(test_targets_no_all)
|
||||
)
|
||||
print("matched test_targets")
|
||||
for target in matching_test_targets:
|
||||
print("\t", target.name)
|
||||
matching_target_names = [
|
||||
gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in matching_test_targets
|
||||
]
|
||||
if matching_test_targets_contains_all:
|
||||
matching_target_names.append("all")
|
||||
print("\tall")
|
||||
return matching_target_names
|
||||
|
||||
def find_matching_compile_target_names(self):
|
||||
"""Returns the set of output compile targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Compile targets are found by searching up from changed targets.
|
||||
# Reset the visited status for _GetBuildTargets.
|
||||
for target in self._name_to_target.values():
|
||||
target.visited = False
|
||||
|
||||
supplied_targets = _LookupTargets(
|
||||
self._supplied_target_names_no_all(), self._unqualified_mapping
|
||||
)
|
||||
if "all" in self._supplied_target_names():
|
||||
supplied_targets = list(set(supplied_targets) | set(self._root_targets))
|
||||
print("Supplied test_targets & compile_targets")
|
||||
for target in supplied_targets:
|
||||
print("\t", target.name)
|
||||
print("Finding compile targets")
|
||||
compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets)
|
||||
return [
|
||||
gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in compile_targets
|
||||
]
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Called by gyp as the final stage. Outputs results."""
|
||||
config = Config()
|
||||
try:
|
||||
config.Init(params)
|
||||
|
||||
if not config.files:
|
||||
raise Exception(
|
||||
"Must specify files to analyze via config_path generator " "flag"
|
||||
)
|
||||
|
||||
toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir))
|
||||
if debug:
|
||||
print("toplevel_dir", toplevel_dir)
|
||||
|
||||
if _WasGypIncludeFileModified(params, config.files):
|
||||
result_dict = {
|
||||
"status": all_changed_string,
|
||||
"test_targets": list(config.test_target_names),
|
||||
"compile_targets": list(
|
||||
config.additional_compile_target_names | config.test_target_names
|
||||
),
|
||||
}
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
calculator = TargetCalculator(
|
||||
config.files,
|
||||
config.additional_compile_target_names,
|
||||
config.test_target_names,
|
||||
data,
|
||||
target_list,
|
||||
target_dicts,
|
||||
toplevel_dir,
|
||||
params["build_files"],
|
||||
)
|
||||
if not calculator.is_build_impacted():
|
||||
result_dict = {
|
||||
"status": no_dependency_string,
|
||||
"test_targets": [],
|
||||
"compile_targets": [],
|
||||
}
|
||||
if calculator.invalid_targets:
|
||||
result_dict["invalid_targets"] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
test_target_names = calculator.find_matching_test_target_names()
|
||||
compile_target_names = calculator.find_matching_compile_target_names()
|
||||
found_at_least_one_target = compile_target_names or test_target_names
|
||||
result_dict = {
|
||||
"test_targets": test_target_names,
|
||||
"status": found_dependency_string
|
||||
if found_at_least_one_target
|
||||
else no_dependency_string,
|
||||
"compile_targets": list(set(compile_target_names) | set(test_target_names)),
|
||||
}
|
||||
if calculator.invalid_targets:
|
||||
result_dict["invalid_targets"] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
|
||||
except Exception as e:
|
||||
_WriteOutput(params, error=str(e))
|
1173
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
generated
vendored
Normal file
1173
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1318
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
generated
vendored
Normal file
1318
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
123
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
generated
vendored
Normal file
123
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
generated
vendored
Normal file
|
@ -0,0 +1,123 @@
|
|||
# Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import gyp.common
|
||||
import gyp.xcode_emulation
|
||||
import json
|
||||
import os
|
||||
|
||||
generator_additional_non_configuration_keys = []
|
||||
generator_additional_path_sections = []
|
||||
generator_extra_sources_for_rules = []
|
||||
generator_filelist_paths = None
|
||||
generator_supports_multiple_toolsets = True
|
||||
generator_wants_sorted_dependencies = False
|
||||
|
||||
# Lifted from make.py. The actual values don't matter much.
|
||||
generator_default_variables = {
|
||||
"CONFIGURATION_NAME": "$(BUILDTYPE)",
|
||||
"EXECUTABLE_PREFIX": "",
|
||||
"EXECUTABLE_SUFFIX": "",
|
||||
"INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni",
|
||||
"PRODUCT_DIR": "$(builddir)",
|
||||
"RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s",
|
||||
"RULE_INPUT_EXT": "$(suffix $<)",
|
||||
"RULE_INPUT_NAME": "$(notdir $<)",
|
||||
"RULE_INPUT_PATH": "$(abspath $<)",
|
||||
"RULE_INPUT_ROOT": "%(INPUT_ROOT)s",
|
||||
"SHARED_INTERMEDIATE_DIR": "$(obj)/gen",
|
||||
"SHARED_LIB_PREFIX": "lib",
|
||||
"STATIC_LIB_PREFIX": "lib",
|
||||
"STATIC_LIB_SUFFIX": ".a",
|
||||
}
|
||||
|
||||
|
||||
def IsMac(params):
|
||||
return gyp.common.GetFlavor(params) == "mac"
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
|
||||
|
||||
|
||||
def AddCommandsForTarget(cwd, target, params, per_config_commands):
|
||||
output_dir = params["generator_flags"].get("output_dir", "out")
|
||||
for configuration_name, configuration in target["configurations"].items():
|
||||
if IsMac(params):
|
||||
xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
|
||||
cflags = xcode_settings.GetCflags(configuration_name)
|
||||
cflags_c = xcode_settings.GetCflagsC(configuration_name)
|
||||
cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
|
||||
else:
|
||||
cflags = configuration.get("cflags", [])
|
||||
cflags_c = configuration.get("cflags_c", [])
|
||||
cflags_cc = configuration.get("cflags_cc", [])
|
||||
|
||||
cflags_c = cflags + cflags_c
|
||||
cflags_cc = cflags + cflags_cc
|
||||
|
||||
defines = configuration.get("defines", [])
|
||||
defines = ["-D" + s for s in defines]
|
||||
|
||||
# TODO(bnoordhuis) Handle generated source files.
|
||||
extensions = (".c", ".cc", ".cpp", ".cxx")
|
||||
sources = [s for s in target.get("sources", []) if s.endswith(extensions)]
|
||||
|
||||
def resolve(filename):
|
||||
return os.path.abspath(os.path.join(cwd, filename))
|
||||
|
||||
# TODO(bnoordhuis) Handle generated header files.
|
||||
include_dirs = configuration.get("include_dirs", [])
|
||||
include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")]
|
||||
includes = ["-I" + resolve(s) for s in include_dirs]
|
||||
|
||||
defines = gyp.common.EncodePOSIXShellList(defines)
|
||||
includes = gyp.common.EncodePOSIXShellList(includes)
|
||||
cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
|
||||
cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
|
||||
|
||||
commands = per_config_commands.setdefault(configuration_name, [])
|
||||
for source in sources:
|
||||
file = resolve(source)
|
||||
isc = source.endswith(".c")
|
||||
cc = "cc" if isc else "c++"
|
||||
cflags = cflags_c if isc else cflags_cc
|
||||
command = " ".join(
|
||||
(
|
||||
cc,
|
||||
defines,
|
||||
includes,
|
||||
cflags,
|
||||
"-c",
|
||||
gyp.common.EncodePOSIXShellArgument(file),
|
||||
)
|
||||
)
|
||||
commands.append({"command": command, "directory": output_dir, "file": file})
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
per_config_commands = {}
|
||||
for qualified_target, target in target_dicts.items():
|
||||
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
|
||||
qualified_target
|
||||
)
|
||||
if IsMac(params):
|
||||
settings = data[build_file]
|
||||
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
|
||||
cwd = os.path.dirname(build_file)
|
||||
AddCommandsForTarget(cwd, target, params, per_config_commands)
|
||||
|
||||
try:
|
||||
output_dir = params["options"].generator_output
|
||||
except (AttributeError, KeyError):
|
||||
output_dir = params["generator_flags"].get("output_dir", "out")
|
||||
for configuration_name, commands in per_config_commands.items():
|
||||
filename = os.path.join(output_dir, configuration_name, "compile_commands.json")
|
||||
gyp.common.EnsureDirExists(filename)
|
||||
fp = open(filename, "w")
|
||||
json.dump(commands, fp=fp, indent=0, check_circular=False)
|
||||
|
||||
|
||||
def PerformBuild(data, configurations, params):
|
||||
pass
|
103
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
generated
vendored
Normal file
103
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
generated
vendored
Normal file
|
@ -0,0 +1,103 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import os
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import json
|
||||
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_filelist_paths = {}
|
||||
|
||||
generator_default_variables = {}
|
||||
for dirname in [
|
||||
"INTERMEDIATE_DIR",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"LIB_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
]:
|
||||
# Some gyp steps fail if these are empty(!).
|
||||
generator_default_variables[dirname] = "dir"
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
if generator_flags.get("adjust_static_libraries", False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
toplevel = params["options"].toplevel_dir
|
||||
generator_dir = os.path.relpath(params["options"].generator_output or ".")
|
||||
# output_dir: relative path from generator_dir to the build directory.
|
||||
output_dir = generator_flags.get("output_dir", "out")
|
||||
qualified_out_dir = os.path.normpath(
|
||||
os.path.join(toplevel, generator_dir, output_dir, "gypfiles")
|
||||
)
|
||||
global generator_filelist_paths
|
||||
generator_filelist_paths = {
|
||||
"toplevel": toplevel,
|
||||
"qualified_out_dir": qualified_out_dir,
|
||||
}
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# Map of target -> list of targets it depends on.
|
||||
edges = {}
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target = targets_to_visit.pop()
|
||||
if target in edges:
|
||||
continue
|
||||
edges[target] = []
|
||||
|
||||
for dep in target_dicts[target].get("dependencies", []):
|
||||
edges[target].append(dep)
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
try:
|
||||
filepath = params["generator_flags"]["output_dir"]
|
||||
except KeyError:
|
||||
filepath = "."
|
||||
filename = os.path.join(filepath, "dump.json")
|
||||
f = open(filename, "w")
|
||||
json.dump(edges, f)
|
||||
f.close()
|
||||
print("Wrote json to %s." % filename)
|
461
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
generated
vendored
Normal file
461
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
generated
vendored
Normal file
|
@ -0,0 +1,461 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""GYP backend that generates Eclipse CDT settings files.
|
||||
|
||||
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
|
||||
files that can be imported into an Eclipse CDT project. The XML file contains a
|
||||
list of include paths and symbols (i.e. defines).
|
||||
|
||||
Because a full .cproject definition is not created by this generator, it's not
|
||||
possible to properly define the include dirs and symbols for each file
|
||||
individually. Instead, one set of includes/symbols is generated for the entire
|
||||
project. This works fairly well (and is a vast improvement in general), but may
|
||||
still result in a few indexer issues here and there.
|
||||
|
||||
This generator has no automated tests, so expect it to be broken.
|
||||
"""
|
||||
|
||||
from xml.sax.saxutils import escape
|
||||
import os.path
|
||||
import subprocess
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import shlex
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {}
|
||||
|
||||
for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]:
|
||||
# Some gyp steps fail if these are empty(!), so we convert them to variables
|
||||
generator_default_variables[dirname] = "$" + dirname
|
||||
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
|
||||
# part of the path when dealing with generated headers. This value will be
|
||||
# replaced dynamically for each configuration.
|
||||
generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR"
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
default_variables.setdefault("OS", flavor)
|
||||
if flavor == "win":
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
if generator_flags.get("adjust_static_libraries", False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
|
||||
def GetAllIncludeDirectories(
|
||||
target_list,
|
||||
target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name,
|
||||
params,
|
||||
compiler_path,
|
||||
):
|
||||
"""Calculate the set of include directories to be used.
|
||||
|
||||
Returns:
|
||||
A list including all the include_dir's specified for every target followed
|
||||
by any include directories that were added as cflag compiler options.
|
||||
"""
|
||||
|
||||
gyp_includes_set = set()
|
||||
compiler_includes_list = []
|
||||
|
||||
# Find compiler's default include dirs.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(["-E", "-xc++", "-v", "-"])
|
||||
proc = subprocess.Popen(
|
||||
args=command,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
output = proc.communicate()[1].decode("utf-8")
|
||||
# Extract the list of include dirs from the output, which has this format:
|
||||
# ...
|
||||
# #include "..." search starts here:
|
||||
# #include <...> search starts here:
|
||||
# /usr/include/c++/4.6
|
||||
# /usr/local/include
|
||||
# End of search list.
|
||||
# ...
|
||||
in_include_list = False
|
||||
for line in output.splitlines():
|
||||
if line.startswith("#include"):
|
||||
in_include_list = True
|
||||
continue
|
||||
if line.startswith("End of search list."):
|
||||
break
|
||||
if in_include_list:
|
||||
include_dir = line.strip()
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
if config_name in target["configurations"]:
|
||||
config = target["configurations"][config_name]
|
||||
|
||||
# Look for any include dirs that were explicitly added via cflags. This
|
||||
# may be done in gyp files to force certain includes to come at the end.
|
||||
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
|
||||
# remove this.
|
||||
if flavor == "win":
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
cflags = msvs_settings.GetCflags(config_name)
|
||||
else:
|
||||
cflags = config["cflags"]
|
||||
for cflag in cflags:
|
||||
if cflag.startswith("-I"):
|
||||
include_dir = cflag[2:]
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
# Find standard gyp include dirs.
|
||||
if "include_dirs" in config:
|
||||
include_dirs = config["include_dirs"]
|
||||
for shared_intermediate_dir in shared_intermediate_dirs:
|
||||
for include_dir in include_dirs:
|
||||
include_dir = include_dir.replace(
|
||||
"$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir
|
||||
)
|
||||
if not os.path.isabs(include_dir):
|
||||
base_dir = os.path.dirname(target_name)
|
||||
|
||||
include_dir = base_dir + "/" + include_dir
|
||||
include_dir = os.path.abspath(include_dir)
|
||||
|
||||
gyp_includes_set.add(include_dir)
|
||||
|
||||
# Generate a list that has all the include dirs.
|
||||
all_includes_list = list(gyp_includes_set)
|
||||
all_includes_list.sort()
|
||||
for compiler_include in compiler_includes_list:
|
||||
if compiler_include not in gyp_includes_set:
|
||||
all_includes_list.append(compiler_include)
|
||||
|
||||
# All done.
|
||||
return all_includes_list
|
||||
|
||||
|
||||
def GetCompilerPath(target_list, data, options):
|
||||
"""Determine a command that can be used to invoke the compiler.
|
||||
|
||||
Returns:
|
||||
If this is a gyp project that has explicit make settings, try to determine
|
||||
the compiler from that. Otherwise, see if a compiler was specified via the
|
||||
CC_target environment variable.
|
||||
"""
|
||||
# First, see if the compiler is configured in make's settings.
|
||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||
make_global_settings_dict = data[build_file].get("make_global_settings", {})
|
||||
for key, value in make_global_settings_dict:
|
||||
if key in ["CC", "CXX"]:
|
||||
return os.path.join(options.toplevel_dir, value)
|
||||
|
||||
# Check to see if the compiler was specified as an environment variable.
|
||||
for key in ["CC_target", "CC", "CXX"]:
|
||||
compiler = os.environ.get(key)
|
||||
if compiler:
|
||||
return compiler
|
||||
|
||||
return "gcc"
|
||||
|
||||
|
||||
def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path):
|
||||
"""Calculate the defines for a project.
|
||||
|
||||
Returns:
|
||||
A dict that includes explicit defines declared in gyp files along with all
|
||||
of the default defines that the compiler uses.
|
||||
"""
|
||||
|
||||
# Get defines declared in the gyp files.
|
||||
all_defines = {}
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
|
||||
if flavor == "win":
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
extra_defines = msvs_settings.GetComputedDefines(config_name)
|
||||
else:
|
||||
extra_defines = []
|
||||
if config_name in target["configurations"]:
|
||||
config = target["configurations"][config_name]
|
||||
target_defines = config["defines"]
|
||||
else:
|
||||
target_defines = []
|
||||
for define in target_defines + extra_defines:
|
||||
split_define = define.split("=", 1)
|
||||
if len(split_define) == 1:
|
||||
split_define.append("1")
|
||||
if split_define[0].strip() in all_defines:
|
||||
# Already defined
|
||||
continue
|
||||
all_defines[split_define[0].strip()] = split_define[1].strip()
|
||||
# Get default compiler defines (if possible).
|
||||
if flavor == "win":
|
||||
return all_defines # Default defines already processed in the loop above.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(["-E", "-dM", "-"])
|
||||
cpp_proc = subprocess.Popen(
|
||||
args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE
|
||||
)
|
||||
cpp_output = cpp_proc.communicate()[0].decode("utf-8")
|
||||
cpp_lines = cpp_output.split("\n")
|
||||
for cpp_line in cpp_lines:
|
||||
if not cpp_line.strip():
|
||||
continue
|
||||
cpp_line_parts = cpp_line.split(" ", 2)
|
||||
key = cpp_line_parts[1]
|
||||
val = cpp_line_parts[2] if len(cpp_line_parts) >= 3 else "1"
|
||||
all_defines[key] = val
|
||||
|
||||
return all_defines
|
||||
|
||||
|
||||
def WriteIncludePaths(out, eclipse_langs, include_dirs):
|
||||
"""Write the includes section of a CDT settings export file."""
|
||||
|
||||
out.write(
|
||||
' <section name="org.eclipse.cdt.internal.ui.wizards.'
|
||||
'settingswizards.IncludePaths">\n'
|
||||
)
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for include_dir in include_dirs:
|
||||
out.write(
|
||||
' <includepath workspace_path="false">%s</includepath>\n'
|
||||
% include_dir
|
||||
)
|
||||
out.write(" </language>\n")
|
||||
out.write(" </section>\n")
|
||||
|
||||
|
||||
def WriteMacros(out, eclipse_langs, defines):
|
||||
"""Write the macros section of a CDT settings export file."""
|
||||
|
||||
out.write(
|
||||
' <section name="org.eclipse.cdt.internal.ui.wizards.'
|
||||
'settingswizards.Macros">\n'
|
||||
)
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for key in sorted(defines):
|
||||
out.write(
|
||||
" <macro><name>%s</name><value>%s</value></macro>\n"
|
||||
% (escape(key), escape(defines[key]))
|
||||
)
|
||||
out.write(" </language>\n")
|
||||
out.write(" </section>\n")
|
||||
|
||||
|
||||
def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name):
|
||||
options = params["options"]
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
|
||||
# build_dir: relative path from source root to our output files.
|
||||
# e.g. "out/Debug"
|
||||
build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name)
|
||||
|
||||
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
||||
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
|
||||
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
|
||||
shared_intermediate_dirs = [
|
||||
os.path.join(toplevel_build, "obj", "gen"),
|
||||
os.path.join(toplevel_build, "gen"),
|
||||
]
|
||||
|
||||
GenerateCdtSettingsFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
os.path.join(toplevel_build, "eclipse-cdt-settings.xml"),
|
||||
options,
|
||||
shared_intermediate_dirs,
|
||||
)
|
||||
GenerateClasspathFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
options.toplevel_dir,
|
||||
toplevel_build,
|
||||
os.path.join(toplevel_build, "eclipse-classpath.xml"),
|
||||
)
|
||||
|
||||
|
||||
def GenerateCdtSettingsFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
out_name,
|
||||
options,
|
||||
shared_intermediate_dirs,
|
||||
):
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
with open(out_name, "w") as out:
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
out.write("<cdtprojectproperties>\n")
|
||||
|
||||
eclipse_langs = [
|
||||
"C++ Source File",
|
||||
"C Source File",
|
||||
"Assembly Source File",
|
||||
"GNU C++",
|
||||
"GNU C",
|
||||
"Assembly",
|
||||
]
|
||||
compiler_path = GetCompilerPath(target_list, data, options)
|
||||
include_dirs = GetAllIncludeDirectories(
|
||||
target_list,
|
||||
target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name,
|
||||
params,
|
||||
compiler_path,
|
||||
)
|
||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||
defines = GetAllDefines(
|
||||
target_list, target_dicts, data, config_name, params, compiler_path
|
||||
)
|
||||
WriteMacros(out, eclipse_langs, defines)
|
||||
|
||||
out.write("</cdtprojectproperties>\n")
|
||||
|
||||
|
||||
def GenerateClasspathFile(
|
||||
target_list, target_dicts, toplevel_dir, toplevel_build, out_name
|
||||
):
|
||||
"""Generates a classpath file suitable for symbol navigation and code
|
||||
completion of Java code (such as in Android projects) by finding all
|
||||
.java and .jar files used as action inputs."""
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
result = ET.Element("classpath")
|
||||
|
||||
def AddElements(kind, paths):
|
||||
# First, we need to normalize the paths so they are all relative to the
|
||||
# toplevel dir.
|
||||
rel_paths = set()
|
||||
for path in paths:
|
||||
if os.path.isabs(path):
|
||||
rel_paths.add(os.path.relpath(path, toplevel_dir))
|
||||
else:
|
||||
rel_paths.add(path)
|
||||
|
||||
for path in sorted(rel_paths):
|
||||
entry_element = ET.SubElement(result, "classpathentry")
|
||||
entry_element.set("kind", kind)
|
||||
entry_element.set("path", path)
|
||||
|
||||
AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir))
|
||||
AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
|
||||
# Include the standard JRE container and a dummy out folder
|
||||
AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"])
|
||||
# Include a dummy out folder so that Eclipse doesn't use the default /bin
|
||||
# folder in the root of the project.
|
||||
AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")])
|
||||
|
||||
ET.ElementTree(result).write(out_name)
|
||||
|
||||
|
||||
def GetJavaJars(target_list, target_dicts, toplevel_dir):
|
||||
"""Generates a sequence of all .jars used as inputs."""
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get("actions", []):
|
||||
for input_ in action["inputs"]:
|
||||
if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"):
|
||||
if os.path.isabs(input_):
|
||||
yield input_
|
||||
else:
|
||||
yield os.path.join(os.path.dirname(target_name), input_)
|
||||
|
||||
|
||||
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
|
||||
"""Generates a sequence of all likely java package root directories."""
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get("actions", []):
|
||||
for input_ in action["inputs"]:
|
||||
if os.path.splitext(input_)[1] == ".java" and not input_.startswith(
|
||||
"$"
|
||||
):
|
||||
dir_ = os.path.dirname(
|
||||
os.path.join(os.path.dirname(target_name), input_)
|
||||
)
|
||||
# If there is a parent 'src' or 'java' folder, navigate up to it -
|
||||
# these are canonical package root names in Chromium. This will
|
||||
# break if 'src' or 'java' exists in the package structure. This
|
||||
# could be further improved by inspecting the java file for the
|
||||
# package name if this proves to be too fragile in practice.
|
||||
parent_search = dir_
|
||||
while os.path.basename(parent_search) not in ["src", "java"]:
|
||||
parent_search, _ = os.path.split(parent_search)
|
||||
if not parent_search or parent_search == toplevel_dir:
|
||||
# Didn't find a known root, just return the original path
|
||||
yield dir_
|
||||
break
|
||||
else:
|
||||
yield parent_search
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Generate an XML settings file that can be imported into a CDT project."""
|
||||
|
||||
if params["options"].generator_output:
|
||||
raise NotImplementedError("--generator_output not implemented for eclipse")
|
||||
|
||||
user_config = params.get("generator_flags", {}).get("config", None)
|
||||
if user_config:
|
||||
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
||||
else:
|
||||
config_names = target_dicts[target_list[0]]["configurations"]
|
||||
for config_name in config_names:
|
||||
GenerateOutputForConfig(
|
||||
target_list, target_dicts, data, params, config_name
|
||||
)
|
89
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
generated
vendored
Normal file
89
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
generated
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypd output module
|
||||
|
||||
This module produces gyp input as its output. Output files are given the
|
||||
.gypd extension to avoid overwriting the .gyp files that they are generated
|
||||
from. Internal references to .gyp files (such as those found in
|
||||
"dependencies" sections) are not adjusted to point to .gypd files instead;
|
||||
unlike other paths, which are relative to the .gyp or .gypd file, such paths
|
||||
are relative to the directory from which gyp was run to create the .gypd file.
|
||||
|
||||
This generator module is intended to be a sample and a debugging aid, hence
|
||||
the "d" for "debug" in .gypd. It is useful to inspect the results of the
|
||||
various merges, expansions, and conditional evaluations performed by gyp
|
||||
and to see a representation of what would be fed to a generator module.
|
||||
|
||||
It's not advisable to rename .gypd files produced by this module to .gyp,
|
||||
because they will have all merges, expansions, and evaluations already
|
||||
performed and the relevant constructs not present in the output; paths to
|
||||
dependencies may be wrong; and various sections that do not belong in .gyp
|
||||
files such as such as "included_files" and "*_excluded" will be present.
|
||||
Output will also be stripped of comments. This is not intended to be a
|
||||
general-purpose gyp pretty-printer; for that, you probably just want to
|
||||
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
|
||||
comments but won't do all of the other things done to this module's output.
|
||||
|
||||
The specific formatting of the output generated by this module is subject
|
||||
to change.
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import pprint
|
||||
|
||||
|
||||
# These variables should just be spit back out as variable references.
|
||||
_generator_identity_variables = [
|
||||
"CONFIGURATION_NAME",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"INTERMEDIATE_DIR",
|
||||
"LIB_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_PATH",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
]
|
||||
|
||||
# gypd doesn't define a default value for OS like many other generator
|
||||
# modules. Specify "-D OS=whatever" on the command line to provide a value.
|
||||
generator_default_variables = {}
|
||||
|
||||
# gypd supports multiple toolsets
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
# TODO(mark): This always uses <, which isn't right. The input module should
|
||||
# notify the generator to tell it which phase it is operating in, and this
|
||||
# module should use < for the early phase and then switch to > for the late
|
||||
# phase. Bonus points for carrying @ back into the output too.
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = "<(%s)" % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
output_files = {}
|
||||
for qualified_target in target_list:
|
||||
[input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
|
||||
|
||||
if input_file[-4:] != ".gyp":
|
||||
continue
|
||||
input_file_stem = input_file[:-4]
|
||||
output_file = input_file_stem + params["options"].suffix + ".gypd"
|
||||
|
||||
output_files[output_file] = output_files.get(output_file, input_file)
|
||||
|
||||
for output_file, input_file in output_files.items():
|
||||
output = open(output_file, "w")
|
||||
pprint.pprint(data[input_file], output)
|
||||
output.close()
|
57
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
generated
vendored
Normal file
57
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
generated
vendored
Normal file
|
@ -0,0 +1,57 @@
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypsh output module
|
||||
|
||||
gypsh is a GYP shell. It's not really a generator per se. All it does is
|
||||
fire up an interactive Python session with a few local variables set to the
|
||||
variables passed to the generator. Like gypd, it's intended as a debugging
|
||||
aid, to facilitate the exploration of .gyp structures after being processed
|
||||
by the input module.
|
||||
|
||||
The expected usage is "gyp -f gypsh -D OS=desired_os".
|
||||
"""
|
||||
|
||||
|
||||
import code
|
||||
import sys
|
||||
|
||||
|
||||
# All of this stuff about generator variables was lovingly ripped from gypd.py.
|
||||
# That module has a much better description of what's going on and why.
|
||||
_generator_identity_variables = [
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_PATH",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
]
|
||||
|
||||
generator_default_variables = {}
|
||||
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = "<(%s)" % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
locals = {
|
||||
"target_list": target_list,
|
||||
"target_dicts": target_dicts,
|
||||
"data": data,
|
||||
}
|
||||
|
||||
# Use a banner that looks like the stock Python one and like what
|
||||
# code.interact uses by default, but tack on something to indicate what
|
||||
# locals are available, and identify gypsh.
|
||||
banner = (
|
||||
f"Python {sys.version} on {sys.platform}\nlocals.keys() = "
|
||||
f"{repr(sorted(locals.keys()))}\ngypsh"
|
||||
)
|
||||
|
||||
code.interact(banner, local=locals)
|
2711
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
generated
vendored
Normal file
2711
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3977
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
generated
vendored
Normal file
3977
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
44
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
generated
vendored
Normal file
44
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
generated
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the msvs.py file. """
|
||||
|
||||
import gyp.generator.msvs as msvs
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.stderr = StringIO()
|
||||
|
||||
def test_GetLibraries(self):
|
||||
self.assertEqual(msvs._GetLibraries({}), [])
|
||||
self.assertEqual(msvs._GetLibraries({"libraries": []}), [])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"]
|
||||
)
|
||||
self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries(
|
||||
{
|
||||
"libraries": [
|
||||
"a.lib",
|
||||
"b.lib",
|
||||
"c.lib",
|
||||
"-lb.lib",
|
||||
"-lb.lib",
|
||||
"d.lib",
|
||||
"a.lib",
|
||||
]
|
||||
}
|
||||
),
|
||||
["c.lib", "b.lib", "d.lib", "a.lib"],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
2933
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
generated
vendored
Normal file
2933
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
55
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
generated
vendored
Normal file
55
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
generated
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the ninja.py file. """
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import gyp.generator.ninja as ninja
|
||||
|
||||
|
||||
class TestPrefixesAndSuffixes(unittest.TestCase):
|
||||
def test_BinaryNamesWindows(self):
|
||||
# These cannot run on non-Windows as they require a VS installation to
|
||||
# correctly handle variable expansion.
|
||||
if sys.platform.startswith("win"):
|
||||
writer = ninja.NinjaWriter(
|
||||
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win"
|
||||
)
|
||||
spec = {"target_name": "wee"}
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "executable").endswith(".exe")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").endswith(".lib")
|
||||
)
|
||||
|
||||
def test_BinaryNamesLinux(self):
|
||||
writer = ninja.NinjaWriter(
|
||||
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux"
|
||||
)
|
||||
spec = {"target_name": "wee"}
|
||||
self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable"))
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").startswith("lib")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").startswith("lib")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").endswith(".so")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").endswith(".a")
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
1391
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
generated
vendored
Normal file
1391
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
25
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
generated
vendored
Normal file
25
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the xcode.py file. """
|
||||
|
||||
import gyp.generator.xcode as xcode
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestEscapeXcodeDefine(unittest.TestCase):
|
||||
if sys.platform == "darwin":
|
||||
|
||||
def test_InheritedRemainsUnescaped(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)")
|
||||
|
||||
def test_Escaping(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
3113
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/input.py
generated
vendored
Normal file
3113
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/input.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
98
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
generated
vendored
Normal file
98
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
generated
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the input.py file."""
|
||||
|
||||
import gyp.input
|
||||
import unittest
|
||||
|
||||
|
||||
class TestFindCycles(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.nodes = {}
|
||||
for x in ("a", "b", "c", "d", "e"):
|
||||
self.nodes[x] = gyp.input.DependencyGraphNode(x)
|
||||
|
||||
def _create_dependency(self, dependent, dependency):
|
||||
dependent.dependencies.append(dependency)
|
||||
dependency.dependents.append(dependent)
|
||||
|
||||
def test_no_cycle_empty_graph(self):
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_line(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["d"])
|
||||
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_dag(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["a"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_cycle_self_reference(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles()
|
||||
)
|
||||
|
||||
def test_cycle_two_nodes(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[[self.nodes["a"], self.nodes["b"], self.nodes["a"]]],
|
||||
self.nodes["a"].FindCycles(),
|
||||
)
|
||||
self.assertEqual(
|
||||
[[self.nodes["b"], self.nodes["a"], self.nodes["b"]]],
|
||||
self.nodes["b"].FindCycles(),
|
||||
)
|
||||
|
||||
def test_two_cycles(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["a"])
|
||||
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["b"])
|
||||
|
||||
cycles = self.nodes["a"].FindCycles()
|
||||
self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles)
|
||||
self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles)
|
||||
self.assertEqual(2, len(cycles))
|
||||
|
||||
def test_big_cycle(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["d"])
|
||||
self._create_dependency(self.nodes["d"], self.nodes["e"])
|
||||
self._create_dependency(self.nodes["e"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
self.nodes["a"],
|
||||
self.nodes["b"],
|
||||
self.nodes["c"],
|
||||
self.nodes["d"],
|
||||
self.nodes["e"],
|
||||
self.nodes["a"],
|
||||
]
|
||||
],
|
||||
self.nodes["a"].FindCycles(),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
771
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
generated
vendored
Normal file
771
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
generated
vendored
Normal file
|
@ -0,0 +1,771 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions to perform Xcode-style build steps.
|
||||
|
||||
These functions are executed via gyp-mac-tool when using the Makefile generator.
|
||||
"""
|
||||
|
||||
|
||||
import fcntl
|
||||
import fnmatch
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import plistlib
|
||||
import re
|
||||
import shutil
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = MacTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class MacTool:
|
||||
"""This class performs all the Mac tooling steps. The methods can either be
|
||||
executed directly, or dispatched from an argument list."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
|
||||
"""Copies a resource file to the bundle/Resources directory, performing any
|
||||
necessary compilation on each resource."""
|
||||
convert_to_binary = convert_to_binary == "True"
|
||||
extension = os.path.splitext(source)[1].lower()
|
||||
if os.path.isdir(source):
|
||||
# Copy tree.
|
||||
# TODO(thakis): This copies file attributes like mtime, while the
|
||||
# single-file branch below doesn't. This should probably be changed to
|
||||
# be consistent with the single-file branch.
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
shutil.copytree(source, dest)
|
||||
elif extension == ".xib":
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == ".storyboard":
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == ".strings" and not convert_to_binary:
|
||||
self._CopyStringsFile(source, dest)
|
||||
else:
|
||||
if os.path.exists(dest):
|
||||
os.unlink(dest)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
if convert_to_binary and extension in (".plist", ".strings"):
|
||||
self._ConvertToBinary(dest)
|
||||
|
||||
def _CopyXIBFile(self, source, dest):
|
||||
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
|
||||
|
||||
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
|
||||
base = os.path.dirname(os.path.realpath(__file__))
|
||||
if os.path.relpath(source):
|
||||
source = os.path.join(base, source)
|
||||
if os.path.relpath(dest):
|
||||
dest = os.path.join(base, dest)
|
||||
|
||||
args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"]
|
||||
|
||||
if os.environ["XCODE_VERSION_ACTUAL"] > "0700":
|
||||
args.extend(["--auto-activate-custom-fonts"])
|
||||
if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ:
|
||||
args.extend(
|
||||
[
|
||||
"--target-device",
|
||||
"iphone",
|
||||
"--target-device",
|
||||
"ipad",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
|
||||
]
|
||||
)
|
||||
else:
|
||||
args.extend(
|
||||
[
|
||||
"--target-device",
|
||||
"mac",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["MACOSX_DEPLOYMENT_TARGET"],
|
||||
]
|
||||
)
|
||||
|
||||
args.extend(
|
||||
["--output-format", "human-readable-text", "--compile", dest, source]
|
||||
)
|
||||
|
||||
ibtool_section_re = re.compile(r"/\*.*\*/")
|
||||
ibtool_re = re.compile(r".*note:.*is clipping its content")
|
||||
try:
|
||||
stdout = subprocess.check_output(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output)
|
||||
raise
|
||||
current_section_header = None
|
||||
for line in stdout.splitlines():
|
||||
if ibtool_section_re.match(line):
|
||||
current_section_header = line
|
||||
elif not ibtool_re.match(line):
|
||||
if current_section_header:
|
||||
print(current_section_header)
|
||||
current_section_header = None
|
||||
print(line)
|
||||
return 0
|
||||
|
||||
def _ConvertToBinary(self, dest):
|
||||
subprocess.check_call(
|
||||
["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest]
|
||||
)
|
||||
|
||||
def _CopyStringsFile(self, source, dest):
|
||||
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
||||
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
||||
|
||||
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
|
||||
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
|
||||
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
||||
# semicolon in dictionary.
|
||||
# on invalid files. Do the same kind of validation.
|
||||
import CoreFoundation
|
||||
|
||||
with open(source, "rb") as in_file:
|
||||
s = in_file.read()
|
||||
d = CoreFoundation.CFDataCreate(None, s, len(s))
|
||||
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
|
||||
if error:
|
||||
return
|
||||
|
||||
with open(dest, "wb") as fp:
|
||||
fp.write(s.decode(input_code).encode("UTF-16"))
|
||||
|
||||
def _DetectInputEncoding(self, file_name):
|
||||
"""Reads the first few bytes from file_name and tries to guess the text
|
||||
encoding. Returns None as a guess if it can't detect it."""
|
||||
with open(file_name, "rb") as fp:
|
||||
try:
|
||||
header = fp.read(3)
|
||||
except Exception:
|
||||
return None
|
||||
if header.startswith(b"\xFE\xFF"):
|
||||
return "UTF-16"
|
||||
elif header.startswith(b"\xFF\xFE"):
|
||||
return "UTF-16"
|
||||
elif header.startswith(b"\xEF\xBB\xBF"):
|
||||
return "UTF-8"
|
||||
else:
|
||||
return None
|
||||
|
||||
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
|
||||
"""Copies the |source| Info.plist to the destination directory |dest|."""
|
||||
# Read the source Info.plist into memory.
|
||||
with open(source) as fd:
|
||||
lines = fd.read()
|
||||
|
||||
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
|
||||
plist = plistlib.readPlistFromString(lines)
|
||||
if keys:
|
||||
plist.update(json.loads(keys[0]))
|
||||
lines = plistlib.writePlistToString(plist)
|
||||
|
||||
# Go through all the environment variables and replace them as variables in
|
||||
# the file.
|
||||
IDENT_RE = re.compile(r"[_/\s]")
|
||||
for key in os.environ:
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
evar = "${%s}" % key
|
||||
evalue = os.environ[key]
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
# Xcode supports various suffices on environment variables, which are
|
||||
# all undocumented. :rfc1034identifier is used in the standard project
|
||||
# template these days, and :identifier was used earlier. They are used to
|
||||
# convert non-url characters into things that look like valid urls --
|
||||
# except that the replacement character for :identifier, '_' isn't valid
|
||||
# in a URL either -- oops, hence :rfc1034identifier was born.
|
||||
evar = "${%s:identifier}" % key
|
||||
evalue = IDENT_RE.sub("_", os.environ[key])
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
evar = "${%s:rfc1034identifier}" % key
|
||||
evalue = IDENT_RE.sub("-", os.environ[key])
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
# Remove any keys with values that haven't been replaced.
|
||||
lines = lines.splitlines()
|
||||
for i in range(len(lines)):
|
||||
if lines[i].strip().startswith("<string>${"):
|
||||
lines[i] = None
|
||||
lines[i - 1] = None
|
||||
lines = "\n".join(line for line in lines if line is not None)
|
||||
|
||||
# Write out the file with variables replaced.
|
||||
with open(dest, "w") as fd:
|
||||
fd.write(lines)
|
||||
|
||||
# Now write out PkgInfo file now that the Info.plist file has been
|
||||
# "compiled".
|
||||
self._WritePkgInfo(dest)
|
||||
|
||||
if convert_to_binary == "True":
|
||||
self._ConvertToBinary(dest)
|
||||
|
||||
def _WritePkgInfo(self, info_plist):
|
||||
"""This writes the PkgInfo file from the data stored in Info.plist."""
|
||||
plist = plistlib.readPlist(info_plist)
|
||||
if not plist:
|
||||
return
|
||||
|
||||
# Only create PkgInfo for executable types.
|
||||
package_type = plist["CFBundlePackageType"]
|
||||
if package_type != "APPL":
|
||||
return
|
||||
|
||||
# The format of PkgInfo is eight characters, representing the bundle type
|
||||
# and bundle signature, each four characters. If that is missing, four
|
||||
# '?' characters are used instead.
|
||||
signature_code = plist.get("CFBundleSignature", "????")
|
||||
if len(signature_code) != 4: # Wrong length resets everything, too.
|
||||
signature_code = "?" * 4
|
||||
|
||||
dest = os.path.join(os.path.dirname(info_plist), "PkgInfo")
|
||||
with open(dest, "w") as fp:
|
||||
fp.write(f"{package_type}{signature_code}")
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
def ExecFilterLibtool(self, *cmd_list):
|
||||
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
|
||||
symbols'."""
|
||||
libtool_re = re.compile(
|
||||
r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$"
|
||||
)
|
||||
libtool_re5 = re.compile(
|
||||
r"^.*libtool: warning for library: "
|
||||
+ r".* the table of contents is empty "
|
||||
+ r"\(no object file members in the library define global symbols\)$"
|
||||
)
|
||||
env = os.environ.copy()
|
||||
# Ref:
|
||||
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
|
||||
# The problem with this flag is that it resets the file mtime on the file to
|
||||
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
|
||||
env["ZERO_AR_DATE"] = "1"
|
||||
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
|
||||
err = libtoolout.communicate()[1].decode("utf-8")
|
||||
for line in err.splitlines():
|
||||
if not libtool_re.match(line) and not libtool_re5.match(line):
|
||||
print(line, file=sys.stderr)
|
||||
# Unconditionally touch the output .a file on the command line if present
|
||||
# and the command succeeded. A bit hacky.
|
||||
if not libtoolout.returncode:
|
||||
for i in range(len(cmd_list) - 1):
|
||||
if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"):
|
||||
os.utime(cmd_list[i + 1], None)
|
||||
break
|
||||
return libtoolout.returncode
|
||||
|
||||
def ExecPackageIosFramework(self, framework):
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split(".")[0]
|
||||
module_path = os.path.join(framework, "Modules")
|
||||
if not os.path.exists(module_path):
|
||||
os.mkdir(module_path)
|
||||
module_template = (
|
||||
"framework module %s {\n"
|
||||
' umbrella header "%s.h"\n'
|
||||
"\n"
|
||||
" export *\n"
|
||||
" module * { export * }\n"
|
||||
"}\n" % (binary, binary)
|
||||
)
|
||||
|
||||
with open(os.path.join(module_path, "module.modulemap"), "w") as module_file:
|
||||
module_file.write(module_template)
|
||||
|
||||
def ExecPackageFramework(self, framework, version):
|
||||
"""Takes a path to Something.framework and the Current version of that and
|
||||
sets up all the symlinks."""
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split(".")[0]
|
||||
|
||||
CURRENT = "Current"
|
||||
RESOURCES = "Resources"
|
||||
VERSIONS = "Versions"
|
||||
|
||||
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
|
||||
# Binary-less frameworks don't seem to contain symlinks (see e.g.
|
||||
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
|
||||
return
|
||||
|
||||
# Move into the framework directory to set the symlinks correctly.
|
||||
pwd = os.getcwd()
|
||||
os.chdir(framework)
|
||||
|
||||
# Set up the Current version.
|
||||
self._Relink(version, os.path.join(VERSIONS, CURRENT))
|
||||
|
||||
# Set up the root symlinks.
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
|
||||
|
||||
# Back to where we were before!
|
||||
os.chdir(pwd)
|
||||
|
||||
def _Relink(self, dest, link):
|
||||
"""Creates a symlink to |dest| named |link|. If |link| already exists,
|
||||
it is overwritten."""
|
||||
if os.path.lexists(link):
|
||||
os.remove(link)
|
||||
os.symlink(dest, link)
|
||||
|
||||
def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
|
||||
framework_name = os.path.basename(framework).split(".")[0]
|
||||
all_headers = [os.path.abspath(header) for header in all_headers]
|
||||
filelist = {}
|
||||
for header in all_headers:
|
||||
filename = os.path.basename(header)
|
||||
filelist[filename] = header
|
||||
filelist[os.path.join(framework_name, filename)] = header
|
||||
WriteHmap(out, filelist)
|
||||
|
||||
def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
|
||||
header_path = os.path.join(framework, "Headers")
|
||||
if not os.path.exists(header_path):
|
||||
os.makedirs(header_path)
|
||||
for header in copy_headers:
|
||||
shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
|
||||
|
||||
def ExecCompileXcassets(self, keys, *inputs):
|
||||
"""Compiles multiple .xcassets files into a single .car file.
|
||||
|
||||
This invokes 'actool' to compile all the inputs .xcassets files. The
|
||||
|keys| arguments is a json-encoded dictionary of extra arguments to
|
||||
pass to 'actool' when the asset catalogs contains an application icon
|
||||
or a launch image.
|
||||
|
||||
Note that 'actool' does not create the Assets.car file if the asset
|
||||
catalogs does not contains imageset.
|
||||
"""
|
||||
command_line = [
|
||||
"xcrun",
|
||||
"actool",
|
||||
"--output-format",
|
||||
"human-readable-text",
|
||||
"--compress-pngs",
|
||||
"--notices",
|
||||
"--warnings",
|
||||
"--errors",
|
||||
]
|
||||
is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ
|
||||
if is_iphone_target:
|
||||
platform = os.environ["CONFIGURATION"].split("-")[-1]
|
||||
if platform not in ("iphoneos", "iphonesimulator"):
|
||||
platform = "iphonesimulator"
|
||||
command_line.extend(
|
||||
[
|
||||
"--platform",
|
||||
platform,
|
||||
"--target-device",
|
||||
"iphone",
|
||||
"--target-device",
|
||||
"ipad",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
|
||||
"--compile",
|
||||
os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]),
|
||||
]
|
||||
)
|
||||
else:
|
||||
command_line.extend(
|
||||
[
|
||||
"--platform",
|
||||
"macosx",
|
||||
"--target-device",
|
||||
"mac",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["MACOSX_DEPLOYMENT_TARGET"],
|
||||
"--compile",
|
||||
os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]),
|
||||
]
|
||||
)
|
||||
if keys:
|
||||
keys = json.loads(keys)
|
||||
for key, value in keys.items():
|
||||
arg_name = "--" + key
|
||||
if isinstance(value, bool):
|
||||
if value:
|
||||
command_line.append(arg_name)
|
||||
elif isinstance(value, list):
|
||||
for v in value:
|
||||
command_line.append(arg_name)
|
||||
command_line.append(str(v))
|
||||
else:
|
||||
command_line.append(arg_name)
|
||||
command_line.append(str(value))
|
||||
# Note: actool crashes if inputs path are relative, so use os.path.abspath
|
||||
# to get absolute path name for inputs.
|
||||
command_line.extend(map(os.path.abspath, inputs))
|
||||
subprocess.check_call(command_line)
|
||||
|
||||
def ExecMergeInfoPlist(self, output, *inputs):
|
||||
"""Merge multiple .plist files into a single .plist file."""
|
||||
merged_plist = {}
|
||||
for path in inputs:
|
||||
plist = self._LoadPlistMaybeBinary(path)
|
||||
self._MergePlist(merged_plist, plist)
|
||||
plistlib.writePlist(merged_plist, output)
|
||||
|
||||
def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
|
||||
"""Code sign a bundle.
|
||||
|
||||
This function tries to code sign an iOS bundle, following the same
|
||||
algorithm as Xcode:
|
||||
1. pick the provisioning profile that best match the bundle identifier,
|
||||
and copy it into the bundle as embedded.mobileprovision,
|
||||
2. copy Entitlements.plist from user or SDK next to the bundle,
|
||||
3. code sign the bundle.
|
||||
"""
|
||||
substitutions, overrides = self._InstallProvisioningProfile(
|
||||
provisioning, self._GetCFBundleIdentifier()
|
||||
)
|
||||
entitlements_path = self._InstallEntitlements(
|
||||
entitlements, substitutions, overrides
|
||||
)
|
||||
|
||||
args = ["codesign", "--force", "--sign", key]
|
||||
if preserve == "True":
|
||||
args.extend(["--deep", "--preserve-metadata=identifier,entitlements"])
|
||||
else:
|
||||
args.extend(["--entitlements", entitlements_path])
|
||||
args.extend(["--timestamp=none", path])
|
||||
subprocess.check_call(args)
|
||||
|
||||
def _InstallProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Installs embedded.mobileprovision into the bundle.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple containing two dictionary: variables substitutions and values
|
||||
to overrides when generating the entitlements file.
|
||||
"""
|
||||
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
|
||||
profile, bundle_identifier
|
||||
)
|
||||
target_path = os.path.join(
|
||||
os.environ["BUILT_PRODUCTS_DIR"],
|
||||
os.environ["CONTENTS_FOLDER_PATH"],
|
||||
"embedded.mobileprovision",
|
||||
)
|
||||
shutil.copy2(source_path, target_path)
|
||||
substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".")
|
||||
return substitutions, provisioning_data["Entitlements"]
|
||||
|
||||
def _FindProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Finds the .mobileprovision file to use for signing the bundle.
|
||||
|
||||
Checks all the installed provisioning profiles (or if the user specified
|
||||
the PROVISIONING_PROFILE variable, only consult it) and select the most
|
||||
specific that correspond to the bundle identifier.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple of the path to the selected provisioning profile, the data of
|
||||
the embedded plist in the provisioning profile and the team identifier
|
||||
to use for code signing.
|
||||
|
||||
Raises:
|
||||
SystemExit: if no .mobileprovision can be used to sign the bundle.
|
||||
"""
|
||||
profiles_dir = os.path.join(
|
||||
os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles"
|
||||
)
|
||||
if not os.path.isdir(profiles_dir):
|
||||
print(
|
||||
"cannot find mobile provisioning for %s" % (bundle_identifier),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
provisioning_profiles = None
|
||||
if profile:
|
||||
profile_path = os.path.join(profiles_dir, profile + ".mobileprovision")
|
||||
if os.path.exists(profile_path):
|
||||
provisioning_profiles = [profile_path]
|
||||
if not provisioning_profiles:
|
||||
provisioning_profiles = glob.glob(
|
||||
os.path.join(profiles_dir, "*.mobileprovision")
|
||||
)
|
||||
valid_provisioning_profiles = {}
|
||||
for profile_path in provisioning_profiles:
|
||||
profile_data = self._LoadProvisioningProfile(profile_path)
|
||||
app_id_pattern = profile_data.get("Entitlements", {}).get(
|
||||
"application-identifier", ""
|
||||
)
|
||||
for team_identifier in profile_data.get("TeamIdentifier", []):
|
||||
app_id = f"{team_identifier}.{bundle_identifier}"
|
||||
if fnmatch.fnmatch(app_id, app_id_pattern):
|
||||
valid_provisioning_profiles[app_id_pattern] = (
|
||||
profile_path,
|
||||
profile_data,
|
||||
team_identifier,
|
||||
)
|
||||
if not valid_provisioning_profiles:
|
||||
print(
|
||||
"cannot find mobile provisioning for %s" % (bundle_identifier),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
# If the user has multiple provisioning profiles installed that can be
|
||||
# used for ${bundle_identifier}, pick the most specific one (ie. the
|
||||
# provisioning profile whose pattern is the longest).
|
||||
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
|
||||
return valid_provisioning_profiles[selected_key]
|
||||
|
||||
def _LoadProvisioningProfile(self, profile_path):
|
||||
"""Extracts the plist embedded in a provisioning profile.
|
||||
|
||||
Args:
|
||||
profile_path: string, path to the .mobileprovision file
|
||||
|
||||
Returns:
|
||||
Content of the plist embedded in the provisioning profile as a dictionary.
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
subprocess.check_call(
|
||||
["security", "cms", "-D", "-i", profile_path, "-o", temp.name]
|
||||
)
|
||||
return self._LoadPlistMaybeBinary(temp.name)
|
||||
|
||||
def _MergePlist(self, merged_plist, plist):
|
||||
"""Merge |plist| into |merged_plist|."""
|
||||
for key, value in plist.items():
|
||||
if isinstance(value, dict):
|
||||
merged_value = merged_plist.get(key, {})
|
||||
if isinstance(merged_value, dict):
|
||||
self._MergePlist(merged_value, value)
|
||||
merged_plist[key] = merged_value
|
||||
else:
|
||||
merged_plist[key] = value
|
||||
else:
|
||||
merged_plist[key] = value
|
||||
|
||||
def _LoadPlistMaybeBinary(self, plist_path):
|
||||
"""Loads into a memory a plist possibly encoded in binary format.
|
||||
|
||||
This is a wrapper around plistlib.readPlist that tries to convert the
|
||||
plist to the XML format if it can't be parsed (assuming that it is in
|
||||
the binary format).
|
||||
|
||||
Args:
|
||||
plist_path: string, path to a plist file, in XML or binary format
|
||||
|
||||
Returns:
|
||||
Content of the plist as a dictionary.
|
||||
"""
|
||||
try:
|
||||
# First, try to read the file using plistlib that only supports XML,
|
||||
# and if an exception is raised, convert a temporary copy to XML and
|
||||
# load that copy.
|
||||
return plistlib.readPlist(plist_path)
|
||||
except Exception:
|
||||
pass
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
shutil.copy2(plist_path, temp.name)
|
||||
subprocess.check_call(["plutil", "-convert", "xml1", temp.name])
|
||||
return plistlib.readPlist(temp.name)
|
||||
|
||||
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
|
||||
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
|
||||
|
||||
Args:
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
app_identifier_prefix: string, value for AppIdentifierPrefix
|
||||
|
||||
Returns:
|
||||
Dictionary of substitutions to apply when generating Entitlements.plist.
|
||||
"""
|
||||
return {
|
||||
"CFBundleIdentifier": bundle_identifier,
|
||||
"AppIdentifierPrefix": app_identifier_prefix,
|
||||
}
|
||||
|
||||
def _GetCFBundleIdentifier(self):
|
||||
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
|
||||
|
||||
Returns:
|
||||
Value of CFBundleIdentifier in the Info.plist located in the bundle.
|
||||
"""
|
||||
info_plist_path = os.path.join(
|
||||
os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"]
|
||||
)
|
||||
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
|
||||
return info_plist_data["CFBundleIdentifier"]
|
||||
|
||||
def _InstallEntitlements(self, entitlements, substitutions, overrides):
|
||||
"""Generates and install the ${BundleName}.xcent entitlements file.
|
||||
|
||||
Expands variables "$(variable)" pattern in the source entitlements file,
|
||||
add extra entitlements defined in the .mobileprovision file and the copy
|
||||
the generated plist to "${BundlePath}.xcent".
|
||||
|
||||
Args:
|
||||
entitlements: string, optional, path to the Entitlements.plist template
|
||||
to use, defaults to "${SDKROOT}/Entitlements.plist"
|
||||
substitutions: dictionary, variable substitutions
|
||||
overrides: dictionary, values to add to the entitlements
|
||||
|
||||
Returns:
|
||||
Path to the generated entitlements file.
|
||||
"""
|
||||
source_path = entitlements
|
||||
target_path = os.path.join(
|
||||
os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent"
|
||||
)
|
||||
if not source_path:
|
||||
source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist")
|
||||
shutil.copy2(source_path, target_path)
|
||||
data = self._LoadPlistMaybeBinary(target_path)
|
||||
data = self._ExpandVariables(data, substitutions)
|
||||
if overrides:
|
||||
for key in overrides:
|
||||
if key not in data:
|
||||
data[key] = overrides[key]
|
||||
plistlib.writePlist(data, target_path)
|
||||
return target_path
|
||||
|
||||
def _ExpandVariables(self, data, substitutions):
|
||||
"""Expands variables "$(variable)" in data.
|
||||
|
||||
Args:
|
||||
data: object, can be either string, list or dictionary
|
||||
substitutions: dictionary, variable substitutions to perform
|
||||
|
||||
Returns:
|
||||
Copy of data where each references to "$(variable)" has been replaced
|
||||
by the corresponding value found in substitutions, or left intact if
|
||||
the key was not found.
|
||||
"""
|
||||
if isinstance(data, str):
|
||||
for key, value in substitutions.items():
|
||||
data = data.replace("$(%s)" % key, value)
|
||||
return data
|
||||
if isinstance(data, list):
|
||||
return [self._ExpandVariables(v, substitutions) for v in data]
|
||||
if isinstance(data, dict):
|
||||
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
|
||||
return data
|
||||
|
||||
|
||||
def NextGreaterPowerOf2(x):
|
||||
return 2 ** (x).bit_length()
|
||||
|
||||
|
||||
def WriteHmap(output_name, filelist):
|
||||
"""Generates a header map based on |filelist|.
|
||||
|
||||
Per Mark Mentovai:
|
||||
A header map is structured essentially as a hash table, keyed by names used
|
||||
in #includes, and providing pathnames to the actual files.
|
||||
|
||||
The implementation below and the comment above comes from inspecting:
|
||||
http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
|
||||
while also looking at the implementation in clang in:
|
||||
https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
|
||||
"""
|
||||
magic = 1751998832
|
||||
version = 1
|
||||
_reserved = 0
|
||||
count = len(filelist)
|
||||
capacity = NextGreaterPowerOf2(count)
|
||||
strings_offset = 24 + (12 * capacity)
|
||||
max_value_length = max(len(value) for value in filelist.values())
|
||||
|
||||
out = open(output_name, "wb")
|
||||
out.write(
|
||||
struct.pack(
|
||||
"<LHHLLLL",
|
||||
magic,
|
||||
version,
|
||||
_reserved,
|
||||
strings_offset,
|
||||
count,
|
||||
capacity,
|
||||
max_value_length,
|
||||
)
|
||||
)
|
||||
|
||||
# Create empty hashmap buckets.
|
||||
buckets = [None] * capacity
|
||||
for file, path in filelist.items():
|
||||
key = 0
|
||||
for c in file:
|
||||
key += ord(c.lower()) * 13
|
||||
|
||||
# Fill next empty bucket.
|
||||
while buckets[key & capacity - 1] is not None:
|
||||
key = key + 1
|
||||
buckets[key & capacity - 1] = (file, path)
|
||||
|
||||
next_offset = 1
|
||||
for bucket in buckets:
|
||||
if bucket is None:
|
||||
out.write(struct.pack("<LLL", 0, 0, 0))
|
||||
else:
|
||||
(file, path) = bucket
|
||||
key_offset = next_offset
|
||||
prefix_offset = key_offset + len(file) + 1
|
||||
suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
|
||||
next_offset = suffix_offset + len(os.path.basename(path)) + 1
|
||||
out.write(struct.pack("<LLL", key_offset, prefix_offset, suffix_offset))
|
||||
|
||||
# Pad byte since next offset starts at 1.
|
||||
out.write(struct.pack("<x"))
|
||||
|
||||
for bucket in buckets:
|
||||
if bucket is not None:
|
||||
(file, path) = bucket
|
||||
out.write(struct.pack("<%ds" % len(file), file))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
base = os.path.dirname(path) + os.sep
|
||||
out.write(struct.pack("<%ds" % len(base), base))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
path = os.path.basename(path)
|
||||
out.write(struct.pack("<%ds" % len(path), path))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
1259
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
generated
vendored
Normal file
1259
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
174
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
generated
vendored
Normal file
174
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
generated
vendored
Normal file
|
@ -0,0 +1,174 @@
|
|||
# This file comes from
|
||||
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
|
||||
# Do not edit! Edit the upstream one instead.
|
||||
|
||||
"""Python module for generating .ninja files.
|
||||
|
||||
Note that this is emphatically not a required piece of Ninja; it's
|
||||
just a helpful utility for build-file-generation systems that already
|
||||
use Python.
|
||||
"""
|
||||
|
||||
import textwrap
|
||||
|
||||
|
||||
def escape_path(word):
|
||||
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
||||
|
||||
|
||||
class Writer:
|
||||
def __init__(self, output, width=78):
|
||||
self.output = output
|
||||
self.width = width
|
||||
|
||||
def newline(self):
|
||||
self.output.write("\n")
|
||||
|
||||
def comment(self, text):
|
||||
for line in textwrap.wrap(text, self.width - 2):
|
||||
self.output.write("# " + line + "\n")
|
||||
|
||||
def variable(self, key, value, indent=0):
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, list):
|
||||
value = " ".join(filter(None, value)) # Filter out empty strings.
|
||||
self._line(f"{key} = {value}", indent)
|
||||
|
||||
def pool(self, name, depth):
|
||||
self._line("pool %s" % name)
|
||||
self.variable("depth", depth, indent=1)
|
||||
|
||||
def rule(
|
||||
self,
|
||||
name,
|
||||
command,
|
||||
description=None,
|
||||
depfile=None,
|
||||
generator=False,
|
||||
pool=None,
|
||||
restat=False,
|
||||
rspfile=None,
|
||||
rspfile_content=None,
|
||||
deps=None,
|
||||
):
|
||||
self._line("rule %s" % name)
|
||||
self.variable("command", command, indent=1)
|
||||
if description:
|
||||
self.variable("description", description, indent=1)
|
||||
if depfile:
|
||||
self.variable("depfile", depfile, indent=1)
|
||||
if generator:
|
||||
self.variable("generator", "1", indent=1)
|
||||
if pool:
|
||||
self.variable("pool", pool, indent=1)
|
||||
if restat:
|
||||
self.variable("restat", "1", indent=1)
|
||||
if rspfile:
|
||||
self.variable("rspfile", rspfile, indent=1)
|
||||
if rspfile_content:
|
||||
self.variable("rspfile_content", rspfile_content, indent=1)
|
||||
if deps:
|
||||
self.variable("deps", deps, indent=1)
|
||||
|
||||
def build(
|
||||
self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None
|
||||
):
|
||||
outputs = self._as_list(outputs)
|
||||
all_inputs = self._as_list(inputs)[:]
|
||||
out_outputs = list(map(escape_path, outputs))
|
||||
all_inputs = list(map(escape_path, all_inputs))
|
||||
|
||||
if implicit:
|
||||
implicit = map(escape_path, self._as_list(implicit))
|
||||
all_inputs.append("|")
|
||||
all_inputs.extend(implicit)
|
||||
if order_only:
|
||||
order_only = map(escape_path, self._as_list(order_only))
|
||||
all_inputs.append("||")
|
||||
all_inputs.extend(order_only)
|
||||
|
||||
self._line(
|
||||
"build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs))
|
||||
)
|
||||
|
||||
if variables:
|
||||
if isinstance(variables, dict):
|
||||
iterator = iter(variables.items())
|
||||
else:
|
||||
iterator = iter(variables)
|
||||
|
||||
for key, val in iterator:
|
||||
self.variable(key, val, indent=1)
|
||||
|
||||
return outputs
|
||||
|
||||
def include(self, path):
|
||||
self._line("include %s" % path)
|
||||
|
||||
def subninja(self, path):
|
||||
self._line("subninja %s" % path)
|
||||
|
||||
def default(self, paths):
|
||||
self._line("default %s" % " ".join(self._as_list(paths)))
|
||||
|
||||
def _count_dollars_before_index(self, s, i):
|
||||
"""Returns the number of '$' characters right in front of s[i]."""
|
||||
dollar_count = 0
|
||||
dollar_index = i - 1
|
||||
while dollar_index > 0 and s[dollar_index] == "$":
|
||||
dollar_count += 1
|
||||
dollar_index -= 1
|
||||
return dollar_count
|
||||
|
||||
def _line(self, text, indent=0):
|
||||
"""Write 'text' word-wrapped at self.width characters."""
|
||||
leading_space = " " * indent
|
||||
while len(leading_space) + len(text) > self.width:
|
||||
# The text is too wide; wrap if possible.
|
||||
|
||||
# Find the rightmost space that would obey our width constraint and
|
||||
# that's not an escaped space.
|
||||
available_space = self.width - len(leading_space) - len(" $")
|
||||
space = available_space
|
||||
while True:
|
||||
space = text.rfind(" ", 0, space)
|
||||
if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
|
||||
break
|
||||
|
||||
if space < 0:
|
||||
# No such space; just use the first unescaped space we can find.
|
||||
space = available_space - 1
|
||||
while True:
|
||||
space = text.find(" ", space + 1)
|
||||
if (
|
||||
space < 0
|
||||
or self._count_dollars_before_index(text, space) % 2 == 0
|
||||
):
|
||||
break
|
||||
if space < 0:
|
||||
# Give up on breaking.
|
||||
break
|
||||
|
||||
self.output.write(leading_space + text[0:space] + " $\n")
|
||||
text = text[space + 1 :]
|
||||
|
||||
# Subsequent lines are continuations, so indent them.
|
||||
leading_space = " " * (indent + 2)
|
||||
|
||||
self.output.write(leading_space + text + "\n")
|
||||
|
||||
def _as_list(self, input):
|
||||
if input is None:
|
||||
return []
|
||||
if isinstance(input, list):
|
||||
return input
|
||||
return [input]
|
||||
|
||||
|
||||
def escape(string):
|
||||
"""Escape a string such that it can be embedded into a Ninja file without
|
||||
further interpretation."""
|
||||
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
||||
# We only have one special metacharacter: '$'.
|
||||
return string.replace("$", "$$")
|
61
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
generated
vendored
Normal file
61
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""A clone of the default copy.deepcopy that doesn't handle cyclic
|
||||
structures or complex types except for dicts and lists. This is
|
||||
because gyp copies so large structure that small copy overhead ends up
|
||||
taking seconds in a project the size of Chromium."""
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
__all__ = ["Error", "deepcopy"]
|
||||
|
||||
|
||||
def deepcopy(x):
|
||||
"""Deep copy operation on gyp objects such as strings, ints, dicts
|
||||
and lists. More than twice as fast as copy.deepcopy but much less
|
||||
generic."""
|
||||
|
||||
try:
|
||||
return _deepcopy_dispatch[type(x)](x)
|
||||
except KeyError:
|
||||
raise Error(
|
||||
"Unsupported type %s for deepcopy. Use copy.deepcopy "
|
||||
+ "or expand simple_copy support." % type(x)
|
||||
)
|
||||
|
||||
|
||||
_deepcopy_dispatch = d = {}
|
||||
|
||||
|
||||
def _deepcopy_atomic(x):
|
||||
return x
|
||||
|
||||
|
||||
types = bool, float, int, str, type, type(None)
|
||||
|
||||
for x in types:
|
||||
d[x] = _deepcopy_atomic
|
||||
|
||||
|
||||
def _deepcopy_list(x):
|
||||
return [deepcopy(a) for a in x]
|
||||
|
||||
|
||||
d[list] = _deepcopy_list
|
||||
|
||||
|
||||
def _deepcopy_dict(x):
|
||||
y = {}
|
||||
for key, value in x.items():
|
||||
y[deepcopy(key)] = deepcopy(value)
|
||||
return y
|
||||
|
||||
|
||||
d[dict] = _deepcopy_dict
|
||||
|
||||
del d
|
373
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
generated
vendored
Normal file
373
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
generated
vendored
Normal file
|
@ -0,0 +1,373 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions for Windows builds.
|
||||
|
||||
These functions are executed via gyp-win-tool when using the ninja generator.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import stat
|
||||
import string
|
||||
import sys
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# A regex matching an argument corresponding to the output filename passed to
|
||||
# link.exe.
|
||||
_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P<out>.+)$", re.IGNORECASE)
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = WinTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class WinTool:
|
||||
"""This class performs all the Windows tooling steps. The methods can either
|
||||
be executed directly, or dispatched from an argument list."""
|
||||
|
||||
def _UseSeparateMspdbsrv(self, env, args):
|
||||
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
|
||||
shared one."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
if args[0] != "link.exe":
|
||||
return
|
||||
|
||||
# Use the output filename passed to the linker to generate an endpoint name
|
||||
# for mspdbsrv.exe.
|
||||
endpoint_name = None
|
||||
for arg in args:
|
||||
m = _LINK_EXE_OUT_ARG.match(arg)
|
||||
if m:
|
||||
endpoint_name = re.sub(
|
||||
r"\W+", "", "%s_%d" % (m.group("out"), os.getpid())
|
||||
)
|
||||
break
|
||||
|
||||
if endpoint_name is None:
|
||||
return
|
||||
|
||||
# Adds the appropriate environment variable. This will be read by link.exe
|
||||
# to know which instance of mspdbsrv.exe it should connect to (if it's
|
||||
# not set then the default endpoint is used).
|
||||
env["_MSPDBSRV_ENDPOINT_"] = endpoint_name
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def _GetEnv(self, arch):
|
||||
"""Gets the saved environment from a file for a given architecture."""
|
||||
# The environment is saved as an "environment block" (see CreateProcess
|
||||
# and msvs_emulation for details). We convert to a dict here.
|
||||
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
|
||||
pairs = open(arch).read()[:-2].split("\0")
|
||||
kvs = [item.split("=", 1) for item in pairs]
|
||||
return dict(kvs)
|
||||
|
||||
def ExecStamp(self, path):
|
||||
"""Simple stamp command."""
|
||||
open(path, "w").close()
|
||||
|
||||
def ExecRecursiveMirror(self, source, dest):
|
||||
"""Emulation of rm -rf out && cp -af in out."""
|
||||
if os.path.exists(dest):
|
||||
if os.path.isdir(dest):
|
||||
|
||||
def _on_error(fn, path, excinfo):
|
||||
# The operation failed, possibly because the file is set to
|
||||
# read-only. If that's why, make it writable and try the op again.
|
||||
if not os.access(path, os.W_OK):
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
fn(path)
|
||||
|
||||
shutil.rmtree(dest, onerror=_on_error)
|
||||
else:
|
||||
if not os.access(dest, os.W_OK):
|
||||
# Attempt to make the file writable before deleting it.
|
||||
os.chmod(dest, stat.S_IWRITE)
|
||||
os.unlink(dest)
|
||||
|
||||
if os.path.isdir(source):
|
||||
shutil.copytree(source, dest)
|
||||
else:
|
||||
shutil.copy2(source, dest)
|
||||
|
||||
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
|
||||
"""Filter diagnostic output from link that looks like:
|
||||
' Creating library ui.dll.lib and object ui.dll.exp'
|
||||
This happens when there are exports from the dll or exe.
|
||||
"""
|
||||
env = self._GetEnv(arch)
|
||||
if use_separate_mspdbsrv == "True":
|
||||
self._UseSeparateMspdbsrv(env, args)
|
||||
if sys.platform == "win32":
|
||||
args = list(args) # *args is a tuple by default, which is read-only.
|
||||
args[0] = args[0].replace("/", "\\")
|
||||
# https://docs.python.org/2/library/subprocess.html:
|
||||
# "On Unix with shell=True [...] if args is a sequence, the first item
|
||||
# specifies the command string, and any additional items will be treated as
|
||||
# additional arguments to the shell itself. That is to say, Popen does the
|
||||
# equivalent of:
|
||||
# Popen(['/bin/sh', '-c', args[0], args[1], ...])"
|
||||
# For that reason, since going through the shell doesn't seem necessary on
|
||||
# non-Windows don't do that there.
|
||||
link = subprocess.Popen(
|
||||
args,
|
||||
shell=sys.platform == "win32",
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
out = link.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith(" Creating library ")
|
||||
and not line.startswith("Generating code")
|
||||
and not line.startswith("Finished generating code")
|
||||
):
|
||||
print(line)
|
||||
return link.returncode
|
||||
|
||||
def ExecLinkWithManifests(
|
||||
self,
|
||||
arch,
|
||||
embed_manifest,
|
||||
out,
|
||||
ldcmd,
|
||||
resname,
|
||||
mt,
|
||||
rc,
|
||||
intermediate_manifest,
|
||||
*manifests
|
||||
):
|
||||
"""A wrapper for handling creating a manifest resource and then executing
|
||||
a link command."""
|
||||
# The 'normal' way to do manifests is to have link generate a manifest
|
||||
# based on gathering dependencies from the object files, then merge that
|
||||
# manifest with other manifests supplied as sources, convert the merged
|
||||
# manifest to a resource, and then *relink*, including the compiled
|
||||
# version of the manifest resource. This breaks incremental linking, and
|
||||
# is generally overly complicated. Instead, we merge all the manifests
|
||||
# provided (along with one that includes what would normally be in the
|
||||
# linker-generated one, see msvs_emulation.py), and include that into the
|
||||
# first and only link. We still tell link to generate a manifest, but we
|
||||
# only use that to assert that our simpler process did not miss anything.
|
||||
variables = {
|
||||
"python": sys.executable,
|
||||
"arch": arch,
|
||||
"out": out,
|
||||
"ldcmd": ldcmd,
|
||||
"resname": resname,
|
||||
"mt": mt,
|
||||
"rc": rc,
|
||||
"intermediate_manifest": intermediate_manifest,
|
||||
"manifests": " ".join(manifests),
|
||||
}
|
||||
add_to_ld = ""
|
||||
if manifests:
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
|
||||
"-manifest %(manifests)s -out:%(out)s.manifest" % variables
|
||||
)
|
||||
if embed_manifest == "True":
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest"
|
||||
" %(out)s.manifest.rc %(resname)s" % variables
|
||||
)
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s "
|
||||
"%(out)s.manifest.rc" % variables
|
||||
)
|
||||
add_to_ld = " %(out)s.manifest.res" % variables
|
||||
subprocess.check_call(ldcmd + add_to_ld)
|
||||
|
||||
# Run mt.exe on the theoretically complete manifest we generated, merging
|
||||
# it with the one the linker generated to confirm that the linker
|
||||
# generated one does not add anything. This is strictly unnecessary for
|
||||
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
|
||||
# used in a #pragma comment.
|
||||
if manifests:
|
||||
# Merge the intermediate one with ours to .assert.manifest, then check
|
||||
# that .assert.manifest is identical to ours.
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
|
||||
"-manifest %(out)s.manifest %(intermediate_manifest)s "
|
||||
"-out:%(out)s.assert.manifest" % variables
|
||||
)
|
||||
assert_manifest = "%(out)s.assert.manifest" % variables
|
||||
our_manifest = "%(out)s.manifest" % variables
|
||||
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
|
||||
# and sometimes doesn't unfortunately.
|
||||
with open(our_manifest) as our_f, open(assert_manifest) as assert_f:
|
||||
translator = str.maketrans("", "", string.whitespace)
|
||||
our_data = our_f.read().translate(translator)
|
||||
assert_data = assert_f.read().translate(translator)
|
||||
if our_data != assert_data:
|
||||
os.unlink(out)
|
||||
|
||||
def dump(filename):
|
||||
print(filename, file=sys.stderr)
|
||||
print("-----", file=sys.stderr)
|
||||
with open(filename) as f:
|
||||
print(f.read(), file=sys.stderr)
|
||||
print("-----", file=sys.stderr)
|
||||
|
||||
dump(intermediate_manifest)
|
||||
dump(our_manifest)
|
||||
dump(assert_manifest)
|
||||
sys.stderr.write(
|
||||
'Linker generated manifest "%s" added to final manifest "%s" '
|
||||
'(result in "%s"). '
|
||||
"Were /MANIFEST switches used in #pragma statements? "
|
||||
% (intermediate_manifest, our_manifest, assert_manifest)
|
||||
)
|
||||
return 1
|
||||
|
||||
def ExecManifestWrapper(self, arch, *args):
|
||||
"""Run manifest tool with environment set. Strip out undesirable warning
|
||||
(some XML blocks are recognized by the OS loader, but not the manifest
|
||||
tool)."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if line and "manifest authoring warning 81010002" not in line:
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecManifestToRc(self, arch, *args):
|
||||
"""Creates a resource file pointing a SxS assembly manifest.
|
||||
|args| is tuple containing path to resource file, path to manifest file
|
||||
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
|
||||
manifest_path, resource_path, resource_name = args
|
||||
with open(resource_path, "w") as output:
|
||||
output.write(
|
||||
'#include <windows.h>\n%s RT_MANIFEST "%s"'
|
||||
% (resource_name, os.path.abspath(manifest_path).replace("\\", "/"))
|
||||
)
|
||||
|
||||
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags):
|
||||
"""Filter noisy filenames output from MIDL compile step that isn't
|
||||
quietable via command line flags.
|
||||
"""
|
||||
args = (
|
||||
["midl", "/nologo"]
|
||||
+ list(flags)
|
||||
+ [
|
||||
"/out",
|
||||
outdir,
|
||||
"/tlb",
|
||||
tlb,
|
||||
"/h",
|
||||
h,
|
||||
"/dlldata",
|
||||
dlldata,
|
||||
"/iid",
|
||||
iid,
|
||||
"/proxy",
|
||||
proxy,
|
||||
idl,
|
||||
]
|
||||
)
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
# Filter junk out of stdout, and write filtered versions. Output we want
|
||||
# to filter is pairs of lines that look like this:
|
||||
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
|
||||
# objidl.idl
|
||||
lines = out.splitlines()
|
||||
prefixes = ("Processing ", "64 bit Processing ")
|
||||
processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)}
|
||||
for line in lines:
|
||||
if not line.startswith(prefixes) and line not in processing:
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecAsmWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of asm.exe."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith("Copyright (C) Microsoft Corporation")
|
||||
and not line.startswith("Microsoft (R) Macro Assembler")
|
||||
and not line.startswith(" Assembling: ")
|
||||
and line
|
||||
):
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecRcWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of rc.exe. Older versions of RC
|
||||
don't support the /nologo flag."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith("Microsoft (R) Windows (R) Resource Compiler")
|
||||
and not line.startswith("Copyright (C) Microsoft Corporation")
|
||||
and line
|
||||
):
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecActionWrapper(self, arch, rspfile, *dir):
|
||||
"""Runs an action command line from a response file using the environment
|
||||
for |arch|. If |dir| is supplied, use that as the working directory."""
|
||||
env = self._GetEnv(arch)
|
||||
# TODO(scottmg): This is a temporary hack to get some specific variables
|
||||
# through to actions that are set after gyp-time. http://crbug.com/333738.
|
||||
for k, v in os.environ.items():
|
||||
if k not in env:
|
||||
env[k] = v
|
||||
args = open(rspfile).read()
|
||||
dir = dir[0] if dir else None
|
||||
return subprocess.call(args, shell=True, env=env, cwd=dir)
|
||||
|
||||
def ExecClCompile(self, project_dir, selected_files):
|
||||
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
|
||||
build selected C/C++ files."""
|
||||
project_dir = os.path.relpath(project_dir, BASE_DIR)
|
||||
selected_files = selected_files.split(";")
|
||||
ninja_targets = [
|
||||
os.path.join(project_dir, filename) + "^^" for filename in selected_files
|
||||
]
|
||||
cmd = ["ninja.exe"]
|
||||
cmd.extend(ninja_targets)
|
||||
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
1933
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
generated
vendored
Normal file
1933
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
302
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
generated
vendored
Normal file
302
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
generated
vendored
Normal file
|
@ -0,0 +1,302 @@
|
|||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Xcode-ninja wrapper project file generator.
|
||||
|
||||
This updates the data structures passed to the Xcode gyp generator to build
|
||||
with ninja instead. The Xcode project itself is transformed into a list of
|
||||
executable targets, each with a build step to build with ninja, and a target
|
||||
with every source and resource file. This appears to sidestep some of the
|
||||
major performance headaches experienced using complex projects and large number
|
||||
of targets within Xcode.
|
||||
"""
|
||||
|
||||
import errno
|
||||
import gyp.generator.ninja
|
||||
import os
|
||||
import re
|
||||
import xml.sax.saxutils
|
||||
|
||||
|
||||
def _WriteWorkspace(main_gyp, sources_gyp, params):
|
||||
""" Create a workspace to wrap main and sources gyp paths. """
|
||||
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
|
||||
workspace_path = build_file_root + ".xcworkspace"
|
||||
options = params["options"]
|
||||
if options.generator_output:
|
||||
workspace_path = os.path.join(options.generator_output, workspace_path)
|
||||
try:
|
||||
os.makedirs(workspace_path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
output_string = (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n' + '<Workspace version = "1.0">\n'
|
||||
)
|
||||
for gyp_name in [main_gyp, sources_gyp]:
|
||||
name = os.path.splitext(os.path.basename(gyp_name))[0] + ".xcodeproj"
|
||||
name = xml.sax.saxutils.quoteattr("group:" + name)
|
||||
output_string += " <FileRef location = %s></FileRef>\n" % name
|
||||
output_string += "</Workspace>\n"
|
||||
|
||||
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
|
||||
|
||||
try:
|
||||
with open(workspace_file) as input_file:
|
||||
input_string = input_file.read()
|
||||
if input_string == output_string:
|
||||
return
|
||||
except OSError:
|
||||
# Ignore errors if the file doesn't exist.
|
||||
pass
|
||||
|
||||
with open(workspace_file, "w") as output_file:
|
||||
output_file.write(output_string)
|
||||
|
||||
|
||||
def _TargetFromSpec(old_spec, params):
|
||||
""" Create fake target for xcode-ninja wrapper. """
|
||||
# Determine ninja top level build dir (e.g. /path/to/out).
|
||||
ninja_toplevel = None
|
||||
jobs = 0
|
||||
if params:
|
||||
options = params["options"]
|
||||
ninja_toplevel = os.path.join(
|
||||
options.toplevel_dir, gyp.generator.ninja.ComputeOutputDir(params)
|
||||
)
|
||||
jobs = params.get("generator_flags", {}).get("xcode_ninja_jobs", 0)
|
||||
|
||||
target_name = old_spec.get("target_name")
|
||||
product_name = old_spec.get("product_name", target_name)
|
||||
product_extension = old_spec.get("product_extension")
|
||||
|
||||
ninja_target = {}
|
||||
ninja_target["target_name"] = target_name
|
||||
ninja_target["product_name"] = product_name
|
||||
if product_extension:
|
||||
ninja_target["product_extension"] = product_extension
|
||||
ninja_target["toolset"] = old_spec.get("toolset")
|
||||
ninja_target["default_configuration"] = old_spec.get("default_configuration")
|
||||
ninja_target["configurations"] = {}
|
||||
|
||||
# Tell Xcode to look in |ninja_toplevel| for build products.
|
||||
new_xcode_settings = {}
|
||||
if ninja_toplevel:
|
||||
new_xcode_settings["CONFIGURATION_BUILD_DIR"] = (
|
||||
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
|
||||
)
|
||||
|
||||
if "configurations" in old_spec:
|
||||
for config in old_spec["configurations"]:
|
||||
old_xcode_settings = old_spec["configurations"][config].get(
|
||||
"xcode_settings", {}
|
||||
)
|
||||
if "IPHONEOS_DEPLOYMENT_TARGET" in old_xcode_settings:
|
||||
new_xcode_settings["CODE_SIGNING_REQUIRED"] = "NO"
|
||||
new_xcode_settings["IPHONEOS_DEPLOYMENT_TARGET"] = old_xcode_settings[
|
||||
"IPHONEOS_DEPLOYMENT_TARGET"
|
||||
]
|
||||
for key in ["BUNDLE_LOADER", "TEST_HOST"]:
|
||||
if key in old_xcode_settings:
|
||||
new_xcode_settings[key] = old_xcode_settings[key]
|
||||
|
||||
ninja_target["configurations"][config] = {}
|
||||
ninja_target["configurations"][config][
|
||||
"xcode_settings"
|
||||
] = new_xcode_settings
|
||||
|
||||
ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0)
|
||||
ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0)
|
||||
ninja_target["ios_app_extension"] = old_spec.get("ios_app_extension", 0)
|
||||
ninja_target["ios_watchkit_extension"] = old_spec.get("ios_watchkit_extension", 0)
|
||||
ninja_target["ios_watchkit_app"] = old_spec.get("ios_watchkit_app", 0)
|
||||
ninja_target["type"] = old_spec["type"]
|
||||
if ninja_toplevel:
|
||||
ninja_target["actions"] = [
|
||||
{
|
||||
"action_name": "Compile and copy %s via ninja" % target_name,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"action": [
|
||||
"env",
|
||||
"PATH=%s" % os.environ["PATH"],
|
||||
"ninja",
|
||||
"-C",
|
||||
new_xcode_settings["CONFIGURATION_BUILD_DIR"],
|
||||
target_name,
|
||||
],
|
||||
"message": "Compile and copy %s via ninja" % target_name,
|
||||
},
|
||||
]
|
||||
if jobs > 0:
|
||||
ninja_target["actions"][0]["action"].extend(("-j", jobs))
|
||||
return ninja_target
|
||||
|
||||
|
||||
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||
"""Limit targets for Xcode wrapper.
|
||||
|
||||
Xcode sometimes performs poorly with too many targets, so only include
|
||||
proper executable targets, with filters to customize.
|
||||
Arguments:
|
||||
target_extras: Regular expression to always add, matching any target.
|
||||
executable_target_pattern: Regular expression limiting executable targets.
|
||||
spec: Specifications for target.
|
||||
"""
|
||||
target_name = spec.get("target_name")
|
||||
# Always include targets matching target_extras.
|
||||
if target_extras is not None and re.search(target_extras, target_name):
|
||||
return True
|
||||
|
||||
# Otherwise just show executable targets and xc_tests.
|
||||
if int(spec.get("mac_xctest_bundle", 0)) != 0 or (
|
||||
spec.get("type", "") == "executable"
|
||||
and spec.get("product_extension", "") != "bundle"
|
||||
):
|
||||
|
||||
# If there is a filter and the target does not match, exclude the target.
|
||||
if executable_target_pattern is not None:
|
||||
if not re.search(executable_target_pattern, target_name):
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def CreateWrapper(target_list, target_dicts, data, params):
|
||||
"""Initialize targets for the ninja wrapper.
|
||||
|
||||
This sets up the necessary variables in the targets to generate Xcode projects
|
||||
that use ninja as an external builder.
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
data: Dict of flattened build files keyed on gyp path.
|
||||
params: Dict of global options for gyp.
|
||||
"""
|
||||
orig_gyp = params["build_files"][0]
|
||||
for gyp_name, gyp_dict in data.items():
|
||||
if gyp_name == orig_gyp:
|
||||
depth = gyp_dict["_DEPTH"]
|
||||
|
||||
# Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
|
||||
# and prepend .ninja before the .gyp extension.
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
main_gyp = generator_flags.get("xcode_ninja_main_gyp", None)
|
||||
if main_gyp is None:
|
||||
(build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
|
||||
main_gyp = build_file_root + ".ninja" + build_file_ext
|
||||
|
||||
# Create new |target_list|, |target_dicts| and |data| data structures.
|
||||
new_target_list = []
|
||||
new_target_dicts = {}
|
||||
new_data = {}
|
||||
|
||||
# Set base keys needed for |data|.
|
||||
new_data[main_gyp] = {}
|
||||
new_data[main_gyp]["included_files"] = []
|
||||
new_data[main_gyp]["targets"] = []
|
||||
new_data[main_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
|
||||
|
||||
# Normally the xcode-ninja generator includes only valid executable targets.
|
||||
# If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
|
||||
# executable targets that match the pattern. (Default all)
|
||||
executable_target_pattern = generator_flags.get(
|
||||
"xcode_ninja_executable_target_pattern", None
|
||||
)
|
||||
|
||||
# For including other non-executable targets, add the matching target name
|
||||
# to the |xcode_ninja_target_pattern| regular expression. (Default none)
|
||||
target_extras = generator_flags.get("xcode_ninja_target_pattern", None)
|
||||
|
||||
for old_qualified_target in target_list:
|
||||
spec = target_dicts[old_qualified_target]
|
||||
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||
# Add to new_target_list.
|
||||
target_name = spec.get("target_name")
|
||||
new_target_name = f"{main_gyp}:{target_name}#target"
|
||||
new_target_list.append(new_target_name)
|
||||
|
||||
# Add to new_target_dicts.
|
||||
new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
|
||||
|
||||
# Add to new_data.
|
||||
for old_target in data[old_qualified_target.split(":")[0]]["targets"]:
|
||||
if old_target["target_name"] == target_name:
|
||||
new_data_target = {}
|
||||
new_data_target["target_name"] = old_target["target_name"]
|
||||
new_data_target["toolset"] = old_target["toolset"]
|
||||
new_data[main_gyp]["targets"].append(new_data_target)
|
||||
|
||||
# Create sources target.
|
||||
sources_target_name = "sources_for_indexing"
|
||||
sources_target = _TargetFromSpec(
|
||||
{
|
||||
"target_name": sources_target_name,
|
||||
"toolset": "target",
|
||||
"default_configuration": "Default",
|
||||
"mac_bundle": "0",
|
||||
"type": "executable",
|
||||
},
|
||||
None,
|
||||
)
|
||||
|
||||
# Tell Xcode to look everywhere for headers.
|
||||
sources_target["configurations"] = {"Default": {"include_dirs": [depth]}}
|
||||
|
||||
# Put excluded files into the sources target so they can be opened in Xcode.
|
||||
skip_excluded_files = not generator_flags.get(
|
||||
"xcode_ninja_list_excluded_files", True
|
||||
)
|
||||
|
||||
sources = []
|
||||
for target, target_dict in target_dicts.items():
|
||||
base = os.path.dirname(target)
|
||||
files = target_dict.get("sources", []) + target_dict.get(
|
||||
"mac_bundle_resources", []
|
||||
)
|
||||
|
||||
if not skip_excluded_files:
|
||||
files.extend(
|
||||
target_dict.get("sources_excluded", [])
|
||||
+ target_dict.get("mac_bundle_resources_excluded", [])
|
||||
)
|
||||
|
||||
for action in target_dict.get("actions", []):
|
||||
files.extend(action.get("inputs", []))
|
||||
|
||||
if not skip_excluded_files:
|
||||
files.extend(action.get("inputs_excluded", []))
|
||||
|
||||
# Remove files starting with $. These are mostly intermediate files for the
|
||||
# build system.
|
||||
files = [file for file in files if not file.startswith("$")]
|
||||
|
||||
# Make sources relative to root build file.
|
||||
relative_path = os.path.dirname(main_gyp)
|
||||
sources += [
|
||||
os.path.relpath(os.path.join(base, file), relative_path) for file in files
|
||||
]
|
||||
|
||||
sources_target["sources"] = sorted(set(sources))
|
||||
|
||||
# Put sources_to_index in it's own gyp.
|
||||
sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
|
||||
fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target"
|
||||
|
||||
# Add to new_target_list, new_target_dicts and new_data.
|
||||
new_target_list.append(fully_qualified_target_name)
|
||||
new_target_dicts[fully_qualified_target_name] = sources_target
|
||||
new_data_target = {}
|
||||
new_data_target["target_name"] = sources_target["target_name"]
|
||||
new_data_target["_DEPTH"] = depth
|
||||
new_data_target["toolset"] = "target"
|
||||
new_data[sources_gyp] = {}
|
||||
new_data[sources_gyp]["targets"] = []
|
||||
new_data[sources_gyp]["included_files"] = []
|
||||
new_data[sources_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
|
||||
new_data[sources_gyp]["targets"].append(new_data_target)
|
||||
|
||||
# Write workspace to file.
|
||||
_WriteWorkspace(main_gyp, sources_gyp, params)
|
||||
return (new_target_list, new_target_dicts, new_data)
|
3198
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
generated
vendored
Normal file
3198
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
65
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
generated
vendored
Normal file
65
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
generated
vendored
Normal file
|
@ -0,0 +1,65 @@
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Applies a fix to CR LF TAB handling in xml.dom.
|
||||
|
||||
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
|
||||
Working around this: http://bugs.python.org/issue5752
|
||||
TODO(bradnelson): Consider dropping this when we drop XP support.
|
||||
"""
|
||||
|
||||
|
||||
import xml.dom.minidom
|
||||
|
||||
|
||||
def _Replacement_write_data(writer, data, is_attrib=False):
|
||||
"""Writes datachars to writer."""
|
||||
data = data.replace("&", "&").replace("<", "<")
|
||||
data = data.replace('"', """).replace(">", ">")
|
||||
if is_attrib:
|
||||
data = data.replace("\r", "
").replace("\n", "
").replace("\t", "	")
|
||||
writer.write(data)
|
||||
|
||||
|
||||
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
|
||||
# indent = current indentation
|
||||
# addindent = indentation to add to higher levels
|
||||
# newl = newline string
|
||||
writer.write(indent + "<" + self.tagName)
|
||||
|
||||
attrs = self._get_attributes()
|
||||
a_names = sorted(attrs.keys())
|
||||
|
||||
for a_name in a_names:
|
||||
writer.write(' %s="' % a_name)
|
||||
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
|
||||
writer.write('"')
|
||||
if self.childNodes:
|
||||
writer.write(">%s" % newl)
|
||||
for node in self.childNodes:
|
||||
node.writexml(writer, indent + addindent, addindent, newl)
|
||||
writer.write(f"{indent}</{self.tagName}>{newl}")
|
||||
else:
|
||||
writer.write("/>%s" % newl)
|
||||
|
||||
|
||||
class XmlFix:
|
||||
"""Object to manage temporary patching of xml.dom.minidom."""
|
||||
|
||||
def __init__(self):
|
||||
# Preserve current xml.dom.minidom functions.
|
||||
self.write_data = xml.dom.minidom._write_data
|
||||
self.writexml = xml.dom.minidom.Element.writexml
|
||||
# Inject replacement versions of a function and a method.
|
||||
xml.dom.minidom._write_data = _Replacement_write_data
|
||||
xml.dom.minidom.Element.writexml = _Replacement_writexml
|
||||
|
||||
def Cleanup(self):
|
||||
if self.write_data:
|
||||
xml.dom.minidom._write_data = self.write_data
|
||||
xml.dom.minidom.Element.writexml = self.writexml
|
||||
self.write_data = None
|
||||
|
||||
def __del__(self):
|
||||
self.Cleanup()
|
3
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/LICENSE
generated
vendored
Normal file
3
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
This software is made available under the terms of *either* of the licenses
|
||||
found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
|
||||
under the terms of *both* these licenses.
|
177
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.APACHE
generated
vendored
Normal file
177
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.APACHE
generated
vendored
Normal file
|
@ -0,0 +1,177 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
23
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.BSD
generated
vendored
Normal file
23
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.BSD
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
Copyright (c) Donald Stufft and individual contributors.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
15
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/__init__.py
generated
vendored
Normal file
15
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/__init__.py
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
__title__ = "packaging"
|
||||
__summary__ = "Core utilities for Python packages"
|
||||
__uri__ = "https://github.com/pypa/packaging"
|
||||
|
||||
__version__ = "23.3.dev0"
|
||||
|
||||
__author__ = "Donald Stufft and individual contributors"
|
||||
__email__ = "donald@stufft.io"
|
||||
|
||||
__license__ = "BSD-2-Clause or Apache-2.0"
|
||||
__copyright__ = "2014 %s" % __author__
|
108
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
generated
vendored
Normal file
108
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
generated
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
"""
|
||||
ELF file parser.
|
||||
|
||||
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
||||
interface to ``ZipFile``. Only the read interface is implemented.
|
||||
|
||||
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
||||
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
||||
"""
|
||||
|
||||
import enum
|
||||
import os
|
||||
import struct
|
||||
from typing import IO, Optional, Tuple
|
||||
|
||||
|
||||
class ELFInvalid(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class EIClass(enum.IntEnum):
|
||||
C32 = 1
|
||||
C64 = 2
|
||||
|
||||
|
||||
class EIData(enum.IntEnum):
|
||||
Lsb = 1
|
||||
Msb = 2
|
||||
|
||||
|
||||
class EMachine(enum.IntEnum):
|
||||
I386 = 3
|
||||
S390 = 22
|
||||
Arm = 40
|
||||
X8664 = 62
|
||||
AArc64 = 183
|
||||
|
||||
|
||||
class ELFFile:
|
||||
"""
|
||||
Representation of an ELF executable.
|
||||
"""
|
||||
|
||||
def __init__(self, f: IO[bytes]) -> None:
|
||||
self._f = f
|
||||
|
||||
try:
|
||||
ident = self._read("16B")
|
||||
except struct.error:
|
||||
raise ELFInvalid("unable to parse identification")
|
||||
magic = bytes(ident[:4])
|
||||
if magic != b"\x7fELF":
|
||||
raise ELFInvalid(f"invalid magic: {magic!r}")
|
||||
|
||||
self.capacity = ident[4] # Format for program header (bitness).
|
||||
self.encoding = ident[5] # Data structure encoding (endianness).
|
||||
|
||||
try:
|
||||
# e_fmt: Format for program header.
|
||||
# p_fmt: Format for section header.
|
||||
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
||||
e_fmt, self._p_fmt, self._p_idx = {
|
||||
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
||||
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
||||
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
||||
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
||||
}[(self.capacity, self.encoding)]
|
||||
except KeyError:
|
||||
raise ELFInvalid(
|
||||
f"unrecognized capacity ({self.capacity}) or "
|
||||
f"encoding ({self.encoding})"
|
||||
)
|
||||
|
||||
try:
|
||||
(
|
||||
_,
|
||||
self.machine, # Architecture type.
|
||||
_,
|
||||
_,
|
||||
self._e_phoff, # Offset of program header.
|
||||
_,
|
||||
self.flags, # Processor-specific flags.
|
||||
_,
|
||||
self._e_phentsize, # Size of section.
|
||||
self._e_phnum, # Number of sections.
|
||||
) = self._read(e_fmt)
|
||||
except struct.error as e:
|
||||
raise ELFInvalid("unable to parse machine and section information") from e
|
||||
|
||||
def _read(self, fmt: str) -> Tuple[int, ...]:
|
||||
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
||||
|
||||
@property
|
||||
def interpreter(self) -> Optional[str]:
|
||||
"""
|
||||
The path recorded in the ``PT_INTERP`` section header.
|
||||
"""
|
||||
for index in range(self._e_phnum):
|
||||
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
||||
try:
|
||||
data = self._read(self._p_fmt)
|
||||
except struct.error:
|
||||
continue
|
||||
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
||||
continue
|
||||
self._f.seek(data[self._p_idx[1]])
|
||||
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
||||
return None
|
252
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_manylinux.py
generated
vendored
Normal file
252
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_manylinux.py
generated
vendored
Normal file
|
@ -0,0 +1,252 @@
|
|||
import collections
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
|
||||
|
||||
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
||||
|
||||
EF_ARM_ABIMASK = 0xFF000000
|
||||
EF_ARM_ABI_VER5 = 0x05000000
|
||||
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
||||
|
||||
|
||||
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
||||
# as the type for `path` until then.
|
||||
@contextlib.contextmanager
|
||||
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
yield ELFFile(f)
|
||||
except (OSError, TypeError, ValueError):
|
||||
yield None
|
||||
|
||||
|
||||
def _is_linux_armhf(executable: str) -> bool:
|
||||
# hard-float ABI can be detected from the ELF header of the running
|
||||
# process
|
||||
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
||||
with _parse_elf(executable) as f:
|
||||
return (
|
||||
f is not None
|
||||
and f.capacity == EIClass.C32
|
||||
and f.encoding == EIData.Lsb
|
||||
and f.machine == EMachine.Arm
|
||||
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
||||
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
||||
)
|
||||
|
||||
|
||||
def _is_linux_i686(executable: str) -> bool:
|
||||
with _parse_elf(executable) as f:
|
||||
return (
|
||||
f is not None
|
||||
and f.capacity == EIClass.C32
|
||||
and f.encoding == EIData.Lsb
|
||||
and f.machine == EMachine.I386
|
||||
)
|
||||
|
||||
|
||||
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
||||
if "armv7l" in archs:
|
||||
return _is_linux_armhf(executable)
|
||||
if "i686" in archs:
|
||||
return _is_linux_i686(executable)
|
||||
allowed_archs = {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x", "loongarch64"}
|
||||
return any(arch in allowed_archs for arch in archs)
|
||||
|
||||
|
||||
# If glibc ever changes its major version, we need to know what the last
|
||||
# minor version was, so we can build the complete list of all versions.
|
||||
# For now, guess what the highest minor version might be, assume it will
|
||||
# be 50 for testing. Once this actually happens, update the dictionary
|
||||
# with the actual value.
|
||||
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
||||
|
||||
|
||||
class _GLibCVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _glibc_version_string_confstr() -> Optional[str]:
|
||||
"""
|
||||
Primary implementation of glibc_version_string using os.confstr.
|
||||
"""
|
||||
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
||||
# to be broken or missing. This strategy is used in the standard library
|
||||
# platform module.
|
||||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
||||
try:
|
||||
# Should be a string like "glibc 2.17".
|
||||
version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
|
||||
assert version_string is not None
|
||||
_, version = version_string.rsplit()
|
||||
except (AssertionError, AttributeError, OSError, ValueError):
|
||||
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
||||
return None
|
||||
return version
|
||||
|
||||
|
||||
def _glibc_version_string_ctypes() -> Optional[str]:
|
||||
"""
|
||||
Fallback implementation of glibc_version_string using ctypes.
|
||||
"""
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
#
|
||||
# We must also handle the special case where the executable is not a
|
||||
# dynamically linked executable. This can occur when using musl libc,
|
||||
# for example. In this situation, dlopen() will error, leading to an
|
||||
# OSError. Interestingly, at least in the case of musl, there is no
|
||||
# errno set on the OSError. The single string argument used to construct
|
||||
# OSError comes from libc itself and is therefore not portable to
|
||||
# hard code here. In any case, failure to call dlopen() means we
|
||||
# can proceed, so we bail on our attempt.
|
||||
try:
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str: str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
def _glibc_version_string() -> Optional[str]:
|
||||
"""Returns glibc version string, or None if not using glibc."""
|
||||
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
||||
|
||||
|
||||
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
||||
"""Parse glibc version.
|
||||
|
||||
We use a regexp instead of str.split because we want to discard any
|
||||
random junk that might come after the minor version -- this might happen
|
||||
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
"""
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn(
|
||||
f"Expected glibc version with 2 components major.minor,"
|
||||
f" got: {version_str}",
|
||||
RuntimeWarning,
|
||||
)
|
||||
return -1, -1
|
||||
return int(m.group("major")), int(m.group("minor"))
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def _get_glibc_version() -> Tuple[int, int]:
|
||||
version_str = _glibc_version_string()
|
||||
if version_str is None:
|
||||
return (-1, -1)
|
||||
return _parse_glibc_version(version_str)
|
||||
|
||||
|
||||
# From PEP 513, PEP 600
|
||||
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
||||
sys_glibc = _get_glibc_version()
|
||||
if sys_glibc < version:
|
||||
return False
|
||||
# Check for presence of _manylinux module.
|
||||
try:
|
||||
import _manylinux # noqa
|
||||
except ImportError:
|
||||
return True
|
||||
if hasattr(_manylinux, "manylinux_compatible"):
|
||||
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
||||
if result is not None:
|
||||
return bool(result)
|
||||
return True
|
||||
if version == _GLibCVersion(2, 5):
|
||||
if hasattr(_manylinux, "manylinux1_compatible"):
|
||||
return bool(_manylinux.manylinux1_compatible)
|
||||
if version == _GLibCVersion(2, 12):
|
||||
if hasattr(_manylinux, "manylinux2010_compatible"):
|
||||
return bool(_manylinux.manylinux2010_compatible)
|
||||
if version == _GLibCVersion(2, 17):
|
||||
if hasattr(_manylinux, "manylinux2014_compatible"):
|
||||
return bool(_manylinux.manylinux2014_compatible)
|
||||
return True
|
||||
|
||||
|
||||
_LEGACY_MANYLINUX_MAP = {
|
||||
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
||||
(2, 17): "manylinux2014",
|
||||
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
||||
(2, 12): "manylinux2010",
|
||||
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
||||
(2, 5): "manylinux1",
|
||||
}
|
||||
|
||||
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate manylinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be manylinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible manylinux tags.
|
||||
"""
|
||||
if not _have_compatible_abi(sys.executable, archs):
|
||||
return
|
||||
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
||||
too_old_glibc2 = _GLibCVersion(2, 16)
|
||||
if set(archs) & {"x86_64", "i686"}:
|
||||
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
||||
too_old_glibc2 = _GLibCVersion(2, 4)
|
||||
current_glibc = _GLibCVersion(*_get_glibc_version())
|
||||
glibc_max_list = [current_glibc]
|
||||
# We can assume compatibility across glibc major versions.
|
||||
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
||||
#
|
||||
# Build a list of maximum glibc versions so that we can
|
||||
# output the canonical list of all glibc from current_glibc
|
||||
# down to too_old_glibc2, including all intermediary versions.
|
||||
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
||||
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
||||
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
||||
for arch in archs:
|
||||
for glibc_max in glibc_max_list:
|
||||
if glibc_max.major == too_old_glibc2.major:
|
||||
min_minor = too_old_glibc2.minor
|
||||
else:
|
||||
# For other glibc major versions oldest supported is (x, 0).
|
||||
min_minor = -1
|
||||
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
||||
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
||||
tag = "manylinux_{}_{}".format(*glibc_version)
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{tag}_{arch}"
|
||||
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
||||
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
||||
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{legacy_tag}_{arch}"
|
83
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_musllinux.py
generated
vendored
Normal file
83
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_musllinux.py
generated
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
"""PEP 656 support.
|
||||
|
||||
This module implements logic to detect if the currently running Python is
|
||||
linked against musl, and what musl version is used.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterator, NamedTuple, Optional, Sequence
|
||||
|
||||
from ._elffile import ELFFile
|
||||
|
||||
|
||||
class _MuslVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
||||
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
||||
if len(lines) < 2 or lines[0][:4] != "musl":
|
||||
return None
|
||||
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
||||
if not m:
|
||||
return None
|
||||
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
||||
"""Detect currently-running musl runtime version.
|
||||
|
||||
This is done by checking the specified executable's dynamic linking
|
||||
information, and invoking the loader to parse its output for a version
|
||||
string. If the loader is musl, the output would be something like::
|
||||
|
||||
musl libc (x86_64)
|
||||
Version 1.2.2
|
||||
Dynamic Program Loader
|
||||
"""
|
||||
try:
|
||||
with open(executable, "rb") as f:
|
||||
ld = ELFFile(f).interpreter
|
||||
except (OSError, TypeError, ValueError):
|
||||
return None
|
||||
if ld is None or "musl" not in ld:
|
||||
return None
|
||||
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
|
||||
return _parse_musl_version(proc.stderr)
|
||||
|
||||
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate musllinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be musllinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible musllinux tags.
|
||||
"""
|
||||
sys_musl = _get_musl_version(sys.executable)
|
||||
if sys_musl is None: # Python not dynamically linked against musl.
|
||||
return
|
||||
for arch in archs:
|
||||
for minor in range(sys_musl.minor, -1, -1):
|
||||
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
import sysconfig
|
||||
|
||||
plat = sysconfig.get_platform()
|
||||
assert plat.startswith("linux-"), "not linux"
|
||||
|
||||
print("plat:", plat)
|
||||
print("musl:", _get_musl_version(sys.executable))
|
||||
print("tags:", end=" ")
|
||||
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
||||
print(t, end="\n ")
|
359
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_parser.py
generated
vendored
Normal file
359
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_parser.py
generated
vendored
Normal file
|
@ -0,0 +1,359 @@
|
|||
"""Handwritten parser of dependency specifiers.
|
||||
|
||||
The docstring for each __parse_* function contains ENBF-inspired grammar representing
|
||||
the implementation.
|
||||
"""
|
||||
|
||||
import ast
|
||||
from typing import Any, List, NamedTuple, Optional, Tuple, Union
|
||||
|
||||
from ._tokenizer import DEFAULT_RULES, Tokenizer
|
||||
|
||||
|
||||
class Node:
|
||||
def __init__(self, value: str) -> None:
|
||||
self.value = value
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.value
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}('{self}')>"
|
||||
|
||||
def serialize(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Variable(Node):
|
||||
def serialize(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
class Value(Node):
|
||||
def serialize(self) -> str:
|
||||
return f'"{self}"'
|
||||
|
||||
|
||||
class Op(Node):
|
||||
def serialize(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
MarkerVar = Union[Variable, Value]
|
||||
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
|
||||
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
|
||||
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
|
||||
# mypy does not support recursive type definition
|
||||
# https://github.com/python/mypy/issues/731
|
||||
MarkerAtom = Any
|
||||
MarkerList = List[Any]
|
||||
|
||||
|
||||
class ParsedRequirement(NamedTuple):
|
||||
name: str
|
||||
url: str
|
||||
extras: List[str]
|
||||
specifier: str
|
||||
marker: Optional[MarkerList]
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------------------
|
||||
# Recursive descent parser for dependency specifier
|
||||
# --------------------------------------------------------------------------------------
|
||||
def parse_requirement(source: str) -> ParsedRequirement:
|
||||
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
|
||||
|
||||
|
||||
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
|
||||
"""
|
||||
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
|
||||
"""
|
||||
tokenizer.consume("WS")
|
||||
|
||||
name_token = tokenizer.expect(
|
||||
"IDENTIFIER", expected="package name at the start of dependency specifier"
|
||||
)
|
||||
name = name_token.text
|
||||
tokenizer.consume("WS")
|
||||
|
||||
extras = _parse_extras(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
url, specifier, marker = _parse_requirement_details(tokenizer)
|
||||
tokenizer.expect("END", expected="end of dependency specifier")
|
||||
|
||||
return ParsedRequirement(name, url, extras, specifier, marker)
|
||||
|
||||
|
||||
def _parse_requirement_details(
|
||||
tokenizer: Tokenizer,
|
||||
) -> Tuple[str, str, Optional[MarkerList]]:
|
||||
"""
|
||||
requirement_details = AT URL (WS requirement_marker?)?
|
||||
| specifier WS? (requirement_marker)?
|
||||
"""
|
||||
|
||||
specifier = ""
|
||||
url = ""
|
||||
marker = None
|
||||
|
||||
if tokenizer.check("AT"):
|
||||
tokenizer.read()
|
||||
tokenizer.consume("WS")
|
||||
|
||||
url_start = tokenizer.position
|
||||
url = tokenizer.expect("URL", expected="URL after @").text
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
tokenizer.expect("WS", expected="whitespace after URL")
|
||||
|
||||
# The input might end after whitespace.
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
marker = _parse_requirement_marker(
|
||||
tokenizer, span_start=url_start, after="URL and whitespace"
|
||||
)
|
||||
else:
|
||||
specifier_start = tokenizer.position
|
||||
specifier = _parse_specifier(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
marker = _parse_requirement_marker(
|
||||
tokenizer,
|
||||
span_start=specifier_start,
|
||||
after=(
|
||||
"version specifier"
|
||||
if specifier
|
||||
else "name and no valid version specifier"
|
||||
),
|
||||
)
|
||||
|
||||
return (url, specifier, marker)
|
||||
|
||||
|
||||
def _parse_requirement_marker(
|
||||
tokenizer: Tokenizer, *, span_start: int, after: str
|
||||
) -> MarkerList:
|
||||
"""
|
||||
requirement_marker = SEMICOLON marker WS?
|
||||
"""
|
||||
|
||||
if not tokenizer.check("SEMICOLON"):
|
||||
tokenizer.raise_syntax_error(
|
||||
f"Expected end or semicolon (after {after})",
|
||||
span_start=span_start,
|
||||
)
|
||||
tokenizer.read()
|
||||
|
||||
marker = _parse_marker(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return marker
|
||||
|
||||
|
||||
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
|
||||
"""
|
||||
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
|
||||
"""
|
||||
if not tokenizer.check("LEFT_BRACKET", peek=True):
|
||||
return []
|
||||
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_BRACKET",
|
||||
"RIGHT_BRACKET",
|
||||
around="extras",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
extras = _parse_extras_list(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return extras
|
||||
|
||||
|
||||
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
|
||||
"""
|
||||
extras_list = identifier (wsp* ',' wsp* identifier)*
|
||||
"""
|
||||
extras: List[str] = []
|
||||
|
||||
if not tokenizer.check("IDENTIFIER"):
|
||||
return extras
|
||||
|
||||
extras.append(tokenizer.read().text)
|
||||
|
||||
while True:
|
||||
tokenizer.consume("WS")
|
||||
if tokenizer.check("IDENTIFIER", peek=True):
|
||||
tokenizer.raise_syntax_error("Expected comma between extra names")
|
||||
elif not tokenizer.check("COMMA"):
|
||||
break
|
||||
|
||||
tokenizer.read()
|
||||
tokenizer.consume("WS")
|
||||
|
||||
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
|
||||
extras.append(extra_token.text)
|
||||
|
||||
return extras
|
||||
|
||||
|
||||
def _parse_specifier(tokenizer: Tokenizer) -> str:
|
||||
"""
|
||||
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
|
||||
| WS? version_many WS?
|
||||
"""
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_PARENTHESIS",
|
||||
"RIGHT_PARENTHESIS",
|
||||
around="version specifier",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
parsed_specifiers = _parse_version_many(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return parsed_specifiers
|
||||
|
||||
|
||||
def _parse_version_many(tokenizer: Tokenizer) -> str:
|
||||
"""
|
||||
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
|
||||
"""
|
||||
parsed_specifiers = ""
|
||||
while tokenizer.check("SPECIFIER"):
|
||||
span_start = tokenizer.position
|
||||
parsed_specifiers += tokenizer.read().text
|
||||
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
|
||||
tokenizer.raise_syntax_error(
|
||||
".* suffix can only be used with `==` or `!=` operators",
|
||||
span_start=span_start,
|
||||
span_end=tokenizer.position + 1,
|
||||
)
|
||||
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
|
||||
tokenizer.raise_syntax_error(
|
||||
"Local version label can only be used with `==` or `!=` operators",
|
||||
span_start=span_start,
|
||||
span_end=tokenizer.position,
|
||||
)
|
||||
tokenizer.consume("WS")
|
||||
if not tokenizer.check("COMMA"):
|
||||
break
|
||||
parsed_specifiers += tokenizer.read().text
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return parsed_specifiers
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------------------
|
||||
# Recursive descent parser for marker expression
|
||||
# --------------------------------------------------------------------------------------
|
||||
def parse_marker(source: str) -> MarkerList:
|
||||
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
||||
|
||||
|
||||
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
|
||||
retval = _parse_marker(tokenizer)
|
||||
tokenizer.expect("END", expected="end of marker expression")
|
||||
return retval
|
||||
|
||||
|
||||
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
||||
"""
|
||||
marker = marker_atom (BOOLOP marker_atom)+
|
||||
"""
|
||||
expression = [_parse_marker_atom(tokenizer)]
|
||||
while tokenizer.check("BOOLOP"):
|
||||
token = tokenizer.read()
|
||||
expr_right = _parse_marker_atom(tokenizer)
|
||||
expression.extend((token.text, expr_right))
|
||||
return expression
|
||||
|
||||
|
||||
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
|
||||
"""
|
||||
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
|
||||
| WS? marker_item WS?
|
||||
"""
|
||||
|
||||
tokenizer.consume("WS")
|
||||
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_PARENTHESIS",
|
||||
"RIGHT_PARENTHESIS",
|
||||
around="marker expression",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
marker: MarkerAtom = _parse_marker(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
else:
|
||||
marker = _parse_marker_item(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
return marker
|
||||
|
||||
|
||||
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
|
||||
"""
|
||||
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
|
||||
"""
|
||||
tokenizer.consume("WS")
|
||||
marker_var_left = _parse_marker_var(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
marker_op = _parse_marker_op(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
marker_var_right = _parse_marker_var(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
return (marker_var_left, marker_op, marker_var_right)
|
||||
|
||||
|
||||
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
||||
"""
|
||||
marker_var = VARIABLE | QUOTED_STRING
|
||||
"""
|
||||
if tokenizer.check("VARIABLE"):
|
||||
return process_env_var(tokenizer.read().text.replace(".", "_"))
|
||||
elif tokenizer.check("QUOTED_STRING"):
|
||||
return process_python_str(tokenizer.read().text)
|
||||
else:
|
||||
tokenizer.raise_syntax_error(
|
||||
message="Expected a marker variable or quoted string"
|
||||
)
|
||||
|
||||
|
||||
def process_env_var(env_var: str) -> Variable:
|
||||
if (
|
||||
env_var == "platform_python_implementation"
|
||||
or env_var == "python_implementation"
|
||||
):
|
||||
return Variable("platform_python_implementation")
|
||||
else:
|
||||
return Variable(env_var)
|
||||
|
||||
|
||||
def process_python_str(python_str: str) -> Value:
|
||||
value = ast.literal_eval(python_str)
|
||||
return Value(str(value))
|
||||
|
||||
|
||||
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
|
||||
"""
|
||||
marker_op = IN | NOT IN | OP
|
||||
"""
|
||||
if tokenizer.check("IN"):
|
||||
tokenizer.read()
|
||||
return Op("in")
|
||||
elif tokenizer.check("NOT"):
|
||||
tokenizer.read()
|
||||
tokenizer.expect("WS", expected="whitespace after 'not'")
|
||||
tokenizer.expect("IN", expected="'in' after 'not'")
|
||||
return Op("not in")
|
||||
elif tokenizer.check("OP"):
|
||||
return Op(tokenizer.read().text)
|
||||
else:
|
||||
return tokenizer.raise_syntax_error(
|
||||
"Expected marker operator, one of "
|
||||
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
|
||||
)
|
61
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_structures.py
generated
vendored
Normal file
61
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_structures.py
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
|
||||
class InfinityType:
|
||||
def __repr__(self) -> str:
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __le__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __ge__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __neg__(self: object) -> "NegativeInfinityType":
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = InfinityType()
|
||||
|
||||
|
||||
class NegativeInfinityType:
|
||||
def __repr__(self) -> str:
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __le__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __ge__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __neg__(self: object) -> InfinityType:
|
||||
return Infinity
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinityType()
|
192
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_tokenizer.py
generated
vendored
Normal file
192
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/_tokenizer.py
generated
vendored
Normal file
|
@ -0,0 +1,192 @@
|
|||
import contextlib
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
|
||||
|
||||
from .specifiers import Specifier
|
||||
|
||||
|
||||
@dataclass
|
||||
class Token:
|
||||
name: str
|
||||
text: str
|
||||
position: int
|
||||
|
||||
|
||||
class ParserSyntaxError(Exception):
|
||||
"""The provided source text could not be parsed correctly."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
*,
|
||||
source: str,
|
||||
span: Tuple[int, int],
|
||||
) -> None:
|
||||
self.span = span
|
||||
self.message = message
|
||||
self.source = source
|
||||
|
||||
super().__init__()
|
||||
|
||||
def __str__(self) -> str:
|
||||
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
|
||||
return "\n ".join([self.message, self.source, marker])
|
||||
|
||||
|
||||
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
|
||||
"LEFT_PARENTHESIS": r"\(",
|
||||
"RIGHT_PARENTHESIS": r"\)",
|
||||
"LEFT_BRACKET": r"\[",
|
||||
"RIGHT_BRACKET": r"\]",
|
||||
"SEMICOLON": r";",
|
||||
"COMMA": r",",
|
||||
"QUOTED_STRING": re.compile(
|
||||
r"""
|
||||
(
|
||||
('[^']*')
|
||||
|
|
||||
("[^"]*")
|
||||
)
|
||||
""",
|
||||
re.VERBOSE,
|
||||
),
|
||||
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
|
||||
"BOOLOP": r"\b(or|and)\b",
|
||||
"IN": r"\bin\b",
|
||||
"NOT": r"\bnot\b",
|
||||
"VARIABLE": re.compile(
|
||||
r"""
|
||||
\b(
|
||||
python_version
|
||||
|python_full_version
|
||||
|os[._]name
|
||||
|sys[._]platform
|
||||
|platform_(release|system)
|
||||
|platform[._](version|machine|python_implementation)
|
||||
|python_implementation
|
||||
|implementation_(name|version)
|
||||
|extra
|
||||
)\b
|
||||
""",
|
||||
re.VERBOSE,
|
||||
),
|
||||
"SPECIFIER": re.compile(
|
||||
Specifier._operator_regex_str + Specifier._version_regex_str,
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
),
|
||||
"AT": r"\@",
|
||||
"URL": r"[^ \t]+",
|
||||
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
|
||||
"VERSION_PREFIX_TRAIL": r"\.\*",
|
||||
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
|
||||
"WS": r"[ \t]+",
|
||||
"END": r"$",
|
||||
}
|
||||
|
||||
|
||||
class Tokenizer:
|
||||
"""Context-sensitive token parsing.
|
||||
|
||||
Provides methods to examine the input stream to check whether the next token
|
||||
matches.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
source: str,
|
||||
*,
|
||||
rules: "Dict[str, Union[str, re.Pattern[str]]]",
|
||||
) -> None:
|
||||
self.source = source
|
||||
self.rules: Dict[str, re.Pattern[str]] = {
|
||||
name: re.compile(pattern) for name, pattern in rules.items()
|
||||
}
|
||||
self.next_token: Optional[Token] = None
|
||||
self.position = 0
|
||||
|
||||
def consume(self, name: str) -> None:
|
||||
"""Move beyond provided token name, if at current position."""
|
||||
if self.check(name):
|
||||
self.read()
|
||||
|
||||
def check(self, name: str, *, peek: bool = False) -> bool:
|
||||
"""Check whether the next token has the provided name.
|
||||
|
||||
By default, if the check succeeds, the token *must* be read before
|
||||
another check. If `peek` is set to `True`, the token is not loaded and
|
||||
would need to be checked again.
|
||||
"""
|
||||
assert (
|
||||
self.next_token is None
|
||||
), f"Cannot check for {name!r}, already have {self.next_token!r}"
|
||||
assert name in self.rules, f"Unknown token name: {name!r}"
|
||||
|
||||
expression = self.rules[name]
|
||||
|
||||
match = expression.match(self.source, self.position)
|
||||
if match is None:
|
||||
return False
|
||||
if not peek:
|
||||
self.next_token = Token(name, match[0], self.position)
|
||||
return True
|
||||
|
||||
def expect(self, name: str, *, expected: str) -> Token:
|
||||
"""Expect a certain token name next, failing with a syntax error otherwise.
|
||||
|
||||
The token is *not* read.
|
||||
"""
|
||||
if not self.check(name):
|
||||
raise self.raise_syntax_error(f"Expected {expected}")
|
||||
return self.read()
|
||||
|
||||
def read(self) -> Token:
|
||||
"""Consume the next token and return it."""
|
||||
token = self.next_token
|
||||
assert token is not None
|
||||
|
||||
self.position += len(token.text)
|
||||
self.next_token = None
|
||||
|
||||
return token
|
||||
|
||||
def raise_syntax_error(
|
||||
self,
|
||||
message: str,
|
||||
*,
|
||||
span_start: Optional[int] = None,
|
||||
span_end: Optional[int] = None,
|
||||
) -> NoReturn:
|
||||
"""Raise ParserSyntaxError at the given position."""
|
||||
span = (
|
||||
self.position if span_start is None else span_start,
|
||||
self.position if span_end is None else span_end,
|
||||
)
|
||||
raise ParserSyntaxError(
|
||||
message,
|
||||
source=self.source,
|
||||
span=span,
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def enclosing_tokens(
|
||||
self, open_token: str, close_token: str, *, around: str
|
||||
) -> Iterator[None]:
|
||||
if self.check(open_token):
|
||||
open_position = self.position
|
||||
self.read()
|
||||
else:
|
||||
open_position = None
|
||||
|
||||
yield
|
||||
|
||||
if open_position is None:
|
||||
return
|
||||
|
||||
if not self.check(close_token):
|
||||
self.raise_syntax_error(
|
||||
f"Expected matching {close_token} for {open_token}, after {around}",
|
||||
span_start=open_position,
|
||||
)
|
||||
|
||||
self.read()
|
252
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/markers.py
generated
vendored
Normal file
252
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/markers.py
generated
vendored
Normal file
|
@ -0,0 +1,252 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import operator
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from ._parser import (
|
||||
MarkerAtom,
|
||||
MarkerList,
|
||||
Op,
|
||||
Value,
|
||||
Variable,
|
||||
parse_marker as _parse_marker,
|
||||
)
|
||||
from ._tokenizer import ParserSyntaxError
|
||||
from .specifiers import InvalidSpecifier, Specifier
|
||||
from .utils import canonicalize_name
|
||||
|
||||
__all__ = [
|
||||
"InvalidMarker",
|
||||
"UndefinedComparison",
|
||||
"UndefinedEnvironmentName",
|
||||
"Marker",
|
||||
"default_environment",
|
||||
]
|
||||
|
||||
Operator = Callable[[str, str], bool]
|
||||
|
||||
|
||||
class InvalidMarker(ValueError):
|
||||
"""
|
||||
An invalid marker was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedComparison(ValueError):
|
||||
"""
|
||||
An invalid operation was attempted on a value that doesn't support it.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedEnvironmentName(ValueError):
|
||||
"""
|
||||
A name was attempted to be used that does not exist inside of the
|
||||
environment.
|
||||
"""
|
||||
|
||||
|
||||
def _normalize_extra_values(results: Any) -> Any:
|
||||
"""
|
||||
Normalize extra values.
|
||||
"""
|
||||
if isinstance(results[0], tuple):
|
||||
lhs, op, rhs = results[0]
|
||||
if isinstance(lhs, Variable) and lhs.value == "extra":
|
||||
normalized_extra = canonicalize_name(rhs.value)
|
||||
rhs = Value(normalized_extra)
|
||||
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
||||
normalized_extra = canonicalize_name(lhs.value)
|
||||
lhs = Value(normalized_extra)
|
||||
results[0] = lhs, op, rhs
|
||||
return results
|
||||
|
||||
|
||||
def _format_marker(
|
||||
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
|
||||
) -> str:
|
||||
|
||||
assert isinstance(marker, (list, tuple, str))
|
||||
|
||||
# Sometimes we have a structure like [[...]] which is a single item list
|
||||
# where the single item is itself it's own list. In that case we want skip
|
||||
# the rest of this function so that we don't get extraneous () on the
|
||||
# outside.
|
||||
if (
|
||||
isinstance(marker, list)
|
||||
and len(marker) == 1
|
||||
and isinstance(marker[0], (list, tuple))
|
||||
):
|
||||
return _format_marker(marker[0])
|
||||
|
||||
if isinstance(marker, list):
|
||||
inner = (_format_marker(m, first=False) for m in marker)
|
||||
if first:
|
||||
return " ".join(inner)
|
||||
else:
|
||||
return "(" + " ".join(inner) + ")"
|
||||
elif isinstance(marker, tuple):
|
||||
return " ".join([m.serialize() for m in marker])
|
||||
else:
|
||||
return marker
|
||||
|
||||
|
||||
_operators: Dict[str, Operator] = {
|
||||
"in": lambda lhs, rhs: lhs in rhs,
|
||||
"not in": lambda lhs, rhs: lhs not in rhs,
|
||||
"<": operator.lt,
|
||||
"<=": operator.le,
|
||||
"==": operator.eq,
|
||||
"!=": operator.ne,
|
||||
">=": operator.ge,
|
||||
">": operator.gt,
|
||||
}
|
||||
|
||||
|
||||
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
||||
try:
|
||||
spec = Specifier("".join([op.serialize(), rhs]))
|
||||
except InvalidSpecifier:
|
||||
pass
|
||||
else:
|
||||
return spec.contains(lhs, prereleases=True)
|
||||
|
||||
oper: Optional[Operator] = _operators.get(op.serialize())
|
||||
if oper is None:
|
||||
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
||||
|
||||
return oper(lhs, rhs)
|
||||
|
||||
|
||||
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
|
||||
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
||||
# https://peps.python.org/pep-0685/
|
||||
# > When comparing extra names, tools MUST normalize the names being
|
||||
# > compared using the semantics outlined in PEP 503 for names
|
||||
if key == "extra":
|
||||
return tuple(canonicalize_name(v) for v in values)
|
||||
|
||||
# other environment markers don't have such standards
|
||||
return values
|
||||
|
||||
|
||||
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
|
||||
groups: List[List[bool]] = [[]]
|
||||
|
||||
for marker in markers:
|
||||
assert isinstance(marker, (list, tuple, str))
|
||||
|
||||
if isinstance(marker, list):
|
||||
groups[-1].append(_evaluate_markers(marker, environment))
|
||||
elif isinstance(marker, tuple):
|
||||
lhs, op, rhs = marker
|
||||
|
||||
if isinstance(lhs, Variable):
|
||||
environment_key = lhs.value
|
||||
lhs_value = environment[environment_key]
|
||||
rhs_value = rhs.value
|
||||
else:
|
||||
lhs_value = lhs.value
|
||||
environment_key = rhs.value
|
||||
rhs_value = environment[environment_key]
|
||||
|
||||
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
||||
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
||||
else:
|
||||
assert marker in ["and", "or"]
|
||||
if marker == "or":
|
||||
groups.append([])
|
||||
|
||||
return any(all(item) for item in groups)
|
||||
|
||||
|
||||
def format_full_version(info: "sys._version_info") -> str:
|
||||
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
||||
kind = info.releaselevel
|
||||
if kind != "final":
|
||||
version += kind[0] + str(info.serial)
|
||||
return version
|
||||
|
||||
|
||||
def default_environment() -> Dict[str, str]:
|
||||
iver = format_full_version(sys.implementation.version)
|
||||
implementation_name = sys.implementation.name
|
||||
return {
|
||||
"implementation_name": implementation_name,
|
||||
"implementation_version": iver,
|
||||
"os_name": os.name,
|
||||
"platform_machine": platform.machine(),
|
||||
"platform_release": platform.release(),
|
||||
"platform_system": platform.system(),
|
||||
"platform_version": platform.version(),
|
||||
"python_full_version": platform.python_version(),
|
||||
"platform_python_implementation": platform.python_implementation(),
|
||||
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
||||
"sys_platform": sys.platform,
|
||||
}
|
||||
|
||||
|
||||
class Marker:
|
||||
def __init__(self, marker: str) -> None:
|
||||
# Note: We create a Marker object without calling this constructor in
|
||||
# packaging.requirements.Requirement. If any additional logic is
|
||||
# added here, make sure to mirror/adapt Requirement.
|
||||
try:
|
||||
self._markers = _normalize_extra_values(_parse_marker(marker))
|
||||
# The attribute `_markers` can be described in terms of a recursive type:
|
||||
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
||||
#
|
||||
# For example, the following expression:
|
||||
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
||||
#
|
||||
# is parsed into:
|
||||
# [
|
||||
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
||||
# 'and',
|
||||
# [
|
||||
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
||||
# 'or',
|
||||
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
||||
# ]
|
||||
# ]
|
||||
except ParserSyntaxError as e:
|
||||
raise InvalidMarker(str(e)) from e
|
||||
|
||||
def __str__(self) -> str:
|
||||
return _format_marker(self._markers)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Marker('{self}')>"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__.__name__, str(self)))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if not isinstance(other, Marker):
|
||||
return NotImplemented
|
||||
|
||||
return str(self) == str(other)
|
||||
|
||||
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
|
||||
"""Evaluate a marker.
|
||||
|
||||
Return the boolean from evaluating the given marker against the
|
||||
environment. environment is an optional argument to override all or
|
||||
part of the determined environment.
|
||||
|
||||
The environment is determined from the current Python process.
|
||||
"""
|
||||
current_environment = default_environment()
|
||||
current_environment["extra"] = ""
|
||||
if environment is not None:
|
||||
current_environment.update(environment)
|
||||
# The API used to allow setting extra to None. We need to handle this
|
||||
# case for backwards compatibility.
|
||||
if current_environment["extra"] is None:
|
||||
current_environment["extra"] = ""
|
||||
|
||||
return _evaluate_markers(self._markers, current_environment)
|
825
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
generated
vendored
Normal file
825
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
generated
vendored
Normal file
|
@ -0,0 +1,825 @@
|
|||
import email.feedparser
|
||||
import email.header
|
||||
import email.message
|
||||
import email.parser
|
||||
import email.policy
|
||||
import sys
|
||||
import typing
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from . import requirements, specifiers, utils, version as version_module
|
||||
|
||||
T = typing.TypeVar("T")
|
||||
if sys.version_info[:2] >= (3, 8): # pragma: no cover
|
||||
from typing import Literal, TypedDict
|
||||
else: # pragma: no cover
|
||||
if typing.TYPE_CHECKING:
|
||||
from typing_extensions import Literal, TypedDict
|
||||
else:
|
||||
try:
|
||||
from typing_extensions import Literal, TypedDict
|
||||
except ImportError:
|
||||
|
||||
class Literal:
|
||||
def __init_subclass__(*_args, **_kwargs):
|
||||
pass
|
||||
|
||||
class TypedDict:
|
||||
def __init_subclass__(*_args, **_kwargs):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
ExceptionGroup
|
||||
except NameError: # pragma: no cover
|
||||
|
||||
class ExceptionGroup(Exception): # noqa: N818
|
||||
"""A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
|
||||
|
||||
If :external:exc:`ExceptionGroup` is already defined by Python itself,
|
||||
that version is used instead.
|
||||
"""
|
||||
|
||||
message: str
|
||||
exceptions: List[Exception]
|
||||
|
||||
def __init__(self, message: str, exceptions: List[Exception]) -> None:
|
||||
self.message = message
|
||||
self.exceptions = exceptions
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
|
||||
|
||||
else: # pragma: no cover
|
||||
ExceptionGroup = ExceptionGroup
|
||||
|
||||
|
||||
class InvalidMetadata(ValueError):
|
||||
"""A metadata field contains invalid data."""
|
||||
|
||||
field: str
|
||||
"""The name of the field that contains invalid data."""
|
||||
|
||||
def __init__(self, field: str, message: str) -> None:
|
||||
self.field = field
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
# The RawMetadata class attempts to make as few assumptions about the underlying
|
||||
# serialization formats as possible. The idea is that as long as a serialization
|
||||
# formats offer some very basic primitives in *some* way then we can support
|
||||
# serializing to and from that format.
|
||||
class RawMetadata(TypedDict, total=False):
|
||||
"""A dictionary of raw core metadata.
|
||||
|
||||
Each field in core metadata maps to a key of this dictionary (when data is
|
||||
provided). The key is lower-case and underscores are used instead of dashes
|
||||
compared to the equivalent core metadata field. Any core metadata field that
|
||||
can be specified multiple times or can hold multiple values in a single
|
||||
field have a key with a plural name. See :class:`Metadata` whose attributes
|
||||
match the keys of this dictionary.
|
||||
|
||||
Core metadata fields that can be specified multiple times are stored as a
|
||||
list or dict depending on which is appropriate for the field. Any fields
|
||||
which hold multiple values in a single field are stored as a list.
|
||||
|
||||
"""
|
||||
|
||||
# Metadata 1.0 - PEP 241
|
||||
metadata_version: str
|
||||
name: str
|
||||
version: str
|
||||
platforms: List[str]
|
||||
summary: str
|
||||
description: str
|
||||
keywords: List[str]
|
||||
home_page: str
|
||||
author: str
|
||||
author_email: str
|
||||
license: str
|
||||
|
||||
# Metadata 1.1 - PEP 314
|
||||
supported_platforms: List[str]
|
||||
download_url: str
|
||||
classifiers: List[str]
|
||||
requires: List[str]
|
||||
provides: List[str]
|
||||
obsoletes: List[str]
|
||||
|
||||
# Metadata 1.2 - PEP 345
|
||||
maintainer: str
|
||||
maintainer_email: str
|
||||
requires_dist: List[str]
|
||||
provides_dist: List[str]
|
||||
obsoletes_dist: List[str]
|
||||
requires_python: str
|
||||
requires_external: List[str]
|
||||
project_urls: Dict[str, str]
|
||||
|
||||
# Metadata 2.0
|
||||
# PEP 426 attempted to completely revamp the metadata format
|
||||
# but got stuck without ever being able to build consensus on
|
||||
# it and ultimately ended up withdrawn.
|
||||
#
|
||||
# However, a number of tools had started emitting METADATA with
|
||||
# `2.0` Metadata-Version, so for historical reasons, this version
|
||||
# was skipped.
|
||||
|
||||
# Metadata 2.1 - PEP 566
|
||||
description_content_type: str
|
||||
provides_extra: List[str]
|
||||
|
||||
# Metadata 2.2 - PEP 643
|
||||
dynamic: List[str]
|
||||
|
||||
# Metadata 2.3 - PEP 685
|
||||
# No new fields were added in PEP 685, just some edge case were
|
||||
# tightened up to provide better interoptability.
|
||||
|
||||
|
||||
_STRING_FIELDS = {
|
||||
"author",
|
||||
"author_email",
|
||||
"description",
|
||||
"description_content_type",
|
||||
"download_url",
|
||||
"home_page",
|
||||
"license",
|
||||
"maintainer",
|
||||
"maintainer_email",
|
||||
"metadata_version",
|
||||
"name",
|
||||
"requires_python",
|
||||
"summary",
|
||||
"version",
|
||||
}
|
||||
|
||||
_LIST_FIELDS = {
|
||||
"classifiers",
|
||||
"dynamic",
|
||||
"obsoletes",
|
||||
"obsoletes_dist",
|
||||
"platforms",
|
||||
"provides",
|
||||
"provides_dist",
|
||||
"provides_extra",
|
||||
"requires",
|
||||
"requires_dist",
|
||||
"requires_external",
|
||||
"supported_platforms",
|
||||
}
|
||||
|
||||
_DICT_FIELDS = {
|
||||
"project_urls",
|
||||
}
|
||||
|
||||
|
||||
def _parse_keywords(data: str) -> List[str]:
|
||||
"""Split a string of comma-separate keyboards into a list of keywords."""
|
||||
return [k.strip() for k in data.split(",")]
|
||||
|
||||
|
||||
def _parse_project_urls(data: List[str]) -> Dict[str, str]:
|
||||
"""Parse a list of label/URL string pairings separated by a comma."""
|
||||
urls = {}
|
||||
for pair in data:
|
||||
# Our logic is slightly tricky here as we want to try and do
|
||||
# *something* reasonable with malformed data.
|
||||
#
|
||||
# The main thing that we have to worry about, is data that does
|
||||
# not have a ',' at all to split the label from the Value. There
|
||||
# isn't a singular right answer here, and we will fail validation
|
||||
# later on (if the caller is validating) so it doesn't *really*
|
||||
# matter, but since the missing value has to be an empty str
|
||||
# and our return value is dict[str, str], if we let the key
|
||||
# be the missing value, then they'd have multiple '' values that
|
||||
# overwrite each other in a accumulating dict.
|
||||
#
|
||||
# The other potentional issue is that it's possible to have the
|
||||
# same label multiple times in the metadata, with no solid "right"
|
||||
# answer with what to do in that case. As such, we'll do the only
|
||||
# thing we can, which is treat the field as unparseable and add it
|
||||
# to our list of unparsed fields.
|
||||
parts = [p.strip() for p in pair.split(",", 1)]
|
||||
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
|
||||
|
||||
# TODO: The spec doesn't say anything about if the keys should be
|
||||
# considered case sensitive or not... logically they should
|
||||
# be case-preserving and case-insensitive, but doing that
|
||||
# would open up more cases where we might have duplicate
|
||||
# entries.
|
||||
label, url = parts
|
||||
if label in urls:
|
||||
# The label already exists in our set of urls, so this field
|
||||
# is unparseable, and we can just add the whole thing to our
|
||||
# unparseable data and stop processing it.
|
||||
raise KeyError("duplicate labels in project urls")
|
||||
urls[label] = url
|
||||
|
||||
return urls
|
||||
|
||||
|
||||
def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
|
||||
"""Get the body of the message."""
|
||||
# If our source is a str, then our caller has managed encodings for us,
|
||||
# and we don't need to deal with it.
|
||||
if isinstance(source, str):
|
||||
payload: str = msg.get_payload()
|
||||
return payload
|
||||
# If our source is a bytes, then we're managing the encoding and we need
|
||||
# to deal with it.
|
||||
else:
|
||||
bpayload: bytes = msg.get_payload(decode=True)
|
||||
try:
|
||||
return bpayload.decode("utf8", "strict")
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError("payload in an invalid encoding")
|
||||
|
||||
|
||||
# The various parse_FORMAT functions here are intended to be as lenient as
|
||||
# possible in their parsing, while still returning a correctly typed
|
||||
# RawMetadata.
|
||||
#
|
||||
# To aid in this, we also generally want to do as little touching of the
|
||||
# data as possible, except where there are possibly some historic holdovers
|
||||
# that make valid data awkward to work with.
|
||||
#
|
||||
# While this is a lower level, intermediate format than our ``Metadata``
|
||||
# class, some light touch ups can make a massive difference in usability.
|
||||
|
||||
# Map METADATA fields to RawMetadata.
|
||||
_EMAIL_TO_RAW_MAPPING = {
|
||||
"author": "author",
|
||||
"author-email": "author_email",
|
||||
"classifier": "classifiers",
|
||||
"description": "description",
|
||||
"description-content-type": "description_content_type",
|
||||
"download-url": "download_url",
|
||||
"dynamic": "dynamic",
|
||||
"home-page": "home_page",
|
||||
"keywords": "keywords",
|
||||
"license": "license",
|
||||
"maintainer": "maintainer",
|
||||
"maintainer-email": "maintainer_email",
|
||||
"metadata-version": "metadata_version",
|
||||
"name": "name",
|
||||
"obsoletes": "obsoletes",
|
||||
"obsoletes-dist": "obsoletes_dist",
|
||||
"platform": "platforms",
|
||||
"project-url": "project_urls",
|
||||
"provides": "provides",
|
||||
"provides-dist": "provides_dist",
|
||||
"provides-extra": "provides_extra",
|
||||
"requires": "requires",
|
||||
"requires-dist": "requires_dist",
|
||||
"requires-external": "requires_external",
|
||||
"requires-python": "requires_python",
|
||||
"summary": "summary",
|
||||
"supported-platform": "supported_platforms",
|
||||
"version": "version",
|
||||
}
|
||||
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
|
||||
|
||||
|
||||
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
|
||||
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
|
||||
|
||||
This function returns a two-item tuple of dicts. The first dict is of
|
||||
recognized fields from the core metadata specification. Fields that can be
|
||||
parsed and translated into Python's built-in types are converted
|
||||
appropriately. All other fields are left as-is. Fields that are allowed to
|
||||
appear multiple times are stored as lists.
|
||||
|
||||
The second dict contains all other fields from the metadata. This includes
|
||||
any unrecognized fields. It also includes any fields which are expected to
|
||||
be parsed into a built-in type but were not formatted appropriately. Finally,
|
||||
any fields that are expected to appear only once but are repeated are
|
||||
included in this dict.
|
||||
|
||||
"""
|
||||
raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
|
||||
unparsed: Dict[str, List[str]] = {}
|
||||
|
||||
if isinstance(data, str):
|
||||
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
|
||||
else:
|
||||
parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
|
||||
|
||||
# We have to wrap parsed.keys() in a set, because in the case of multiple
|
||||
# values for a key (a list), the key will appear multiple times in the
|
||||
# list of keys, but we're avoiding that by using get_all().
|
||||
for name in frozenset(parsed.keys()):
|
||||
# Header names in RFC are case insensitive, so we'll normalize to all
|
||||
# lower case to make comparisons easier.
|
||||
name = name.lower()
|
||||
|
||||
# We use get_all() here, even for fields that aren't multiple use,
|
||||
# because otherwise someone could have e.g. two Name fields, and we
|
||||
# would just silently ignore it rather than doing something about it.
|
||||
headers = parsed.get_all(name) or []
|
||||
|
||||
# The way the email module works when parsing bytes is that it
|
||||
# unconditionally decodes the bytes as ascii using the surrogateescape
|
||||
# handler. When you pull that data back out (such as with get_all() ),
|
||||
# it looks to see if the str has any surrogate escapes, and if it does
|
||||
# it wraps it in a Header object instead of returning the string.
|
||||
#
|
||||
# As such, we'll look for those Header objects, and fix up the encoding.
|
||||
value = []
|
||||
# Flag if we have run into any issues processing the headers, thus
|
||||
# signalling that the data belongs in 'unparsed'.
|
||||
valid_encoding = True
|
||||
for h in headers:
|
||||
# It's unclear if this can return more types than just a Header or
|
||||
# a str, so we'll just assert here to make sure.
|
||||
assert isinstance(h, (email.header.Header, str))
|
||||
|
||||
# If it's a header object, we need to do our little dance to get
|
||||
# the real data out of it. In cases where there is invalid data
|
||||
# we're going to end up with mojibake, but there's no obvious, good
|
||||
# way around that without reimplementing parts of the Header object
|
||||
# ourselves.
|
||||
#
|
||||
# That should be fine since, if mojibacked happens, this key is
|
||||
# going into the unparsed dict anyways.
|
||||
if isinstance(h, email.header.Header):
|
||||
# The Header object stores it's data as chunks, and each chunk
|
||||
# can be independently encoded, so we'll need to check each
|
||||
# of them.
|
||||
chunks: List[Tuple[bytes, Optional[str]]] = []
|
||||
for bin, encoding in email.header.decode_header(h):
|
||||
try:
|
||||
bin.decode("utf8", "strict")
|
||||
except UnicodeDecodeError:
|
||||
# Enable mojibake.
|
||||
encoding = "latin1"
|
||||
valid_encoding = False
|
||||
else:
|
||||
encoding = "utf8"
|
||||
chunks.append((bin, encoding))
|
||||
|
||||
# Turn our chunks back into a Header object, then let that
|
||||
# Header object do the right thing to turn them into a
|
||||
# string for us.
|
||||
value.append(str(email.header.make_header(chunks)))
|
||||
# This is already a string, so just add it.
|
||||
else:
|
||||
value.append(h)
|
||||
|
||||
# We've processed all of our values to get them into a list of str,
|
||||
# but we may have mojibake data, in which case this is an unparsed
|
||||
# field.
|
||||
if not valid_encoding:
|
||||
unparsed[name] = value
|
||||
continue
|
||||
|
||||
raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
|
||||
if raw_name is None:
|
||||
# This is a bit of a weird situation, we've encountered a key that
|
||||
# we don't know what it means, so we don't know whether it's meant
|
||||
# to be a list or not.
|
||||
#
|
||||
# Since we can't really tell one way or another, we'll just leave it
|
||||
# as a list, even though it may be a single item list, because that's
|
||||
# what makes the most sense for email headers.
|
||||
unparsed[name] = value
|
||||
continue
|
||||
|
||||
# If this is one of our string fields, then we'll check to see if our
|
||||
# value is a list of a single item. If it is then we'll assume that
|
||||
# it was emitted as a single string, and unwrap the str from inside
|
||||
# the list.
|
||||
#
|
||||
# If it's any other kind of data, then we haven't the faintest clue
|
||||
# what we should parse it as, and we have to just add it to our list
|
||||
# of unparsed stuff.
|
||||
if raw_name in _STRING_FIELDS and len(value) == 1:
|
||||
raw[raw_name] = value[0]
|
||||
# If this is one of our list of string fields, then we can just assign
|
||||
# the value, since email *only* has strings, and our get_all() call
|
||||
# above ensures that this is a list.
|
||||
elif raw_name in _LIST_FIELDS:
|
||||
raw[raw_name] = value
|
||||
# Special Case: Keywords
|
||||
# The keywords field is implemented in the metadata spec as a str,
|
||||
# but it conceptually is a list of strings, and is serialized using
|
||||
# ", ".join(keywords), so we'll do some light data massaging to turn
|
||||
# this into what it logically is.
|
||||
elif raw_name == "keywords" and len(value) == 1:
|
||||
raw[raw_name] = _parse_keywords(value[0])
|
||||
# Special Case: Project-URL
|
||||
# The project urls is implemented in the metadata spec as a list of
|
||||
# specially-formatted strings that represent a key and a value, which
|
||||
# is fundamentally a mapping, however the email format doesn't support
|
||||
# mappings in a sane way, so it was crammed into a list of strings
|
||||
# instead.
|
||||
#
|
||||
# We will do a little light data massaging to turn this into a map as
|
||||
# it logically should be.
|
||||
elif raw_name == "project_urls":
|
||||
try:
|
||||
raw[raw_name] = _parse_project_urls(value)
|
||||
except KeyError:
|
||||
unparsed[name] = value
|
||||
# Nothing that we've done has managed to parse this, so it'll just
|
||||
# throw it in our unparseable data and move on.
|
||||
else:
|
||||
unparsed[name] = value
|
||||
|
||||
# We need to support getting the Description from the message payload in
|
||||
# addition to getting it from the the headers. This does mean, though, there
|
||||
# is the possibility of it being set both ways, in which case we put both
|
||||
# in 'unparsed' since we don't know which is right.
|
||||
try:
|
||||
payload = _get_payload(parsed, data)
|
||||
except ValueError:
|
||||
unparsed.setdefault("description", []).append(
|
||||
parsed.get_payload(decode=isinstance(data, bytes))
|
||||
)
|
||||
else:
|
||||
if payload:
|
||||
# Check to see if we've already got a description, if so then both
|
||||
# it, and this body move to unparseable.
|
||||
if "description" in raw:
|
||||
description_header = cast(str, raw.pop("description"))
|
||||
unparsed.setdefault("description", []).extend(
|
||||
[description_header, payload]
|
||||
)
|
||||
elif "description" in unparsed:
|
||||
unparsed["description"].append(payload)
|
||||
else:
|
||||
raw["description"] = payload
|
||||
|
||||
# We need to cast our `raw` to a metadata, because a TypedDict only support
|
||||
# literal key names, but we're computing our key names on purpose, but the
|
||||
# way this function is implemented, our `TypedDict` can only have valid key
|
||||
# names.
|
||||
return cast(RawMetadata, raw), unparsed
|
||||
|
||||
|
||||
_NOT_FOUND = object()
|
||||
|
||||
|
||||
# Keep the two values in sync.
|
||||
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
|
||||
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
|
||||
|
||||
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
|
||||
|
||||
|
||||
class _Validator(Generic[T]):
|
||||
"""Validate a metadata field.
|
||||
|
||||
All _process_*() methods correspond to a core metadata field. The method is
|
||||
called with the field's raw value. If the raw value is valid it is returned
|
||||
in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
|
||||
If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
|
||||
as appropriate).
|
||||
"""
|
||||
|
||||
name: str
|
||||
raw_name: str
|
||||
added: _MetadataVersion
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
added: _MetadataVersion = "1.0",
|
||||
) -> None:
|
||||
self.added = added
|
||||
|
||||
def __set_name__(self, _owner: "Metadata", name: str) -> None:
|
||||
self.name = name
|
||||
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
|
||||
|
||||
def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
|
||||
# With Python 3.8, the caching can be replaced with functools.cached_property().
|
||||
# No need to check the cache as attribute lookup will resolve into the
|
||||
# instance's __dict__ before __get__ is called.
|
||||
cache = instance.__dict__
|
||||
value = instance._raw.get(self.name)
|
||||
|
||||
# To make the _process_* methods easier, we'll check if the value is None
|
||||
# and if this field is NOT a required attribute, and if both of those
|
||||
# things are true, we'll skip the the converter. This will mean that the
|
||||
# converters never have to deal with the None union.
|
||||
if self.name in _REQUIRED_ATTRS or value is not None:
|
||||
try:
|
||||
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
value = converter(value)
|
||||
|
||||
cache[self.name] = value
|
||||
try:
|
||||
del instance._raw[self.name] # type: ignore[misc]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return cast(T, value)
|
||||
|
||||
def _invalid_metadata(
|
||||
self, msg: str, cause: Optional[Exception] = None
|
||||
) -> InvalidMetadata:
|
||||
exc = InvalidMetadata(
|
||||
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
|
||||
)
|
||||
exc.__cause__ = cause
|
||||
return exc
|
||||
|
||||
def _process_metadata_version(self, value: str) -> _MetadataVersion:
|
||||
# Implicitly makes Metadata-Version required.
|
||||
if value not in _VALID_METADATA_VERSIONS:
|
||||
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
|
||||
return cast(_MetadataVersion, value)
|
||||
|
||||
def _process_name(self, value: str) -> str:
|
||||
if not value:
|
||||
raise self._invalid_metadata("{field} is a required field")
|
||||
# Validate the name as a side-effect.
|
||||
try:
|
||||
utils.canonicalize_name(value, validate=True)
|
||||
except utils.InvalidName as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
else:
|
||||
return value
|
||||
|
||||
def _process_version(self, value: str) -> version_module.Version:
|
||||
if not value:
|
||||
raise self._invalid_metadata("{field} is a required field")
|
||||
try:
|
||||
return version_module.parse(value)
|
||||
except version_module.InvalidVersion as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
|
||||
def _process_summary(self, value: str) -> str:
|
||||
"""Check the field contains no newlines."""
|
||||
if "\n" in value:
|
||||
raise self._invalid_metadata("{field} must be a single line")
|
||||
return value
|
||||
|
||||
def _process_description_content_type(self, value: str) -> str:
|
||||
content_types = {"text/plain", "text/x-rst", "text/markdown"}
|
||||
message = email.message.EmailMessage()
|
||||
message["content-type"] = value
|
||||
|
||||
content_type, parameters = (
|
||||
# Defaults to `text/plain` if parsing failed.
|
||||
message.get_content_type().lower(),
|
||||
message["content-type"].params,
|
||||
)
|
||||
# Check if content-type is valid or defaulted to `text/plain` and thus was
|
||||
# not parseable.
|
||||
if content_type not in content_types or content_type not in value.lower():
|
||||
raise self._invalid_metadata(
|
||||
f"{{field}} must be one of {list(content_types)}, not {value!r}"
|
||||
)
|
||||
|
||||
charset = parameters.get("charset", "UTF-8")
|
||||
if charset != "UTF-8":
|
||||
raise self._invalid_metadata(
|
||||
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
|
||||
)
|
||||
|
||||
markdown_variants = {"GFM", "CommonMark"}
|
||||
variant = parameters.get("variant", "GFM") # Use an acceptable default.
|
||||
if content_type == "text/markdown" and variant not in markdown_variants:
|
||||
raise self._invalid_metadata(
|
||||
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
|
||||
f"not {variant!r}",
|
||||
)
|
||||
return value
|
||||
|
||||
def _process_dynamic(self, value: List[str]) -> List[str]:
|
||||
for dynamic_field in map(str.lower, value):
|
||||
if dynamic_field in {"name", "version", "metadata-version"}:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is not allowed as a dynamic field"
|
||||
)
|
||||
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
|
||||
raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
|
||||
return list(map(str.lower, value))
|
||||
|
||||
def _process_provides_extra(
|
||||
self,
|
||||
value: List[str],
|
||||
) -> List[utils.NormalizedName]:
|
||||
normalized_names = []
|
||||
try:
|
||||
for name in value:
|
||||
normalized_names.append(utils.canonicalize_name(name, validate=True))
|
||||
except utils.InvalidName as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{name!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
else:
|
||||
return normalized_names
|
||||
|
||||
def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
|
||||
try:
|
||||
return specifiers.SpecifierSet(value)
|
||||
except specifiers.InvalidSpecifier as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
|
||||
def _process_requires_dist(
|
||||
self,
|
||||
value: List[str],
|
||||
) -> List[requirements.Requirement]:
|
||||
reqs = []
|
||||
try:
|
||||
for req in value:
|
||||
reqs.append(requirements.Requirement(req))
|
||||
except requirements.InvalidRequirement as exc:
|
||||
raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
|
||||
else:
|
||||
return reqs
|
||||
|
||||
|
||||
class Metadata:
|
||||
"""Representation of distribution metadata.
|
||||
|
||||
Compared to :class:`RawMetadata`, this class provides objects representing
|
||||
metadata fields instead of only using built-in types. Any invalid metadata
|
||||
will cause :exc:`InvalidMetadata` to be raised (with a
|
||||
:py:attr:`~BaseException.__cause__` attribute as appropriate).
|
||||
"""
|
||||
|
||||
_raw: RawMetadata
|
||||
|
||||
@classmethod
|
||||
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
|
||||
"""Create an instance from :class:`RawMetadata`.
|
||||
|
||||
If *validate* is true, all metadata will be validated. All exceptions
|
||||
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
||||
"""
|
||||
ins = cls()
|
||||
ins._raw = data.copy() # Mutations occur due to caching enriched values.
|
||||
|
||||
if validate:
|
||||
exceptions: List[Exception] = []
|
||||
try:
|
||||
metadata_version = ins.metadata_version
|
||||
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
|
||||
except InvalidMetadata as metadata_version_exc:
|
||||
exceptions.append(metadata_version_exc)
|
||||
metadata_version = None
|
||||
|
||||
# Make sure to check for the fields that are present, the required
|
||||
# fields (so their absence can be reported).
|
||||
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
|
||||
# Remove fields that have already been checked.
|
||||
fields_to_check -= {"metadata_version"}
|
||||
|
||||
for key in fields_to_check:
|
||||
try:
|
||||
if metadata_version:
|
||||
# Can't use getattr() as that triggers descriptor protocol which
|
||||
# will fail due to no value for the instance argument.
|
||||
try:
|
||||
field_metadata_version = cls.__dict__[key].added
|
||||
except KeyError:
|
||||
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
|
||||
exceptions.append(exc)
|
||||
continue
|
||||
field_age = _VALID_METADATA_VERSIONS.index(
|
||||
field_metadata_version
|
||||
)
|
||||
if field_age > metadata_age:
|
||||
field = _RAW_TO_EMAIL_MAPPING[key]
|
||||
exc = InvalidMetadata(
|
||||
field,
|
||||
"{field} introduced in metadata version "
|
||||
"{field_metadata_version}, not {metadata_version}",
|
||||
)
|
||||
exceptions.append(exc)
|
||||
continue
|
||||
getattr(ins, key)
|
||||
except InvalidMetadata as exc:
|
||||
exceptions.append(exc)
|
||||
|
||||
if exceptions:
|
||||
raise ExceptionGroup("invalid metadata", exceptions)
|
||||
|
||||
return ins
|
||||
|
||||
@classmethod
|
||||
def from_email(
|
||||
cls, data: Union[bytes, str], *, validate: bool = True
|
||||
) -> "Metadata":
|
||||
"""Parse metadata from email headers.
|
||||
|
||||
If *validate* is true, the metadata will be validated. All exceptions
|
||||
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
||||
"""
|
||||
raw, unparsed = parse_email(data)
|
||||
|
||||
if validate:
|
||||
exceptions: list[Exception] = []
|
||||
for unparsed_key in unparsed:
|
||||
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
|
||||
message = f"{unparsed_key!r} has invalid data"
|
||||
else:
|
||||
message = f"unrecognized field: {unparsed_key!r}"
|
||||
exceptions.append(InvalidMetadata(unparsed_key, message))
|
||||
|
||||
if exceptions:
|
||||
raise ExceptionGroup("unparsed", exceptions)
|
||||
|
||||
try:
|
||||
return cls.from_raw(raw, validate=validate)
|
||||
except ExceptionGroup as exc_group:
|
||||
raise ExceptionGroup(
|
||||
"invalid or unparsed metadata", exc_group.exceptions
|
||||
) from None
|
||||
|
||||
metadata_version: _Validator[_MetadataVersion] = _Validator()
|
||||
""":external:ref:`core-metadata-metadata-version`
|
||||
(required; validated to be a valid metadata version)"""
|
||||
name: _Validator[str] = _Validator()
|
||||
""":external:ref:`core-metadata-name`
|
||||
(required; validated using :func:`~packaging.utils.canonicalize_name` and its
|
||||
*validate* parameter)"""
|
||||
version: _Validator[version_module.Version] = _Validator()
|
||||
""":external:ref:`core-metadata-version` (required)"""
|
||||
dynamic: _Validator[Optional[List[str]]] = _Validator(
|
||||
added="2.2",
|
||||
)
|
||||
""":external:ref:`core-metadata-dynamic`
|
||||
(validated against core metadata field names and lowercased)"""
|
||||
platforms: _Validator[Optional[List[str]]] = _Validator()
|
||||
""":external:ref:`core-metadata-platform`"""
|
||||
supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-supported-platform`"""
|
||||
summary: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
|
||||
description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body
|
||||
""":external:ref:`core-metadata-description`"""
|
||||
description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
|
||||
""":external:ref:`core-metadata-description-content-type` (validated)"""
|
||||
keywords: _Validator[Optional[List[str]]] = _Validator()
|
||||
""":external:ref:`core-metadata-keywords`"""
|
||||
home_page: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-home-page`"""
|
||||
download_url: _Validator[Optional[str]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-download-url`"""
|
||||
author: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-author`"""
|
||||
author_email: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-author-email`"""
|
||||
maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-maintainer`"""
|
||||
maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-maintainer-email`"""
|
||||
license: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-license`"""
|
||||
classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-classifier`"""
|
||||
requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
|
||||
added="1.2"
|
||||
)
|
||||
""":external:ref:`core-metadata-requires-dist`"""
|
||||
requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
|
||||
added="1.2"
|
||||
)
|
||||
""":external:ref:`core-metadata-requires-python`"""
|
||||
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
|
||||
# don't do any processing on the values.
|
||||
requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-requires-external`"""
|
||||
project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-project-url`"""
|
||||
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
|
||||
# regardless of metadata version.
|
||||
provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
|
||||
added="2.1",
|
||||
)
|
||||
""":external:ref:`core-metadata-provides-extra`"""
|
||||
provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-provides-dist`"""
|
||||
obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-obsoletes-dist`"""
|
||||
requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Requires`` (deprecated)"""
|
||||
provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Provides`` (deprecated)"""
|
||||
obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Obsoletes`` (deprecated)"""
|
0
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/py.typed
generated
vendored
Normal file
0
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/py.typed
generated
vendored
Normal file
90
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/requirements.py
generated
vendored
Normal file
90
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/requirements.py
generated
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from typing import Any, Iterator, Optional, Set
|
||||
|
||||
from ._parser import parse_requirement as _parse_requirement
|
||||
from ._tokenizer import ParserSyntaxError
|
||||
from .markers import Marker, _normalize_extra_values
|
||||
from .specifiers import SpecifierSet
|
||||
from .utils import canonicalize_name
|
||||
|
||||
|
||||
class InvalidRequirement(ValueError):
|
||||
"""
|
||||
An invalid requirement was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
class Requirement:
|
||||
"""Parse a requirement.
|
||||
|
||||
Parse a given requirement string into its parts, such as name, specifier,
|
||||
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
||||
string.
|
||||
"""
|
||||
|
||||
# TODO: Can we test whether something is contained within a requirement?
|
||||
# If so how do we do that? Do we need to test against the _name_ of
|
||||
# the thing as well as the version? What about the markers?
|
||||
# TODO: Can we normalize the name and extra name?
|
||||
|
||||
def __init__(self, requirement_string: str) -> None:
|
||||
try:
|
||||
parsed = _parse_requirement(requirement_string)
|
||||
except ParserSyntaxError as e:
|
||||
raise InvalidRequirement(str(e)) from e
|
||||
|
||||
self.name: str = parsed.name
|
||||
self.url: Optional[str] = parsed.url or None
|
||||
self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
|
||||
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
||||
self.marker: Optional[Marker] = None
|
||||
if parsed.marker is not None:
|
||||
self.marker = Marker.__new__(Marker)
|
||||
self.marker._markers = _normalize_extra_values(parsed.marker)
|
||||
|
||||
def _iter_parts(self, name: str) -> Iterator[str]:
|
||||
yield name
|
||||
|
||||
if self.extras:
|
||||
formatted_extras = ",".join(sorted(self.extras))
|
||||
yield f"[{formatted_extras}]"
|
||||
|
||||
if self.specifier:
|
||||
yield str(self.specifier)
|
||||
|
||||
if self.url:
|
||||
yield f"@ {self.url}"
|
||||
if self.marker:
|
||||
yield " "
|
||||
|
||||
if self.marker:
|
||||
yield f"; {self.marker}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "".join(self._iter_parts(self.name))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Requirement('{self}')>"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(
|
||||
(
|
||||
self.__class__.__name__,
|
||||
*self._iter_parts(canonicalize_name(self.name)),
|
||||
)
|
||||
)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if not isinstance(other, Requirement):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
canonicalize_name(self.name) == canonicalize_name(other.name)
|
||||
and self.extras == other.extras
|
||||
and self.specifier == other.specifier
|
||||
and self.url == other.url
|
||||
and self.marker == other.marker
|
||||
)
|
1030
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/specifiers.py
generated
vendored
Normal file
1030
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/specifiers.py
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
553
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/tags.py
generated
vendored
Normal file
553
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/tags.py
generated
vendored
Normal file
|
@ -0,0 +1,553 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import logging
|
||||
import platform
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import sysconfig
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
from typing import (
|
||||
Dict,
|
||||
FrozenSet,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from . import _manylinux, _musllinux
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PythonVersion = Sequence[int]
|
||||
MacVersion = Tuple[int, int]
|
||||
|
||||
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
||||
"python": "py", # Generic.
|
||||
"cpython": "cp",
|
||||
"pypy": "pp",
|
||||
"ironpython": "ip",
|
||||
"jython": "jy",
|
||||
}
|
||||
|
||||
|
||||
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
|
||||
|
||||
|
||||
class Tag:
|
||||
"""
|
||||
A representation of the tag triple for a wheel.
|
||||
|
||||
Instances are considered immutable and thus are hashable. Equality checking
|
||||
is also supported.
|
||||
"""
|
||||
|
||||
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
|
||||
|
||||
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
||||
self._interpreter = interpreter.lower()
|
||||
self._abi = abi.lower()
|
||||
self._platform = platform.lower()
|
||||
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
||||
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
||||
# times when scanning a page of links for packages with tags matching that
|
||||
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
||||
# downstream consumers.
|
||||
self._hash = hash((self._interpreter, self._abi, self._platform))
|
||||
|
||||
@property
|
||||
def interpreter(self) -> str:
|
||||
return self._interpreter
|
||||
|
||||
@property
|
||||
def abi(self) -> str:
|
||||
return self._abi
|
||||
|
||||
@property
|
||||
def platform(self) -> str:
|
||||
return self._platform
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Tag):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
||||
and (self._platform == other._platform)
|
||||
and (self._abi == other._abi)
|
||||
and (self._interpreter == other._interpreter)
|
||||
)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return self._hash
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self} @ {id(self)}>"
|
||||
|
||||
|
||||
def parse_tag(tag: str) -> FrozenSet[Tag]:
|
||||
"""
|
||||
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
||||
|
||||
Returning a set is required due to the possibility that the tag is a
|
||||
compressed tag set.
|
||||
"""
|
||||
tags = set()
|
||||
interpreters, abis, platforms = tag.split("-")
|
||||
for interpreter in interpreters.split("."):
|
||||
for abi in abis.split("."):
|
||||
for platform_ in platforms.split("."):
|
||||
tags.add(Tag(interpreter, abi, platform_))
|
||||
return frozenset(tags)
|
||||
|
||||
|
||||
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
|
||||
value: Union[int, str, None] = sysconfig.get_config_var(name)
|
||||
if value is None and warn:
|
||||
logger.debug(
|
||||
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _normalize_string(string: str) -> str:
|
||||
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
|
||||
|
||||
|
||||
def _abi3_applies(python_version: PythonVersion) -> bool:
|
||||
"""
|
||||
Determine if the Python version supports abi3.
|
||||
|
||||
PEP 384 was first implemented in Python 3.2.
|
||||
"""
|
||||
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
|
||||
|
||||
|
||||
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
||||
py_version = tuple(py_version) # To allow for version comparison.
|
||||
abis = []
|
||||
version = _version_nodot(py_version[:2])
|
||||
debug = pymalloc = ucs4 = ""
|
||||
with_debug = _get_config_var("Py_DEBUG", warn)
|
||||
has_refcount = hasattr(sys, "gettotalrefcount")
|
||||
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
||||
# extension modules is the best option.
|
||||
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
||||
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
||||
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
||||
debug = "d"
|
||||
if py_version < (3, 8):
|
||||
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
||||
if with_pymalloc or with_pymalloc is None:
|
||||
pymalloc = "m"
|
||||
if py_version < (3, 3):
|
||||
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
||||
if unicode_size == 4 or (
|
||||
unicode_size is None and sys.maxunicode == 0x10FFFF
|
||||
):
|
||||
ucs4 = "u"
|
||||
elif debug:
|
||||
# Debug builds can also load "normal" extension modules.
|
||||
# We can also assume no UCS-4 or pymalloc requirement.
|
||||
abis.append(f"cp{version}")
|
||||
abis.insert(
|
||||
0,
|
||||
"cp{version}{debug}{pymalloc}{ucs4}".format(
|
||||
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
|
||||
),
|
||||
)
|
||||
return abis
|
||||
|
||||
|
||||
def cpython_tags(
|
||||
python_version: Optional[PythonVersion] = None,
|
||||
abis: Optional[Iterable[str]] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
*,
|
||||
warn: bool = False,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the tags for a CPython interpreter.
|
||||
|
||||
The tags consist of:
|
||||
- cp<python_version>-<abi>-<platform>
|
||||
- cp<python_version>-abi3-<platform>
|
||||
- cp<python_version>-none-<platform>
|
||||
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
||||
|
||||
If python_version only specifies a major version then user-provided ABIs and
|
||||
the 'none' ABItag will be used.
|
||||
|
||||
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
||||
their normal position and not at the beginning.
|
||||
"""
|
||||
if not python_version:
|
||||
python_version = sys.version_info[:2]
|
||||
|
||||
interpreter = f"cp{_version_nodot(python_version[:2])}"
|
||||
|
||||
if abis is None:
|
||||
if len(python_version) > 1:
|
||||
abis = _cpython_abis(python_version, warn)
|
||||
else:
|
||||
abis = []
|
||||
abis = list(abis)
|
||||
# 'abi3' and 'none' are explicitly handled later.
|
||||
for explicit_abi in ("abi3", "none"):
|
||||
try:
|
||||
abis.remove(explicit_abi)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
platforms = list(platforms or platform_tags())
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
if _abi3_applies(python_version):
|
||||
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
||||
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
||||
|
||||
if _abi3_applies(python_version):
|
||||
for minor_version in range(python_version[1] - 1, 1, -1):
|
||||
for platform_ in platforms:
|
||||
interpreter = "cp{version}".format(
|
||||
version=_version_nodot((python_version[0], minor_version))
|
||||
)
|
||||
yield Tag(interpreter, "abi3", platform_)
|
||||
|
||||
|
||||
def _generic_abi() -> List[str]:
|
||||
"""
|
||||
Return the ABI tag based on EXT_SUFFIX.
|
||||
"""
|
||||
# The following are examples of `EXT_SUFFIX`.
|
||||
# We want to keep the parts which are related to the ABI and remove the
|
||||
# parts which are related to the platform:
|
||||
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
|
||||
# - mac: '.cpython-310-darwin.so' => cp310
|
||||
# - win: '.cp310-win_amd64.pyd' => cp310
|
||||
# - win: '.pyd' => cp37 (uses _cpython_abis())
|
||||
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
|
||||
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
|
||||
# => graalpy_38_native
|
||||
|
||||
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
|
||||
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
|
||||
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
|
||||
parts = ext_suffix.split(".")
|
||||
if len(parts) < 3:
|
||||
# CPython3.7 and earlier uses ".pyd" on Windows.
|
||||
return _cpython_abis(sys.version_info[:2])
|
||||
soabi = parts[1]
|
||||
if soabi.startswith("cpython"):
|
||||
# non-windows
|
||||
abi = "cp" + soabi.split("-")[1]
|
||||
elif soabi.startswith("cp"):
|
||||
# windows
|
||||
abi = soabi.split("-")[0]
|
||||
elif soabi.startswith("pypy"):
|
||||
abi = "-".join(soabi.split("-")[:2])
|
||||
elif soabi.startswith("graalpy"):
|
||||
abi = "-".join(soabi.split("-")[:3])
|
||||
elif soabi:
|
||||
# pyston, ironpython, others?
|
||||
abi = soabi
|
||||
else:
|
||||
return []
|
||||
return [_normalize_string(abi)]
|
||||
|
||||
|
||||
def generic_tags(
|
||||
interpreter: Optional[str] = None,
|
||||
abis: Optional[Iterable[str]] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
*,
|
||||
warn: bool = False,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the tags for a generic interpreter.
|
||||
|
||||
The tags consist of:
|
||||
- <interpreter>-<abi>-<platform>
|
||||
|
||||
The "none" ABI will be added if it was not explicitly provided.
|
||||
"""
|
||||
if not interpreter:
|
||||
interp_name = interpreter_name()
|
||||
interp_version = interpreter_version(warn=warn)
|
||||
interpreter = "".join([interp_name, interp_version])
|
||||
if abis is None:
|
||||
abis = _generic_abi()
|
||||
else:
|
||||
abis = list(abis)
|
||||
platforms = list(platforms or platform_tags())
|
||||
if "none" not in abis:
|
||||
abis.append("none")
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
|
||||
|
||||
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
||||
"""
|
||||
Yields Python versions in descending order.
|
||||
|
||||
After the latest version, the major-only version will be yielded, and then
|
||||
all previous versions of that major version.
|
||||
"""
|
||||
if len(py_version) > 1:
|
||||
yield f"py{_version_nodot(py_version[:2])}"
|
||||
yield f"py{py_version[0]}"
|
||||
if len(py_version) > 1:
|
||||
for minor in range(py_version[1] - 1, -1, -1):
|
||||
yield f"py{_version_nodot((py_version[0], minor))}"
|
||||
|
||||
|
||||
def compatible_tags(
|
||||
python_version: Optional[PythonVersion] = None,
|
||||
interpreter: Optional[str] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the sequence of tags that are compatible with a specific version of Python.
|
||||
|
||||
The tags consist of:
|
||||
- py*-none-<platform>
|
||||
- <interpreter>-none-any # ... if `interpreter` is provided.
|
||||
- py*-none-any
|
||||
"""
|
||||
if not python_version:
|
||||
python_version = sys.version_info[:2]
|
||||
platforms = list(platforms or platform_tags())
|
||||
for version in _py_interpreter_range(python_version):
|
||||
for platform_ in platforms:
|
||||
yield Tag(version, "none", platform_)
|
||||
if interpreter:
|
||||
yield Tag(interpreter, "none", "any")
|
||||
for version in _py_interpreter_range(python_version):
|
||||
yield Tag(version, "none", "any")
|
||||
|
||||
|
||||
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
||||
if not is_32bit:
|
||||
return arch
|
||||
|
||||
if arch.startswith("ppc"):
|
||||
return "ppc"
|
||||
|
||||
return "i386"
|
||||
|
||||
|
||||
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
|
||||
formats = [cpu_arch]
|
||||
if cpu_arch == "x86_64":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat64", "fat32"])
|
||||
|
||||
elif cpu_arch == "i386":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat32", "fat"])
|
||||
|
||||
elif cpu_arch == "ppc64":
|
||||
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
||||
if version > (10, 5) or version < (10, 4):
|
||||
return []
|
||||
formats.append("fat64")
|
||||
|
||||
elif cpu_arch == "ppc":
|
||||
if version > (10, 6):
|
||||
return []
|
||||
formats.extend(["fat32", "fat"])
|
||||
|
||||
if cpu_arch in {"arm64", "x86_64"}:
|
||||
formats.append("universal2")
|
||||
|
||||
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
||||
formats.append("universal")
|
||||
|
||||
return formats
|
||||
|
||||
|
||||
def mac_platforms(
|
||||
version: Optional[MacVersion] = None, arch: Optional[str] = None
|
||||
) -> Iterator[str]:
|
||||
"""
|
||||
Yields the platform tags for a macOS system.
|
||||
|
||||
The `version` parameter is a two-item tuple specifying the macOS version to
|
||||
generate platform tags for. The `arch` parameter is the CPU architecture to
|
||||
generate platform tags for. Both parameters default to the appropriate value
|
||||
for the current system.
|
||||
"""
|
||||
version_str, _, cpu_arch = platform.mac_ver()
|
||||
if version is None:
|
||||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
||||
if version == (10, 16):
|
||||
# When built against an older macOS SDK, Python will report macOS 10.16
|
||||
# instead of the real version.
|
||||
version_str = subprocess.run(
|
||||
[
|
||||
sys.executable,
|
||||
"-sS",
|
||||
"-c",
|
||||
"import platform; print(platform.mac_ver()[0])",
|
||||
],
|
||||
check=True,
|
||||
env={"SYSTEM_VERSION_COMPAT": "0"},
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
).stdout
|
||||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
||||
else:
|
||||
version = version
|
||||
if arch is None:
|
||||
arch = _mac_arch(cpu_arch)
|
||||
else:
|
||||
arch = arch
|
||||
|
||||
if (10, 0) <= version and version < (11, 0):
|
||||
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
||||
# "minor" version number. The major version was always 10.
|
||||
for minor_version in range(version[1], -1, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=10, minor=minor_version, binary_format=binary_format
|
||||
)
|
||||
|
||||
if version >= (11, 0):
|
||||
# Starting with Mac OS 11, each yearly release bumps the major version
|
||||
# number. The minor versions are now the midyear updates.
|
||||
for major_version in range(version[0], 10, -1):
|
||||
compat_version = major_version, 0
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=major_version, minor=0, binary_format=binary_format
|
||||
)
|
||||
|
||||
if version >= (11, 0):
|
||||
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
||||
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
||||
# releases exist.
|
||||
#
|
||||
# However, the "universal2" binary format can have a
|
||||
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
||||
# that version of macOS.
|
||||
if arch == "x86_64":
|
||||
for minor_version in range(16, 3, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
else:
|
||||
for minor_version in range(16, 3, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_format = "universal2"
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
|
||||
|
||||
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
||||
linux = _normalize_string(sysconfig.get_platform())
|
||||
if not linux.startswith("linux_"):
|
||||
# we should never be here, just yield the sysconfig one and return
|
||||
yield linux
|
||||
return
|
||||
if is_32bit:
|
||||
if linux == "linux_x86_64":
|
||||
linux = "linux_i686"
|
||||
elif linux == "linux_aarch64":
|
||||
linux = "linux_armv8l"
|
||||
_, arch = linux.split("_", 1)
|
||||
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
|
||||
yield from _manylinux.platform_tags(archs)
|
||||
yield from _musllinux.platform_tags(archs)
|
||||
for arch in archs:
|
||||
yield f"linux_{arch}"
|
||||
|
||||
|
||||
def _generic_platforms() -> Iterator[str]:
|
||||
yield _normalize_string(sysconfig.get_platform())
|
||||
|
||||
|
||||
def platform_tags() -> Iterator[str]:
|
||||
"""
|
||||
Provides the platform tags for this installation.
|
||||
"""
|
||||
if platform.system() == "Darwin":
|
||||
return mac_platforms()
|
||||
elif platform.system() == "Linux":
|
||||
return _linux_platforms()
|
||||
else:
|
||||
return _generic_platforms()
|
||||
|
||||
|
||||
def interpreter_name() -> str:
|
||||
"""
|
||||
Returns the name of the running interpreter.
|
||||
|
||||
Some implementations have a reserved, two-letter abbreviation which will
|
||||
be returned when appropriate.
|
||||
"""
|
||||
name = sys.implementation.name
|
||||
return INTERPRETER_SHORT_NAMES.get(name) or name
|
||||
|
||||
|
||||
def interpreter_version(*, warn: bool = False) -> str:
|
||||
"""
|
||||
Returns the version of the running interpreter.
|
||||
"""
|
||||
version = _get_config_var("py_version_nodot", warn=warn)
|
||||
if version:
|
||||
version = str(version)
|
||||
else:
|
||||
version = _version_nodot(sys.version_info[:2])
|
||||
return version
|
||||
|
||||
|
||||
def _version_nodot(version: PythonVersion) -> str:
|
||||
return "".join(map(str, version))
|
||||
|
||||
|
||||
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
||||
"""
|
||||
Returns the sequence of tag triples for the running interpreter.
|
||||
|
||||
The order of the sequence corresponds to priority order for the
|
||||
interpreter, from most to least important.
|
||||
"""
|
||||
|
||||
interp_name = interpreter_name()
|
||||
if interp_name == "cp":
|
||||
yield from cpython_tags(warn=warn)
|
||||
else:
|
||||
yield from generic_tags()
|
||||
|
||||
if interp_name == "pp":
|
||||
interp = "pp3"
|
||||
elif interp_name == "cp":
|
||||
interp = "cp" + interpreter_version(warn=warn)
|
||||
else:
|
||||
interp = None
|
||||
yield from compatible_tags(interpreter=interp)
|
172
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/utils.py
generated
vendored
Normal file
172
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/utils.py
generated
vendored
Normal file
|
@ -0,0 +1,172 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import re
|
||||
from typing import FrozenSet, NewType, Tuple, Union, cast
|
||||
|
||||
from .tags import Tag, parse_tag
|
||||
from .version import InvalidVersion, Version
|
||||
|
||||
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
||||
NormalizedName = NewType("NormalizedName", str)
|
||||
|
||||
|
||||
class InvalidName(ValueError):
|
||||
"""
|
||||
An invalid distribution name; users should refer to the packaging user guide.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidWheelFilename(ValueError):
|
||||
"""
|
||||
An invalid wheel filename was found, users should refer to PEP 427.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidSdistFilename(ValueError):
|
||||
"""
|
||||
An invalid sdist filename was found, users should refer to the packaging user guide.
|
||||
"""
|
||||
|
||||
|
||||
# Core metadata spec for `Name`
|
||||
_validate_regex = re.compile(
|
||||
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
||||
)
|
||||
_canonicalize_regex = re.compile(r"[-_.]+")
|
||||
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
||||
# PEP 427: The build number must start with a digit.
|
||||
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
||||
|
||||
|
||||
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
||||
if validate and not _validate_regex.match(name):
|
||||
raise InvalidName(f"name is invalid: {name!r}")
|
||||
# This is taken from PEP 503.
|
||||
value = _canonicalize_regex.sub("-", name).lower()
|
||||
return cast(NormalizedName, value)
|
||||
|
||||
|
||||
def is_normalized_name(name: str) -> bool:
|
||||
return _normalized_regex.match(name) is not None
|
||||
|
||||
|
||||
def canonicalize_version(
|
||||
version: Union[Version, str], *, strip_trailing_zero: bool = True
|
||||
) -> str:
|
||||
"""
|
||||
This is very similar to Version.__str__, but has one subtle difference
|
||||
with the way it handles the release segment.
|
||||
"""
|
||||
if isinstance(version, str):
|
||||
try:
|
||||
parsed = Version(version)
|
||||
except InvalidVersion:
|
||||
# Legacy versions cannot be normalized
|
||||
return version
|
||||
else:
|
||||
parsed = version
|
||||
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if parsed.epoch != 0:
|
||||
parts.append(f"{parsed.epoch}!")
|
||||
|
||||
# Release segment
|
||||
release_segment = ".".join(str(x) for x in parsed.release)
|
||||
if strip_trailing_zero:
|
||||
# NB: This strips trailing '.0's to normalize
|
||||
release_segment = re.sub(r"(\.0)+$", "", release_segment)
|
||||
parts.append(release_segment)
|
||||
|
||||
# Pre-release
|
||||
if parsed.pre is not None:
|
||||
parts.append("".join(str(x) for x in parsed.pre))
|
||||
|
||||
# Post-release
|
||||
if parsed.post is not None:
|
||||
parts.append(f".post{parsed.post}")
|
||||
|
||||
# Development release
|
||||
if parsed.dev is not None:
|
||||
parts.append(f".dev{parsed.dev}")
|
||||
|
||||
# Local version segment
|
||||
if parsed.local is not None:
|
||||
parts.append(f"+{parsed.local}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
def parse_wheel_filename(
|
||||
filename: str,
|
||||
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
||||
if not filename.endswith(".whl"):
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
||||
)
|
||||
|
||||
filename = filename[:-4]
|
||||
dashes = filename.count("-")
|
||||
if dashes not in (4, 5):
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (wrong number of parts): {filename}"
|
||||
)
|
||||
|
||||
parts = filename.split("-", dashes - 2)
|
||||
name_part = parts[0]
|
||||
# See PEP 427 for the rules on escaping the project name.
|
||||
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
||||
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
||||
name = canonicalize_name(name_part)
|
||||
|
||||
try:
|
||||
version = Version(parts[1])
|
||||
except InvalidVersion as e:
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (invalid version): {filename}"
|
||||
) from e
|
||||
|
||||
if dashes == 5:
|
||||
build_part = parts[2]
|
||||
build_match = _build_tag_regex.match(build_part)
|
||||
if build_match is None:
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid build number: {build_part} in '{filename}'"
|
||||
)
|
||||
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
||||
else:
|
||||
build = ()
|
||||
tags = parse_tag(parts[-1])
|
||||
return (name, version, build, tags)
|
||||
|
||||
|
||||
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
||||
if filename.endswith(".tar.gz"):
|
||||
file_stem = filename[: -len(".tar.gz")]
|
||||
elif filename.endswith(".zip"):
|
||||
file_stem = filename[: -len(".zip")]
|
||||
else:
|
||||
raise InvalidSdistFilename(
|
||||
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
||||
f" {filename}"
|
||||
)
|
||||
|
||||
# We are requiring a PEP 440 version, which cannot contain dashes,
|
||||
# so we split on the last dash.
|
||||
name_part, sep, version_part = file_stem.rpartition("-")
|
||||
if not sep:
|
||||
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
||||
|
||||
name = canonicalize_name(name_part)
|
||||
|
||||
try:
|
||||
version = Version(version_part)
|
||||
except InvalidVersion as e:
|
||||
raise InvalidSdistFilename(
|
||||
f"Invalid sdist filename (invalid version): {filename}"
|
||||
) from e
|
||||
|
||||
return (name, version)
|
563
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/version.py
generated
vendored
Normal file
563
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pylib/packaging/version.py
generated
vendored
Normal file
|
@ -0,0 +1,563 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
"""
|
||||
.. testsetup::
|
||||
|
||||
from packaging.version import parse, Version
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import re
|
||||
from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
|
||||
|
||||
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
||||
|
||||
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
|
||||
|
||||
LocalType = Tuple[Union[int, str], ...]
|
||||
|
||||
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
|
||||
CmpLocalType = Union[
|
||||
NegativeInfinityType,
|
||||
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
|
||||
]
|
||||
CmpKey = Tuple[
|
||||
int,
|
||||
Tuple[int, ...],
|
||||
CmpPrePostDevType,
|
||||
CmpPrePostDevType,
|
||||
CmpPrePostDevType,
|
||||
CmpLocalType,
|
||||
]
|
||||
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
||||
|
||||
|
||||
class _Version(NamedTuple):
|
||||
epoch: int
|
||||
release: Tuple[int, ...]
|
||||
dev: Optional[Tuple[str, int]]
|
||||
pre: Optional[Tuple[str, int]]
|
||||
post: Optional[Tuple[str, int]]
|
||||
local: Optional[LocalType]
|
||||
|
||||
|
||||
def parse(version: str) -> "Version":
|
||||
"""Parse the given version string.
|
||||
|
||||
>>> parse('1.0.dev1')
|
||||
<Version('1.0.dev1')>
|
||||
|
||||
:param version: The version string to parse.
|
||||
:raises InvalidVersion: When the version string is not a valid version.
|
||||
"""
|
||||
return Version(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""Raised when a version string is not a valid version.
|
||||
|
||||
>>> Version("invalid")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion:
|
||||
_key: Tuple[Any, ...]
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._key)
|
||||
|
||||
# Please keep the duplicated `isinstance` check
|
||||
# in the six comparisons hereunder
|
||||
# unless you find a way to avoid adding overhead function calls.
|
||||
def __lt__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key < other._key
|
||||
|
||||
def __le__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key <= other._key
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key == other._key
|
||||
|
||||
def __ge__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key >= other._key
|
||||
|
||||
def __gt__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key > other._key
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key != other._key
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
_VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
VERSION_PATTERN = _VERSION_PATTERN
|
||||
"""
|
||||
A string containing the regular expression used to match a valid version.
|
||||
|
||||
The pattern is not anchored at either end, and is intended for embedding in larger
|
||||
expressions (for example, matching a version number as part of a file name). The
|
||||
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
||||
flags set.
|
||||
|
||||
:meta hide-value:
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
"""This class abstracts handling of a project's versions.
|
||||
|
||||
A :class:`Version` instance is comparison aware and can be compared and
|
||||
sorted using the standard Python interfaces.
|
||||
|
||||
>>> v1 = Version("1.0a5")
|
||||
>>> v2 = Version("1.0")
|
||||
>>> v1
|
||||
<Version('1.0a5')>
|
||||
>>> v2
|
||||
<Version('1.0')>
|
||||
>>> v1 < v2
|
||||
True
|
||||
>>> v1 == v2
|
||||
False
|
||||
>>> v1 > v2
|
||||
False
|
||||
>>> v1 >= v2
|
||||
False
|
||||
>>> v1 <= v2
|
||||
True
|
||||
"""
|
||||
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
_key: CmpKey
|
||||
|
||||
def __init__(self, version: str) -> None:
|
||||
"""Initialize a Version object.
|
||||
|
||||
:param version:
|
||||
The string representation of a version which will be parsed and normalized
|
||||
before use.
|
||||
:raises InvalidVersion:
|
||||
If the ``version`` does not conform to PEP 440 in any way then this
|
||||
exception will be raised.
|
||||
"""
|
||||
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion(f"Invalid version: '{version}'")
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""A representation of the Version that shows all internal state.
|
||||
|
||||
>>> Version('1.0.0')
|
||||
<Version('1.0.0')>
|
||||
"""
|
||||
return f"<Version('{self}')>"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""A string representation of the version that can be rounded-tripped.
|
||||
|
||||
>>> str(Version("1.0a5"))
|
||||
'1.0a5'
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append(f"{self.epoch}!")
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
# Pre-release
|
||||
if self.pre is not None:
|
||||
parts.append("".join(str(x) for x in self.pre))
|
||||
|
||||
# Post-release
|
||||
if self.post is not None:
|
||||
parts.append(f".post{self.post}")
|
||||
|
||||
# Development release
|
||||
if self.dev is not None:
|
||||
parts.append(f".dev{self.dev}")
|
||||
|
||||
# Local version segment
|
||||
if self.local is not None:
|
||||
parts.append(f"+{self.local}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def epoch(self) -> int:
|
||||
"""The epoch of the version.
|
||||
|
||||
>>> Version("2.0.0").epoch
|
||||
0
|
||||
>>> Version("1!2.0.0").epoch
|
||||
1
|
||||
"""
|
||||
return self._version.epoch
|
||||
|
||||
@property
|
||||
def release(self) -> Tuple[int, ...]:
|
||||
"""The components of the "release" segment of the version.
|
||||
|
||||
>>> Version("1.2.3").release
|
||||
(1, 2, 3)
|
||||
>>> Version("2.0.0").release
|
||||
(2, 0, 0)
|
||||
>>> Version("1!2.0.0.post0").release
|
||||
(2, 0, 0)
|
||||
|
||||
Includes trailing zeroes but not the epoch or any pre-release / development /
|
||||
post-release suffixes.
|
||||
"""
|
||||
return self._version.release
|
||||
|
||||
@property
|
||||
def pre(self) -> Optional[Tuple[str, int]]:
|
||||
"""The pre-release segment of the version.
|
||||
|
||||
>>> print(Version("1.2.3").pre)
|
||||
None
|
||||
>>> Version("1.2.3a1").pre
|
||||
('a', 1)
|
||||
>>> Version("1.2.3b1").pre
|
||||
('b', 1)
|
||||
>>> Version("1.2.3rc1").pre
|
||||
('rc', 1)
|
||||
"""
|
||||
return self._version.pre
|
||||
|
||||
@property
|
||||
def post(self) -> Optional[int]:
|
||||
"""The post-release number of the version.
|
||||
|
||||
>>> print(Version("1.2.3").post)
|
||||
None
|
||||
>>> Version("1.2.3.post1").post
|
||||
1
|
||||
"""
|
||||
return self._version.post[1] if self._version.post else None
|
||||
|
||||
@property
|
||||
def dev(self) -> Optional[int]:
|
||||
"""The development number of the version.
|
||||
|
||||
>>> print(Version("1.2.3").dev)
|
||||
None
|
||||
>>> Version("1.2.3.dev1").dev
|
||||
1
|
||||
"""
|
||||
return self._version.dev[1] if self._version.dev else None
|
||||
|
||||
@property
|
||||
def local(self) -> Optional[str]:
|
||||
"""The local version segment of the version.
|
||||
|
||||
>>> print(Version("1.2.3").local)
|
||||
None
|
||||
>>> Version("1.2.3+abc").local
|
||||
'abc'
|
||||
"""
|
||||
if self._version.local:
|
||||
return ".".join(str(x) for x in self._version.local)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def public(self) -> str:
|
||||
"""The public portion of the version.
|
||||
|
||||
>>> Version("1.2.3").public
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc").public
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc.dev1").public
|
||||
'1.2.3'
|
||||
"""
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self) -> str:
|
||||
"""The "base version" of the version.
|
||||
|
||||
>>> Version("1.2.3").base_version
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc").base_version
|
||||
'1.2.3'
|
||||
>>> Version("1!1.2.3+abc.dev1").base_version
|
||||
'1!1.2.3'
|
||||
|
||||
The "base version" is the public version of the project without any pre or post
|
||||
release markers.
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append(f"{self.epoch}!")
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def is_prerelease(self) -> bool:
|
||||
"""Whether this version is a pre-release.
|
||||
|
||||
>>> Version("1.2.3").is_prerelease
|
||||
False
|
||||
>>> Version("1.2.3a1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3b1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3rc1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3dev1").is_prerelease
|
||||
True
|
||||
"""
|
||||
return self.dev is not None or self.pre is not None
|
||||
|
||||
@property
|
||||
def is_postrelease(self) -> bool:
|
||||
"""Whether this version is a post-release.
|
||||
|
||||
>>> Version("1.2.3").is_postrelease
|
||||
False
|
||||
>>> Version("1.2.3.post1").is_postrelease
|
||||
True
|
||||
"""
|
||||
return self.post is not None
|
||||
|
||||
@property
|
||||
def is_devrelease(self) -> bool:
|
||||
"""Whether this version is a development release.
|
||||
|
||||
>>> Version("1.2.3").is_devrelease
|
||||
False
|
||||
>>> Version("1.2.3.dev1").is_devrelease
|
||||
True
|
||||
"""
|
||||
return self.dev is not None
|
||||
|
||||
@property
|
||||
def major(self) -> int:
|
||||
"""The first item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").major
|
||||
1
|
||||
"""
|
||||
return self.release[0] if len(self.release) >= 1 else 0
|
||||
|
||||
@property
|
||||
def minor(self) -> int:
|
||||
"""The second item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").minor
|
||||
2
|
||||
>>> Version("1").minor
|
||||
0
|
||||
"""
|
||||
return self.release[1] if len(self.release) >= 2 else 0
|
||||
|
||||
@property
|
||||
def micro(self) -> int:
|
||||
"""The third item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").micro
|
||||
3
|
||||
>>> Version("1").micro
|
||||
0
|
||||
"""
|
||||
return self.release[2] if len(self.release) >= 3 else 0
|
||||
|
||||
|
||||
def _parse_letter_version(
|
||||
letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
|
||||
) -> Optional[Tuple[str, int]]:
|
||||
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_separators.split(local)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def _cmpkey(
|
||||
epoch: int,
|
||||
release: Tuple[int, ...],
|
||||
pre: Optional[Tuple[str, int]],
|
||||
post: Optional[Tuple[str, int]],
|
||||
dev: Optional[Tuple[str, int]],
|
||||
local: Optional[LocalType],
|
||||
) -> CmpKey:
|
||||
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non zero, then take the rest
|
||||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
_release = tuple(
|
||||
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
_pre: CmpPrePostDevType = NegativeInfinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
_pre = Infinity
|
||||
else:
|
||||
_pre = pre
|
||||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
_post: CmpPrePostDevType = NegativeInfinity
|
||||
|
||||
else:
|
||||
_post = post
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
_dev: CmpPrePostDevType = Infinity
|
||||
|
||||
else:
|
||||
_dev = dev
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
_local: CmpLocalType = NegativeInfinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alpha numeric segments sort before numeric segments
|
||||
# - Alpha numeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
_local = tuple(
|
||||
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
||||
)
|
||||
|
||||
return epoch, _release, _pre, _post, _dev, _local
|
119
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pyproject.toml
generated
vendored
Normal file
119
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/pyproject.toml
generated
vendored
Normal file
|
@ -0,0 +1,119 @@
|
|||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "gyp-next"
|
||||
version = "0.16.1"
|
||||
authors = [
|
||||
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
|
||||
]
|
||||
description = "A fork of the GYP build system for use in the Node.js projects"
|
||||
readme = "README.md"
|
||||
license = { file="LICENSE" }
|
||||
requires-python = ">=3.8"
|
||||
# The Python module "packaging" is vendored in the "pylib/packaging" directory to support Python >= 3.12.
|
||||
# dependencies = ["packaging>=23.1"] # Uncomment this line if the vendored version is removed.
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: BSD License",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["flake8", "ruff", "pytest"]
|
||||
|
||||
[project.scripts]
|
||||
gyp = "gyp:script_main"
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/nodejs/gyp-next"
|
||||
|
||||
[tool.ruff]
|
||||
lint.select = [
|
||||
"C4", # flake8-comprehensions
|
||||
"C90", # McCabe cyclomatic complexity
|
||||
"DTZ", # flake8-datetimez
|
||||
"E", # pycodestyle
|
||||
"F", # Pyflakes
|
||||
"G", # flake8-logging-format
|
||||
"ICN", # flake8-import-conventions
|
||||
"INT", # flake8-gettext
|
||||
"PL", # Pylint
|
||||
"PYI", # flake8-pyi
|
||||
"RSE", # flake8-raise
|
||||
"RUF", # Ruff-specific rules
|
||||
"T10", # flake8-debugger
|
||||
"TCH", # flake8-type-checking
|
||||
"TID", # flake8-tidy-imports
|
||||
"UP", # pyupgrade
|
||||
"W", # pycodestyle
|
||||
"YTT", # flake8-2020
|
||||
# "A", # flake8-builtins
|
||||
# "ANN", # flake8-annotations
|
||||
# "ARG", # flake8-unused-arguments
|
||||
# "B", # flake8-bugbear
|
||||
# "BLE", # flake8-blind-except
|
||||
# "COM", # flake8-commas
|
||||
# "D", # pydocstyle
|
||||
# "DJ", # flake8-django
|
||||
# "EM", # flake8-errmsg
|
||||
# "ERA", # eradicate
|
||||
# "EXE", # flake8-executable
|
||||
# "FBT", # flake8-boolean-trap
|
||||
# "I", # isort
|
||||
# "INP", # flake8-no-pep420
|
||||
# "ISC", # flake8-implicit-str-concat
|
||||
# "N", # pep8-naming
|
||||
# "NPY", # NumPy-specific rules
|
||||
# "PD", # pandas-vet
|
||||
# "PGH", # pygrep-hooks
|
||||
# "PIE", # flake8-pie
|
||||
# "PT", # flake8-pytest-style
|
||||
# "PTH", # flake8-use-pathlib
|
||||
# "Q", # flake8-quotes
|
||||
# "RET", # flake8-return
|
||||
# "S", # flake8-bandit
|
||||
# "SIM", # flake8-simplify
|
||||
# "SLF", # flake8-self
|
||||
# "T20", # flake8-print
|
||||
# "TRY", # tryceratops
|
||||
]
|
||||
lint.ignore = [
|
||||
"E721",
|
||||
"PLC1901",
|
||||
"PLR0402",
|
||||
"PLR1714",
|
||||
"PLR2004",
|
||||
"PLR5501",
|
||||
"PLW0603",
|
||||
"PLW2901",
|
||||
"PYI024",
|
||||
"RUF005",
|
||||
"RUF012",
|
||||
"UP031",
|
||||
]
|
||||
extend-exclude = ["pylib/packaging"]
|
||||
line-length = 88
|
||||
target-version = "py37"
|
||||
|
||||
[tool.ruff.lint.mccabe]
|
||||
max-complexity = 101
|
||||
|
||||
[tool.ruff.lint.pylint]
|
||||
max-args = 11
|
||||
max-branches = 108
|
||||
max-returns = 10
|
||||
max-statements = 286
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "pylib"}
|
||||
packages = ["gyp", "gyp.generator"]
|
261
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/test_gyp.py
generated
vendored
Normal file
261
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/gyp/test_gyp.py
generated
vendored
Normal file
|
@ -0,0 +1,261 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gyptest.py -- test runner for GYP tests."""
|
||||
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
def is_test_name(f):
|
||||
return f.startswith("gyptest") and f.endswith(".py")
|
||||
|
||||
|
||||
def find_all_gyptest_files(directory):
|
||||
result = []
|
||||
for root, dirs, files in os.walk(directory):
|
||||
result.extend([os.path.join(root, f) for f in files if is_test_name(f)])
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-a", "--all", action="store_true", help="run all tests")
|
||||
parser.add_argument("-C", "--chdir", action="store", help="change to directory")
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
action="store",
|
||||
default="",
|
||||
help="run tests with the specified formats",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
"--gyp_option",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Add -G options to the gyp command line",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--list", action="store_true", help="list available tests and exit"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--no-exec",
|
||||
action="store_true",
|
||||
help="no execute, just print the command line",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path", action="append", default=[], help="additional $PATH directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="quiet, don't print anything unless there are failures",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="print configuration info and test results.",
|
||||
)
|
||||
parser.add_argument("tests", nargs="*")
|
||||
args = parser.parse_args(argv[1:])
|
||||
|
||||
if args.chdir:
|
||||
os.chdir(args.chdir)
|
||||
|
||||
if args.path:
|
||||
extra_path = [os.path.abspath(p) for p in args.path]
|
||||
extra_path = os.pathsep.join(extra_path)
|
||||
os.environ["PATH"] = extra_path + os.pathsep + os.environ["PATH"]
|
||||
|
||||
if not args.tests:
|
||||
if not args.all:
|
||||
sys.stderr.write("Specify -a to get all tests.\n")
|
||||
return 1
|
||||
args.tests = ["test"]
|
||||
|
||||
tests = []
|
||||
for arg in args.tests:
|
||||
if os.path.isdir(arg):
|
||||
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
|
||||
else:
|
||||
if not is_test_name(os.path.basename(arg)):
|
||||
print(arg, "is not a valid gyp test name.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
tests.append(arg)
|
||||
|
||||
if args.list:
|
||||
for test in tests:
|
||||
print(test)
|
||||
sys.exit(0)
|
||||
|
||||
os.environ["PYTHONPATH"] = os.path.abspath("test/lib")
|
||||
|
||||
if args.verbose:
|
||||
print_configuration_info()
|
||||
|
||||
if args.gyp_option and not args.quiet:
|
||||
print("Extra Gyp options: %s\n" % args.gyp_option)
|
||||
|
||||
if args.format:
|
||||
format_list = args.format.split(",")
|
||||
else:
|
||||
format_list = {
|
||||
"aix5": ["make"],
|
||||
"os400": ["make"],
|
||||
"freebsd7": ["make"],
|
||||
"freebsd8": ["make"],
|
||||
"openbsd5": ["make"],
|
||||
"cygwin": ["msvs"],
|
||||
"win32": ["msvs", "ninja"],
|
||||
"linux": ["make", "ninja"],
|
||||
"linux2": ["make", "ninja"],
|
||||
"linux3": ["make", "ninja"],
|
||||
# TODO: Re-enable xcode-ninja.
|
||||
# https://bugs.chromium.org/p/gyp/issues/detail?id=530
|
||||
# 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
|
||||
"darwin": ["make", "ninja", "xcode"],
|
||||
}[sys.platform]
|
||||
|
||||
gyp_options = []
|
||||
for option in args.gyp_option:
|
||||
gyp_options += ["-G", option]
|
||||
|
||||
runner = Runner(format_list, tests, gyp_options, args.verbose)
|
||||
runner.run()
|
||||
|
||||
if not args.quiet:
|
||||
runner.print_results()
|
||||
|
||||
return 1 if runner.failures else 0
|
||||
|
||||
|
||||
def print_configuration_info():
|
||||
print("Test configuration:")
|
||||
if sys.platform == "darwin":
|
||||
sys.path.append(os.path.abspath("test/lib"))
|
||||
import TestMac
|
||||
|
||||
print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
|
||||
print(f" Xcode {TestMac.Xcode.Version()}")
|
||||
elif sys.platform == "win32":
|
||||
sys.path.append(os.path.abspath("pylib"))
|
||||
import gyp.MSVSVersion
|
||||
|
||||
print(" Win %s %s\n" % platform.win32_ver()[0:2])
|
||||
print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
|
||||
elif sys.platform in ("linux", "linux2"):
|
||||
print(" Linux %s" % " ".join(platform.linux_distribution()))
|
||||
print(f" Python {platform.python_version()}")
|
||||
print(f" PYTHONPATH={os.environ['PYTHONPATH']}")
|
||||
print()
|
||||
|
||||
|
||||
class Runner:
|
||||
def __init__(self, formats, tests, gyp_options, verbose):
|
||||
self.formats = formats
|
||||
self.tests = tests
|
||||
self.verbose = verbose
|
||||
self.gyp_options = gyp_options
|
||||
self.failures = []
|
||||
self.num_tests = len(formats) * len(tests)
|
||||
num_digits = len(str(self.num_tests))
|
||||
self.fmt_str = "[%%%dd/%%%dd] (%%s) %%s" % (num_digits, num_digits)
|
||||
self.isatty = sys.stdout.isatty() and not self.verbose
|
||||
self.env = os.environ.copy()
|
||||
self.hpos = 0
|
||||
|
||||
def run(self):
|
||||
run_start = time.time()
|
||||
|
||||
i = 1
|
||||
for fmt in self.formats:
|
||||
for test in self.tests:
|
||||
self.run_test(test, fmt, i)
|
||||
i += 1
|
||||
|
||||
if self.isatty:
|
||||
self.erase_current_line()
|
||||
|
||||
self.took = time.time() - run_start
|
||||
|
||||
def run_test(self, test, fmt, i):
|
||||
if self.isatty:
|
||||
self.erase_current_line()
|
||||
|
||||
msg = self.fmt_str % (i, self.num_tests, fmt, test)
|
||||
self.print_(msg)
|
||||
|
||||
start = time.time()
|
||||
cmd = [sys.executable, test] + self.gyp_options
|
||||
self.env["TESTGYP_FORMAT"] = fmt
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.env
|
||||
)
|
||||
proc.wait()
|
||||
took = time.time() - start
|
||||
|
||||
stdout = proc.stdout.read().decode("utf8")
|
||||
if proc.returncode == 2:
|
||||
res = "skipped"
|
||||
elif proc.returncode:
|
||||
res = "failed"
|
||||
self.failures.append(f"({test}) {fmt}")
|
||||
else:
|
||||
res = "passed"
|
||||
res_msg = f" {res} {took:.3f}s"
|
||||
self.print_(res_msg)
|
||||
|
||||
if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")):
|
||||
print()
|
||||
print("\n".join(f" {line}" for line in stdout.splitlines()))
|
||||
elif not self.isatty:
|
||||
print()
|
||||
|
||||
def print_(self, msg):
|
||||
print(msg, end="")
|
||||
index = msg.rfind("\n")
|
||||
if index == -1:
|
||||
self.hpos += len(msg)
|
||||
else:
|
||||
self.hpos = len(msg) - index
|
||||
sys.stdout.flush()
|
||||
|
||||
def erase_current_line(self):
|
||||
print("\b" * self.hpos + " " * self.hpos + "\b" * self.hpos, end="")
|
||||
sys.stdout.flush()
|
||||
self.hpos = 0
|
||||
|
||||
def print_results(self):
|
||||
num_failures = len(self.failures)
|
||||
if num_failures:
|
||||
print()
|
||||
if num_failures == 1:
|
||||
print("Failed the following test:")
|
||||
else:
|
||||
print("Failed the following %d tests:" % num_failures)
|
||||
print("\t" + "\n\t".join(sorted(self.failures)))
|
||||
print()
|
||||
print(
|
||||
"Ran %d tests in %.3fs, %d failed."
|
||||
% (self.num_tests, self.took, num_failures)
|
||||
)
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
250
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/Find-VisualStudio.cs
generated
vendored
Normal file
250
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/Find-VisualStudio.cs
generated
vendored
Normal file
|
@ -0,0 +1,250 @@
|
|||
// Copyright 2017 - Refael Ackermann
|
||||
// Distributed under MIT style license
|
||||
// See accompanying file LICENSE at https://github.com/node4good/windows-autoconf
|
||||
|
||||
// Usage:
|
||||
// powershell -ExecutionPolicy Unrestricted -Command "Add-Type -Path Find-VisualStudio.cs; [VisualStudioConfiguration.Main]::PrintJson()"
|
||||
// This script needs to be compatible with PowerShell v2 to run on Windows 2008R2 and Windows 7.
|
||||
|
||||
using System;
|
||||
using System.Text;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace VisualStudioConfiguration
|
||||
{
|
||||
[Flags]
|
||||
public enum InstanceState : uint
|
||||
{
|
||||
None = 0,
|
||||
Local = 1,
|
||||
Registered = 2,
|
||||
NoRebootRequired = 4,
|
||||
NoErrors = 8,
|
||||
Complete = 4294967295,
|
||||
}
|
||||
|
||||
[Guid("6380BCFF-41D3-4B2E-8B2E-BF8A6810C848")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface IEnumSetupInstances
|
||||
{
|
||||
|
||||
void Next([MarshalAs(UnmanagedType.U4), In] int celt,
|
||||
[MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.Interface), Out] ISetupInstance[] rgelt,
|
||||
[MarshalAs(UnmanagedType.U4)] out int pceltFetched);
|
||||
|
||||
void Skip([MarshalAs(UnmanagedType.U4), In] int celt);
|
||||
|
||||
void Reset();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
IEnumSetupInstances Clone();
|
||||
}
|
||||
|
||||
[Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupConfiguration
|
||||
{
|
||||
}
|
||||
|
||||
[Guid("26AAB78C-4A60-49D6-AF3B-3C35BC93365D")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupConfiguration2 : ISetupConfiguration
|
||||
{
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
IEnumSetupInstances EnumInstances();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
ISetupInstance GetInstanceForCurrentProcess();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
ISetupInstance GetInstanceForPath([MarshalAs(UnmanagedType.LPWStr), In] string path);
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Interface)]
|
||||
IEnumSetupInstances EnumAllInstances();
|
||||
}
|
||||
|
||||
[Guid("B41463C3-8866-43B5-BC33-2B0676F7F42E")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupInstance
|
||||
{
|
||||
}
|
||||
|
||||
[Guid("89143C9A-05AF-49B0-B717-72E218A2185C")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupInstance2 : ISetupInstance
|
||||
{
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetInstanceId();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.Struct)]
|
||||
System.Runtime.InteropServices.ComTypes.FILETIME GetInstallDate();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetInstallationName();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetInstallationPath();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetInstallationVersion();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetDisplayName([MarshalAs(UnmanagedType.U4), In] int lcid);
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetDescription([MarshalAs(UnmanagedType.U4), In] int lcid);
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string ResolvePath([MarshalAs(UnmanagedType.LPWStr), In] string pwszRelativePath);
|
||||
|
||||
[return: MarshalAs(UnmanagedType.U4)]
|
||||
InstanceState GetState();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)]
|
||||
ISetupPackageReference[] GetPackages();
|
||||
|
||||
ISetupPackageReference GetProduct();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetProductPath();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.VariantBool)]
|
||||
bool IsLaunchable();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.VariantBool)]
|
||||
bool IsComplete();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)]
|
||||
ISetupPropertyStore GetProperties();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetEnginePath();
|
||||
}
|
||||
|
||||
[Guid("DA8D8A16-B2B6-4487-A2F1-594CCCCD6BF5")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupPackageReference
|
||||
{
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetId();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetVersion();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetChip();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetLanguage();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetBranch();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetType();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.BStr)]
|
||||
string GetUniqueId();
|
||||
|
||||
[return: MarshalAs(UnmanagedType.VariantBool)]
|
||||
bool GetIsExtension();
|
||||
}
|
||||
|
||||
[Guid("c601c175-a3be-44bc-91f6-4568d230fc83")]
|
||||
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
|
||||
[ComImport]
|
||||
public interface ISetupPropertyStore
|
||||
{
|
||||
|
||||
[return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_BSTR)]
|
||||
string[] GetNames();
|
||||
|
||||
object GetValue([MarshalAs(UnmanagedType.LPWStr), In] string pwszName);
|
||||
}
|
||||
|
||||
[Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")]
|
||||
[CoClass(typeof(SetupConfigurationClass))]
|
||||
[ComImport]
|
||||
public interface SetupConfiguration : ISetupConfiguration2, ISetupConfiguration
|
||||
{
|
||||
}
|
||||
|
||||
[Guid("177F0C4A-1CD3-4DE7-A32C-71DBBB9FA36D")]
|
||||
[ClassInterface(ClassInterfaceType.None)]
|
||||
[ComImport]
|
||||
public class SetupConfigurationClass
|
||||
{
|
||||
}
|
||||
|
||||
public static class Main
|
||||
{
|
||||
public static void PrintJson()
|
||||
{
|
||||
ISetupConfiguration query = new SetupConfiguration();
|
||||
ISetupConfiguration2 query2 = (ISetupConfiguration2)query;
|
||||
IEnumSetupInstances e = query2.EnumAllInstances();
|
||||
|
||||
int pceltFetched;
|
||||
ISetupInstance2[] rgelt = new ISetupInstance2[1];
|
||||
List<string> instances = new List<string>();
|
||||
while (true)
|
||||
{
|
||||
e.Next(1, rgelt, out pceltFetched);
|
||||
if (pceltFetched <= 0)
|
||||
{
|
||||
Console.WriteLine(String.Format("[{0}]", string.Join(",", instances.ToArray())));
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
instances.Add(InstanceJson(rgelt[0]));
|
||||
}
|
||||
catch (COMException)
|
||||
{
|
||||
// Ignore instances that can't be queried.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string JsonString(string s)
|
||||
{
|
||||
return "\"" + s.Replace("\\", "\\\\").Replace("\"", "\\\"") + "\"";
|
||||
}
|
||||
|
||||
private static string InstanceJson(ISetupInstance2 setupInstance2)
|
||||
{
|
||||
// Visual Studio component directory:
|
||||
// https://docs.microsoft.com/en-us/visualstudio/install/workload-and-component-ids
|
||||
|
||||
StringBuilder json = new StringBuilder();
|
||||
json.Append("{");
|
||||
|
||||
string path = JsonString(setupInstance2.GetInstallationPath());
|
||||
json.Append(String.Format("\"path\":{0},", path));
|
||||
|
||||
string version = JsonString(setupInstance2.GetInstallationVersion());
|
||||
json.Append(String.Format("\"version\":{0},", version));
|
||||
|
||||
List<string> packages = new List<string>();
|
||||
foreach (ISetupPackageReference package in setupInstance2.GetPackages())
|
||||
{
|
||||
string id = JsonString(package.GetId());
|
||||
packages.Add(id);
|
||||
}
|
||||
json.Append(String.Format("\"packages\":[{0}]", string.Join(",", packages.ToArray())));
|
||||
|
||||
json.Append("}");
|
||||
return json.ToString();
|
||||
}
|
||||
}
|
||||
}
|
220
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/build.js
generated
vendored
Normal file
220
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/build.js
generated
vendored
Normal file
|
@ -0,0 +1,220 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs').promises
|
||||
const path = require('path')
|
||||
const { glob } = require('glob')
|
||||
const log = require('./log')
|
||||
const which = require('which')
|
||||
const win = process.platform === 'win32'
|
||||
|
||||
async function build (gyp, argv) {
|
||||
let platformMake = 'make'
|
||||
if (process.platform === 'aix') {
|
||||
platformMake = 'gmake'
|
||||
} else if (process.platform === 'os400') {
|
||||
platformMake = 'gmake'
|
||||
} else if (process.platform.indexOf('bsd') !== -1) {
|
||||
platformMake = 'gmake'
|
||||
} else if (win && argv.length > 0) {
|
||||
argv = argv.map(function (target) {
|
||||
return '/t:' + target
|
||||
})
|
||||
}
|
||||
|
||||
const makeCommand = gyp.opts.make || process.env.MAKE || platformMake
|
||||
let command = win ? 'msbuild' : makeCommand
|
||||
const jobs = gyp.opts.jobs || process.env.JOBS
|
||||
let buildType
|
||||
let config
|
||||
let arch
|
||||
let nodeDir
|
||||
let guessedSolution
|
||||
let python
|
||||
let buildBinsDir
|
||||
|
||||
await loadConfigGypi()
|
||||
|
||||
/**
|
||||
* Load the "config.gypi" file that was generated during "configure".
|
||||
*/
|
||||
|
||||
async function loadConfigGypi () {
|
||||
let data
|
||||
try {
|
||||
const configPath = path.resolve('build', 'config.gypi')
|
||||
data = await fs.readFile(configPath, 'utf8')
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
throw new Error('You must run `node-gyp configure` first!')
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
config = JSON.parse(data.replace(/#.+\n/, ''))
|
||||
|
||||
// get the 'arch', 'buildType', and 'nodeDir' vars from the config
|
||||
buildType = config.target_defaults.default_configuration
|
||||
arch = config.variables.target_arch
|
||||
nodeDir = config.variables.nodedir
|
||||
python = config.variables.python
|
||||
|
||||
if ('debug' in gyp.opts) {
|
||||
buildType = gyp.opts.debug ? 'Debug' : 'Release'
|
||||
}
|
||||
if (!buildType) {
|
||||
buildType = 'Release'
|
||||
}
|
||||
|
||||
log.verbose('build type', buildType)
|
||||
log.verbose('architecture', arch)
|
||||
log.verbose('node dev dir', nodeDir)
|
||||
log.verbose('python', python)
|
||||
|
||||
if (win) {
|
||||
await findSolutionFile()
|
||||
} else {
|
||||
await doWhich()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* On Windows, find the first build/*.sln file.
|
||||
*/
|
||||
|
||||
async function findSolutionFile () {
|
||||
const files = await glob('build/*.sln')
|
||||
if (files.length === 0) {
|
||||
throw new Error('Could not find *.sln file. Did you run "configure"?')
|
||||
}
|
||||
guessedSolution = files[0]
|
||||
log.verbose('found first Solution file', guessedSolution)
|
||||
await doWhich()
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses node-which to locate the msbuild / make executable.
|
||||
*/
|
||||
|
||||
async function doWhich () {
|
||||
// On Windows use msbuild provided by node-gyp configure
|
||||
if (win) {
|
||||
if (!config.variables.msbuild_path) {
|
||||
throw new Error('MSBuild is not set, please run `node-gyp configure`.')
|
||||
}
|
||||
command = config.variables.msbuild_path
|
||||
log.verbose('using MSBuild:', command)
|
||||
await doBuild()
|
||||
return
|
||||
}
|
||||
|
||||
// First make sure we have the build command in the PATH
|
||||
const execPath = await which(command)
|
||||
log.verbose('`which` succeeded for `' + command + '`', execPath)
|
||||
await doBuild()
|
||||
}
|
||||
|
||||
/**
|
||||
* Actually spawn the process and compile the module.
|
||||
*/
|
||||
|
||||
async function doBuild () {
|
||||
// Enable Verbose build
|
||||
const verbose = log.logger.isVisible('verbose')
|
||||
let j
|
||||
|
||||
if (!win && verbose) {
|
||||
argv.push('V=1')
|
||||
}
|
||||
|
||||
if (win && !verbose) {
|
||||
argv.push('/clp:Verbosity=minimal')
|
||||
}
|
||||
|
||||
if (win) {
|
||||
// Turn off the Microsoft logo on Windows
|
||||
argv.push('/nologo')
|
||||
}
|
||||
|
||||
// Specify the build type, Release by default
|
||||
if (win) {
|
||||
// Convert .gypi config target_arch to MSBuild /Platform
|
||||
// Since there are many ways to state '32-bit Intel', default to it.
|
||||
// N.B. msbuild's Condition string equality tests are case-insensitive.
|
||||
const archLower = arch.toLowerCase()
|
||||
const p = archLower === 'x64'
|
||||
? 'x64'
|
||||
: (archLower === 'arm'
|
||||
? 'ARM'
|
||||
: (archLower === 'arm64' ? 'ARM64' : 'Win32'))
|
||||
argv.push('/p:Configuration=' + buildType + ';Platform=' + p)
|
||||
if (jobs) {
|
||||
j = parseInt(jobs, 10)
|
||||
if (!isNaN(j) && j > 0) {
|
||||
argv.push('/m:' + j)
|
||||
} else if (jobs.toUpperCase() === 'MAX') {
|
||||
argv.push('/m:' + require('os').cpus().length)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
argv.push('BUILDTYPE=' + buildType)
|
||||
// Invoke the Makefile in the 'build' dir.
|
||||
argv.push('-C')
|
||||
argv.push('build')
|
||||
if (jobs) {
|
||||
j = parseInt(jobs, 10)
|
||||
if (!isNaN(j) && j > 0) {
|
||||
argv.push('--jobs')
|
||||
argv.push(j)
|
||||
} else if (jobs.toUpperCase() === 'MAX') {
|
||||
argv.push('--jobs')
|
||||
argv.push(require('os').cpus().length)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (win) {
|
||||
// did the user specify their own .sln file?
|
||||
const hasSln = argv.some(function (arg) {
|
||||
return path.extname(arg) === '.sln'
|
||||
})
|
||||
if (!hasSln) {
|
||||
argv.unshift(gyp.opts.solution || guessedSolution)
|
||||
}
|
||||
}
|
||||
|
||||
if (!win) {
|
||||
// Add build-time dependency symlinks (such as Python) to PATH
|
||||
buildBinsDir = path.resolve('build', 'node_gyp_bins')
|
||||
process.env.PATH = `${buildBinsDir}:${process.env.PATH}`
|
||||
await fs.mkdir(buildBinsDir, { recursive: true })
|
||||
const symlinkDestination = path.join(buildBinsDir, 'python3')
|
||||
try {
|
||||
await fs.unlink(symlinkDestination)
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') throw err
|
||||
}
|
||||
await fs.symlink(python, symlinkDestination)
|
||||
log.verbose('bin symlinks', `created symlink to "${python}" in "${buildBinsDir}" and added to PATH`)
|
||||
}
|
||||
|
||||
const proc = gyp.spawn(command, argv)
|
||||
await new Promise((resolve, reject) => proc.on('exit', async (code, signal) => {
|
||||
if (buildBinsDir) {
|
||||
// Clean up the build-time dependency symlinks:
|
||||
await fs.rm(buildBinsDir, { recursive: true })
|
||||
}
|
||||
|
||||
if (code !== 0) {
|
||||
return reject(new Error('`' + command + '` failed with exit code: ' + code))
|
||||
}
|
||||
if (signal) {
|
||||
return reject(new Error('`' + command + '` got signal: ' + signal))
|
||||
}
|
||||
resolve()
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = build
|
||||
module.exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module'
|
15
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/clean.js
generated
vendored
Normal file
15
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/clean.js
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs').promises
|
||||
const log = require('./log')
|
||||
|
||||
async function clean (gyp, argv) {
|
||||
// Remove the 'build' dir
|
||||
const buildDir = 'build'
|
||||
|
||||
log.verbose('clean', 'removing "%s" directory', buildDir)
|
||||
await fs.rm(buildDir, { recursive: true, force: true })
|
||||
}
|
||||
|
||||
module.exports = clean
|
||||
module.exports.usage = 'Removes any generated build files and the "out" dir'
|
308
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/configure.js
generated
vendored
Normal file
308
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/configure.js
generated
vendored
Normal file
|
@ -0,0 +1,308 @@
|
|||
'use strict'
|
||||
|
||||
const { promises: fs, readFileSync } = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const log = require('./log')
|
||||
const os = require('os')
|
||||
const processRelease = require('./process-release')
|
||||
const win = process.platform === 'win32'
|
||||
const findNodeDirectory = require('./find-node-directory')
|
||||
const { createConfigGypi } = require('./create-config-gypi')
|
||||
const { format: msgFormat } = require('util')
|
||||
const { findAccessibleSync } = require('./util')
|
||||
const { findPython } = require('./find-python')
|
||||
const { findVisualStudio } = win ? require('./find-visualstudio') : {}
|
||||
|
||||
const majorRe = /^#define NODE_MAJOR_VERSION (\d+)/m
|
||||
const minorRe = /^#define NODE_MINOR_VERSION (\d+)/m
|
||||
const patchRe = /^#define NODE_PATCH_VERSION (\d+)/m
|
||||
|
||||
async function configure (gyp, argv) {
|
||||
const buildDir = path.resolve('build')
|
||||
const configNames = ['config.gypi', 'common.gypi']
|
||||
const configs = []
|
||||
let nodeDir
|
||||
const release = processRelease(argv, gyp, process.version, process.release)
|
||||
|
||||
const python = await findPython(gyp.opts.python)
|
||||
return getNodeDir()
|
||||
|
||||
async function getNodeDir () {
|
||||
// 'python' should be set by now
|
||||
process.env.PYTHON = python
|
||||
|
||||
if (!gyp.opts.nodedir &&
|
||||
process.config.variables.use_prefix_to_find_headers) {
|
||||
// check if the headers can be found using the prefix specified
|
||||
// at build time. Use them if they match the version expected
|
||||
const prefix = process.config.variables.node_prefix
|
||||
let availVersion
|
||||
try {
|
||||
const nodeVersionH = readFileSync(path.join(prefix,
|
||||
'include', 'node', 'node_version.h'), { encoding: 'utf8' })
|
||||
const major = nodeVersionH.match(majorRe)[1]
|
||||
const minor = nodeVersionH.match(minorRe)[1]
|
||||
const patch = nodeVersionH.match(patchRe)[1]
|
||||
availVersion = major + '.' + minor + '.' + patch
|
||||
} catch {}
|
||||
if (availVersion === release.version) {
|
||||
// ok version matches, use the headers
|
||||
gyp.opts.nodedir = prefix
|
||||
log.verbose('using local node headers based on prefix',
|
||||
'setting nodedir to ' + gyp.opts.nodedir)
|
||||
}
|
||||
}
|
||||
|
||||
if (gyp.opts.nodedir) {
|
||||
// --nodedir was specified. use that for the dev files
|
||||
nodeDir = gyp.opts.nodedir.replace(/^~/, os.homedir())
|
||||
log.verbose('get node dir', 'compiling against specified --nodedir dev files: %s', nodeDir)
|
||||
} else {
|
||||
// if no --nodedir specified, ensure node dependencies are installed
|
||||
if ('v' + release.version !== process.version) {
|
||||
// if --target was given, then determine a target version to compile for
|
||||
log.verbose('get node dir', 'compiling against --target node version: %s', release.version)
|
||||
} else {
|
||||
// if no --target was specified then use the current host node version
|
||||
log.verbose('get node dir', 'no --target version specified, falling back to host node version: %s', release.version)
|
||||
}
|
||||
|
||||
if (!release.semver) {
|
||||
// could not parse the version string with semver
|
||||
throw new Error('Invalid version number: ' + release.version)
|
||||
}
|
||||
|
||||
// If the tarball option is set, always remove and reinstall the headers
|
||||
// into devdir. Otherwise only install if they're not already there.
|
||||
gyp.opts.ensure = !gyp.opts.tarball
|
||||
|
||||
await gyp.commands.install([release.version])
|
||||
|
||||
log.verbose('get node dir', 'target node version installed:', release.versionDir)
|
||||
nodeDir = path.resolve(gyp.devDir, release.versionDir)
|
||||
}
|
||||
|
||||
return createBuildDir()
|
||||
}
|
||||
|
||||
async function createBuildDir () {
|
||||
log.verbose('build dir', 'attempting to create "build" dir: %s', buildDir)
|
||||
|
||||
const isNew = await fs.mkdir(buildDir, { recursive: true })
|
||||
log.verbose(
|
||||
'build dir', '"build" dir needed to be created?', isNew ? 'Yes' : 'No'
|
||||
)
|
||||
const vsInfo = win ? await findVisualStudio(release.semver, gyp.opts['msvs-version']) : null
|
||||
return createConfigFile(vsInfo)
|
||||
}
|
||||
|
||||
async function createConfigFile (vsInfo) {
|
||||
if (win) {
|
||||
process.env.GYP_MSVS_VERSION = Math.min(vsInfo.versionYear, 2015)
|
||||
process.env.GYP_MSVS_OVERRIDE_PATH = vsInfo.path
|
||||
}
|
||||
const configPath = await createConfigGypi({ gyp, buildDir, nodeDir, vsInfo, python })
|
||||
configs.push(configPath)
|
||||
return findConfigs()
|
||||
}
|
||||
|
||||
async function findConfigs () {
|
||||
const name = configNames.shift()
|
||||
if (!name) {
|
||||
return runGyp()
|
||||
}
|
||||
|
||||
const fullPath = path.resolve(name)
|
||||
log.verbose(name, 'checking for gypi file: %s', fullPath)
|
||||
try {
|
||||
await fs.stat(fullPath)
|
||||
log.verbose(name, 'found gypi file')
|
||||
configs.push(fullPath)
|
||||
} catch (err) {
|
||||
// ENOENT will check next gypi filename
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
return findConfigs()
|
||||
}
|
||||
|
||||
async function runGyp () {
|
||||
if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) {
|
||||
if (win) {
|
||||
log.verbose('gyp', 'gyp format was not specified; forcing "msvs"')
|
||||
// force the 'make' target for non-Windows
|
||||
argv.push('-f', 'msvs')
|
||||
} else {
|
||||
log.verbose('gyp', 'gyp format was not specified; forcing "make"')
|
||||
// force the 'make' target for non-Windows
|
||||
argv.push('-f', 'make')
|
||||
}
|
||||
}
|
||||
|
||||
// include all the ".gypi" files that were found
|
||||
configs.forEach(function (config) {
|
||||
argv.push('-I', config)
|
||||
})
|
||||
|
||||
// For AIX and z/OS we need to set up the path to the exports file
|
||||
// which contains the symbols needed for linking.
|
||||
let nodeExpFile
|
||||
let nodeRootDir
|
||||
let candidates
|
||||
let logprefix = 'find exports file'
|
||||
if (process.platform === 'aix' || process.platform === 'os390' || process.platform === 'os400') {
|
||||
const ext = process.platform === 'os390' ? 'x' : 'exp'
|
||||
nodeRootDir = findNodeDirectory()
|
||||
|
||||
if (process.platform === 'aix' || process.platform === 'os400') {
|
||||
candidates = [
|
||||
'include/node/node',
|
||||
'out/Release/node',
|
||||
'out/Debug/node',
|
||||
'node'
|
||||
].map(function (file) {
|
||||
return file + '.' + ext
|
||||
})
|
||||
} else {
|
||||
candidates = [
|
||||
'out/Release/lib.target/libnode',
|
||||
'out/Debug/lib.target/libnode',
|
||||
'out/Release/obj.target/libnode',
|
||||
'out/Debug/obj.target/libnode',
|
||||
'lib/libnode'
|
||||
].map(function (file) {
|
||||
return file + '.' + ext
|
||||
})
|
||||
}
|
||||
|
||||
nodeExpFile = findAccessibleSync(logprefix, nodeRootDir, candidates)
|
||||
if (nodeExpFile !== undefined) {
|
||||
log.verbose(logprefix, 'Found exports file: %s', nodeExpFile)
|
||||
} else {
|
||||
const msg = msgFormat('Could not find node.%s file in %s', ext, nodeRootDir)
|
||||
log.error(logprefix, 'Could not find exports file')
|
||||
throw new Error(msg)
|
||||
}
|
||||
}
|
||||
|
||||
// For z/OS we need to set up the path to zoslib include directory,
|
||||
// which contains headers included in v8config.h.
|
||||
let zoslibIncDir
|
||||
if (process.platform === 'os390') {
|
||||
logprefix = "find zoslib's zos-base.h:"
|
||||
let msg
|
||||
let zoslibIncPath = process.env.ZOSLIB_INCLUDES
|
||||
if (zoslibIncPath) {
|
||||
zoslibIncPath = findAccessibleSync(logprefix, zoslibIncPath, ['zos-base.h'])
|
||||
if (zoslibIncPath === undefined) {
|
||||
msg = msgFormat('Could not find zos-base.h file in the directory set ' +
|
||||
'in ZOSLIB_INCLUDES environment variable: %s; set it ' +
|
||||
'to the correct path, or unset it to search %s', process.env.ZOSLIB_INCLUDES, nodeRootDir)
|
||||
}
|
||||
} else {
|
||||
candidates = [
|
||||
'include/node/zoslib/zos-base.h',
|
||||
'include/zoslib/zos-base.h',
|
||||
'zoslib/include/zos-base.h',
|
||||
'install/include/node/zoslib/zos-base.h'
|
||||
]
|
||||
zoslibIncPath = findAccessibleSync(logprefix, nodeRootDir, candidates)
|
||||
if (zoslibIncPath === undefined) {
|
||||
msg = msgFormat('Could not find any of %s in directory %s; set ' +
|
||||
'environmant variable ZOSLIB_INCLUDES to the path ' +
|
||||
'that contains zos-base.h', candidates.toString(), nodeRootDir)
|
||||
}
|
||||
}
|
||||
if (zoslibIncPath !== undefined) {
|
||||
zoslibIncDir = path.dirname(zoslibIncPath)
|
||||
log.verbose(logprefix, "Found zoslib's zos-base.h in: %s", zoslibIncDir)
|
||||
} else if (release.version.split('.')[0] >= 16) {
|
||||
// zoslib is only shipped in Node v16 and above.
|
||||
log.error(logprefix, msg)
|
||||
throw new Error(msg)
|
||||
}
|
||||
}
|
||||
|
||||
// this logic ported from the old `gyp_addon` python file
|
||||
const gypScript = path.resolve(__dirname, '..', 'gyp', 'gyp_main.py')
|
||||
const addonGypi = path.resolve(__dirname, '..', 'addon.gypi')
|
||||
let commonGypi = path.resolve(nodeDir, 'include/node/common.gypi')
|
||||
try {
|
||||
await fs.stat(commonGypi)
|
||||
} catch (err) {
|
||||
commonGypi = path.resolve(nodeDir, 'common.gypi')
|
||||
}
|
||||
|
||||
let outputDir = 'build'
|
||||
if (win) {
|
||||
// Windows expects an absolute path
|
||||
outputDir = buildDir
|
||||
}
|
||||
const nodeGypDir = path.resolve(__dirname, '..')
|
||||
|
||||
let nodeLibFile = path.join(nodeDir,
|
||||
!gyp.opts.nodedir ? '<(target_arch)' : '$(Configuration)',
|
||||
release.name + '.lib')
|
||||
|
||||
argv.push('-I', addonGypi)
|
||||
argv.push('-I', commonGypi)
|
||||
argv.push('-Dlibrary=shared_library')
|
||||
argv.push('-Dvisibility=default')
|
||||
argv.push('-Dnode_root_dir=' + nodeDir)
|
||||
if (process.platform === 'aix' || process.platform === 'os390' || process.platform === 'os400') {
|
||||
argv.push('-Dnode_exp_file=' + nodeExpFile)
|
||||
if (process.platform === 'os390' && zoslibIncDir) {
|
||||
argv.push('-Dzoslib_include_dir=' + zoslibIncDir)
|
||||
}
|
||||
}
|
||||
argv.push('-Dnode_gyp_dir=' + nodeGypDir)
|
||||
|
||||
// Do this to keep Cygwin environments happy, else the unescaped '\' gets eaten up,
|
||||
// resulting in bad paths, Ex c:parentFolderfolderanotherFolder instead of c:\parentFolder\folder\anotherFolder
|
||||
if (win) {
|
||||
nodeLibFile = nodeLibFile.replace(/\\/g, '\\\\')
|
||||
}
|
||||
argv.push('-Dnode_lib_file=' + nodeLibFile)
|
||||
argv.push('-Dmodule_root_dir=' + process.cwd())
|
||||
argv.push('-Dnode_engine=' +
|
||||
(gyp.opts.node_engine || process.jsEngine || 'v8'))
|
||||
argv.push('--depth=.')
|
||||
argv.push('--no-parallel')
|
||||
|
||||
// tell gyp to write the Makefile/Solution files into output_dir
|
||||
argv.push('--generator-output', outputDir)
|
||||
|
||||
// tell make to write its output into the same dir
|
||||
argv.push('-Goutput_dir=.')
|
||||
|
||||
// enforce use of the "binding.gyp" file
|
||||
argv.unshift('binding.gyp')
|
||||
|
||||
// execute `gyp` from the current target nodedir
|
||||
argv.unshift(gypScript)
|
||||
|
||||
// make sure python uses files that came with this particular node package
|
||||
const pypath = [path.join(__dirname, '..', 'gyp', 'pylib')]
|
||||
if (process.env.PYTHONPATH) {
|
||||
pypath.push(process.env.PYTHONPATH)
|
||||
}
|
||||
process.env.PYTHONPATH = pypath.join(win ? ';' : ':')
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const cp = gyp.spawn(python, argv)
|
||||
cp.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
reject(new Error('`gyp` failed with exit code: ' + code))
|
||||
} else {
|
||||
// we're done
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = configure
|
||||
module.exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module'
|
150
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/create-config-gypi.js
generated
vendored
Normal file
150
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/create-config-gypi.js
generated
vendored
Normal file
|
@ -0,0 +1,150 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs').promises
|
||||
const log = require('./log')
|
||||
const path = require('path')
|
||||
|
||||
function parseConfigGypi (config) {
|
||||
// translated from tools/js2c.py of Node.js
|
||||
// 1. string comments
|
||||
config = config.replace(/#.*/g, '')
|
||||
// 2. join multiline strings
|
||||
config = config.replace(/'$\s+'/mg, '')
|
||||
// 3. normalize string literals from ' into "
|
||||
config = config.replace(/'/g, '"')
|
||||
return JSON.parse(config)
|
||||
}
|
||||
|
||||
async function getBaseConfigGypi ({ gyp, nodeDir }) {
|
||||
// try reading $nodeDir/include/node/config.gypi first when:
|
||||
// 1. --dist-url or --nodedir is specified
|
||||
// 2. and --force-process-config is not specified
|
||||
const useCustomHeaders = gyp.opts.nodedir || gyp.opts.disturl || gyp.opts['dist-url']
|
||||
const shouldReadConfigGypi = useCustomHeaders && !gyp.opts['force-process-config']
|
||||
if (shouldReadConfigGypi && nodeDir) {
|
||||
try {
|
||||
const baseConfigGypiPath = path.resolve(nodeDir, 'include/node/config.gypi')
|
||||
const baseConfigGypi = await fs.readFile(baseConfigGypiPath)
|
||||
return parseConfigGypi(baseConfigGypi.toString())
|
||||
} catch (err) {
|
||||
log.warn('read config.gypi', err.message)
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to process.config if it is invalid
|
||||
return JSON.parse(JSON.stringify(process.config))
|
||||
}
|
||||
|
||||
async function getCurrentConfigGypi ({ gyp, nodeDir, vsInfo, python }) {
|
||||
const config = await getBaseConfigGypi({ gyp, nodeDir })
|
||||
if (!config.target_defaults) {
|
||||
config.target_defaults = {}
|
||||
}
|
||||
if (!config.variables) {
|
||||
config.variables = {}
|
||||
}
|
||||
|
||||
const defaults = config.target_defaults
|
||||
const variables = config.variables
|
||||
|
||||
// don't inherit the "defaults" from the base config.gypi.
|
||||
// doing so could cause problems in cases where the `node` executable was
|
||||
// compiled on a different machine (with different lib/include paths) than
|
||||
// the machine where the addon is being built to
|
||||
defaults.cflags = []
|
||||
defaults.defines = []
|
||||
defaults.include_dirs = []
|
||||
defaults.libraries = []
|
||||
|
||||
// set the default_configuration prop
|
||||
if ('debug' in gyp.opts) {
|
||||
defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release'
|
||||
}
|
||||
|
||||
if (!defaults.default_configuration) {
|
||||
defaults.default_configuration = 'Release'
|
||||
}
|
||||
|
||||
// set the target_arch variable
|
||||
variables.target_arch = gyp.opts.arch || process.arch || 'ia32'
|
||||
if (variables.target_arch === 'arm64') {
|
||||
defaults.msvs_configuration_platform = 'ARM64'
|
||||
defaults.xcode_configuration_platform = 'arm64'
|
||||
}
|
||||
|
||||
// set the node development directory
|
||||
variables.nodedir = nodeDir
|
||||
|
||||
// set the configured Python path
|
||||
variables.python = python
|
||||
|
||||
// disable -T "thin" static archives by default
|
||||
variables.standalone_static_library = gyp.opts.thin ? 0 : 1
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
defaults.msbuild_toolset = vsInfo.toolset
|
||||
if (vsInfo.sdk) {
|
||||
defaults.msvs_windows_target_platform_version = vsInfo.sdk
|
||||
}
|
||||
if (variables.target_arch === 'arm64') {
|
||||
if (vsInfo.versionMajor > 15 ||
|
||||
(vsInfo.versionMajor === 15 && vsInfo.versionMajor >= 9)) {
|
||||
defaults.msvs_enable_marmasm = 1
|
||||
} else {
|
||||
log.warn('Compiling ARM64 assembly is only available in\n' +
|
||||
'Visual Studio 2017 version 15.9 and above')
|
||||
}
|
||||
}
|
||||
variables.msbuild_path = vsInfo.msBuild
|
||||
}
|
||||
|
||||
// loop through the rest of the opts and add the unknown ones as variables.
|
||||
// this allows for module-specific configure flags like:
|
||||
//
|
||||
// $ node-gyp configure --shared-libxml2
|
||||
Object.keys(gyp.opts).forEach(function (opt) {
|
||||
if (opt === 'argv') {
|
||||
return
|
||||
}
|
||||
if (opt in gyp.configDefs) {
|
||||
return
|
||||
}
|
||||
variables[opt.replace(/-/g, '_')] = gyp.opts[opt]
|
||||
})
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
async function createConfigGypi ({ gyp, buildDir, nodeDir, vsInfo, python }) {
|
||||
const configFilename = 'config.gypi'
|
||||
const configPath = path.resolve(buildDir, configFilename)
|
||||
|
||||
log.verbose('build/' + configFilename, 'creating config file')
|
||||
|
||||
const config = await getCurrentConfigGypi({ gyp, nodeDir, vsInfo, python })
|
||||
|
||||
// ensures that any boolean values in config.gypi get stringified
|
||||
function boolsToString (k, v) {
|
||||
if (typeof v === 'boolean') {
|
||||
return String(v)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
log.silly('build/' + configFilename, config)
|
||||
|
||||
// now write out the config.gypi file to the build/ dir
|
||||
const prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step'
|
||||
|
||||
const json = JSON.stringify(config, boolsToString, 2)
|
||||
log.verbose('build/' + configFilename, 'writing out config file: %s', configPath)
|
||||
await fs.writeFile(configPath, [prefix, json, ''].join('\n'))
|
||||
|
||||
return configPath
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createConfigGypi,
|
||||
parseConfigGypi,
|
||||
getCurrentConfigGypi
|
||||
}
|
39
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/download.js
generated
vendored
Normal file
39
.yarn/unplugged/node-gyp-npm-10.1.0-bdea7d2ece/node_modules/node-gyp/lib/download.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
const fetch = require('make-fetch-happen')
|
||||
const { promises: fs } = require('graceful-fs')
|
||||
const log = require('./log')
|
||||
|
||||
async function download (gyp, url) {
|
||||
log.http('GET', url)
|
||||
|
||||
const requestOpts = {
|
||||
headers: {
|
||||
'User-Agent': `node-gyp v${gyp.version} (node ${process.version})`,
|
||||
Connection: 'keep-alive'
|
||||
},
|
||||
proxy: gyp.opts.proxy,
|
||||
noProxy: gyp.opts.noproxy
|
||||
}
|
||||
|
||||
const cafile = gyp.opts.cafile
|
||||
if (cafile) {
|
||||
requestOpts.ca = await readCAFile(cafile)
|
||||
}
|
||||
|
||||
const res = await fetch(url, requestOpts)
|
||||
log.http(res.status, res.url)
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
async function readCAFile (filename) {
|
||||
// The CA file can contain multiple certificates so split on certificate
|
||||
// boundaries. [\S\s]*? is used to match everything including newlines.
|
||||
const ca = await fs.readFile(filename, 'utf8')
|
||||
const re = /(-----BEGIN CERTIFICATE-----[\S\s]*?-----END CERTIFICATE-----)/g
|
||||
return ca.match(re)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
download,
|
||||
readCAFile
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue