mirror of
https://github.com/microsoft/PowerToys.git
synced 2026-01-17 01:27:48 +01:00
Compare commits
306 Commits
user/yeela
...
dev/featur
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d34d4e2d54 | ||
|
|
8588dff066 | ||
|
|
cbf6624ddd | ||
|
|
010fccd12a | ||
|
|
b68b7724c9 | ||
|
|
ab47d91b5f | ||
|
|
f6282773c1 | ||
|
|
868ba040d8 | ||
|
|
179b4ed197 | ||
|
|
dc5bc12896 | ||
|
|
558ea132d1 | ||
|
|
919fe830a2 | ||
|
|
f665966db6 | ||
|
|
40a2510c52 | ||
|
|
cec9a73a6b | ||
|
|
02f792b13e | ||
|
|
b7743d28b5 | ||
|
|
f180d51367 | ||
|
|
8911a6eb63 | ||
|
|
c38412c7cb | ||
|
|
3bc0a518fc | ||
|
|
e49c1e1d60 | ||
|
|
fcf5c5c05c | ||
|
|
56d87133af | ||
|
|
2093dd4cde | ||
|
|
7beae3e470 | ||
|
|
34f4514ebb | ||
|
|
62d7b40ab0 | ||
|
|
cd0b8aa6d9 | ||
|
|
7b23e5ba99 | ||
|
|
d3faa86600 | ||
|
|
2bc3d1228d | ||
|
|
169c231509 | ||
|
|
690dce5482 | ||
|
|
a20fd15800 | ||
|
|
8c30da743c | ||
|
|
601eab5c88 | ||
|
|
e23cf3c914 | ||
|
|
e17747cc1e | ||
|
|
817bec9c4c | ||
|
|
ca49cab49b | ||
|
|
ff225044f9 | ||
|
|
aee1eb82a2 | ||
|
|
1f58a9e8c9 | ||
|
|
114ddbeeba | ||
|
|
e12bf70081 | ||
|
|
24533e4344 | ||
|
|
fa50208973 | ||
|
|
2d4d49f783 | ||
|
|
af85be3b90 | ||
|
|
33442e8853 | ||
|
|
ec049aa27b | ||
|
|
71d40b96cd | ||
|
|
f6afee9012 | ||
|
|
d8b4798476 | ||
|
|
344506b1db | ||
|
|
23b6a4bedb | ||
|
|
c28afa1fc8 | ||
|
|
2d812fa6d8 | ||
|
|
dc39ec771e | ||
|
|
a4e07a7b5d | ||
|
|
4825c62c50 | ||
|
|
74c891b0f3 | ||
|
|
d1266cf080 | ||
|
|
26119ea0e0 | ||
|
|
5420beb700 | ||
|
|
b72ec989fa | ||
|
|
8675a93f69 | ||
|
|
877d5c0506 | ||
|
|
6820e0cd80 | ||
|
|
98eaffc475 | ||
|
|
bc86e4fb04 | ||
|
|
95d9521cce | ||
|
|
19c660a8ff | ||
|
|
8bd74accc0 | ||
|
|
443f709899 | ||
|
|
86b1bff927 | ||
|
|
00aa8b4b88 | ||
|
|
6db620c79f | ||
|
|
b72e6bb7d6 | ||
|
|
b67b7a4ee7 | ||
|
|
65be209c2d | ||
|
|
3b7f7f6851 | ||
|
|
f10610e1e1 | ||
|
|
903c64b29a | ||
|
|
13e8743802 | ||
|
|
05ddd019af | ||
|
|
55d8631650 | ||
|
|
666d0c2840 | ||
|
|
ed18f10fa9 | ||
|
|
ef511d6318 | ||
|
|
f80eaf7b23 | ||
|
|
ad88f71398 | ||
|
|
26b7a48278 | ||
|
|
b407bf0f2b | ||
|
|
82cfa4cbfe | ||
|
|
ac5d9dbef2 | ||
|
|
2af86355ac | ||
|
|
e16f3ab4e5 | ||
|
|
6686c5b5f5 | ||
|
|
f922f56890 | ||
|
|
193581a9ac | ||
|
|
78c3f95179 | ||
|
|
0212892c94 | ||
|
|
50792b9520 | ||
|
|
885c4ffd77 | ||
|
|
d3c9fe8ea5 | ||
|
|
475a059ef6 | ||
|
|
39086f702e | ||
|
|
ed98a94d2a | ||
|
|
04ff5373d9 | ||
|
|
3fb51eea9e | ||
|
|
a208742012 | ||
|
|
3320dc2d0d | ||
|
|
4c12db9fa6 | ||
|
|
abc5a80a1e | ||
|
|
548bfdfcee | ||
|
|
ea6f5c8d88 | ||
|
|
93c81bb93e | ||
|
|
81fb58ed20 | ||
|
|
23aa46a4a5 | ||
|
|
ae7eb60b0c | ||
|
|
6c09b251b5 | ||
|
|
e5c8305c5c | ||
|
|
3c422438e8 | ||
|
|
82e0749779 | ||
|
|
4fc83786e3 | ||
|
|
513dd0cbf4 | ||
|
|
672206d17b | ||
|
|
9f93e238c3 | ||
|
|
158f4ee222 | ||
|
|
5aa4580bdb | ||
|
|
dcc0634fb1 | ||
|
|
d218e36a77 | ||
|
|
89d6aed118 | ||
|
|
a7e5c0330b | ||
|
|
b5251c27e9 | ||
|
|
16a71e2acd | ||
|
|
3036df9d7f | ||
|
|
12308babc7 | ||
|
|
5576c7b23f | ||
|
|
8620d80047 | ||
|
|
be202df44b | ||
|
|
3fa5aae70c | ||
|
|
dae91d2874 | ||
|
|
2e2e47a039 | ||
|
|
06ecbd58b8 | ||
|
|
fc8918e538 | ||
|
|
f93aa42b19 | ||
|
|
cbedf45796 | ||
|
|
7b8e229544 | ||
|
|
d7467e24ae | ||
|
|
c493d27dd3 | ||
|
|
b48c8aa80b | ||
|
|
deb1680f91 | ||
|
|
647ea2db1e | ||
|
|
6516af8a7a | ||
|
|
d0775dac28 | ||
|
|
9906141322 | ||
|
|
353aeb268b | ||
|
|
83ee449b1b | ||
|
|
9a3519cd0a | ||
|
|
54ed4834a8 | ||
|
|
c00ed8f996 | ||
|
|
db8935dca3 | ||
|
|
10a3f1162f | ||
|
|
fd5e4590d7 | ||
|
|
907a6f5ec9 | ||
|
|
3ea27571f3 | ||
|
|
ed423d8264 | ||
|
|
9733c1035b | ||
|
|
477094057d | ||
|
|
1912f1c533 | ||
|
|
550f7ec08d | ||
|
|
aae69ab125 | ||
|
|
8344060e5e | ||
|
|
54a8f47df7 | ||
|
|
d9e92eca01 | ||
|
|
2643f00fa1 | ||
|
|
8f076cb5da | ||
|
|
9b8d86858f | ||
|
|
c94c0431f4 | ||
|
|
64c5439b60 | ||
|
|
e0bdba9772 | ||
|
|
c4a93d2482 | ||
|
|
4960af8a13 | ||
|
|
a6b223083c | ||
|
|
dc9af4bc30 | ||
|
|
18e1d851fe | ||
|
|
663b0281c2 | ||
|
|
3c694e428c | ||
|
|
98ef8c4e70 | ||
|
|
c89b4bb9fd | ||
|
|
17a607fda0 | ||
|
|
371a05ac5e | ||
|
|
65e75d1c42 | ||
|
|
15ae5a03dc | ||
|
|
f39e4bb058 | ||
|
|
0679664d4c | ||
|
|
5e012b8891 | ||
|
|
77c59faa8a | ||
|
|
ed7d3ec973 | ||
|
|
26bbefa004 | ||
|
|
e6b56c4152 | ||
|
|
4c4630ef75 | ||
|
|
6fc164df98 | ||
|
|
5f42c6ef66 | ||
|
|
1e28cb8c40 | ||
|
|
0c83e632ed | ||
|
|
852d67b569 | ||
|
|
01d6c916d6 | ||
|
|
90979cbecb | ||
|
|
20a7dd4a7f | ||
|
|
ba19333ad7 | ||
|
|
6eafa57c68 | ||
|
|
8c450f3457 | ||
|
|
0137bc9697 | ||
|
|
e4ccde49f7 | ||
|
|
0efe6c91e8 | ||
|
|
a62d95c8fa | ||
|
|
ba13f5c0c4 | ||
|
|
bd50d6961d | ||
|
|
fbb9f4188f | ||
|
|
ae91aa3869 | ||
|
|
4cccbecf54 | ||
|
|
0e74b2ee6b | ||
|
|
f7cab16fb6 | ||
|
|
bfb569e894 | ||
|
|
ce39ef2360 | ||
|
|
0c3108bca1 | ||
|
|
28bf6de36c | ||
|
|
3de23f1c82 | ||
|
|
198b7a6890 | ||
|
|
f9fe9cc204 | ||
|
|
5d09e2e821 | ||
|
|
9914d31d6b | ||
|
|
add5078f43 | ||
|
|
68a66ff03d | ||
|
|
9cac577b7f | ||
|
|
cc9b7d62df | ||
|
|
27d0620cac | ||
|
|
6a353e0941 | ||
|
|
73146e844e | ||
|
|
07ab6191d0 | ||
|
|
4a672e2ed5 | ||
|
|
f869f99144 | ||
|
|
2493fd6a1a | ||
|
|
64c0ca77bf | ||
|
|
0bdfa32cd0 | ||
|
|
8f69109689 | ||
|
|
5abba947d8 | ||
|
|
12bc9d7206 | ||
|
|
a5c21d3432 | ||
|
|
6bfe924234 | ||
|
|
3cc3701465 | ||
|
|
71c7241fe1 | ||
|
|
ae6cb122c8 | ||
|
|
6fba21d9ad | ||
|
|
7e93d1f767 | ||
|
|
e08aa76085 | ||
|
|
e5bbeb738e | ||
|
|
404a189c87 | ||
|
|
26dd8ea18e | ||
|
|
80f4611b0a | ||
|
|
a5b69d9be5 | ||
|
|
927ac511c8 | ||
|
|
07e0ef8a2c | ||
|
|
1cf5d88507 | ||
|
|
1a33257314 | ||
|
|
6c7b99fbbc | ||
|
|
29e0bad3ea | ||
|
|
67a336bc46 | ||
|
|
40f2572b22 | ||
|
|
cdb40a91fa | ||
|
|
3dea02a981 | ||
|
|
4b371726c2 | ||
|
|
93bda77554 | ||
|
|
47ccfde156 | ||
|
|
782a2e1b1e | ||
|
|
5c600ebc92 | ||
|
|
b07b15cf42 | ||
|
|
910b259ce1 | ||
|
|
3d75b8471c | ||
|
|
f5cc0cba40 | ||
|
|
449078be8e | ||
|
|
690b537e9f | ||
|
|
af49b36d20 | ||
|
|
9867395b5b | ||
|
|
cb2a4ec6e9 | ||
|
|
7e80c1bf73 | ||
|
|
24add7d4f8 | ||
|
|
e11deec96f | ||
|
|
feba2e6f17 | ||
|
|
c18c70afdb | ||
|
|
aa788dad04 | ||
|
|
42905045b4 | ||
|
|
ccaf327baa | ||
|
|
6f813d9e66 | ||
|
|
2b71d97449 | ||
|
|
9f61f0793d | ||
|
|
cc553a60d5 | ||
|
|
3cd57eee63 | ||
|
|
6f6c32f989 | ||
|
|
15c3956eda | ||
|
|
ce19c4e5ff | ||
|
|
c157e64f28 |
@@ -3,13 +3,13 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"dotnet-consolidate": {
|
||||
"version": "4.2.0",
|
||||
"version": "2.0.0",
|
||||
"commands": [
|
||||
"dotnet-consolidate"
|
||||
]
|
||||
},
|
||||
"xamlstyler.console": {
|
||||
"version": "3.2501.8",
|
||||
"version": "3.2404.2",
|
||||
"commands": [
|
||||
"xstyler"
|
||||
]
|
||||
|
||||
@@ -2,20 +2,17 @@
|
||||
# Reference: https://github.com/microsoft/PowerToys/blob/main/doc/devdocs/readme.md#compiling-powertoys
|
||||
properties:
|
||||
resources:
|
||||
- resource: Microsoft.Windows.Settings/WindowsSettings
|
||||
- resource: Microsoft.Windows.Developer/DeveloperMode
|
||||
directives:
|
||||
description: Enable Developer Mode
|
||||
allowPrerelease: true
|
||||
# Requires elevation for the set operation
|
||||
securityContext: elevated
|
||||
settings:
|
||||
DeveloperMode: true
|
||||
Ensure: Present
|
||||
- resource: Microsoft.WinGet.DSC/WinGetPackage
|
||||
id: vsPackage
|
||||
directives:
|
||||
description: Install Visual Studio 2022 Community (Any edition will work)
|
||||
# Requires elevation for the set operation
|
||||
securityContext: elevated
|
||||
allowPrerelease: true
|
||||
settings:
|
||||
id: Microsoft.VisualStudio.2022.Community
|
||||
source: winget
|
||||
@@ -25,8 +22,6 @@ properties:
|
||||
directives:
|
||||
description: Install required VS workloads
|
||||
allowPrerelease: true
|
||||
# Requires elevation for the get and set operations
|
||||
securityContext: elevated
|
||||
settings:
|
||||
productId: Microsoft.VisualStudio.Product.Community
|
||||
channelId: VisualStudio.17.Release
|
||||
@@ -2,20 +2,17 @@
|
||||
# Reference: https://github.com/microsoft/PowerToys/blob/main/doc/devdocs/readme.md#compiling-powertoys
|
||||
properties:
|
||||
resources:
|
||||
- resource: Microsoft.Windows.Settings/WindowsSettings
|
||||
- resource: Microsoft.Windows.Developer/DeveloperMode
|
||||
directives:
|
||||
description: Enable Developer Mode
|
||||
allowPrerelease: true
|
||||
# Requires elevation for the set operation
|
||||
securityContext: elevated
|
||||
settings:
|
||||
DeveloperMode: true
|
||||
Ensure: Present
|
||||
- resource: Microsoft.WinGet.DSC/WinGetPackage
|
||||
id: vsPackage
|
||||
directives:
|
||||
description: Install Visual Studio 2022 Enterprise (Any edition will work)
|
||||
# Requires elevation for the set operation
|
||||
securityContext: elevated
|
||||
allowPrerelease: true
|
||||
settings:
|
||||
id: Microsoft.VisualStudio.2022.Enterprise
|
||||
source: winget
|
||||
@@ -25,8 +22,6 @@ properties:
|
||||
directives:
|
||||
description: Install required VS workloads
|
||||
allowPrerelease: true
|
||||
# Requires elevation for the get and set operations
|
||||
securityContext: elevated
|
||||
settings:
|
||||
productId: Microsoft.VisualStudio.Product.Enterprise
|
||||
channelId: VisualStudio.17.Release
|
||||
@@ -2,20 +2,17 @@
|
||||
# Reference: https://github.com/microsoft/PowerToys/blob/main/doc/devdocs/readme.md#compiling-powertoys
|
||||
properties:
|
||||
resources:
|
||||
- resource: Microsoft.Windows.Settings/WindowsSettings
|
||||
- resource: Microsoft.Windows.Developer/DeveloperMode
|
||||
directives:
|
||||
description: Enable Developer Mode
|
||||
allowPrerelease: true
|
||||
# Requires elevation for the set operation
|
||||
securityContext: elevated
|
||||
settings:
|
||||
DeveloperMode: true
|
||||
Ensure: Present
|
||||
- resource: Microsoft.WinGet.DSC/WinGetPackage
|
||||
id: vsPackage
|
||||
directives:
|
||||
description: Install Visual Studio 2022 Professional (Any edition will work)
|
||||
# Requires elevation for the set operation
|
||||
securityContext: elevated
|
||||
allowPrerelease: true
|
||||
settings:
|
||||
id: Microsoft.VisualStudio.2022.Professional
|
||||
source: winget
|
||||
@@ -25,8 +22,6 @@ properties:
|
||||
directives:
|
||||
description: Install required VS workloads
|
||||
allowPrerelease: true
|
||||
# Requires elevation for the get and set operations
|
||||
securityContext: elevated
|
||||
settings:
|
||||
productId: Microsoft.VisualStudio.Product.Professional
|
||||
channelId: VisualStudio.17.Release
|
||||
14
.github/CODEOWNERS
vendored
14
.github/CODEOWNERS
vendored
@@ -1,16 +1,16 @@
|
||||
# Protect `.github` folder except the spell-check rules inside it. (The exception happens by not defining any owner user or group for the path.)
|
||||
# Protection of the spell-check rules makes no sense as it needs to be changed in nearly every PR.
|
||||
/.github/ @microsoft/powertoys-code-owners
|
||||
/.github/ @crutkas @DHowett @ethanfangg
|
||||
/.github/actions/spell-check/
|
||||
|
||||
# locking down pipeline folder
|
||||
/.pipelines/ @microsoft/powertoys-code-owners
|
||||
/.pipelines/ @crutkas @DHowett @ethanfangg
|
||||
|
||||
# locking down nuget config
|
||||
nuget.config @microsoft/powertoys-code-owners
|
||||
packages.config @microsoft/powertoys-code-owners
|
||||
nuget.config @crutkas @DHowett @ethanfangg
|
||||
packages.config @crutkas @DHowett @ethanfangg
|
||||
|
||||
# locking down files that should not change
|
||||
LICENSE @microsoft/powertoys-code-owners
|
||||
SECURITY.md @microsoft/powertoys-code-owners
|
||||
CODE_OF_CONDUCT.md @microsoft/powertoys-code-owners
|
||||
LICENSE @crutkas @DHowett @ethanfangg
|
||||
SECURITY.md @crutkas @DHowett @ethanfangg
|
||||
CODE_OF_CONDUCT.md @crutkas @DHowett @ethanfangg
|
||||
|
||||
46
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
46
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -7,27 +7,18 @@ body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Please make sure to [search for existing issues](https://github.com/microsoft/PowerToys/issues) before filing a new one!
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
We are aware of the following high-volume issues and are actively working on them. Please check if your issue is one of these before filing a new bug report:
|
||||
* **PowerToys Run crash related to "Desktop composition is disabled"**: This may appear as `COMException: 0x80263001`. For more details, see issue [#31226](https://github.com/microsoft/PowerToys/issues/31226).
|
||||
* **PowerToys Run crash with `COMException (0xD0000701)`**: For more details, see issue [#30769](https://github.com/microsoft/PowerToys/issues/30769).
|
||||
* **PowerToys Run crash with a "Cyclic reference" error**: This `System.InvalidOperationException` is detailed in issue [#36451](https://github.com/microsoft/PowerToys/issues/36451).
|
||||
- id: version
|
||||
type: input
|
||||
- type: input
|
||||
attributes:
|
||||
label: Microsoft PowerToys version
|
||||
placeholder: X.XX.X
|
||||
description: Hover over the system tray icon or look at Settings
|
||||
placeholder: 0.70.0
|
||||
description: Hover over system tray icon or look at Settings
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- id: installed
|
||||
type: dropdown
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Installation method
|
||||
description: How / where was PowerToys installed from?
|
||||
description: How / Where was PowerToys installed from?
|
||||
multiple: true
|
||||
options:
|
||||
- GitHub
|
||||
@@ -41,6 +32,14 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Running as admin
|
||||
description: Are you running PowerToys as Admin?
|
||||
options:
|
||||
- "Yes"
|
||||
- "No"
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Area(s) with issue?
|
||||
@@ -53,7 +52,6 @@ body:
|
||||
- Awake
|
||||
- ColorPicker
|
||||
- Command not found
|
||||
- Command Palette
|
||||
- Crop and Lock
|
||||
- Environment Variables
|
||||
- FancyZones
|
||||
@@ -65,10 +63,8 @@ body:
|
||||
- Image Resizer
|
||||
- Installer
|
||||
- Keyboard Manager
|
||||
- Light Switch
|
||||
- Mouse Utilities
|
||||
- Mouse Without Borders
|
||||
- New+
|
||||
- Peek
|
||||
- PowerRename
|
||||
- PowerToys Run
|
||||
@@ -79,9 +75,9 @@ body:
|
||||
- Shortcut Guide
|
||||
- System tray interaction
|
||||
- TextExtractor
|
||||
- Video Conference Mute
|
||||
- Workspaces
|
||||
- Welcome / PowerToys Tour window
|
||||
- ZoomIt
|
||||
validations:
|
||||
required: true
|
||||
|
||||
@@ -107,19 +103,6 @@ body:
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- id: additionalInfo
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Additional Information
|
||||
placeholder: |
|
||||
OS version
|
||||
.Net version
|
||||
System Language
|
||||
User or System Installation
|
||||
Running as admin
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Other Software
|
||||
@@ -130,4 +113,3 @@ body:
|
||||
My Cool Application v0.3 (include a code snippet if it would help!)
|
||||
validations:
|
||||
required: false
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ labels:
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the requested doc changes
|
||||
label: Provide a description of requested docs changes
|
||||
placeholder: Briefly describe which document needs to be corrected and why.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
3
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: "⭐ Feature or enhancement request"
|
||||
description: Propose something new.
|
||||
type: Feature
|
||||
labels:
|
||||
- Needs-Triage
|
||||
body:
|
||||
@@ -13,7 +12,7 @@ body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Scenario when this would be used?
|
||||
placeholder: What is the scenario this would be used in? Why is this important to your workflow as a power user?
|
||||
placeholder: What is the scenario this would be used? Why is this important to your workflow as a power user?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/translation_issue.yml
vendored
9
.github/ISSUE_TEMPLATE/translation_issue.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: "🌐 Localization/Translation issue"
|
||||
description: Report incorrect translations.
|
||||
type: Bug
|
||||
labels:
|
||||
- Issue-Bug
|
||||
- Area-Localization
|
||||
@@ -14,7 +13,7 @@ body:
|
||||
attributes:
|
||||
label: Microsoft PowerToys version
|
||||
placeholder: 0.70.0
|
||||
description: Hover over the system tray icon or look at Settings
|
||||
description: Hover over system tray icon or look at Settings
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
@@ -38,10 +37,8 @@ body:
|
||||
- Image Resizer
|
||||
- Installer
|
||||
- Keyboard Manager
|
||||
- Light Switch
|
||||
- Mouse Utilities
|
||||
- Mouse Without Borders
|
||||
- New+
|
||||
- Peek
|
||||
- PowerRename
|
||||
- PowerToys Run
|
||||
@@ -52,9 +49,9 @@ body:
|
||||
- Shortcut Guide
|
||||
- System tray interaction
|
||||
- TextExtractor
|
||||
- Video Conference Mute
|
||||
- Workspaces
|
||||
- Welcome / PowerToys Tour window
|
||||
- ZoomIt
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
@@ -66,7 +63,7 @@ body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ❌ Actual phrase(s)
|
||||
placeholder: What is there? Please include a screenshot, as that is extremely helpful.
|
||||
placeholder: What is there? Please include a screenshot as that is extremely helpful.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
118
.github/actions/spell-check/allow/code.txt
vendored
118
.github/actions/spell-check/allow/code.txt
vendored
@@ -21,22 +21,16 @@ Pbgra
|
||||
WHITEONBLACK
|
||||
|
||||
|
||||
# COUNTRIES
|
||||
RUS
|
||||
|
||||
# FILES
|
||||
|
||||
AYUV
|
||||
bak
|
||||
Bcl
|
||||
bgcode
|
||||
Deflatealgorithm
|
||||
exa
|
||||
exabyte
|
||||
Gbits
|
||||
Gbps
|
||||
gcode
|
||||
Heatshrink
|
||||
Mbits
|
||||
MBs
|
||||
mkv
|
||||
@@ -56,8 +50,6 @@ YVU
|
||||
YVYU
|
||||
zipfolder
|
||||
CODEOWNERS
|
||||
VNext
|
||||
vnext
|
||||
|
||||
# FONTS
|
||||
|
||||
@@ -83,44 +75,18 @@ sinclairinat
|
||||
stylecop
|
||||
uipi
|
||||
yinwang
|
||||
myaccess
|
||||
onmicrosoft
|
||||
aep
|
||||
epsf
|
||||
howto
|
||||
onefuzzconfig
|
||||
oip
|
||||
onefuzzingestionpreparationtool
|
||||
OTP
|
||||
Yubi
|
||||
Yubico
|
||||
Perplexity
|
||||
Groq
|
||||
svgl
|
||||
|
||||
|
||||
# KEYS
|
||||
|
||||
altdown
|
||||
BUTTONUP
|
||||
bafunctions
|
||||
Baf
|
||||
Bitness
|
||||
BUILDARCHSHORT
|
||||
CTRLALTDEL
|
||||
Ctrls
|
||||
CSilent
|
||||
CBal
|
||||
CREATEBAFUNCTIONS
|
||||
CPrereq
|
||||
dirutil
|
||||
DUtil
|
||||
Editbox
|
||||
EXSEL
|
||||
HOLDENTER
|
||||
HOLDESC
|
||||
HOLDSPACE
|
||||
HOLDBACKSPACE
|
||||
IDIGNORE
|
||||
KBDLLHOOKSTRUCT
|
||||
keyevent
|
||||
LAlt
|
||||
@@ -132,16 +98,12 @@ LCONTROL
|
||||
LCtrl
|
||||
LEFTDOWN
|
||||
LEFTUP
|
||||
locutil
|
||||
logutil
|
||||
msimg
|
||||
MBUTTON
|
||||
MBUTTONDBLCLK
|
||||
MBUTTONDOWN
|
||||
MBUTTONUP
|
||||
MIDDLEDOWN
|
||||
MIDDLEUP
|
||||
memutil
|
||||
NCRBUTTONDBLCLK
|
||||
NCRBUTTONDOWN
|
||||
NCRBUTTONUP
|
||||
@@ -154,18 +116,8 @@ RCONTROL
|
||||
RCtrl
|
||||
RIGHTDOWN
|
||||
RIGHTUP
|
||||
Richedit
|
||||
rgwz
|
||||
resrutil
|
||||
srd
|
||||
scz
|
||||
shelutil
|
||||
thmutil
|
||||
uriutil
|
||||
VKTAB
|
||||
wcautil
|
||||
winkey
|
||||
wininet
|
||||
WMKEYDOWN
|
||||
WMKEYUP
|
||||
WMSYSKEYDOWN
|
||||
@@ -175,10 +127,6 @@ XBUTTONDBLCLK
|
||||
XBUTTONDOWN
|
||||
XBUTTONUP
|
||||
XDOWN
|
||||
xmlutil
|
||||
|
||||
# Prefix
|
||||
pcs
|
||||
|
||||
# User32.SYSTEM_METRICS_INDEX.cs
|
||||
|
||||
@@ -267,78 +215,14 @@ SWAPBUTTON
|
||||
SYSTEMDOCKED
|
||||
TABLETPC
|
||||
|
||||
# Units
|
||||
nmi
|
||||
|
||||
# MATH
|
||||
|
||||
artanh
|
||||
arsinh
|
||||
arcosh
|
||||
roundf
|
||||
|
||||
# Linux
|
||||
|
||||
dbus
|
||||
anypass
|
||||
github
|
||||
gpg
|
||||
https
|
||||
ssh
|
||||
ubuntu
|
||||
workarounds
|
||||
|
||||
# For upgrade to check-spelling v0.0.24
|
||||
pwa
|
||||
|
||||
# .NET
|
||||
|
||||
AOT
|
||||
Aot
|
||||
|
||||
# YML
|
||||
onefuzz
|
||||
|
||||
# NameInCode
|
||||
leilzh
|
||||
mengyuanchen
|
||||
|
||||
# DllName
|
||||
testhost
|
||||
Testably
|
||||
|
||||
#Tools
|
||||
OIP
|
||||
xef
|
||||
xes
|
||||
PACKAGEVERSIONNUMBER
|
||||
APPXMANIFESTVERSION
|
||||
|
||||
# MRU lists
|
||||
CACHEWRITE
|
||||
MRUCMPPROC
|
||||
MRUINFO
|
||||
REGSTR
|
||||
|
||||
# Misc Win32 APIs and PInvokes
|
||||
INVOKEIDLIST
|
||||
|
||||
# PowerRename metadata pattern abbreviations (used in tests and regex patterns)
|
||||
DDDD
|
||||
FFF
|
||||
HHH
|
||||
riday
|
||||
YYY
|
||||
|
||||
# Unicode
|
||||
precomposed
|
||||
|
||||
# GitHub issue/PR commands
|
||||
azp
|
||||
feedbackhub
|
||||
needinfo
|
||||
reportbug
|
||||
|
||||
#ffmpeg
|
||||
crf
|
||||
nostdin
|
||||
|
||||
69
.github/actions/spell-check/allow/names.txt
vendored
69
.github/actions/spell-check/allow/names.txt
vendored
@@ -23,17 +23,16 @@ registrypreview
|
||||
rooler
|
||||
scoobe
|
||||
shortcutguide
|
||||
videoconference
|
||||
|
||||
|
||||
# USERS
|
||||
|
||||
# 8LWXpg is user name but user folder causes a flag
|
||||
LWXpg
|
||||
8LWXpg
|
||||
Adoumie
|
||||
Advaith
|
||||
alekhyareddy
|
||||
Aleks
|
||||
amihaiuc
|
||||
angularsen
|
||||
Anirudha
|
||||
arjunbalgovind
|
||||
@@ -44,10 +43,7 @@ Bartosz
|
||||
betadele
|
||||
betsegaw
|
||||
bricelam
|
||||
bsky
|
||||
CCcat
|
||||
chemwolf
|
||||
chenmy
|
||||
Chinh
|
||||
chrdavis
|
||||
Chrzan
|
||||
@@ -55,76 +51,54 @@ clayton
|
||||
Coplen
|
||||
craigloewen
|
||||
crutkas
|
||||
Chubercik
|
||||
damienleroy
|
||||
daverayment
|
||||
davidegiacometti
|
||||
debian
|
||||
Deibisu
|
||||
Deibisu
|
||||
Delimarsky
|
||||
Deondre
|
||||
DHowett
|
||||
ductdo
|
||||
Essey
|
||||
ethanfangg
|
||||
Feng
|
||||
ferraridavide
|
||||
foxmsft
|
||||
frankychen
|
||||
Gaarden
|
||||
gaardmark
|
||||
gabime
|
||||
Galaxi
|
||||
Garside
|
||||
Gershaft
|
||||
Giordani
|
||||
Gleb
|
||||
Gokce
|
||||
gordon
|
||||
Griese
|
||||
grzhan
|
||||
Guo
|
||||
hanselman
|
||||
haoliuu
|
||||
Harmath
|
||||
Heiko
|
||||
Hemmerlein
|
||||
hlaueriksson
|
||||
Horvalds
|
||||
Howett
|
||||
hotkidfamily
|
||||
htcfreek
|
||||
Huynh
|
||||
Ionut
|
||||
jamrobot
|
||||
Jaswal
|
||||
Jaylyn
|
||||
jefflord
|
||||
Jeremic
|
||||
Jordi
|
||||
jyuwono
|
||||
kai
|
||||
Kairu
|
||||
Kairu
|
||||
Kamra
|
||||
Kantarci
|
||||
Karthick
|
||||
kaylacinnamon
|
||||
kevinguo
|
||||
Khmyznikov
|
||||
Krigun
|
||||
Lambson
|
||||
Laute
|
||||
laviusmotileng
|
||||
Leilei
|
||||
Loewen
|
||||
Luecking
|
||||
Mahalingam
|
||||
Markovic
|
||||
martinchrzan
|
||||
martinmoene
|
||||
Melman
|
||||
Mengyuan
|
||||
Mihaiuc
|
||||
Mikhayelyan
|
||||
msft
|
||||
Mykhailo
|
||||
@@ -133,39 +107,30 @@ Naro
|
||||
nathancartlidge
|
||||
Nemeth
|
||||
nielslaute
|
||||
Noraa
|
||||
noraajunker
|
||||
oldnewthing
|
||||
onegreatworld
|
||||
palenshus
|
||||
pedrolamas
|
||||
Peiyao
|
||||
peteblois
|
||||
phoboslab
|
||||
Ponten
|
||||
Pooja
|
||||
Pylyp
|
||||
Qingpeng
|
||||
quachpas
|
||||
Quriz
|
||||
randyrants
|
||||
rayment
|
||||
ricardosantos
|
||||
riri
|
||||
riri
|
||||
ritchielawrence
|
||||
robmikh
|
||||
ruslanlap
|
||||
Russinovich
|
||||
Rutkas
|
||||
ryanbodrug
|
||||
saahmedm
|
||||
sachaple
|
||||
Sameerjs
|
||||
Santossio
|
||||
Schoen
|
||||
Sekan
|
||||
Seraphima
|
||||
Shmuelie
|
||||
skttl
|
||||
somil
|
||||
Soref
|
||||
@@ -176,43 +141,24 @@ Tadele
|
||||
talynone
|
||||
Taras
|
||||
TBM
|
||||
Teutsch
|
||||
tilovell
|
||||
Triet
|
||||
urnotdfs
|
||||
vednig
|
||||
waaverecords
|
||||
wang
|
||||
Whuihuan
|
||||
Xiaofeng
|
||||
Xpg
|
||||
Yaqing
|
||||
yaqingmi
|
||||
ycv
|
||||
yeelam
|
||||
Yuniardi
|
||||
yuyoyuppe
|
||||
Zeol
|
||||
Zhao
|
||||
Zhaopeng
|
||||
zhaopy
|
||||
zhaoqpcn
|
||||
Zoltan
|
||||
Zykova
|
||||
|
||||
# OTHERS
|
||||
|
||||
Bilibili
|
||||
BVID
|
||||
capturevideosample
|
||||
cmdow
|
||||
Controlz
|
||||
cortana
|
||||
devhints
|
||||
dlnilsson
|
||||
fancymouse
|
||||
firefox
|
||||
fudan
|
||||
gpt
|
||||
Inkscape
|
||||
Markdig
|
||||
@@ -226,11 +172,8 @@ openai
|
||||
Quickime
|
||||
regedit
|
||||
roslyn
|
||||
Skia
|
||||
Spotify
|
||||
tldr
|
||||
Vanara
|
||||
wangyi
|
||||
WEX
|
||||
windowwalker
|
||||
winui
|
||||
@@ -238,9 +181,7 @@ winuiex
|
||||
wix
|
||||
wordpad
|
||||
WWL
|
||||
wyhash
|
||||
xamlstyler
|
||||
Xavalon
|
||||
Xbox
|
||||
Youdao
|
||||
zadjii
|
||||
|
||||
236
.github/actions/spell-check/candidate.patterns
vendored
236
.github/actions/spell-check/candidate.patterns
vendored
@@ -1,9 +1,3 @@
|
||||
# D2D
|
||||
#D?2D
|
||||
|
||||
# Repeated letters
|
||||
\b([a-z])\g{-1}{2,}\b
|
||||
|
||||
# marker to ignore all code on line
|
||||
^.*/\* #no-spell-check-line \*/.*$
|
||||
# marker to ignore all code on line
|
||||
@@ -13,20 +7,14 @@
|
||||
# cspell inline
|
||||
^.*\b[Cc][Ss][Pp][Ee][Ll]{2}:\s*[Dd][Ii][Ss][Aa][Bb][Ll][Ee]-[Ll][Ii][Nn][Ee]\b
|
||||
|
||||
# copyright
|
||||
Copyright (?:\([Cc]\)|)(?:[-\d, ]|and)+(?: [A-Z][a-z]+ [A-Z][a-z]+,?)+
|
||||
|
||||
# patch hunk comments
|
||||
^@@ -\d+(?:,\d+|) \+\d+(?:,\d+|) @@ .*
|
||||
^\@\@ -\d+(?:,\d+|) \+\d+(?:,\d+|) \@\@ .*
|
||||
# git index header
|
||||
index (?:[0-9a-z]{7,40},|)[0-9a-z]{7,40}\.\.[0-9a-z]{7,40}
|
||||
|
||||
# file permissions
|
||||
['"`\s][-bcdLlpsw](?:[-r][-w][-Ssx]){2}[-r][-w][-SsTtx]\+?['"`\s]
|
||||
|
||||
# css fonts
|
||||
\bfont(?:-family|):[^;}]+
|
||||
|
||||
# css url wrappings
|
||||
\burl\([^)]+\)
|
||||
|
||||
@@ -38,13 +26,13 @@ index (?:[0-9a-z]{7,40},|)[0-9a-z]{7,40}\.\.[0-9a-z]{7,40}
|
||||
# data url in quotes
|
||||
([`'"])data:(?:[^ `'"].*?|)(?:[A-Z]{3,}|[A-Z][a-z]{2,}|[a-z]{3,}).*\g{-1}
|
||||
# data url
|
||||
\bdata:[-a-zA-Z=;:/0-9+_]*,\S*
|
||||
data:[-a-zA-Z=;:/0-9+]*,\S*
|
||||
|
||||
# https/http/file urls
|
||||
#(?:\b(?:https?|ftp|file)://)[-A-Za-z0-9+&@#/*%?=~_|!:,.;]+[-A-Za-z0-9+&@#/*%=~_|]
|
||||
#(?:\b(?:https?|ftp|file)://)[-A-Za-z0-9+&@#/%?=~_|!:,.;]+[-A-Za-z0-9+&@#/%=~_|]
|
||||
|
||||
# mailto urls
|
||||
#mailto:[-a-zA-Z=;:/?%&0-9+@._]{3,}
|
||||
mailto:[-a-zA-Z=;:/?%&0-9+@.]{3,}
|
||||
|
||||
# magnet urls
|
||||
magnet:[?=:\w]+
|
||||
@@ -77,8 +65,6 @@ magnet:[?=:\w]+
|
||||
|
||||
# Amazon
|
||||
\bamazon\.com/[-\w]+/(?:dp/[0-9A-Z]+|)
|
||||
# AWS ARN
|
||||
arn:aws:[-/:\w]+
|
||||
# AWS S3
|
||||
\b\w*\.s3[^.]*\.amazonaws\.com/[-\w/&#%_?:=]*
|
||||
# AWS execute-api
|
||||
@@ -105,8 +91,6 @@ vpc-\w+
|
||||
\bgoogle-analytics\.com/collect.[-0-9a-zA-Z?%=&_.~]*
|
||||
# Google APIs
|
||||
\bgoogleapis\.(?:com|dev)/[a-z]+/(?:v\d+/|)[a-z]+/[-@:./?=\w+|&]+
|
||||
# Google Artifact Registry
|
||||
\.pkg\.dev(?:/[-\w]+)+(?::[-\w]+|)
|
||||
# Google Storage
|
||||
\b[-a-zA-Z0-9.]*\bstorage\d*\.googleapis\.com(?:/\S*|)
|
||||
# Google Calendar
|
||||
@@ -142,8 +126,6 @@ themes\.googleusercontent\.com/static/fonts/[^/\s"]+/v\d+/[^.]+.
|
||||
\bscholar\.google\.com/citations\?user=[A-Za-z0-9_]+
|
||||
# Google Colab Research Drive
|
||||
\bcolab\.research\.google\.com/drive/[-0-9a-zA-Z_?=]*
|
||||
# Google Cloud regions
|
||||
(?:us|(?:north|south)america|europe|asia|australia|me|africa)-(?:north|south|east|west|central){1,2}\d+
|
||||
|
||||
# GitHub SHAs (api)
|
||||
\bapi.github\.com/repos(?:/[^/\s"]+){3}/[0-9a-f]+\b
|
||||
@@ -170,9 +152,6 @@ themes\.googleusercontent\.com/static/fonts/[^/\s"]+/v\d+/[^.]+.
|
||||
# GHSA
|
||||
GHSA(?:-[0-9a-z]{4}){3}
|
||||
|
||||
# GitHub actions
|
||||
\buses:\s+[-\w.]+/[-\w./]+@[-\w.]+
|
||||
|
||||
# GitLab commit
|
||||
\bgitlab\.[^/\s"]*/\S+/\S+/commit/[0-9a-f]{7,16}#[0-9a-f]{40}\b
|
||||
# GitLab merge requests
|
||||
@@ -182,12 +161,6 @@ GHSA(?:-[0-9a-z]{4}){3}
|
||||
# GitLab commits
|
||||
\bgitlab\.[^/\s"]*/(?:[^/\s"]+/){2}commits?/[0-9a-f]+\b
|
||||
|
||||
# #includes
|
||||
^\s*#include\s*(?:<.*?>|".*?")
|
||||
|
||||
# #pragma lib
|
||||
^\s*#pragma comment\(lib, ".*?"\)
|
||||
|
||||
# binance
|
||||
accounts\.binance\.com/[a-z/]*oauth/authorize\?[-0-9a-zA-Z&%]*
|
||||
|
||||
@@ -237,10 +210,10 @@ accounts\.binance\.com/[a-z/]*oauth/authorize\?[-0-9a-zA-Z&%]*
|
||||
# medium link
|
||||
\blink\.medium\.com/[a-zA-Z0-9]+
|
||||
# medium
|
||||
\bmedium\.com/@?[^/\s"]+/[-\w]+
|
||||
\bmedium\.com/\@?[^/\s"]+/[-\w]+
|
||||
|
||||
# microsoft
|
||||
\b(?:https?://|)(?:(?:(?:blogs|download\.visualstudio|docs|msdn2?|research)\.|)microsoft|blogs\.msdn)\.co(?:m|\.\w\w)/[-_a-zA-Z0-9()=./%]*
|
||||
\b(?:https?://|)(?:(?:download\.visualstudio|docs|msdn2?|research)\.microsoft|blogs\.msdn)\.com/[-_a-zA-Z0-9()=./%]*
|
||||
# powerbi
|
||||
\bapp\.powerbi\.com/reportEmbed/[^"' ]*
|
||||
# vs devops
|
||||
@@ -302,7 +275,7 @@ slack://[a-zA-Z0-9?&=]+
|
||||
[0-9a-f]{32}\@o\d+\.ingest\.sentry\.io\b
|
||||
|
||||
# Twitter markdown
|
||||
\[@[^[/\]:]*?\]\(https://twitter.com/[^/\s"')]*(?:/status/\d+(?:\?[-_0-9a-zA-Z&=]*|)|)\)
|
||||
\[\@[^[/\]:]*?\]\(https://twitter.com/[^/\s"')]*(?:/status/\d+(?:\?[-_0-9a-zA-Z&=]*|)|)\)
|
||||
# Twitter hashtag
|
||||
\btwitter\.com/hashtag/[\w?_=&]*
|
||||
# Twitter status
|
||||
@@ -357,7 +330,7 @@ ipfs://[0-9a-zA-Z]{3,}
|
||||
[^"\s]+/gitweb/\S+;h=[0-9a-f]+
|
||||
|
||||
# HyperKitty lists
|
||||
/archives/list/[^@/]+@[^/\s"]*/message/[^/\s"]*/
|
||||
/archives/list/[^@/]+\@[^/\s"]*/message/[^/\s"]*/
|
||||
|
||||
# lists
|
||||
/thread\.html/[^"\s]+
|
||||
@@ -375,7 +348,7 @@ ipfs://[0-9a-zA-Z]{3,}
|
||||
\bopen\.spotify\.com/embed/playlist/\w+
|
||||
|
||||
# Mastodon
|
||||
\bmastodon\.[-a-z.]*/(?:media/|@)[?&=0-9a-zA-Z_]*
|
||||
\bmastodon\.[-a-z.]*/(?:media/|\@)[?&=0-9a-zA-Z_]*
|
||||
|
||||
# scastie
|
||||
\bscastie\.scala-lang\.org/[^/]+/\w+
|
||||
@@ -414,12 +387,12 @@ ipfs://[0-9a-zA-Z]{3,}
|
||||
\bgetopts\s+(?:"[^"]+"|'[^']+')
|
||||
|
||||
# ANSI color codes
|
||||
(?:\\(?:u00|x)1[Bb]|\\03[1-7]|\x1b|\\u\{1[Bb]\})\[\d+(?:;\d+)*m
|
||||
(?:\\(?:u00|x)1[Bb]|\x1b|\\u\{1[Bb]\})\[\d+(?:;\d+|)m
|
||||
|
||||
# URL escaped characters
|
||||
%[0-9A-F][A-F](?=[A-Za-z])
|
||||
\%[0-9A-F][A-F]
|
||||
# lower URL escaped characters
|
||||
%[0-9a-f][a-f](?=[a-z]{2,})
|
||||
\%[0-9a-f][a-f](?=[a-z]{2,})
|
||||
# IPv6
|
||||
\b(?:[0-9a-fA-F]{0,4}:){3,7}[0-9a-fA-F]{0,4}\b
|
||||
# c99 hex digits (not the full format, just one I've seen)
|
||||
@@ -427,7 +400,7 @@ ipfs://[0-9a-zA-Z]{3,}
|
||||
# Punycode
|
||||
\bxn--[-0-9a-z]+
|
||||
# sha
|
||||
sha\d+:[0-9a-f]*?[a-f]{3,}[0-9a-f]*
|
||||
sha\d+:[0-9]*[a-f]{3,}[0-9a-f]*
|
||||
# sha-... -- uses a fancy capture
|
||||
(\\?['"]|")[0-9a-f]{40,}\g{-1}
|
||||
# hex runs
|
||||
@@ -447,17 +420,10 @@ sha\d+:[0-9a-f]*?[a-f]{3,}[0-9a-f]*
|
||||
# pki
|
||||
-----BEGIN.*-----END
|
||||
|
||||
# pki (base64)
|
||||
LS0tLS1CRUdJT.*
|
||||
|
||||
# C# includes
|
||||
^\s*using [^;]+;
|
||||
|
||||
# uuid:
|
||||
\b[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}\b
|
||||
# hex digits including css/html color classes:
|
||||
(?:[\\0][xX]|\\u\{?|[uU]\+|#x?|%23|&H)[0-9_a-fA-FgGrR]*?[a-fA-FgGrR]{2,}[0-9_a-fA-FgGrR]*(?:[uUlL]{0,3}|[iu]\d+)\b
|
||||
|
||||
(?:[\\0][xX]|\\u|[uU]\+|#x?|\%23)[0-9_a-fA-FgGrR]*?[a-fA-FgGrR]{2,}[0-9_a-fA-FgGrR]*(?:[uUlL]{0,3}|[iu]\d+)\b
|
||||
# integrity
|
||||
integrity=(['"])(?:\s*sha\d+-[-a-zA-Z=;:/0-9+]{40,})+\g{-1}
|
||||
|
||||
@@ -475,47 +441,20 @@ integrity=(['"])(?:\s*sha\d+-[-a-zA-Z=;:/0-9+]{40,})+\g{-1}
|
||||
Name\[[^\]]+\]=.*
|
||||
|
||||
# IServiceProvider / isAThing
|
||||
(?:(?:\b|_|(?<=[a-z]))I|(?:\b|_)(?:nsI|isA))(?=(?:[A-Z][a-z]{2,})+(?:[A-Z\d]|\b))
|
||||
|
||||
# python
|
||||
#\b(?i)py(?!gments|gmy|lon|ramid|ro|th)(?=[a-z]{2,})
|
||||
\b(?:I|isA)(?=(?:[A-Z][a-z]{2,})+\b)
|
||||
|
||||
# crypt
|
||||
(['"])\$2[ayb]\$.{56}\g{-1}
|
||||
|
||||
# apache/old crypt
|
||||
(['"]|)\$+(?:apr|)1\$+.{8}\$+.{22}\g{-1}
|
||||
|
||||
# sha1 hash
|
||||
\{SHA\}[-a-zA-Z=;:/0-9+]{3,}
|
||||
|
||||
# machine learning (?)
|
||||
#\b(?i)ml(?=[a-z]{2,})
|
||||
|
||||
# scrypt / argon
|
||||
\$(?:scrypt|argon\d+[di]*)\$\S+
|
||||
|
||||
# go.sum
|
||||
\bh1:\S+
|
||||
|
||||
# imports
|
||||
^import\s+(?:(?:static|type)\s+|)(?:[\w.]|\{\s*\w*?(?:,\s*(?:\w*|\*))+\s*\})+
|
||||
|
||||
# scala modules
|
||||
#("[^"]+"\s*%%?\s*){2,3}"[^"]+"
|
||||
|
||||
# container images
|
||||
image: [-\w./:@]+
|
||||
|
||||
# Docker images
|
||||
^\s*(?i)FROM\s+\S+:\S+(?:\s+AS\s+\S+|)
|
||||
|
||||
# `docker images` REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
\s*\S+/\S+\s+\S+\s+[0-9a-f]{8,}\s+\d+\s+(?:hour|day|week)s ago\s+[\d.]+[KMGT]B
|
||||
|
||||
# Intel intrinsics
|
||||
_mm\d*_(?!dd)\w+
|
||||
|
||||
# Input to GitHub JSON
|
||||
content: (['"])[-a-zA-Z=;:/0-9+]*=\g{-1}
|
||||
|
||||
@@ -523,46 +462,36 @@ content: (['"])[-a-zA-Z=;:/0-9+]*=\g{-1}
|
||||
# you'll want to remove the `(?=.*?")` suffix.
|
||||
# The `(?=.*?")` suffix should limit the false positives rate
|
||||
# printf
|
||||
#%(?:(?:(?:hh?|ll?|[jzt])?[diuoxn]|l?[cs]|L?[fega]|p)(?=[a-z]{2,})|(?:X|L?[FEGA])(?=[a-zA-Z]{2,}))(?!%)(?=[_a-zA-Z]+(?!%)\b)(?=.*?['"])
|
||||
|
||||
# Alternative printf
|
||||
# %s
|
||||
%(?:s(?=[a-z]{2,}))(?!%)(?=[_a-zA-Z]+(?!%[^s])\b)(?=.*?['"])
|
||||
#%(?:(?:(?:hh?|ll?|[jzt])?[diuoxn]|l?[cs]|L?[fega]|p)(?=[a-z]{2,})|(?:X|L?[FEGA]|p)(?=[a-zA-Z]{2,}))(?=[_a-zA-Z]+\b)(?!%)(?=.*?['"])
|
||||
|
||||
# Python string prefix / binary prefix
|
||||
# Note that there's a high false positive rate, remove the `?=` and search for the regex to see if the matches seem like reasonable strings
|
||||
(?<!['"])\b(?:B|BR|Br|F|FR|Fr|R|RB|RF|Rb|Rf|U|UR|Ur|b|bR|br|f|fR|fr|r|rB|rF|rb|rf|u|uR|ur)['"](?=[A-Z]{3,}|[A-Z][a-z]{2,}|[a-z]{3,})
|
||||
(?<!')\b(?:B|BR|Br|F|FR|Fr|R|RB|RF|Rb|Rf|U|UR|Ur|b|bR|br|f|fR|fr|r|rB|rF|rb|rf|u|uR|ur)'(?=[A-Z]{3,}|[A-Z][a-z]{2,}|[a-z]{3,})
|
||||
|
||||
# Regular expressions for (P|p)assword
|
||||
\([A-Z]\|[a-z]\)[a-z]+
|
||||
|
||||
# JavaScript regular expressions
|
||||
# javascript test regex
|
||||
/.{3,}/[gim]*\.test\(
|
||||
/.*/[gim]*\.test\(
|
||||
# javascript match regex
|
||||
\.match\(/[^/\s"]{3,}/[gim]*\s*
|
||||
\.match\(/[^/\s"]*/[gim]*\s*
|
||||
# javascript match regex
|
||||
\.match\(/\\[b].{3,}?/[gim]*\s*\)(?:;|$)
|
||||
\.match\(/\\[b].*?/[gim]*\s*\)(?:;|$)
|
||||
# javascript regex
|
||||
^\s*/\\[b].{3,}?/[gim]*\s*(?:\)(?:;|$)|,$)
|
||||
^\s*/\\[b].*/[gim]*\s*(?:\)(?:;|$)|,$)
|
||||
# javascript replace regex
|
||||
\.replace\(/[^/\s"]{3,}/[gim]*\s*,
|
||||
\.replace\(/[^/\s"]*/[gim]*\s*,
|
||||
# assign regex
|
||||
= /[^*].*?(?:[a-z]{3,}|[A-Z]{3,}|[A-Z][a-z]{2,}).*/[gim]*(?=\W|$)
|
||||
= /[^*]*?(?:[a-z]{3,}|[A-Z]{3,}|[A-Z][a-z]{2,}).*/
|
||||
# perl regex test
|
||||
[!=]~ (?:/.*/|m\{.*?\}|m<.*?>|m([|!/@#,;']).*?\g{-1})
|
||||
|
||||
# perl qr regex
|
||||
(?<!\$)\bqr(?:\{.*?\}|<.*?>|\(.*?\)|([|!/@#,;']).*?\g{-1})
|
||||
|
||||
# perl run
|
||||
perl(?:\s+-[a-zA-Z]\w*)+
|
||||
|
||||
# C network byte conversions
|
||||
#(?:\d|\bh)to(?!ken)(?=[a-z])|to(?=[adhiklpun]\()
|
||||
|
||||
# Go regular expressions
|
||||
regexp?\.MustCompile\((?:`[^`]*`|".*"|'.*')\)
|
||||
regexp?\.MustCompile\(`[^`]*`\)
|
||||
|
||||
# regex choice
|
||||
\(\?:[^)]+\|[^)]+\)
|
||||
@@ -574,20 +503,14 @@ regexp?\.MustCompile\((?:`[^`]*`|".*"|'.*')\)
|
||||
sed 's/(?:[^/]*?[a-zA-Z]{3,}[^/]*?/){2}
|
||||
|
||||
# node packages
|
||||
(["'])@[^/'" ]+/[^/'" ]+\g{-1}
|
||||
(["'])\@[^/'" ]+/[^/'" ]+\g{-1}
|
||||
|
||||
# go install
|
||||
go install(?:\s+[a-z]+\.[-@\w/.]+)+
|
||||
|
||||
# pom.xml
|
||||
<(?:group|artifact)Id>.*?<
|
||||
|
||||
# jetbrains schema https://youtrack.jetbrains.com/issue/RSRP-489571
|
||||
urn:shemas-jetbrains-com
|
||||
|
||||
# Debian changelog severity
|
||||
[-\w]+ \(.*\) (?:\w+|baseline|unstable|experimental); urgency=(?:low|medium|high|emergency|critical)\b
|
||||
|
||||
# kubernetes pod status lists
|
||||
# https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#pod-phase
|
||||
\w+(?:-\w+)+\s+\d+/\d+\s+(?:Running|Pending|Succeeded|Failed|Unknown)\s+
|
||||
@@ -595,29 +518,20 @@ urn:shemas-jetbrains-com
|
||||
# kubectl - pods in CrashLoopBackOff
|
||||
\w+-[0-9a-f]+-\w+\s+\d+/\d+\s+CrashLoopBackOff\s+
|
||||
|
||||
# kubernetes applications
|
||||
\.apps/[-\w]+
|
||||
|
||||
# kubernetes object suffix
|
||||
-[0-9a-f]{10}-\w{5}\s
|
||||
|
||||
# kubernetes crd patterns
|
||||
^\s*pattern: .*$
|
||||
|
||||
# posthog secrets
|
||||
([`'"])phc_[^"',]+\g{-1}
|
||||
|
||||
# xcode
|
||||
|
||||
# xcodeproject scenes
|
||||
(?:Controller|destination|(?:first|second)Item|ID|id)="\w{3}-\w{2}-\w{3}"
|
||||
(?:Controller|destination|ID|id)="\w{3}-\w{2}-\w{3}"
|
||||
|
||||
# xcode api botches
|
||||
customObjectInstantitationMethod
|
||||
|
||||
# msvc api botches
|
||||
PrependWithABINamepsace
|
||||
|
||||
# configure flags
|
||||
.* \| --\w{2,}.*?(?=\w+\s\w+)
|
||||
|
||||
@@ -625,42 +539,23 @@ PrependWithABINamepsace
|
||||
\.fa-[-a-z0-9]+
|
||||
|
||||
# bearer auth
|
||||
(['"])[Bb]ear[e][r] .{3,}?\g{-1}
|
||||
|
||||
# bearer auth
|
||||
\b[Bb]ear[e][r]:? [-a-zA-Z=;:/0-9+.]{3,}
|
||||
(['"])Bear[e][r] .*?\g{-1}
|
||||
|
||||
# basic auth
|
||||
(['"])[Bb]asic [-a-zA-Z=;:/0-9+]{3,}\g{-1}
|
||||
|
||||
# basic auth
|
||||
: [Bb]asic [-a-zA-Z=;:/0-9+.]{3,}
|
||||
(['"])Basic [-a-zA-Z=;:/0-9+]{3,}\g{-1}
|
||||
|
||||
# base64 encoded content
|
||||
#([`'"])[-a-zA-Z=;:/0-9+]{3,}=\g{-1}
|
||||
([`'"])[-a-zA-Z=;:/0-9+]+=\g{-1}
|
||||
# base64 encoded content in xml/sgml
|
||||
>[-a-zA-Z=;:/0-9+]{3,}=</
|
||||
>[-a-zA-Z=;:/0-9+]+=</
|
||||
# base64 encoded content, possibly wrapped in mime
|
||||
#(?:^|[\s=;:?])[-a-zA-Z=;:/0-9+]{50,}(?:[\s=;:?]|$)
|
||||
# base64 encoded json
|
||||
\beyJ[-a-zA-Z=;:/0-9+]+
|
||||
# base64 encoded pkcs
|
||||
#\bMII[-a-zA-Z=;:/0-9+]+
|
||||
|
||||
# uuencoded
|
||||
#[!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_]{40,}
|
||||
|
||||
# DNS rr data
|
||||
#(?:\d+\s+){3}(?:[-+/=.\w]{2,}\s*){1,2}
|
||||
(?:^|[\s=;:?])[-a-zA-Z=;:/0-9+]{50,}(?:[\s=;:?]|$)
|
||||
|
||||
# encoded-word
|
||||
=\?[-a-zA-Z0-9"*%]+\?[BQ]\?[^?]{0,75}\?=
|
||||
|
||||
# numerator
|
||||
\bnumer\b(?=.*denom)
|
||||
|
||||
# Time Zones
|
||||
\b(?:Africa|Atlantic|America|Antarctica|Arctic|Asia|Australia|Europe|Indian|Pacific)(?:/[-\w]+)+
|
||||
\b(?:Africa|Atlantic|America|Antarctica|Asia|Australia|Europe|Indian|Pacific)(?:/\w+)+
|
||||
|
||||
# linux kernel info
|
||||
^(?:bugs|flags|Features)\s+:.*
|
||||
@@ -668,22 +563,16 @@ PrependWithABINamepsace
|
||||
# systemd mode
|
||||
systemd.*?running in system mode \([-+].*\)$
|
||||
|
||||
# Lorem
|
||||
# Update Lorem based on your content (requires `ge` and `w` from https://github.com/jsoref/spelling; and `review` from https://github.com/check-spelling/check-spelling/wiki/Looking-for-items-locally )
|
||||
# grep '^[^#].*lorem' .github/actions/spelling/patterns.txt|perl -pne 's/.*i..\?://;s/\).*//' |tr '|' "\n"|sort -f |xargs -n1 ge|perl -pne 's/^[^:]*://'|sort -u|w|sed -e 's/ .*//'|w|review -
|
||||
# Warning, while `(?i)` is very neat and fancy, if you have some binary files that aren't proper unicode, you might run into:
|
||||
# ... Operation "substitution (s///)" returns its argument for non-Unicode code point 0x1C19AE (the code point will vary).
|
||||
# ... You could manually change `(?i)X...` to use `[Xx]...`
|
||||
# ... or you could add the files to your `excludes` file (a version after 0.0.19 should identify the file path)
|
||||
(?:(?:\w|\s|[,.])*\b(?i)(?:amet|consectetur|cursus|dolor|eros|ipsum|lacus|libero|ligula|lorem|magna|neque|nulla|suscipit|tempus)\b(?:\w|\s|[,.])*)
|
||||
## Operation "substitution (s///)" returns its argument for non-Unicode code point 0x1C19AE (the code point will vary).
|
||||
## You could manually change `(?i)X...` to use `[Xx]...`
|
||||
## or you could add the files to your `excludes` file (a version after 0.0.19 should identify the file path)
|
||||
# Lorem
|
||||
(?:\w|\s|[,.])*\b(?i)(?:amet|consectetur|cursus|dolor|eros|ipsum|lacus|libero|ligula|lorem|magna|neque|nulla|suscipit|tempus)\b(?:\w|\s|[,.])*
|
||||
|
||||
# Non-English
|
||||
# Even repositories expecting pure English content can unintentionally have Non-English content... People will occasionally mistakenly enter [homoglyphs](https://en.wikipedia.org/wiki/Homoglyph) which are essentially typos, and using this pattern will mean check-spelling will not complain about them.
|
||||
#
|
||||
# If the content to be checked should be written in English and the only Non-English items will be people's names, then you can consider adding this.
|
||||
#
|
||||
# Alternatively, if you're using check-spelling v0.0.25+, and you would like to _check_ the Non-English content for spelling errors, you can. For information on how to do so, see:
|
||||
# https://docs.check-spelling.dev/Feature:-Configurable-word-characters.html#unicode
|
||||
[a-zA-Z]*[ÀÁÂÃÄÅÆČÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæčçèéêëìíîïðñòóôõöøùúûüýÿĀāŁłŃńŅņŒœŚśŠšŜŝŸŽžź][a-zA-Z]{3}[a-zA-ZÀÁÂÃÄÅÆČÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæčçèéêëìíîïðñòóôõöøùúûüýÿĀāŁłŃńŅņŒœŚśŠšŜŝŸŽžź]*|[a-zA-Z]{3,}[ÀÁÂÃÄÅÆČÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæčçèéêëìíîïðñòóôõöøùúûüýÿĀāŁłŃńŅņŒœŚśŠšŜŝŸŽžź]|[ÀÁÂÃÄÅÆČÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæčçèéêëìíîïðñòóôõöøùúûüýÿĀāŁłŃńŅņŒœŚśŠšŜŝŸŽžź][a-zA-Z]{3,}
|
||||
|
||||
# highlighted letters
|
||||
@@ -696,51 +585,27 @@ systemd.*?running in system mode \([-+].*\)$
|
||||
# latex (check-spelling >= 0.0.22)
|
||||
\\\w{2,}\{
|
||||
|
||||
# American Mathematical Society (AMS) / Doxygen
|
||||
TeX/AMS
|
||||
|
||||
# File extensions
|
||||
#\*\.[+\w]+,
|
||||
|
||||
# eslint
|
||||
"varsIgnorePattern": ".+"
|
||||
|
||||
# nolint
|
||||
nolint:\s*[\w,]+
|
||||
|
||||
# Windows short paths
|
||||
[/\\][^/\\]{5,6}~\d{1,2}(?=[/\\])
|
||||
|
||||
# Windows Resources with accelerators
|
||||
\b[A-Z]&[a-z]+\b(?!;)
|
||||
|
||||
# signed off by
|
||||
(?i)Signed-off-by: .*
|
||||
|
||||
# cygwin paths
|
||||
/cygdrive/[a-zA-Z]/(?:Program Files(?: \(.*?\)| ?)(?:/[-+.~\\/()\w ]+)*|[-+.~\\/()\w])+
|
||||
[/\\][^/\\]{5,6}~\d{1,2}[/\\]
|
||||
|
||||
# in check-spelling@v0.0.22+, printf markers aren't automatically consumed
|
||||
# printf markers
|
||||
#(?<!\\)\\[nrt](?=[a-z]{2,})
|
||||
# alternate printf markers if you run into latex and friends
|
||||
# alternate markers if you run into latex and friends
|
||||
#(?<!\\)\\[nrt](?=[a-z]{2,})(?=.*['"`])
|
||||
|
||||
# Markdown anchor links
|
||||
\(#\S*?[a-zA-Z]\S*?\)
|
||||
|
||||
# apache
|
||||
a2(?:en|dis)
|
||||
|
||||
# weak e-tag
|
||||
W/"[^"]+"
|
||||
|
||||
# authors/credits
|
||||
^\*(?: [A-Z](?:\w+|\.)){2,} (?=\[|$)
|
||||
|
||||
# the negative lookahead here is to allow catching 'templatesz' as a misspelling
|
||||
# but to otherwise recognize a Windows path with \templates\foo.template or similar:
|
||||
#\\(?:necessary|r(?:elease|eport|esolve[dr]?|esult)|t(?:arget|emplates?))(?![a-z])
|
||||
#\\(?:necessary|r(?:eport|esolve[dr]?|esult)|t(?:arget|emplates?))(?![a-z])
|
||||
# ignore long runs of a single character:
|
||||
\b([A-Za-z])\g{-1}{3,}\b
|
||||
|
||||
@@ -749,36 +614,23 @@ W/"[^"]+"
|
||||
|
||||
# Compiler flags (Unix, Java/Scala)
|
||||
# Use if you have things like `-Pdocker` and want to treat them as `docker`
|
||||
#(?:^|[\t ,>"'`=(#])-(?:(?:J-|)[DPWXY]|[Llf])(?=[A-Z]{2,}|[A-Z][a-z]|[a-z]{2,})
|
||||
#(?:^|[\t ,>"'`=(])-(?:(?:J-|)[DPWXY]|[Llf])(?=[A-Z]{2,}|[A-Z][a-z]|[a-z]{2,})
|
||||
|
||||
# Compiler flags (Windows / PowerShell)
|
||||
# This is a subset of the more general compiler flags pattern.
|
||||
# It avoids matching `-Path` to prevent it from being treated as `ath`
|
||||
#(?:^|[\t ,"'`=(#])-(?:[DPL](?=[A-Z]{2,})|[WXYlf](?=[A-Z]{2,}|[A-Z][a-z]|[a-z]{2,}))
|
||||
#(?:^|[\t ,"'`=(])-(?:[DPL](?=[A-Z]{2,})|[WXYlf](?=[A-Z]{2,}|[A-Z][a-z]|[a-z]{2,}))
|
||||
|
||||
# Compiler flags (linker)
|
||||
,-B
|
||||
|
||||
# Library prefix
|
||||
# e.g., `lib`+`archive`, `lib`+`raw`, `lib`+`unwind`
|
||||
# (ignores some words that happen to start with `lib`)
|
||||
(?:\b|_)[Ll]ib(?:re(?=office)|)(?!era[lt]|ero|erty|rar(?:i(?:an|es)|y))(?=[a-z])
|
||||
|
||||
# iSCSI iqn (approximate regex)
|
||||
\biqn\.[0-9]{4}-[0-9]{2}(?:[\.-][a-z][a-z0-9]*)*\b
|
||||
|
||||
# WWNN/WWPN (NAA identifiers)
|
||||
\b(?:0x)?10[0-9a-f]{14}\b|\b(?:0x|3)?[25][0-9a-f]{15}\b|\b(?:0x|3)?6[0-9a-f]{31}\b
|
||||
|
||||
# curl arguments
|
||||
\b(?:\\n|)curl(?:\.exe|)(?:\s+-[a-zA-Z]{1,2}\b)*(?:\s+-[a-zA-Z]{3,})(?:\s+-[a-zA-Z]+)*
|
||||
\b(?:\\n|)curl(?:\s+-[a-zA-Z]{1,2}\b)*(?:\s+-[a-zA-Z]{3,})(?:\s+-[a-zA-Z]+)*
|
||||
# set arguments
|
||||
\b(?:bash|sh|set)(?:\s+[-+][abefimouxE]{1,2})*\s+[-+][abefimouxE]{3,}(?:\s+[-+][abefimouxE]+)*
|
||||
\bset(?:\s+-[abefimouxE]{1,2})*\s+-[abefimouxE]{3,}(?:\s+-[abefimouxE]+)*
|
||||
# tar arguments
|
||||
\b(?:\\n|)g?tar(?:\.exe|)(?:(?:\s+--[-a-zA-Z]+|\s+-[a-zA-Z]+|\s[ABGJMOPRSUWZacdfh-pr-xz]+\b)(?:=[^ ]*|))+
|
||||
# tput arguments -- https://man7.org/linux/man-pages/man5/terminfo.5.html -- technically they can be more than 5 chars long...
|
||||
\btput\s+(?:(?:-[SV]|-T\s*\w+)\s+)*\w{3,5}\b
|
||||
# macOS temp folders
|
||||
/var/folders/\w\w/[+\w]+/(?:T|-Caches-)/
|
||||
# github runner temp folders
|
||||
/home/runner/work/_temp/[-_/a-z0-9]+
|
||||
|
||||
75
.github/actions/spell-check/excludes.txt
vendored
75
.github/actions/spell-check/excludes.txt
vendored
@@ -1,24 +1,26 @@
|
||||
# See https://github.com/check-spelling/check-spelling/wiki/Configuration-Examples:-excludes
|
||||
(?:^|/)(?i)COPYRIGHT
|
||||
(?:^|/)(?i)LICEN[CS]E
|
||||
(?:^|/)(?i)third[-_]?party/
|
||||
(?:^|/)3rdparty/
|
||||
(?:^|/)generated/
|
||||
(?:^|/)FilePreviewCommon/Assets/Monaco/customLanguages/
|
||||
(?:^|/)FilePreviewCommon/Assets/Monaco/generateLanguagesJson.html
|
||||
(?:^|/)FilePreviewCommon/Assets/Monaco/index.html
|
||||
(?:^|/)FilePreviewCommon/Assets/Monaco/monaco_languages.json
|
||||
(?:^|/)FilePreviewCommon/Assets/Monaco/monacoSpecialLanguages.js
|
||||
(?:^|/)go\.sum$
|
||||
(?:^|/)monacoSRC/
|
||||
(?:^|/)package(?:-lock|)\.json$
|
||||
(?:^|/)Pipfile$
|
||||
(?:^|/)power-rename-ui-flags$
|
||||
(?:^|/)pyproject.toml
|
||||
(?:^|/)requirements(?:-dev|-doc|-test|)\.txt$
|
||||
(?:^|/)sample\.qoi$
|
||||
(?:^|/)timezones\.json$
|
||||
(?:^|/)vendor/
|
||||
(?:^|/)WindowsSettings\.json$
|
||||
(?:^|/|\b)requirements(?:-dev|-doc|-test|)\.txt$
|
||||
(?:|$^ 92.31% - excluded 12/13)/editor/[^/]+$
|
||||
/images/launcher/[^/]+$
|
||||
/TestFiles/
|
||||
[^/]\.cur$
|
||||
[^/]\.gcode$
|
||||
[^/]\.bgcode$
|
||||
[^/]\.rgs$
|
||||
\.a$
|
||||
\.ai$
|
||||
@@ -26,10 +28,11 @@
|
||||
\.avi$
|
||||
\.bmp$
|
||||
\.bz2$
|
||||
\.cert?$|\.crt$
|
||||
\.cer$
|
||||
\.class$
|
||||
\.coveragerc$
|
||||
\.crl$
|
||||
\.crt$
|
||||
\.csr$
|
||||
\.dll$
|
||||
\.docx?$
|
||||
@@ -74,9 +77,7 @@
|
||||
\.qm$
|
||||
\.s$
|
||||
\.sig$
|
||||
\.snk$
|
||||
\.so$
|
||||
\.stl$
|
||||
\.svgz?$
|
||||
\.sys$
|
||||
\.tar$
|
||||
@@ -93,47 +94,33 @@
|
||||
\.xz$
|
||||
\.zip$
|
||||
^\.github/actions/spell-check/
|
||||
^\.github/workflows/spelling\d*\.yml$
|
||||
^\.gitmodules$
|
||||
^\Q.github/workflows/spelling2.yml\E$
|
||||
^\Q.pipelines/ESRPSigning_core.json\E$
|
||||
^\Qdoc/devdocs/localization.md\E$
|
||||
^\Qsrc/modules/MouseUtils/MouseJump.Common/NativeMethods/User32/UI/WindowsAndMessaging/User32.SYSTEM_METRICS_INDEX.cs\E$
|
||||
^\Q.pipelines/sdl.gdnbaselines\E$
|
||||
^\Qinstaller/PowerToysSetup/Settings.wxs\E$
|
||||
^\Qsrc/common/FilePreviewCommon/Assets/Monaco/monaco_languages.json\E$
|
||||
^\Qsrc/common/ManagedCommon/ColorFormatHelper.cs\E$
|
||||
^\Qsrc/common/notifications/BackgroundActivatorDLL/cpp.hint\E$
|
||||
^\Qsrc/modules/colorPicker/ColorPickerUI/Assets/ColorPicker/colorPicker.cur\E$
|
||||
^\Qsrc/modules/colorPicker/ColorPickerUI/Shaders/GridShader.cso\E$
|
||||
^\Qsrc/modules/MouseUtils/MouseJumpUI/MainForm.resx\E$
|
||||
^\Qsrc/modules/MouseUtils/MouseJumpUI/NativeMethods/User32/UI/WindowsAndMessaging/User32.SYSTEM_METRICS_INDEX.cs\E$
|
||||
^\Qsrc/modules/MouseWithoutBorders/App/Form/frmAbout.cs\E$
|
||||
^\Qsrc/modules/MouseWithoutBorders/App/Form/frmInputCallback.resx\E$
|
||||
^\Qsrc/modules/MouseWithoutBorders/App/Form/frmLogon.resx\E$
|
||||
^\Qsrc/modules/MouseWithoutBorders/App/Form/frmMatrix.resx\E$
|
||||
^\Qsrc/modules/MouseWithoutBorders/App/Form/frmScreen.resx\E$
|
||||
^\Qsrc/modules/MouseWithoutBorders/ModuleInterface/generateSecurityDescriptor.h\E$
|
||||
^\Qsrc/modules/peek/Peek.Common/NativeMethods.txt\E$
|
||||
^\Qsrc/modules/previewpane/SvgPreviewHandler/SvgHTMLPreviewGenerator.cs\E$
|
||||
^\Qsrc/modules/previewpane/UnitTests-StlThumbnailProvider/HelperFiles/sample.stl\E$
|
||||
^\Qtools/project_template/ModuleTemplate/resource.h\E$
|
||||
^doc/devdocs/akaLinks\.md$
|
||||
^NOTICE\.md$
|
||||
^src/common/CalculatorEngineCommon/exprtk\.hpp$
|
||||
^src/common/ManagedCommon/ColorFormatHelper\.cs$
|
||||
^src/common/notifications/BackgroundActivatorDLL/cpp\.hint$
|
||||
^src/common/sysinternals/Eula/
|
||||
^src/modules/cmdpal/doc/initial-sdk-spec/list-elements-mock-002\.pdn$
|
||||
^src/modules/cmdpal/ext/SamplePagesExtension/Pages/SampleMarkdownImagesPage\.cs$
|
||||
^src/modules/colorPicker/ColorPickerUI/Shaders/GridShader\.cso$
|
||||
^src/modules/launcher/Plugins/Microsoft\.PowerToys\.Run\.Plugin\.TimeDate/Properties/
|
||||
^src/modules/MouseUtils/MouseJumpUI/MainForm\.resx$
|
||||
^src/modules/MouseWithoutBorders/App/.*/NativeMethods\.cs$
|
||||
^src/modules/MouseWithoutBorders/App/Form/.*\.Designer\.cs$
|
||||
^src/modules/MouseWithoutBorders/App/Form/.*\.resx$
|
||||
^src/modules/MouseWithoutBorders/App/Form/frmAbout\.cs$
|
||||
^src/modules/MouseWithoutBorders/App/Form/frmInputCallback\.resx$
|
||||
^src/modules/MouseWithoutBorders/App/Form/frmLogon\.resx$
|
||||
^src/modules/MouseWithoutBorders/App/Form/frmMatrix\.resx$
|
||||
^src/modules/MouseWithoutBorders/App/Form/frmMessage\.resx$
|
||||
^src/modules/MouseWithoutBorders/App/Form/frmMouseCursor\.resx$
|
||||
^src/modules/MouseWithoutBorders/App/Form/frmScreen\.resx$
|
||||
^src/modules/MouseWithoutBorders/App/Helper/.*\.resx$
|
||||
^src/modules/MouseWithoutBorders/ModuleInterface/generateSecurityDescriptor\.h$
|
||||
^src/modules/peek/Peek.Common/NativeMethods\.txt$
|
||||
^src/modules/peek/Peek.UITests/TestAssets/4\.qoi$
|
||||
^src/modules/powerrename/PowerRenameUITest/testItems/folder1/testCase2\.txt$
|
||||
^src/modules/powerrename/PowerRenameUITest/testItems/folder2/SpecialCase\.txt$
|
||||
^src/modules/powerrename/PowerRenameUITest/testItems/testCase1\.txt$
|
||||
^src/modules/previewpane/SvgPreviewHandler/SvgHTMLPreviewGenerator\.cs$
|
||||
^src/modules/previewpane/UnitTests-MarkdownPreviewHandler/HelperFiles/MarkdownWithHTMLImageTag\.txt$
|
||||
^src/modules/registrypreview/RegistryPreviewUILib/Controls/HexBox/.*$
|
||||
^src/modules/ZoomIt/ZoomIt/ZoomIt\.idc$
|
||||
^src/Monaco/
|
||||
^tools/project_template/ModuleTemplate/resource\.h$
|
||||
^src/modules/previewpane/UnitTests-MarkdownPreviewHandler/HelperFiles/MarkdownWithHTMLImageTag.txt$
|
||||
^tools/Verification scripts/Check preview handler registration\.ps1$
|
||||
ignore$
|
||||
^src/modules/registrypreview/RegistryPreviewUILib/Controls/HexBox/.*$
|
||||
^src/common/CalculatorEngineCommon/exprtk\.hpp$
|
||||
src/modules/cmdpal/ext/SamplePagesExtension/Pages/SampleMarkdownImagesPage.cs
|
||||
|
||||
1023
.github/actions/spell-check/expect.txt
vendored
1023
.github/actions/spell-check/expect.txt
vendored
File diff suppressed because it is too large
Load Diff
551
.github/actions/spell-check/line_forbidden.patterns
vendored
551
.github/actions/spell-check/line_forbidden.patterns
vendored
@@ -1,570 +1,119 @@
|
||||
# reject `m_data` as VxWorks defined it and that breaks things if it's used elsewhere
|
||||
# see [fprime](https://github.com/nasa/fprime/commit/d589f0a25c59ea9a800d851ea84c2f5df02fb529)
|
||||
# and [Qt](https://github.com/qtproject/qt-solutions/blame/fb7bc42bfcc578ff3fa3b9ca21a41e96eb37c1c7/qtscriptclassic/src/qscriptbuffer_p.h#L46)
|
||||
#\bm_data\b
|
||||
# \bm_data\b
|
||||
|
||||
# Were you debugging using a framework with `fit()`?
|
||||
# If you have a framework that uses `it()` for testing and `fit()` for debugging a specific test,
|
||||
# you might not want to check in code where you skip all the other tests.
|
||||
# you might not want to check in code where you were debugging w/ `fit()`, in which case, you might want
|
||||
# to use this:
|
||||
#\bfit\(
|
||||
|
||||
# English does not use a hyphen between adverbs and nouns
|
||||
# https://twitter.com/nyttypos/status/1894815686192685239
|
||||
(?:^|\s)[A-Z]?[a-z]+ly-(?=[a-z]{3,})(?:[.,?!]?\s|$)
|
||||
|
||||
# Smart quotes should match
|
||||
\s’[^.?!‘’]+’[^.?!‘’]+‘[^.?!‘’]+’|\s‘[^.?!‘’]+’[^.?!‘’]+’[^.?!‘’]+’|\s”[^.?!“”]+”[^.?!“”]+“[^.?!“”]+”|\s“[^.?!“”]+”[^.?!“”]+”[^.?!“”]+”
|
||||
|
||||
# Don't use `requires that` + `to be`
|
||||
# https://twitter.com/nyttypos/status/1894816551435641027
|
||||
\brequires that \w+\b[^.]+to be\b
|
||||
|
||||
# A fully parenthetical sentence’s period goes inside the parentheses, not outside.
|
||||
# https://twitter.com/nyttypos/status/1898844061873639490
|
||||
\([A-Z][a-z]{2,}(?: [a-z]+){3,}\)\.\s
|
||||
|
||||
# Complete sentences shouldn't be in the middle of another sentence as a parenthetical.
|
||||
(?<!\.)(?<!\betc)\.\),
|
||||
|
||||
# Complete sentences in parentheticals should not have a space before the period.
|
||||
\s\.\)(?!.*\}\})
|
||||
|
||||
# This probably indicates Mojibake https://en.wikipedia.org/wiki/Mojibake
|
||||
# You probably should try to unbake this content
|
||||
Ã(?:Â[¤¶¥]|[£¢])|Ã
|
||||
|
||||
# Should be `HH:MM:SS`
|
||||
\bHH:SS:MM\b
|
||||
|
||||
# Should be `86400` (seconds in a standard day)
|
||||
\b84600\b(?:.*\bday\b)
|
||||
|
||||
# Should probably be `2006-01-02` (yyyy-mm-dd)
|
||||
# Assuming that the time is being passed to https://go.dev/src/time/format.go
|
||||
\b2006-02-01\b
|
||||
|
||||
# Should probably be `YYYYMMDD`
|
||||
\b[Yy]{4}[Dd]{2}[Mm]{2}(?!.*[Yy]{4}[Dd]{2}[Mm]{2}).*$
|
||||
|
||||
# Should be `a priori` or `and prior`
|
||||
(?i)(?<!posteriori)\sand priori\s
|
||||
|
||||
# Should be `a`
|
||||
\san (?=(?:[b-df-gj-npqtv-xz]|h(?!our|tml|ttp)|r(?!c\b)|s(?!sh|vg))[a-z])
|
||||
|
||||
# Articles generally shouldn't be used without a noun and a verb
|
||||
# - Perhaps you're missing a verb between the noun and the second article.
|
||||
# - Or, perhaps you should remove the first verb and treat the intervening word as a verb?
|
||||
# - In some cases you should add a `,` between the noun and the second article.
|
||||
\s(?:an?|the(?! action))\s(?!way|wh|how\b)[A-Za-z][a-z]+[a-qs-z]\s(?:a(?! bit)n?|the)\s
|
||||
|
||||
# Should only be one of `a`, `an`, or `the`
|
||||
\b(?:(?:an?|the)\s+){2,}\b
|
||||
|
||||
# Should be a list `something, a second thing, or a third thing` or `something, a thing to do a thing`
|
||||
# -- This rule is experimental, if you find it has a high false-positive rate, please let the maintainer know
|
||||
#(?:^|[?!.] )[^()?!;,.]+, a(?:\s+(?!to\b)\w+)+?\s+an?\b
|
||||
|
||||
# Should only be `are` or `can`, not both
|
||||
\b(?:(?:are|can)\s+){2,}\b
|
||||
|
||||
# Should probably be `ABCDEFGHIJKLMNOPQRSTUVWXYZ`
|
||||
(?i)(?!ABCDEFGHIJKLMNOPQRSTUVWXYZ)ABC[A-Z]{21}YZ
|
||||
|
||||
# Should be `an`
|
||||
#(?<!\b[Ii] |\.)\bam\b
|
||||
|
||||
# Should be `anymore`
|
||||
# s.b. anymore
|
||||
\bany more[,.]
|
||||
|
||||
# Should be `Ask`
|
||||
(?:^|[.?]\s+)As\s+[A-Z][a-z]{2,}\s[^.?]*?(?:how|if|wh\w+)\b
|
||||
# s.b. GitHub
|
||||
(?<![&*.]|// |\btype )\bGithub\b(?![{)])
|
||||
|
||||
# Should be `at one fell swoop`
|
||||
# and only when talking about killing, not some other completion
|
||||
# Act 4 Scene 3, Macbeth
|
||||
# https://www.opensourceshakespeare.org/views/plays/play_view.php?WorkID=macbeth&Act=4&Scene=3&Scope=scene
|
||||
\bin one fell s[lw]?oop\b
|
||||
# s.b. GitLab
|
||||
(?<![&*.]|// |\btype )\bGitlab\b(?![{)])
|
||||
|
||||
# Should be `'`
|
||||
(?i)\b(?:(?:i|s?he|they|what|who|you)[`"]ll|(?:are|ca|did|do|does|ha[ds]|have|is|should|were|wo|would)n[`"]t|(?:s?he|let|that|there|what|where|who)[`"]s|(?:i|they|we|what|who|you)[`"]ve)\b
|
||||
|
||||
# Should be `background` / `intro text` / `introduction` / `prologue` unless it's a brand or relates to _subterfuge_
|
||||
(?i)\bpretext\b
|
||||
|
||||
# Should be `bearer`
|
||||
\b(?<=the )burden(?= of bad news\b)
|
||||
|
||||
# Should be `bona`
|
||||
# unless talking about bones
|
||||
\bbone(?= fide\b)
|
||||
|
||||
# Should be `branches`
|
||||
# ... unless it's really about the meal that replaces breakfast and lunch.
|
||||
\b[Bb]runches\b
|
||||
|
||||
# Should be `briefcase`
|
||||
\bbrief-case\b
|
||||
|
||||
# Should be `by far` or `far and away`
|
||||
\bby far and away\b
|
||||
|
||||
# Should be `by and large`
|
||||
\bby in large\b
|
||||
|
||||
# Should be `bytes`
|
||||
# unless talking about sports where a team gets to skip a game, or
|
||||
# saying `goodbyes` (even this is questionable)
|
||||
(?<!\\)\bbyes\b
|
||||
|
||||
# Should be `can, not only ..., ... also...`
|
||||
\bcan not only.*can also\b
|
||||
|
||||
# Should be `cannot` (or `can't`)
|
||||
# See https://www.grammarly.com/blog/cannot-or-can-not/
|
||||
# > Don't use `can not` when you mean `cannot`. The only time you're likely to see `can not` written as separate words is when the word `can` happens to precede some other phrase that happens to start with `not`.
|
||||
# > `Can't` is a contraction of `cannot`, and it's best suited for informal writing.
|
||||
# > In formal writing and where contractions are frowned upon, use `cannot`.
|
||||
# > It is possible to write `can not`, but you generally find it only as part of some other construction, such as `not only . . . but also.`
|
||||
# - if you encounter such a case, add a pattern for that case to patterns.txt.
|
||||
\b[Cc]an not\b(?! only\b)
|
||||
|
||||
# Should be `chart`
|
||||
(?i)\bhelm\b.*\bchard\b
|
||||
|
||||
# Do not use `(click) here` links
|
||||
# For more information, see:
|
||||
# * https://www.w3.org/QA/Tips/noClickHere
|
||||
# * https://webaim.org/techniques/hypertext/link_text
|
||||
# * https://granicus.com/blog/why-click-here-links-are-bad/
|
||||
# * https://heyoka.medium.com/dont-use-click-here-f32f445d1021
|
||||
(?i)(?:>|\[)(?:(?:click |)here|this(?=\]\([^\)]+:/)|link|(?:read |)more(?!</value))(?:</|\]\()
|
||||
|
||||
# Including "image of" or "picture of" in alt text is unnecessary.
|
||||
\balt=['"](?:an? |)(?:image|picture) of
|
||||
|
||||
# Alt text should be short
|
||||
\balt=(?:'[^']{126,}'|"[^"]{126,}")
|
||||
|
||||
# Should be `effect`
|
||||
(?<=\btake )affect\b
|
||||
|
||||
# Should be `-endian`
|
||||
\b(?i)(?<=big|little) endian\b
|
||||
|
||||
# Should be `equals` to `is equal to`
|
||||
\bequals to\b
|
||||
|
||||
# Should be `ECMA` 262 (JavaScript)
|
||||
(?i)\bTS\/EMCA\b|\bEMCA(?: \d|\s*Script)|\bEMCA\b(?=.*\bTS\b)
|
||||
|
||||
# Should be `ECMA` 340 (Near Field Communications)
|
||||
(?i)EMCA[- ]340
|
||||
|
||||
# Should be `fall back`
|
||||
\bfallback(?= to)\b
|
||||
|
||||
# Should be `for`, `for, to` or `to`
|
||||
\b(?:for to|to for)\b
|
||||
|
||||
# Should be `GitHub`
|
||||
(?<![&*.]|// |\b(?:from|import|type) )\bGithub\b(?![{()])
|
||||
|
||||
# Should be `GitLab`
|
||||
(?<![&*.]|// |\b(?:from|import|type) )\bGitlab\b(?![{()])
|
||||
|
||||
# Should be `heartrending` unless talking about drawing hearts
|
||||
\b(?i)heart[- ]rendering\b(?![^.?!]*(?:hearts|quirk))
|
||||
|
||||
# Should probably be `https://`...
|
||||
# Markdown generally doesn't assume that links are to urls
|
||||
\]\(www\.\w
|
||||
|
||||
# Should be `intents and purposes`
|
||||
(?<=[Ff]or all )intensive purposes\b
|
||||
|
||||
# Should be `JavaScript`
|
||||
# s.b. JavaScript
|
||||
\bJavascript\b
|
||||
|
||||
# Should be `macOS` or `Mac OS X` or ...
|
||||
# s.b. macOS or Mac OS X or ...
|
||||
\bMacOS\b
|
||||
|
||||
# Should be `Microsoft`
|
||||
# s.b. Microsoft
|
||||
\bMicroSoft\b
|
||||
|
||||
# Should be `OAuth`
|
||||
(?:^|[^-/*$])[ '"]oAuth(?: [a-z]|\d+ |[^ a-zA-Z0-9:;_.()])
|
||||
|
||||
# Should be `RabbitMQ`
|
||||
\bRabbitmq\b
|
||||
|
||||
# Should be `TensorFlow`
|
||||
\bTensorflow\b
|
||||
|
||||
# Should be `TypeScript`
|
||||
# s.b. TypeScript
|
||||
\bTypescript\b
|
||||
|
||||
# Should be `another`
|
||||
\ban[- ]other(?!-)\b
|
||||
# s.b. another
|
||||
\ban[- ]other\b
|
||||
|
||||
# Should be `case-(in)sensitive`
|
||||
\bcase (?:in|)sensitive\b
|
||||
|
||||
# Should be `coinciding`
|
||||
\bco-inciding\b
|
||||
|
||||
# Should be `deprecation warning(s)`
|
||||
# s.b. deprecation warning
|
||||
\b[Dd]epreciation [Ww]arnings?\b
|
||||
|
||||
# Should be `greater than`
|
||||
# s.b. greater than
|
||||
\bgreater then\b
|
||||
|
||||
# Should be `has`
|
||||
\b[Ii]t only have\b
|
||||
|
||||
# Should be `here-in`, `the`, `them`, `this`, `these` or reworded in some other way
|
||||
\bthe here(?:\.|,| (?!and|defined))
|
||||
|
||||
# Should be `going to bed` or `going to a bad`
|
||||
\bgoing to bad(?!-)\b
|
||||
|
||||
# Should be `greater than`
|
||||
#\bhigher than\b
|
||||
|
||||
# Should be `ID` (unless it's a flag/property)
|
||||
#(?<![-\.])\bId\b(?![(])
|
||||
|
||||
# Should be `in front of`
|
||||
# s.b. in front of
|
||||
\bin from of\b
|
||||
|
||||
# Should be `into`
|
||||
# s.b. into
|
||||
# when not phrasal and when `in order to` would be wrong:
|
||||
# https://thewritepractice.com/into-vs-in-to/
|
||||
\sin to\s(?!if\b)
|
||||
|
||||
# Should be `use`
|
||||
\sin used by\b
|
||||
|
||||
# Should be `in-depth` if used as an adjective (but `in depth` when used as an adverb)
|
||||
\bin depth\s(?!rather\b)\w{6,}
|
||||
|
||||
# Should be `in-flight` or `on the fly` (unless actually talking about airline flights)
|
||||
\bon[- ]flight\b(?!=\s+(?:(?:\w{2}|)\d+|availability|booking|computer|data|delay|departure|management|performance|radar|reservation|scheduling|software|status|ticket|time|type|.*(?:hotel|taxi)))
|
||||
|
||||
# Should be `is obsolete`
|
||||
# s.b. is obsolete
|
||||
\bis obsolescent\b
|
||||
|
||||
# Should be `it's` or `its`
|
||||
(?<![.'])\bits['’]
|
||||
# s.b. it's or its
|
||||
\bits['’]
|
||||
|
||||
# Should be `its`
|
||||
\bit's(?= (?:child|only purpose|own(?:er|)|parent|sibling)\b)
|
||||
# s.b. opt-in
|
||||
(?<!\sfor)\sopt in\s
|
||||
|
||||
# Should be `for its` (possessive) or `because it is`
|
||||
\bfor it(?:'s| is)\b
|
||||
|
||||
# Should be `lends`
|
||||
\bleads(?= credence)
|
||||
|
||||
# Should be `log in`
|
||||
\blogin to the
|
||||
|
||||
# Should be `long-standing`
|
||||
\blong standing\b
|
||||
|
||||
# Should be `lose`
|
||||
(?<=\bwill )loose\b
|
||||
|
||||
# `apt-key` is deprecated
|
||||
# ... instead you should be writing a pair of files:
|
||||
# ... * the gpg key added to a distinct key ring file based on your project/distro/key...
|
||||
# ... * the sources.list in a district file -- not simply appended to `/etc/apt/sources.list` -- (there is a newer format [DEB822](https://manpages.debian.org/bookworm/dpkg-dev/deb822.5.en.html)) that references the gpg key.
|
||||
# Consider:
|
||||
# ````sh
|
||||
# curl http://download.something.example.com/$DISTRO/Release.key | \
|
||||
# gpg --dearmor --yes --output /usr/share/keyrings/something-distro.gpg
|
||||
# echo "deb [signed-by=/usr/share/keyrings/something-distro.gpg] http://download.something.example.com/repositories/home:/$DISTRO ./" \
|
||||
# >> /etc/apt/sources.list.d/something-distro.list
|
||||
# ````
|
||||
\bapt-key add\b
|
||||
|
||||
# Should be `nearby`
|
||||
\bnear by\b
|
||||
|
||||
# Should probably be a person named `Nick` or the abbreviation `NIC`
|
||||
\bNic\b
|
||||
|
||||
# Should be `not supposed`
|
||||
\bsupposed not\b
|
||||
|
||||
# Should be `Once this` or `On this` or even `One that`. Rarely `One, this`
|
||||
[?!.] One this\b
|
||||
|
||||
# Should probably be `much more`
|
||||
\bmore much\b
|
||||
|
||||
# Should be `perform its`
|
||||
\bperform it's\b
|
||||
|
||||
# Should be `opt-in`
|
||||
(?<!\scan|for)(?<!\smust)(?<!\sif)\sopt in\s
|
||||
|
||||
# Should be `out-of-date` if acting as an adjective before a noun
|
||||
\bout of date(?= \w{3,}\b)
|
||||
|
||||
# Should be `less than`
|
||||
# s.b. less than
|
||||
\bless then\b
|
||||
|
||||
# Should be `load balancer`
|
||||
\b[Ll]oud balancer
|
||||
# s.b. one of
|
||||
\bon of\b
|
||||
|
||||
# Should be `moot`
|
||||
\bmute point\b
|
||||
|
||||
# Should be `one of`
|
||||
(?<!-)\bon of\b
|
||||
|
||||
# Should be `on the other hand`
|
||||
\b(?i)on another hand\b
|
||||
|
||||
# Reword to `on at runtime` or `enabled at launch`
|
||||
# The former if you mean it can be changed dynamically.
|
||||
# The latter if you mean that it can be changed without recompiling but not after the program starts.
|
||||
\bswitched on runtime\b
|
||||
|
||||
# Should be `Of course,`
|
||||
[?.!]\s+Of course\s(?=[-\w\s]+[.?;!,])
|
||||
|
||||
# Most people only have two hands. Reword.
|
||||
\b(?i)on the third hand\b
|
||||
|
||||
# Should be `Open Graph`
|
||||
# unless talking about a specific Open Graph implementation:
|
||||
# - Java
|
||||
# - Node
|
||||
# - Py
|
||||
# - Ruby
|
||||
\bOpenGraph\b
|
||||
|
||||
# Should be `OpenShift`
|
||||
\bOpenshift\b
|
||||
|
||||
# Should be `otherwise`
|
||||
# s.b. otherwise
|
||||
\bother[- ]wise\b
|
||||
|
||||
# Should be `; otherwise` or `. Otherwise`
|
||||
# https://study.com/learn/lesson/otherwise-in-a-sentence.html
|
||||
, [Oo]therwise\b
|
||||
|
||||
# Should probably be `Otherwise,`
|
||||
(?<=\. )Otherwise\s
|
||||
|
||||
# Should be `or (more|less)`
|
||||
# s.b. or (more|less)
|
||||
\bore (?:more|less)\b
|
||||
|
||||
# Should be `or`
|
||||
\b(?i)true of .*false\b
|
||||
|
||||
# Should be `pale`
|
||||
\b(?<=beyond the )pail\b
|
||||
|
||||
# Should be reworded.
|
||||
# `passthrough` is an adjective
|
||||
# `pass-through` could be a noun
|
||||
# `pass through` would be a verb phrase
|
||||
\b(?i)passthrough(?= an?\b)
|
||||
|
||||
# Should be `rather than`
|
||||
\brather then\b
|
||||
|
||||
# Should be `Red Hat`
|
||||
\bRed[Hh]at\b
|
||||
|
||||
# Should be `regardless, ...` or `regardless of (whether)`
|
||||
\b[Rr]egardless if you\b
|
||||
|
||||
# Should be `self-signed`
|
||||
\bself signed\b
|
||||
|
||||
# Should be `SendGrid`
|
||||
\bSendgrid\b
|
||||
|
||||
# Should be `set up` (`setup` is a noun / `set up` is a verb)
|
||||
\b[Ss]etup(?= (?:an?|the)\b)
|
||||
|
||||
# Should be `state`
|
||||
\bsate(?=\b|[A-Z])|(?<=[a-z])Sate(?=\b|[A-Z])|(?<=[A-Z]{2})Sate(?=\b|[A-Z])
|
||||
|
||||
# Should be `this`
|
||||
\b[Tt]oday(?= morning\b)
|
||||
|
||||
# Should be `let's` or `let us`
|
||||
\b[Ll]ets (?=throw\.)
|
||||
|
||||
# Should be `no longer needed`
|
||||
\bno more needed\b(?! than\b)
|
||||
|
||||
# Should be `<see|look> below for the`
|
||||
(?i)\bfind below the\b
|
||||
|
||||
# Should be `then any` unless there's a comparison before the `,`
|
||||
, than any\b
|
||||
|
||||
# Should be `did not exist`
|
||||
\bwere not existent\b
|
||||
|
||||
# Should be `nonexistent`
|
||||
# s.b. nonexistent
|
||||
\bnon existing\b
|
||||
|
||||
# Should be `nonexistent`
|
||||
\b[Nn]o[nt][- ]existent\b
|
||||
|
||||
# Should be `our`
|
||||
\bspending out time\b
|
||||
|
||||
# Should be `@brief` / `@details` / `@param` / `@return` / `@retval`
|
||||
# s.b. brief / details/ param / return / retval
|
||||
(?:^\s*|(?:\*|//|/*)\s+`)[\\@](?:breif|(?:detail|detials)|(?:params(?!\.)|prama?)|ret(?:uns?)|retvl)\b
|
||||
|
||||
# Should be `more than` or `more, then`
|
||||
\bmore then\b
|
||||
|
||||
# Should be `Pipeline`/`pipeline`
|
||||
(?:(?<=\b|[A-Z])p|P)ipeLine(?:\b|(?=[A-Z]))
|
||||
|
||||
# Should be `preexisting`
|
||||
# s.b. preexisting
|
||||
[Pp]re[- ]existing
|
||||
|
||||
# Should be `preempt`
|
||||
# s.b. preempt
|
||||
[Pp]re[- ]empt\b
|
||||
|
||||
# Should be `preemptively`
|
||||
# s.b. preemptively
|
||||
[Pp]re[- ]emptively
|
||||
|
||||
# Should be `prepopulate`
|
||||
[Pp]re[- ]populate
|
||||
|
||||
# Should be `prerequisite`
|
||||
[Pp]re[- ]requisite
|
||||
|
||||
# Should be `QuickTime`
|
||||
\bQuicktime\b
|
||||
|
||||
# Should be `recently changed` or `recent changes`
|
||||
# s.b. recently changed or recent changes
|
||||
[Rr]ecent changed
|
||||
|
||||
# Should be `reentrancy`
|
||||
# s.b. reentrancy
|
||||
[Rr]e[- ]entrancy
|
||||
|
||||
# Should be `reentrant`
|
||||
# s.b. reentrant
|
||||
[Rr]e[- ]entrant
|
||||
|
||||
# Should be `room for`
|
||||
\brooms for (?!lease|rent|sale)
|
||||
|
||||
# Should be `socioeconomic`
|
||||
# https://dictionary.cambridge.org/us/dictionary/english/socioeconomic
|
||||
socio-economic
|
||||
|
||||
# Should be `strong suit`
|
||||
\b(?:my|his|her|their) strong suite\b
|
||||
|
||||
# Should probably be `temperatures` unless actually talking about thermal drafts (things birds may fly on)
|
||||
\bthermals\b
|
||||
|
||||
# Should be `there are` or `they are` (or `they're`)
|
||||
(?i)\btheir are\b
|
||||
|
||||
# Should be `understand`
|
||||
# s.b. understand
|
||||
\bunder stand\b
|
||||
|
||||
# Should be `URI` or `uri` unless it refers to a person named `Uri` (or a flag)
|
||||
#(?<![-\.])\bUri\b(?![(])
|
||||
|
||||
# Should be `true`
|
||||
(?i)(?<![\[\]()])\brue(?:= or false)
|
||||
|
||||
# Should be `it uses is`
|
||||
/\bis uses is\b/
|
||||
|
||||
# Should be `uses it as`
|
||||
(?:^|\. |and )uses is as (?!an?\b|follows|livestock|[^.]+\s+as\b)
|
||||
|
||||
# Should be `was`
|
||||
\bhas been(?= removed in v?\d)
|
||||
|
||||
# Should be `where`
|
||||
\bwere they are\b
|
||||
|
||||
# Should be `why`
|
||||
, way(?= is [^.]*\?)
|
||||
|
||||
# should be `vCenter`
|
||||
\bV[Cc]enter\b
|
||||
|
||||
# Should be `VM`
|
||||
\bVm\b
|
||||
|
||||
# Should be `walkthrough(s)`
|
||||
\bwalk-throughs?\b
|
||||
|
||||
# Should be `want`
|
||||
\bdon't ant\b
|
||||
|
||||
# Should be `we'll`
|
||||
\bwe 'll\b
|
||||
|
||||
# Should be `week`
|
||||
# unless you're really talking about people or pointers
|
||||
\bevery weak[.,?!]
|
||||
|
||||
# Should be `well`
|
||||
\b[Yy]ou(?:'re| are) doing good\b
|
||||
|
||||
# Should be `whereas`
|
||||
\bwhere as\b
|
||||
|
||||
# Should be `WinGet`
|
||||
\bWinget\b
|
||||
|
||||
# Should be `without` (unless `out` is a modifier of the next word)
|
||||
\bwith out\b(?!-)
|
||||
|
||||
# Should be `work around`
|
||||
\b[Ww]orkaround(?= an?\b)
|
||||
|
||||
# Should be `workarounds`
|
||||
# s.b. workarounds
|
||||
#\bwork[- ]arounds\b
|
||||
|
||||
# Should be `workaround`
|
||||
# s.b. workaround
|
||||
(?:(?:[Aa]|[Tt]he|ugly)\swork[- ]around\b|\swork[- ]around\s+for)
|
||||
|
||||
# Should be `worst`
|
||||
(?i)worse-case
|
||||
|
||||
# Should be `you are not` or reworded
|
||||
\byour not\b
|
||||
|
||||
# Should be `(coarse|fine)-grained`
|
||||
# s.b. (coarse|fine)-grained
|
||||
\b(?:coarse|fine) grained\b
|
||||
|
||||
# Homoglyph (Cyrillic) should be `A`/`B`/`C`/`E`/`H`/`I`/`I`/`J`/`K`/`M`/`O`/`P`/`S`/`T`/`Y`
|
||||
# It's possible that your content is intentionally mixing Cyrillic and Latin scripts, but if it isn't, you definitely want to correct this.
|
||||
(?<=[A-Z]{2})[АВСЕНІӀЈКМОРЅТУ]|[АВСЕНІӀЈКМОРЅТУ](?=[A-Z]+(?:\b|[a-z]+)|[a-z]+(?:[^a-z]|$))
|
||||
# s.b. neither/nor -- or reword
|
||||
#\bnot\b[^.?!"/(]+\bnor\b
|
||||
|
||||
# Homoglyph (Cyrillic) should be `a`/`b`/`c`/`e`/`o`/`p`/`x`/`y`
|
||||
# It's possible that your content is intentionally mixing Cyrillic and Latin scripts, but if it isn't, you definitely want to correct this.
|
||||
[авсеорху](?=[A-Za-z]{2,})|(?<=[A-Za-z]{2})[авсеорху]|(?<=[A-Za-z])[авсеорху](?=[A-Za-z])
|
||||
# probably a double negative
|
||||
# s.b. neither/nor (plus rewording the beginning)
|
||||
\bnot\b[^.?!"/]*\bneither\b[^.?!"/(]*\bnor\b
|
||||
|
||||
# Should be `neither/nor` -- or reword
|
||||
#(?<!do )\bnot\b([^.?!"/(](?!neither|,.*?,))+\bnor\b
|
||||
|
||||
# Should be `neither/nor` (plus rewording the beginning)
|
||||
# This is probably a double negative...
|
||||
\bnot\b[^.?!"/(]*\bneither\b[^.?!"/(]*\bnor\b
|
||||
|
||||
# In English, duplicated words are generally mistakes
|
||||
# There are a few exceptions (e.g. "that that").
|
||||
# If the highlighted doubled word pair is in:
|
||||
# * code, write a pattern to mask it.
|
||||
# * prose, have someone read the English before you dismiss this error.
|
||||
# In English, it is generally wrong to have the same word twice in a row without punctuation.
|
||||
# Duplicated words are generally mistakes.
|
||||
# There are a few exceptions where it is acceptable (e.g. "that that").
|
||||
# If the highlighted doubled word pair is in a code snippet, you can write a pattern to mask it.
|
||||
# If the highlighted doubled word pair is in prose, have someone read the English before you dismiss this error.
|
||||
\s([A-Z]{3,}|[A-Z][a-z]{2,}|[a-z]{3,})\s\g{-1}\s
|
||||
|
||||
346
.github/actions/spell-check/patterns.txt
vendored
346
.github/actions/spell-check/patterns.txt
vendored
@@ -1,41 +1,125 @@
|
||||
# See https://github.com/check-spelling/check-spelling/wiki/Configuration-Examples:-patterns
|
||||
|
||||
# marker to ignore all code on line
|
||||
^.*/\* #no-spell-check-line \*/.*$
|
||||
# marker for ignoring a comment to the end of the line
|
||||
// #no-spell-check.*$
|
||||
# Automatically suggested patterns
|
||||
# hit-count: 3011 file-count: 842
|
||||
# IServiceProvider / isAThing
|
||||
\b(?:I|isA)(?=(?:[A-Z][a-z]{2,})+\b)
|
||||
|
||||
# Gaelic
|
||||
Gàidhlig
|
||||
# hit-count: 2239 file-count: 134
|
||||
# hex runs
|
||||
\b[0-9a-fA-F]{16,}\b
|
||||
|
||||
Ov_erwrite
|
||||
# hit-count: 1868 file-count: 1
|
||||
# sha-... -- uses a fancy capture
|
||||
(\\?['"]|")[0-9a-f]{40,}\g{-1}
|
||||
|
||||
# languageHashTable
|
||||
"\w+(?:-\w+|)"\s+=\s+@\(".*"\)
|
||||
# hit-count: 1100 file-count: 97
|
||||
# base64 encoded content, possibly wrapped in mime
|
||||
(?:^|[\s=;:?])[-a-zA-Z=;:/0-9+]{50,}(?:[\s=;:?]|$)
|
||||
|
||||
# Regular expression with `\b`
|
||||
\\b(?=[a-z]\S*\{)
|
||||
# hit-count: 426 file-count: 165
|
||||
# GitHub SHAs (markdown)
|
||||
(?:\[`?[0-9a-f]+`?\]\(https:/|)/(?:www\.|)github\.com(?:/[^/\s"]+){2,}(?:/[^/\s")]+)(?:[0-9a-f]+(?:[-0-9a-zA-Z/#.]*|)\b|)
|
||||
|
||||
# long lorem
|
||||
L"Lorem.*"
|
||||
# hit-count: 331 file-count: 117
|
||||
# hex digits including css/html color classes:
|
||||
(?:[\\0][xX]|\\u|[uU]\+|#x?|\%23)[0-9_a-fA-FgGrR]*?[a-fA-FgGrR]{2,}[0-9_a-fA-FgGrR]*(?:[uUlL]{0,3}|[iu]\d+)\b
|
||||
|
||||
# hit-count: 275 file-count: 45
|
||||
# version suffix <word>v#
|
||||
(?:(?<=[A-Z]{2})V|(?<=[a-z]{2}|[A-Z]{2})v)\d+(?:\b|(?=[a-zA-Z_]))
|
||||
|
||||
# hit-count: 209 file-count: 97
|
||||
# w3
|
||||
\bw3\.org/[-0-9a-zA-Z/#.]+
|
||||
|
||||
# hit-count: 137 file-count: 38
|
||||
# alternate markers if you run into latex and friends
|
||||
(?<!\\)\\[nrt](?=[a-z]{2,})(?=.*['"`](?:, "[^{]|\]))
|
||||
|
||||
# tabs in c#
|
||||
\$"\\t
|
||||
|
||||
# Hexadecimal character pattern in code
|
||||
\\x[0-9a-fA-F]{4}
|
||||
|
||||
fontFamily": ".*"
|
||||
|
||||
D[23]D(?=[A-Z][a-z])
|
||||
(?<=[a-z])3D(?=[A-Z])
|
||||
|
||||
\.monitorId = \{ .*\}
|
||||
|
||||
json::value\(L"\S+"
|
||||
\\x[0-9a-fA-F][0-9a-fA-F]
|
||||
|
||||
# windows line breaks in strings
|
||||
\\r\\n(?=[A-Za-z])
|
||||
\\r\\n
|
||||
|
||||
# hit-count: 104 file-count: 43
|
||||
# regex choice
|
||||
\(\?:[^)]+\|[^)]+\)
|
||||
|
||||
# hit-count: 76 file-count: 28
|
||||
# base64 encoded content
|
||||
([`'"])[-a-zA-Z=;:/0-9+]+=\g{-1}
|
||||
|
||||
# hit-count: 70 file-count: 5
|
||||
# Contributor
|
||||
\[[^\]]+\]\(https://github\.com/[^/\s"]+/?\)
|
||||
|
||||
# hit-count: 28 file-count: 22
|
||||
# stackexchange -- https://stackexchange.com/feeds/sites
|
||||
\b(?:askubuntu|serverfault|stack(?:exchange|overflow)|superuser).com/(?:questions/\w+/[-\w]+|a/)
|
||||
|
||||
# hit-count: 21 file-count: 2
|
||||
# Update Lorem based on your content (requires `ge` and `w` from https://github.com/jsoref/spelling; and `review` from https://github.com/check-spelling/check-spelling/wiki/Looking-for-items-locally )
|
||||
# grep '^[^#].*lorem' .github/actions/spelling/patterns.txt|perl -pne 's/.*i..\?://;s/\).*//' |tr '|' "\n"|sort -f |xargs -n1 ge|perl -pne 's/^[^:]*://'|sort -u|w|sed -e 's/ .*//'|w|review -
|
||||
# Warning, while `(?i)` is very neat and fancy, if you have some binary files that aren't proper unicode, you might run into:
|
||||
## Operation "substitution (s///)" returns its argument for non-Unicode code point 0x1C19AE (the code point will vary).
|
||||
## You could manually change `(?i)X...` to use `[Xx]...`
|
||||
## or you could add the files to your `excludes` file (a version after 0.0.19 should identify the file path)
|
||||
# Lorem
|
||||
(?:\w|\s|[,.])*\b(?i)(?:amet|consectetur|cursus|dolor|eros|ipsum|lacus|libero|ligula|lorem|magna|neque|nulla|suscipit|tempus)\b(?:\w|\s|[,.])*
|
||||
|
||||
# hit-count: 18 file-count: 15
|
||||
# microsoft
|
||||
\b(?:https?://|)(?:(?:download\.visualstudio|docs|msdn2?|research)\.microsoft|blogs\.msdn)\.com/[-_a-zA-Z0-9()=./%]*
|
||||
|
||||
# hit-count: 14 file-count: 5
|
||||
# githubusercontent
|
||||
/[-a-z0-9]+\.githubusercontent\.com/[-a-zA-Z0-9?&=_\/.]*
|
||||
|
||||
# hit-count: 14 file-count: 3
|
||||
# node packages
|
||||
(["'])\@[^/'" ]+/[^/'" ]+\g{-1}
|
||||
|
||||
# hit-count: 10 file-count: 4
|
||||
# URL escaped characters
|
||||
\%[0-9A-F][A-F]
|
||||
|
||||
# hit-count: 9 file-count: 5
|
||||
# Wikipedia
|
||||
\ben\.wikipedia\.org/wiki/[-\w%.#]+
|
||||
|
||||
# hit-count: 6 file-count: 3
|
||||
# css url wrappings
|
||||
\burl\([^)]+\)
|
||||
|
||||
# hit-count: 5 file-count: 3
|
||||
# vs devops
|
||||
\bvisualstudio.com(?::443|)/[-\w/?=%&.]*
|
||||
|
||||
# hit-count: 4 file-count: 3
|
||||
# Non-English
|
||||
[a-zA-Z]*[ÀÁÂÃÄÅÆČÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæčçèéêëìíîïðñòóôõöøùúûüýÿĀāŁłŃńŅņŒœŚśŠšŜŝŸŽžź][a-zA-Z]{3}[a-zA-ZÀÁÂÃÄÅÆČÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæčçèéêëìíîïðñòóôõöøùúûüýÿĀāŁłŃńŅņŒœŚśŠšŜŝŸŽžź]*|[a-zA-Z]{3,}[ÀÁÂÃÄÅÆČÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæčçèéêëìíîïðñòóôõöøùúûüýÿĀāŁłŃńŅņŒœŚśŠšŜŝŸŽžź]|[ÀÁÂÃÄÅÆČÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæčçèéêëìíîïðñòóôõöøùúûüýÿĀāŁłŃńŅņŒœŚśŠšŜŝŸŽžź][a-zA-Z]{3,}
|
||||
|
||||
# hit-count: 4 file-count: 2
|
||||
# data url in quotes
|
||||
([`'"])data:(?:[^ `'"].*?|)(?:[A-Z]{3,}|[A-Z][a-z]{2,}|[a-z]{3,}).*\g{-1}
|
||||
|
||||
# hit-count: 2 file-count: 2
|
||||
# mailto urls
|
||||
mailto:[-a-zA-Z=;:/?%&0-9+@.]{3,}
|
||||
|
||||
# msdn
|
||||
\b(?:download\.visualstudio|docs|msdn|learn)\.microsoft\.com/[-_a-zA-Z0-9()=./]*
|
||||
aka\.ms/[a-zA-Z0-9]+
|
||||
|
||||
# hit-count: 2 file-count: 1
|
||||
# While you could try to match `http://` and `https://` by using `s?` in `https?://`, sometimes there
|
||||
# YouTube url
|
||||
\b(?:(?:www\.|)youtube\.com|youtu.be)/(?:channel/|embed/|user/|playlist\?list=|watch\?v=|v/|)[-a-zA-Z0-9?&=_%]*
|
||||
|
||||
# power shell gallery website
|
||||
\bpowershellgallery.com/[-_a-zA-Z0-9()=./%]*
|
||||
@@ -45,199 +129,55 @@ L?(["']|[-<({>]|\b)[0-9a-fA-F]{8}-(?:[0-9a-fA-F]{4}-){3}[0-9a-fA-F]{10,12}(?:\g{
|
||||
|
||||
(?:L"[abAB]+", ){3}L"[abAB]+"
|
||||
|
||||
\. (?: @[-A-Za-z\d]+\b(?!\.[A-Z]),?)+
|
||||
|
||||
auto deviceId = L".*"
|
||||
deviceId(?:\.id|) = L".*"
|
||||
|
||||
StringComparer.OrdinalIgnoreCase\) \{.*\}
|
||||
|
||||
# namespaces
|
||||
\b[a-z]+::
|
||||
|
||||
"Author": ".+"
|
||||
|
||||
(?:Include|Link)=".*?"
|
||||
|
||||
# You could ignore `xmlns`, but it's probably better to enforce rules about them...
|
||||
#\s(?:xmlns:[a-z]+(?:[A-Z][a-z]+|)=|[a-z]+(?:[A-Z][a-z]+):(?=[a-z]+=))
|
||||
# hit-count: 1 file-count: 1
|
||||
# marker to ignore all code on line
|
||||
^.*/\* #no-spell-check-line \*/.*$
|
||||
|
||||
# UnitTests
|
||||
\[DataRow\(.*\)\]
|
||||
|
||||
# AdditionalDependencies
|
||||
<AdditionalDependencies>.*<
|
||||
|
||||
# the last line of mimetype="application/x-microsoft.net.object.bytearray.base64" things in .resx files
|
||||
^\s*[-a-zA-Z=;:/0-9+]*[-a-zA-Z;:/0-9+][-a-zA-Z=;:/0-9+]*=$
|
||||
|
||||
# Automatically suggested patterns
|
||||
|
||||
# hit-count: 5402 file-count: 1339
|
||||
# IServiceProvider / isAThing
|
||||
(?:(?:\b|_|(?<=[a-z]))[IT]|(?:\b|_)(?:nsI|isA))(?=(?:[A-Z][a-z]{2,})+(?:[A-Z\d]|\b))
|
||||
|
||||
# hit-count: 2073 file-count: 842
|
||||
# #includes
|
||||
^\s*#include\s*(?:<.*?>|".*?")
|
||||
|
||||
# hit-count: 1639 file-count: 855
|
||||
# C# includes
|
||||
^\s*using [^;]+;
|
||||
|
||||
# hit-count: 1491 file-count: 693
|
||||
# microsoft
|
||||
\b(?:https?://|)(?:(?:(?:blogs|download\.visualstudio|docs|msdn2?|research)\.|)microsoft|blogs\.msdn)\.co(?:m|\.\w\w)/[-_a-zA-Z0-9()=./%]*
|
||||
|
||||
# hit-count: 398 file-count: 133
|
||||
# hex digits including css/html color classes:
|
||||
(?:[\\0][xX]|\\u\{?|[uU]\+|#x?|%23|&H)[0-9_a-fA-FgGrR]*?[a-fA-FgGrR]{2,}[0-9_a-fA-FgGrR]*(?:[uUlL]{0,3}|[iu]\d+)\b
|
||||
|
||||
# hit-count: 339 file-count: 146
|
||||
# hex runs
|
||||
\b[0-9a-fA-F]{16,}\b
|
||||
|
||||
# hit-count: 253 file-count: 100
|
||||
# GitHub SHAs (markdown)
|
||||
(?:\[`?[0-9a-f]+`?\]\(https:/|)/(?:www\.|)github\.com(?:/[^/\s"]+){2,}(?:/[^/\s")]+)(?:[0-9a-f]+(?:[-0-9a-zA-Z/#.]*|)\b|)
|
||||
|
||||
# hit-count: 241 file-count: 37
|
||||
# version suffix <word>v#
|
||||
(?:(?<=[A-Z]{2})V|(?<=[a-z]{2}|[A-Z]{2})v)\d+(?:\b|(?=[a-zA-Z_]))
|
||||
|
||||
# hit-count: 141 file-count: 6
|
||||
# Contributor / Project
|
||||
\[[^\]\s]+\]\(https://github\.com/[^)]+\)(?: -(?: [A-Z]\S+)+|)|\[[^\]]+\]\(https://github\.com/(?:[^/\s"]+/?){1,2}\)
|
||||
|
||||
https://github.com/(?:[-\w]+/?){1,2}
|
||||
|
||||
# hit-count: 131 file-count: 125
|
||||
# Repeated letters
|
||||
\b([a-z])\g{-1}{2,}\b
|
||||
|
||||
# hit-count: 99 file-count: 97
|
||||
# w3
|
||||
\bw3\.org/[-0-9a-zA-Z/#.]+
|
||||
|
||||
# hit-count: 59 file-count: 11
|
||||
# Markdown anchor links
|
||||
\(#\S*?[a-zA-Z]\S*?\)
|
||||
|
||||
# hit-count: 29 file-count: 23
|
||||
# stackexchange -- https://stackexchange.com/feeds/sites
|
||||
\b(?:askubuntu|serverfault|stack(?:exchange|overflow)|superuser).com/(?:questions/\w+/[-\w]+|a/)
|
||||
|
||||
# hit-count: 24 file-count: 11
|
||||
# Library prefix
|
||||
# e.g., `lib`+`archive`, `lib`+`raw`, `lib`+`unwind`
|
||||
# (ignores some words that happen to start with `lib`)
|
||||
(?:\b|_)[Ll]ib(?:re(?=office)|)(?!era[lt]|ero|erty|rar(?:i(?:an|es)|y))(?=[a-z])
|
||||
|
||||
# hit-count: 20 file-count: 2
|
||||
# Intel intrinsics
|
||||
_mm\d*_(?!dd)\w+
|
||||
|
||||
# hit-count: 15 file-count: 8
|
||||
# Wikipedia
|
||||
\ben\.wikipedia\.org/wiki/[-\w%.#]+
|
||||
|
||||
# hit-count: 14 file-count: 10
|
||||
# vs devops
|
||||
\bvisualstudio.com(?::443|)/[-\w/?=%&.]*
|
||||
|
||||
# hit-count: 8 file-count: 2
|
||||
# copyright
|
||||
Copyright (?:\([Cc]\)|)(?:[-\d, ]|and)+(?: [A-Z][a-z]+ [A-Z][a-z]+,?)+
|
||||
|
||||
# hit-count: 8 file-count: 1
|
||||
# css fonts
|
||||
\bfont(?:-family|):[^;}]+
|
||||
|
||||
aka\.ms/[a-zA-Z0-9]+
|
||||
|
||||
# hit-count: 8 file-count: 1
|
||||
# kubernetes crd patterns
|
||||
^\s*pattern: .*$
|
||||
|
||||
# hit-count: 5 file-count: 3
|
||||
# URL escaped characters
|
||||
%[0-9A-F][A-F](?=[A-Za-z])
|
||||
|
||||
# hit-count: 3 file-count: 3
|
||||
# githubusercontent
|
||||
/[-a-z0-9]+\.githubusercontent\.com/[-a-zA-Z0-9?&=_\/.]*
|
||||
|
||||
# hit-count: 2 file-count: 2
|
||||
# medium
|
||||
\bmedium\.com/@?[^/\s"]+/[-\w:/*.]+
|
||||
|
||||
# hit-count: 2 file-count: 1
|
||||
# While you could try to match `http://` and `https://` by using `s?` in `https?://`, sometimes there
|
||||
# YouTube url
|
||||
\b(?:(?:www\.|)youtube\.com|youtu.be)/(?:channel/|embed/|user/|playlist\?list=|watch\?v=|v/|)[-a-zA-Z0-9?&=_%]*
|
||||
|
||||
# hit-count: 2 file-count: 1
|
||||
# GitHub actions
|
||||
\buses:\s+[-\w.]+/[-\w./]+@[-\w.]+
|
||||
# D2D
|
||||
D?2D
|
||||
|
||||
# hit-count: 1 file-count: 1
|
||||
# curl arguments
|
||||
\b(?:\\n|)curl(?:\.exe|)(?:\s+-[a-zA-Z]{1,2}\b)*(?:\s+-[a-zA-Z]{3,})(?:\s+-[a-zA-Z]+)*
|
||||
# GHSA
|
||||
GHSA(?:-[0-9a-z]{4}){3}
|
||||
|
||||
# hit-count: 1 file-count: 1
|
||||
# medium
|
||||
\bmedium\.com/\@?[^/\s"]+/[-\w]+
|
||||
|
||||
# hit-count: 1 file-count: 1
|
||||
# kubectl.kubernetes.io/last-applied-configuration
|
||||
"kubectl.kubernetes.io/last-applied-configuration": ".*"
|
||||
|
||||
# hit-count: 1 file-count: 1
|
||||
# tar arguments
|
||||
\b(?:\\n|)g?tar(?:\.exe|)(?:(?:\s+--[-a-zA-Z]+|\s+-[a-zA-Z]+|\s[ABGJMOPRSUWZacdfh-pr-xz]+\b)(?:=[^ ]*|))+
|
||||
|
||||
# #pragma lib
|
||||
^\s*#pragma comment\(lib, ".*?"\)
|
||||
|
||||
# UnitTests
|
||||
\[DataRow\(.*\)\]
|
||||
|
||||
# AdditionalDependencies
|
||||
<AdditionalDependencies>.*<
|
||||
|
||||
# the last line of mimetype="application/x-microsoft.net.object.bytearray.base64" things in .resx files
|
||||
^\s*[-a-zA-Z=;:/0-9+]*[-a-zA-Z;:/0-9+][-a-zA-Z=;:/0-9+]*=$
|
||||
|
||||
RegExp\(@?([`'"]).*?\g{-1}\)|(?:escapes|regEx):\s*(?:/.*/|([`'"]).*?\g{-1})|return/.*?/
|
||||
\bSecur32
|
||||
|
||||
# Questionably acceptable forms of `in to`
|
||||
# Personally, I prefer `log into`, but people object
|
||||
# https://www.tprteaching.com/log-into-log-in-to-login/
|
||||
\b(?:(?:[Ll]og(?:g(?=[a-z])|)|[Ss]ign)(?:ed|ing)?) in to\b
|
||||
\b(?:[Ll]og|[Ss]ign) in to\b
|
||||
|
||||
# to opt in
|
||||
\bto opt in\b
|
||||
|
||||
# pass(ed|ing) in
|
||||
\bpass(?:ed|ing) in\b
|
||||
|
||||
# acceptable duplicates
|
||||
# ls directory listings
|
||||
[-bcdlpsw](?:[-r][-w][-SsTtx]){3}[\.+*]?\s+\d+\s+\S+\s+\S+\s+[.\d]+(?:[KMGT]|)\s+
|
||||
[-bcdlpsw](?:[-r][-w][-Ssx]){3}\s+\d+\s+\S+\s+\S+\s+\d+\s+
|
||||
# mount
|
||||
\bmount\s+-t\s+(\w+)\s+\g{-1}\b
|
||||
# C types and repeated CSS values
|
||||
\s(auto|buffalo|center|div|inherit|long|LONG|none|normal|solid|thin|transparent|very)(?:\s\g{-1})+\s
|
||||
# C enum and struct
|
||||
\b(?:enum|struct)\s+(\w+)\s+\g{-1}\b
|
||||
\s(auto|center|div|inherit|long|LONG|none|normal|solid|thin|transparent|very)(?: \g{-1})+\s
|
||||
# C struct
|
||||
\bstruct\s+(\w+)\s+\g{-1}\b
|
||||
# go templates
|
||||
\s(\w+)\s+\g{-1}\s+\`(?:graphql|inject|json|yaml):
|
||||
# doxygen / javadoc / .net
|
||||
(?:[\\@](?:brief|defgroup|groupname|link|t?param|return|retval)|(?:public|private|\[Parameter(?:\(.+\)|)\])(?:\s+(?:static|override|readonly|required|virtual))*)(?:\s+\{\w+\}|)\s+(\w+)\s+\g{-1}\s
|
||||
|
||||
# macOS file path
|
||||
(?:Contents\W+|(?!iOS)/)MacOS\b
|
||||
|
||||
# Python package registry has incorrect spelling for macOS / Mac OS X
|
||||
"Operating System :: MacOS :: MacOS X"
|
||||
|
||||
# "company" in Germany
|
||||
\bGmbH\b
|
||||
|
||||
# IntelliJ
|
||||
\bIntelliJ\b
|
||||
(?:[\\@](?:brief|groupname|t?param|return|retval)|(?:public|private|\[Parameter(?:\(.+\)|)\])(?:\s+static|\s+override|\s+readonly)*)(?:\s+\{\w+\}|)\s+(\w+)\s+\g{-1}\s
|
||||
|
||||
# Commit message -- Signed-off-by and friends
|
||||
^\s*(?:(?:Based-on-patch|Co-authored|Helped|Mentored|Reported|Reviewed|Signed-off)-by|Thanks-to): (?:[^<]*<[^>]*>|[^<]*)\s*$
|
||||
@@ -250,29 +190,3 @@ _SILENCE_STDEXT_ARR_ITERS_DEPRECATION_WARNING
|
||||
|
||||
# ignore long runs of a single character:
|
||||
\b([A-Za-z])\g{-1}{3,}\b
|
||||
|
||||
# hit-count: 1 file-count: 1
|
||||
# Amazon
|
||||
\bamazon\.com/[-\w]+/(?:dp/[0-9A-Z]+|)
|
||||
|
||||
# hit-count: 3 file-count: 3
|
||||
# imgur
|
||||
\bimgur\.com/[^.]+
|
||||
|
||||
# Process Process (typename varname)
|
||||
Process Process
|
||||
|
||||
# ZoomIt menu items with accelerator keys
|
||||
E&xit
|
||||
St&yle
|
||||
|
||||
# This matches a relative clause where the relative pronoun "that" is omitted.
|
||||
# Example: "Gets or sets the window the TitleBar should configure."
|
||||
\bthe\s+\w+\s+the\b
|
||||
|
||||
# Usernames with numbers
|
||||
# 0x6f677548 is user name but user folder causes a flag
|
||||
\bx6f677548\b
|
||||
|
||||
# Microsoft Store URLs and product IDs
|
||||
ms-windows-store://\S+
|
||||
|
||||
14
.github/actions/spell-check/reject.txt
vendored
14
.github/actions/spell-check/reject.txt
vendored
@@ -1,23 +1,11 @@
|
||||
^attache$
|
||||
^bellows?$
|
||||
^bellow$
|
||||
benefitting
|
||||
occurences?
|
||||
^dependan.*
|
||||
^develope$
|
||||
^developement$
|
||||
^developpe
|
||||
^Devers?$
|
||||
^devex
|
||||
^devide
|
||||
^Devinn?[ae]
|
||||
^devisal
|
||||
^devisor
|
||||
^diables?$
|
||||
^oer$
|
||||
Sorce
|
||||
^[Ss]pae.*
|
||||
^Teh$
|
||||
^untill$
|
||||
^untilling$
|
||||
^venders?$
|
||||
^wether.*
|
||||
|
||||
92
.github/agents/FixIssue.agent.md
vendored
92
.github/agents/FixIssue.agent.md
vendored
@@ -1,92 +0,0 @@
|
||||
---
|
||||
description: 'Implements fixes for GitHub issues based on implementation plans'
|
||||
name: 'FixIssue'
|
||||
tools: ['read', 'edit', 'search', 'execute', 'agent', 'usages', 'problems', 'changes', 'testFailure', 'github/*', 'github.vscode-pull-request-github/*']
|
||||
argument-hint: 'GitHub issue number (e.g., #12345)'
|
||||
infer: true
|
||||
---
|
||||
|
||||
# FixIssue Agent
|
||||
|
||||
You are an **IMPLEMENTATION AGENT** specialized in executing implementation plans to fix GitHub issues.
|
||||
|
||||
## Identity & Expertise
|
||||
|
||||
- Expert at translating plans into working code
|
||||
- Deep knowledge of PowerToys codebase patterns and conventions
|
||||
- Skilled at writing tests, handling edge cases, and validating builds
|
||||
- You follow plans precisely while handling ambiguity gracefully
|
||||
|
||||
## Goal
|
||||
|
||||
For the given **issue_number**, execute the implementation plan and produce:
|
||||
1. Working code changes applied directly to the repository
|
||||
2. `Generated Files/issueFix/{{issue_number}}/pr-description.md` — PR-ready description
|
||||
3. `Generated Files/issueFix/{{issue_number}}/manual-steps.md` — Only if human action needed
|
||||
|
||||
## Core Directive
|
||||
|
||||
**Follow the implementation plan in `Generated Files/issueReview/{{issue_number}}/implementation-plan.md` as the single source of truth.**
|
||||
|
||||
If the plan doesn't exist, invoke PlanIssue agent first via `runSubagent`.
|
||||
|
||||
## Working Principles
|
||||
|
||||
- **Plan First**: Read and understand the entire implementation plan before coding
|
||||
- **Validate Always**: For each change: Edit → Build → Verify → Commit. Never proceed if build fails.
|
||||
- **Atomic Commits**: Each commit must be self-contained, buildable, and meaningful
|
||||
- **Ask, Don't Guess**: When uncertain, insert `// TODO(Human input needed): <question>` and document in manual-steps.md
|
||||
|
||||
## Strategy
|
||||
|
||||
**Core Loop** — For every unit of work:
|
||||
1. **Edit**: Make focused changes to implement one logical piece
|
||||
2. **Build**: Run `tools\build\build.cmd` and check for exit code 0
|
||||
3. **Verify**: Use `problems` tool for lint/compile errors; run relevant tests
|
||||
4. **Commit**: Only after build passes — use `.github/prompts/create-commit-title.prompt.md`
|
||||
|
||||
Never skip steps. Never commit broken code. Never proceed if build fails.
|
||||
|
||||
**Feature-by-Feature E2E**: For big scenarios with multiple features, complete each feature end-to-end before moving to the next:
|
||||
- Settings UI → Functionality → Logging → Tests (for Feature 1)
|
||||
- Then repeat for Feature 2
|
||||
- Benefits: Each feature is self-contained, testable, easier to review, can ship incrementally
|
||||
|
||||
**Large Changes** (3+ files or cross-module):
|
||||
- Use `tools\build\New-WorktreeFromBranch.ps1` for isolated worktrees
|
||||
- Create separate branches per feature (e.g., `issue/{{issue_number}}-export`, `issue/{{issue_number}}-import`)
|
||||
- Merge feature branches back after each is validated
|
||||
|
||||
**Recovery**: If implementation goes wrong:
|
||||
- Create a checkpoint branch before risky changes
|
||||
- On failure: branch from last known-good state, cherry-pick working changes, abandon broken branch
|
||||
- For complex changes, consider multiple smaller PRs
|
||||
|
||||
## Guidelines
|
||||
|
||||
**DO**:
|
||||
- Follow the plan exactly
|
||||
- Validate build before every commit — **NEVER commit broken code**
|
||||
- Use `.github/prompts/create-commit-title.prompt.md` for commit messages
|
||||
- Add comprehensive tests for changed behavior
|
||||
- Use worktrees for large changes (3+ files or cross-module)
|
||||
- Document deviations from plan
|
||||
|
||||
**DON'T**:
|
||||
- Implement everything in a single massive commit
|
||||
- Continue after a failed build without fixing
|
||||
- Make drive-by refactors outside issue scope
|
||||
- Skip tests for behavioral changes
|
||||
- Add noisy logs in hot paths
|
||||
- Break IPC/JSON contracts without updating both sides
|
||||
- Introduce dependencies without documenting in NOTICE.md
|
||||
|
||||
## References
|
||||
|
||||
- [Build Guidelines](../../tools/build/BUILD-GUIDELINES.md) — Build commands and validation
|
||||
- [Coding Style](../../doc/devdocs/development/style.md) — Formatting and conventions
|
||||
- [AGENTS.md](../../AGENTS.md) — Full contributor guide
|
||||
|
||||
## Parameter
|
||||
|
||||
- **issue_number**: Extract from `#123`, `issue 123`, or plain number. If missing, ask user.
|
||||
81
.github/agents/PlanIssue.agent.md
vendored
81
.github/agents/PlanIssue.agent.md
vendored
@@ -1,81 +0,0 @@
|
||||
---
|
||||
description: 'Analyzes GitHub issues to produce overview and implementation plans'
|
||||
name: 'PlanIssue'
|
||||
tools: ['execute', 'read', 'edit', 'search', 'web', 'github/*', 'agent', 'download-github-image/*']
|
||||
argument-hint: 'GitHub issue number (e.g., #12345)'
|
||||
handoffs:
|
||||
- label: Start Implementation
|
||||
agent: FixIssue
|
||||
prompt: 'Fix issue #{{issue_number}} using the implementation plan'
|
||||
- label: Open Plan in Editor
|
||||
agent: agent
|
||||
prompt: 'Open Generated Files/issueReview/{{issue_number}}/overview.md and implementation-plan.md'
|
||||
showContinueOn: false
|
||||
send: true
|
||||
infer: true
|
||||
---
|
||||
|
||||
# PlanIssue Agent
|
||||
|
||||
You are a **PLANNING AGENT** specialized in analyzing GitHub issues and producing comprehensive planning documentation.
|
||||
|
||||
## Identity & Expertise
|
||||
|
||||
- Expert at issue triage, priority scoring, and technical analysis
|
||||
- Deep knowledge of PowerToys architecture and codebase patterns
|
||||
- Skilled at breaking down problems into actionable implementation steps
|
||||
- You research thoroughly before planning, gathering 80% confidence before drafting
|
||||
|
||||
## Goal
|
||||
|
||||
For the given **issue_number**, produce two deliverables:
|
||||
1. `Generated Files/issueReview/{{issue_number}}/overview.md` — Issue analysis with scoring
|
||||
2. `Generated Files/issueReview/{{issue_number}}/implementation-plan.md` — Technical implementation plan
|
||||
Above is the core interaction with the end user. If you cannot produce the files above, you fail the task. Each time, you must check whether the files exist or have been modified by the end user, without assuming you know their contents.
|
||||
|
||||
## Core Directive
|
||||
|
||||
**Follow the template in `.github/prompts/review-issue.prompt.md` exactly.** Read it first, then apply every section as specified.
|
||||
|
||||
<stopping_rules>
|
||||
You are a PLANNING agent, NOT an implementation agent.
|
||||
|
||||
STOP if you catch yourself:
|
||||
- Writing actual code or making file edits
|
||||
- Switching to implementation mode
|
||||
- Using edit tools on source files
|
||||
|
||||
Plans describe what the USER or FixIssue agent will execute later.
|
||||
</stopping_rules>
|
||||
|
||||
## Working Principles
|
||||
|
||||
- **Research First**: Gather comprehensive context before drafting any plan
|
||||
- **Score Objectively**: Rate importance, feasibility, and clarity to prioritize effectively
|
||||
- **Prefer Existing Patterns**: Choose solutions that match existing repo conventions
|
||||
- **Draft for Review**: Present plans to user for feedback before finalizing
|
||||
|
||||
## Guidelines
|
||||
|
||||
**DO**:
|
||||
- Fetch issue details including reactions, comments, and linked PRs
|
||||
- Search related code and find similar past fixes
|
||||
- Ask clarifying questions when requirements are ambiguous
|
||||
- Identify subject matter experts via git history
|
||||
- Offer handoffs when plan is ready
|
||||
|
||||
**DON'T**:
|
||||
- Implement anything — you only plan
|
||||
- Edit source files
|
||||
- Make assumptions without researching
|
||||
- Skip the scoring/assessment phase
|
||||
|
||||
## References
|
||||
|
||||
- [Review Issue Prompt](../.github/prompts/review-issue.prompt.md) — Template for plan structure
|
||||
- [Architecture Overview](../../doc/devdocs/core/architecture.md) — System design context
|
||||
- [AGENTS.md](../../AGENTS.md) — Full contributor guide
|
||||
|
||||
## Parameter
|
||||
|
||||
- **issue_number**: Extract from `#123`, `issue 123`, or plain number. If missing, ask user.
|
||||
45
.github/copilot-instructions.md
vendored
45
.github/copilot-instructions.md
vendored
@@ -1,45 +0,0 @@
|
||||
---
|
||||
description: 'PowerToys AI contributor guidance'
|
||||
---
|
||||
|
||||
# PowerToys – Copilot Instructions
|
||||
|
||||
Concise guidance for AI contributions. For complete details, see [AGENTS.md](../AGENTS.md).
|
||||
|
||||
## Quick Reference
|
||||
|
||||
- **Build**: `tools\build\build-essentials.cmd` (first time), then `tools\build\build.cmd`
|
||||
- **Tests**: Find `<Product>*UnitTests` project, build it, run via VS Test Explorer
|
||||
- **Exit code 0 = success** – do not proceed if build fails
|
||||
|
||||
## Key Rules
|
||||
|
||||
- One terminal per operation (build → test)
|
||||
- Atomic PRs: one logical change, no drive-by refactors
|
||||
- Add tests when changing behavior
|
||||
- Keep hot paths quiet (no logging in hooks/tight loops)
|
||||
|
||||
## Style Enforcement
|
||||
|
||||
- C#: `src/.editorconfig`, StyleCop.Analyzers
|
||||
- C++: `src/.clang-format`
|
||||
- XAML: XamlStyler
|
||||
|
||||
## When to Ask for Clarification
|
||||
|
||||
- Ambiguous spec after scanning docs
|
||||
- Cross-module impact unclear
|
||||
- Security, elevation, or installer changes
|
||||
|
||||
## Component-Specific Instructions
|
||||
|
||||
These are auto-applied based on file location:
|
||||
- [Runner & Settings UI](.github/instructions/runner-settings-ui.instructions.md)
|
||||
- [Common Libraries](.github/instructions/common-libraries.instructions.md)
|
||||
|
||||
## Detailed Documentation
|
||||
|
||||
- [AGENTS.md](../AGENTS.md) – Full contributor guide
|
||||
- [Build Guidelines](../tools/build/BUILD-GUIDELINES.md)
|
||||
- [Architecture](../doc/devdocs/core/architecture.md)
|
||||
- [Coding Style](../doc/devdocs/development/style.md)
|
||||
261
.github/instructions/agent-skills.instructions.md
vendored
261
.github/instructions/agent-skills.instructions.md
vendored
@@ -1,261 +0,0 @@
|
||||
---
|
||||
description: 'Guidelines for creating high-quality Agent Skills for GitHub Copilot'
|
||||
applyTo: '**/.github/skills/**/SKILL.md, **/.claude/skills/**/SKILL.md'
|
||||
---
|
||||
|
||||
# Agent Skills File Guidelines
|
||||
|
||||
Instructions for creating effective and portable Agent Skills that enhance GitHub Copilot with specialized capabilities, workflows, and bundled resources.
|
||||
|
||||
## What Are Agent Skills?
|
||||
|
||||
Agent Skills are self-contained folders with instructions and bundled resources that teach AI agents specialized capabilities. Unlike custom instructions (which define coding standards), skills enable task-specific workflows that can include scripts, examples, templates, and reference data.
|
||||
|
||||
Key characteristics:
|
||||
- **Portable**: Works across VS Code, Copilot CLI, and Copilot coding agent
|
||||
- **Progressive loading**: Only loaded when relevant to the user's request
|
||||
- **Resource-bundled**: Can include scripts, templates, examples alongside instructions
|
||||
- **On-demand**: Activated automatically based on prompt relevance
|
||||
|
||||
## Directory Structure
|
||||
|
||||
Skills are stored in specific locations:
|
||||
|
||||
| Location | Scope | Recommendation |
|
||||
|----------|-------|----------------|
|
||||
| `.github/skills/<skill-name>/` | Project/repository | Recommended for project skills |
|
||||
| `.claude/skills/<skill-name>/` | Project/repository | Legacy, for backward compatibility |
|
||||
| `~/.github/skills/<skill-name>/` | Personal (user-wide) | Recommended for personal skills |
|
||||
| `~/.claude/skills/<skill-name>/` | Personal (user-wide) | Legacy, for backward compatibility |
|
||||
|
||||
Each skill **must** have its own subdirectory containing at minimum a `SKILL.md` file.
|
||||
|
||||
## Required SKILL.md Format
|
||||
|
||||
### Frontmatter (Required)
|
||||
|
||||
```yaml
|
||||
---
|
||||
name: webapp-testing
|
||||
description: Toolkit for testing local web applications using Playwright. Use when asked to verify frontend functionality, debug UI behavior, capture browser screenshots, check for visual regressions, or view browser console logs. Supports Chrome, Firefox, and WebKit browsers.
|
||||
license: Complete terms in LICENSE.txt
|
||||
---
|
||||
```
|
||||
|
||||
| Field | Required | Constraints |
|
||||
|-------|----------|-------------|
|
||||
| `name` | Yes | Lowercase, hyphens for spaces, max 64 characters (e.g., `webapp-testing`) |
|
||||
| `description` | Yes | Clear description of capabilities AND use cases, max 1024 characters |
|
||||
| `license` | No | Reference to LICENSE.txt (e.g., `Complete terms in LICENSE.txt`) or SPDX identifier |
|
||||
|
||||
### Description Best Practices
|
||||
|
||||
**CRITICAL**: The `description` field is the PRIMARY mechanism for automatic skill discovery. Copilot reads ONLY the `name` and `description` to decide whether to load a skill. If your description is vague, the skill will never be activated.
|
||||
|
||||
**What to include in description:**
|
||||
1. **WHAT** the skill does (capabilities)
|
||||
2. **WHEN** to use it (specific triggers, scenarios, file types, or user requests)
|
||||
3. **Keywords** that users might mention in their prompts
|
||||
|
||||
**Good description:**
|
||||
```yaml
|
||||
description: Toolkit for testing local web applications using Playwright. Use when asked to verify frontend functionality, debug UI behavior, capture browser screenshots, check for visual regressions, or view browser console logs. Supports Chrome, Firefox, and WebKit browsers.
|
||||
```
|
||||
|
||||
**Poor description:**
|
||||
```yaml
|
||||
description: Web testing helpers
|
||||
```
|
||||
|
||||
The poor description fails because:
|
||||
- No specific triggers (when should Copilot load this?)
|
||||
- No keywords (what user prompts would match?)
|
||||
- No capabilities (what can it actually do?)
|
||||
|
||||
### Body Content
|
||||
|
||||
The body contains detailed instructions that Copilot loads AFTER the skill is activated. Recommended sections:
|
||||
|
||||
| Section | Purpose |
|
||||
|---------|---------|
|
||||
| `# Title` | Brief overview of what this skill enables |
|
||||
| `## When to Use This Skill` | List of scenarios (reinforces description triggers) |
|
||||
| `## Prerequisites` | Required tools, dependencies, environment setup |
|
||||
| `## Step-by-Step Workflows` | Numbered steps for common tasks |
|
||||
| `## Troubleshooting` | Common issues and solutions table |
|
||||
| `## References` | Links to bundled docs or external resources |
|
||||
|
||||
## Bundling Resources
|
||||
|
||||
Skills can include additional files that Copilot accesses on-demand:
|
||||
|
||||
### Supported Resource Types
|
||||
|
||||
| Folder | Purpose | Loaded into Context? | Example Files |
|
||||
|--------|---------|---------------------|---------------|
|
||||
| `scripts/` | Executable automation that performs specific operations | When executed | `helper.py`, `validate.sh`, `build.ts` |
|
||||
| `references/` | Documentation the AI agent reads to inform decisions | Yes, when referenced | `api_reference.md`, `schema.md`, `workflow_guide.md` |
|
||||
| `assets/` | **Static files used AS-IS** in output (not modified by the AI agent) | No | `logo.png`, `brand-template.pptx`, `custom-font.ttf` |
|
||||
| `templates/` | **Starter code/scaffolds that the AI agent MODIFIES** and builds upon | Yes, when referenced | `viewer.html` (insert algorithm), `hello-world/` (extend) |
|
||||
|
||||
### Directory Structure Example
|
||||
|
||||
```
|
||||
.github/skills/my-skill/
|
||||
├── SKILL.md # Required: Main instructions
|
||||
├── LICENSE.txt # Recommended: License terms (Apache 2.0 typical)
|
||||
├── scripts/ # Optional: Executable automation
|
||||
│ ├── helper.py # Python script
|
||||
│ └── helper.ps1 # PowerShell script
|
||||
├── references/ # Optional: Documentation loaded into context
|
||||
│ ├── api_reference.md
|
||||
│ ├── step1-setup.md # Detailed workflow (>3 steps)
|
||||
│ └── step2-deployment.md
|
||||
├── assets/ # Optional: Static files used AS-IS in output
|
||||
│ ├── baseline.png # Reference image for comparison
|
||||
│ └── report-template.html
|
||||
└── templates/ # Optional: Starter code the AI agent modifies
|
||||
├── scaffold.py # Code scaffold the AI agent customizes
|
||||
└── config.template # Config template the AI agent fills in
|
||||
```
|
||||
|
||||
> **LICENSE.txt**: When creating a skill, download the Apache 2.0 license text from https://www.apache.org/licenses/LICENSE-2.0.txt and save as `LICENSE.txt`. Update the copyright year and owner in the appendix section.
|
||||
|
||||
### Assets vs Templates: Key Distinction
|
||||
|
||||
**Assets** are static resources **consumed unchanged** in the output:
|
||||
- A `logo.png` that gets embedded into a generated document
|
||||
- A `report-template.html` copied as output format
|
||||
- A `custom-font.ttf` applied to text rendering
|
||||
|
||||
**Templates** are starter code/scaffolds that **the AI agent actively modifies**:
|
||||
- A `scaffold.py` where the AI agent inserts logic
|
||||
- A `config.template` where the AI agent fills in values based on user requirements
|
||||
- A `hello-world/` project directory that the AI agent extends with new features
|
||||
|
||||
**Rule of thumb**: If the AI agent reads and builds upon the file content → `templates/`. If the file is used as-is in output → `assets/`.
|
||||
|
||||
### Referencing Resources in SKILL.md
|
||||
|
||||
Use relative paths to reference files within the skill directory:
|
||||
|
||||
```markdown
|
||||
## Available Scripts
|
||||
|
||||
Run the [helper script](./scripts/helper.py) to automate common tasks.
|
||||
|
||||
See [API reference](./references/api_reference.md) for detailed documentation.
|
||||
|
||||
Use the [scaffold](./templates/scaffold.py) as a starting point.
|
||||
```
|
||||
|
||||
## Progressive Loading Architecture
|
||||
|
||||
Skills use three-level loading for efficiency:
|
||||
|
||||
| Level | What Loads | When |
|
||||
|-------|------------|------|
|
||||
| 1. Discovery | `name` and `description` only | Always (lightweight metadata) |
|
||||
| 2. Instructions | Full `SKILL.md` body | When request matches description |
|
||||
| 3. Resources | Scripts, examples, docs | Only when Copilot references them |
|
||||
|
||||
This means:
|
||||
- Install many skills without consuming context
|
||||
- Only relevant content loads per task
|
||||
- Resources don't load until explicitly needed
|
||||
|
||||
## Content Guidelines
|
||||
|
||||
### Writing Style
|
||||
|
||||
- Use imperative mood: "Run", "Create", "Configure" (not "You should run")
|
||||
- Be specific and actionable
|
||||
- Include exact commands with parameters
|
||||
- Show expected outputs where helpful
|
||||
- Keep sections focused and scannable
|
||||
|
||||
### Script Requirements
|
||||
|
||||
When including scripts, prefer cross-platform languages:
|
||||
|
||||
| Language | Use Case |
|
||||
|----------|----------|
|
||||
| Python | Complex automation, data processing |
|
||||
| pwsh | PowerShell Core scripting |
|
||||
| Node.js | JavaScript-based tooling |
|
||||
| Bash/Shell | Simple automation tasks |
|
||||
|
||||
Best practices:
|
||||
- Include help/usage documentation (`--help` flag)
|
||||
- Handle errors gracefully with clear messages
|
||||
- Avoid storing credentials or secrets
|
||||
- Use relative paths where possible
|
||||
|
||||
### When to Bundle Scripts
|
||||
|
||||
Include scripts in your skill when:
|
||||
- The same code would be rewritten repeatedly by the agent
|
||||
- Deterministic reliability is critical (e.g., file manipulation, API calls)
|
||||
- Complex logic benefits from being pre-tested rather than generated each time
|
||||
- The operation has a self-contained purpose that can evolve independently
|
||||
- Testability matters — scripts can be unit tested and validated
|
||||
- Predictable behavior is preferred over dynamic generation
|
||||
|
||||
Scripts enable evolution: even simple operations benefit from being implemented as scripts when they may grow in complexity, need consistent behavior across invocations, or require future extensibility.
|
||||
|
||||
### Security Considerations
|
||||
|
||||
- Scripts rely on existing credential helpers (no credential storage)
|
||||
- Include `--force` flags only for destructive operations
|
||||
- Warn users before irreversible actions
|
||||
- Document any network operations or external calls
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Parameter Table Pattern
|
||||
|
||||
Document parameters clearly:
|
||||
|
||||
```markdown
|
||||
| Parameter | Required | Default | Description |
|
||||
|-----------|----------|---------|-------------|
|
||||
| `--input` | Yes | - | Input file or URL to process |
|
||||
| `--action` | Yes | - | Action to perform |
|
||||
| `--verbose` | No | `false` | Enable verbose output |
|
||||
```
|
||||
|
||||
## Validation Checklist
|
||||
|
||||
Before publishing a skill:
|
||||
|
||||
- [ ] `SKILL.md` has valid frontmatter with `name` and `description`
|
||||
- [ ] `name` is lowercase with hyphens, ≤64 characters
|
||||
- [ ] `description` clearly states **WHAT** it does, **WHEN** to use it, and relevant **KEYWORDS**
|
||||
- [ ] Body includes when to use, prerequisites, and step-by-step workflows
|
||||
- [ ] SKILL.md body kept under 500 lines (split large content into `references/` folder)
|
||||
- [ ] Large workflows (>5 steps) split into `references/` folder with clear links from SKILL.md
|
||||
- [ ] Scripts include help documentation and error handling
|
||||
- [ ] Relative paths used for all resource references
|
||||
- [ ] No hardcoded credentials or secrets
|
||||
|
||||
## Workflow Execution Pattern
|
||||
|
||||
When executing multi-step workflows, create a TODO list where each step references the relevant documentation:
|
||||
|
||||
```markdown
|
||||
## TODO
|
||||
- [ ] Step 1: Configure environment - see [workflow-setup.md](./references/workflow-setup.md#environment)
|
||||
- [ ] Step 2: Build project - see [workflow-setup.md](./references/workflow-setup.md#build)
|
||||
- [ ] Step 3: Deploy to staging - see [workflow-deployment.md](./references/workflow-deployment.md#staging)
|
||||
- [ ] Step 4: Run validation - see [workflow-deployment.md](./references/workflow-deployment.md#validation)
|
||||
- [ ] Step 5: Deploy to production - see [workflow-deployment.md](./references/workflow-deployment.md#production)
|
||||
```
|
||||
|
||||
This ensures traceability and allows resuming workflows if interrupted.
|
||||
|
||||
## Related Resources
|
||||
|
||||
- [Agent Skills Specification](https://agentskills.io/)
|
||||
- [VS Code Agent Skills Documentation](https://code.visualstudio.com/docs/copilot/customization/agent-skills)
|
||||
- [Reference Skills Repository](https://github.com/anthropics/skills)
|
||||
- [Awesome Copilot Skills](https://github.com/github/awesome-copilot/blob/main/docs/README.skills.md)
|
||||
791
.github/instructions/agents.instructions.md
vendored
791
.github/instructions/agents.instructions.md
vendored
@@ -1,791 +0,0 @@
|
||||
---
|
||||
description: 'Guidelines for creating custom agent files for GitHub Copilot'
|
||||
applyTo: '**/*.agent.md'
|
||||
---
|
||||
|
||||
# Custom Agent File Guidelines
|
||||
|
||||
Instructions for creating effective and maintainable custom agent files that provide specialized expertise for specific development tasks in GitHub Copilot.
|
||||
|
||||
## Project Context
|
||||
|
||||
- Target audience: Developers creating custom agents for GitHub Copilot
|
||||
- File format: Markdown with YAML frontmatter
|
||||
- File naming convention: lowercase with hyphens (e.g., `test-specialist.agent.md`)
|
||||
- Location: `.github/agents/` directory (repository-level) or `agents/` directory (organization/enterprise-level)
|
||||
- Purpose: Define specialized agents with tailored expertise, tools, and instructions for specific tasks
|
||||
- Official documentation: https://docs.github.com/en/copilot/how-tos/use-copilot-agents/coding-agent/create-custom-agents
|
||||
|
||||
## Required Frontmatter
|
||||
|
||||
Every agent file must include YAML frontmatter with the following fields:
|
||||
|
||||
```yaml
|
||||
---
|
||||
description: 'Brief description of the agent purpose and capabilities'
|
||||
name: 'Agent Display Name'
|
||||
tools: ['read', 'edit', 'search']
|
||||
model: 'Claude Sonnet 4.5'
|
||||
target: 'vscode'
|
||||
infer: true
|
||||
---
|
||||
```
|
||||
|
||||
### Core Frontmatter Properties
|
||||
|
||||
#### **description** (REQUIRED)
|
||||
- Single-quoted string, clearly stating the agent's purpose and domain expertise
|
||||
- Should be concise (50-150 characters) and actionable
|
||||
- Example: `'Focuses on test coverage, quality, and testing best practices'`
|
||||
|
||||
#### **name** (OPTIONAL)
|
||||
- Display name for the agent in the UI
|
||||
- If omitted, defaults to filename (without `.md` or `.agent.md`)
|
||||
- Use title case and be descriptive
|
||||
- Example: `'Testing Specialist'`
|
||||
|
||||
#### **tools** (OPTIONAL)
|
||||
- List of tool names or aliases the agent can use
|
||||
- Supports comma-separated string or YAML array format
|
||||
- If omitted, agent has access to all available tools
|
||||
- See "Tool Configuration" section below for details
|
||||
|
||||
#### **model** (STRONGLY RECOMMENDED)
|
||||
- Specifies which AI model the agent should use
|
||||
- Supported in VS Code, JetBrains IDEs, Eclipse, and Xcode
|
||||
- Example: `'Claude Sonnet 4.5'`, `'gpt-4'`, `'gpt-4o'`
|
||||
- Choose based on agent complexity and required capabilities
|
||||
|
||||
#### **target** (OPTIONAL)
|
||||
- Specifies target environment: `'vscode'` or `'github-copilot'`
|
||||
- If omitted, agent is available in both environments
|
||||
- Use when agent has environment-specific features
|
||||
|
||||
#### **infer** (OPTIONAL)
|
||||
- Boolean controlling whether Copilot can automatically use this agent based on context
|
||||
- Default: `true` if omitted
|
||||
- Set to `false` to require manual agent selection
|
||||
|
||||
#### **metadata** (OPTIONAL, GitHub.com only)
|
||||
- Object with name-value pairs for agent annotation
|
||||
- Example: `metadata: { category: 'testing', version: '1.0' }`
|
||||
- Not supported in VS Code
|
||||
|
||||
#### **mcp-servers** (OPTIONAL, Organization/Enterprise only)
|
||||
- Configure MCP servers available only to this agent
|
||||
- Only supported for organization/enterprise level agents
|
||||
- See "MCP Server Configuration" section below
|
||||
|
||||
## Tool Configuration
|
||||
|
||||
### Tool Specification Strategies
|
||||
|
||||
**Enable all tools** (default):
|
||||
```yaml
|
||||
# Omit tools property entirely, or use:
|
||||
tools: ['*']
|
||||
```
|
||||
|
||||
**Enable specific tools**:
|
||||
```yaml
|
||||
tools: ['read', 'edit', 'search', 'execute']
|
||||
```
|
||||
|
||||
**Enable MCP server tools**:
|
||||
```yaml
|
||||
tools: ['read', 'edit', 'github/*', 'playwright/navigate']
|
||||
```
|
||||
|
||||
**Disable all tools**:
|
||||
```yaml
|
||||
tools: []
|
||||
```
|
||||
|
||||
### Standard Tool Aliases
|
||||
|
||||
All aliases are case-insensitive:
|
||||
|
||||
| Alias | Alternative Names | Category | Description |
|
||||
|-------|------------------|----------|-------------|
|
||||
| `execute` | shell, Bash, powershell | Shell execution | Execute commands in appropriate shell |
|
||||
| `read` | Read, NotebookRead, view | File reading | Read file contents |
|
||||
| `edit` | Edit, MultiEdit, Write, NotebookEdit | File editing | Edit and modify files |
|
||||
| `search` | Grep, Glob, search | Code search | Search for files or text in files |
|
||||
| `agent` | custom-agent, Task | Agent invocation | Invoke other custom agents |
|
||||
| `web` | WebSearch, WebFetch | Web access | Fetch web content and search |
|
||||
| `todo` | TodoWrite | Task management | Create and manage task lists (VS Code only) |
|
||||
|
||||
### Built-in MCP Server Tools
|
||||
|
||||
**GitHub MCP Server**:
|
||||
```yaml
|
||||
tools: ['github/*'] # All GitHub tools
|
||||
tools: ['github/get_file_contents', 'github/search_repositories'] # Specific tools
|
||||
```
|
||||
- All read-only tools available by default
|
||||
- Token scoped to source repository
|
||||
|
||||
**Playwright MCP Server**:
|
||||
```yaml
|
||||
tools: ['playwright/*'] # All Playwright tools
|
||||
tools: ['playwright/navigate', 'playwright/screenshot'] # Specific tools
|
||||
```
|
||||
- Configured to access localhost only
|
||||
- Useful for browser automation and testing
|
||||
|
||||
### Tool Selection Best Practices
|
||||
|
||||
- **Principle of Least Privilege**: Only enable tools necessary for the agent's purpose
|
||||
- **Security**: Limit `execute` access unless explicitly required
|
||||
- **Focus**: Fewer tools = clearer agent purpose and better performance
|
||||
- **Documentation**: Comment why specific tools are required for complex configurations
|
||||
|
||||
## Sub-Agent Invocation (Agent Orchestration)
|
||||
|
||||
Agents can invoke other agents using `runSubagent` to orchestrate multi-step workflows.
|
||||
|
||||
### How It Works
|
||||
|
||||
Include `agent` in tools list to enable sub-agent invocation:
|
||||
|
||||
```yaml
|
||||
tools: ['read', 'edit', 'search', 'agent']
|
||||
```
|
||||
|
||||
Then invoke other agents with `runSubagent`:
|
||||
|
||||
```javascript
|
||||
const result = await runSubagent({
|
||||
description: 'What this step does',
|
||||
prompt: `You are the [Specialist] specialist.
|
||||
|
||||
Context:
|
||||
- Parameter: ${parameterValue}
|
||||
- Input: ${inputPath}
|
||||
- Output: ${outputPath}
|
||||
|
||||
Task:
|
||||
1. Do the specific work
|
||||
2. Write results to output location
|
||||
3. Return summary of completion`
|
||||
});
|
||||
```
|
||||
|
||||
### Basic Pattern
|
||||
|
||||
Structure each sub-agent call with:
|
||||
|
||||
1. **description**: Clear one-line purpose of the sub-agent invocation
|
||||
2. **prompt**: Detailed instructions with substituted variables
|
||||
|
||||
The prompt should include:
|
||||
- Who the sub-agent is (specialist role)
|
||||
- What context it needs (parameters, paths)
|
||||
- What to do (concrete tasks)
|
||||
- Where to write output
|
||||
- What to return (summary)
|
||||
|
||||
### Example: Multi-Step Processing
|
||||
|
||||
```javascript
|
||||
// Step 1: Process data
|
||||
const processing = await runSubagent({
|
||||
description: 'Transform raw input data',
|
||||
prompt: `You are the Data Processor specialist.
|
||||
|
||||
Project: ${projectName}
|
||||
Input: ${basePath}/raw/
|
||||
Output: ${basePath}/processed/
|
||||
|
||||
Task:
|
||||
1. Read all files from input directory
|
||||
2. Apply transformations
|
||||
3. Write processed files to output
|
||||
4. Create summary: ${basePath}/processed/summary.md
|
||||
|
||||
Return: Number of files processed and any issues found`
|
||||
});
|
||||
|
||||
// Step 2: Analyze (depends on Step 1)
|
||||
const analysis = await runSubagent({
|
||||
description: 'Analyze processed data',
|
||||
prompt: `You are the Data Analyst specialist.
|
||||
|
||||
Project: ${projectName}
|
||||
Input: ${basePath}/processed/
|
||||
Output: ${basePath}/analysis/
|
||||
|
||||
Task:
|
||||
1. Read processed files from input
|
||||
2. Generate analysis report
|
||||
3. Write to: ${basePath}/analysis/report.md
|
||||
|
||||
Return: Key findings and identified patterns`
|
||||
});
|
||||
```
|
||||
|
||||
### Key Points
|
||||
|
||||
- **Pass variables in prompts**: Use `${variableName}` for all dynamic values
|
||||
- **Keep prompts focused**: Clear, specific tasks for each sub-agent
|
||||
- **Return summaries**: Each sub-agent should report what it accomplished
|
||||
- **Sequential execution**: Use `await` to maintain order when steps depend on each other
|
||||
- **Error handling**: Check results before proceeding to dependent steps
|
||||
|
||||
### ⚠️ Tool Availability Requirement
|
||||
|
||||
**Critical**: If a sub-agent requires specific tools (e.g., `edit`, `execute`, `search`), the orchestrator must include those tools in its own `tools` list. Sub-agents cannot access tools that aren't available to their parent orchestrator.
|
||||
|
||||
**Example**:
|
||||
```yaml
|
||||
# If your sub-agents need to edit files, execute commands, or search code
|
||||
tools: ['read', 'edit', 'search', 'execute', 'agent']
|
||||
```
|
||||
|
||||
The orchestrator's tool permissions act as a ceiling for all invoked sub-agents. Plan your tool list carefully to ensure all sub-agents have the tools they need.
|
||||
|
||||
### ⚠️ Important Limitation
|
||||
|
||||
**Sub-agent orchestration is NOT suitable for large-scale data processing.** Avoid using `runSubagent` when:
|
||||
- Processing hundreds or thousands of files
|
||||
- Handling large datasets
|
||||
- Performing bulk transformations on big codebases
|
||||
- Orchestrating more than 5-10 sequential steps
|
||||
|
||||
Each sub-agent call adds latency and context overhead. For high-volume processing, implement logic directly in a single agent instead. Use orchestration only for coordinating specialized tasks on focused, manageable datasets.
|
||||
|
||||
## Agent Prompt Structure
|
||||
|
||||
The markdown content below the frontmatter defines the agent's behavior, expertise, and instructions. Well-structured prompts typically include:
|
||||
|
||||
1. **Agent Identity and Role**: Who the agent is and its primary role
|
||||
2. **Core Responsibilities**: What specific tasks the agent performs
|
||||
3. **Approach and Methodology**: How the agent works to accomplish tasks
|
||||
4. **Guidelines and Constraints**: What to do/avoid and quality standards
|
||||
5. **Output Expectations**: Expected output format and quality
|
||||
|
||||
### Prompt Writing Best Practices
|
||||
|
||||
- **Be Specific and Direct**: Use imperative mood ("Analyze", "Generate"); avoid vague terms
|
||||
- **Define Boundaries**: Clearly state scope limits and constraints
|
||||
- **Include Context**: Explain domain expertise and reference relevant frameworks
|
||||
- **Focus on Behavior**: Describe how the agent should think and work
|
||||
- **Use Structured Format**: Headers, bullets, and lists make prompts scannable
|
||||
|
||||
## Variable Definition and Extraction
|
||||
|
||||
Agents can define dynamic parameters to extract values from user input and use them throughout the agent's behavior and sub-agent communications. This enables flexible, context-aware agents that adapt to user-provided data.
|
||||
|
||||
### When to Use Variables
|
||||
|
||||
**Use variables when**:
|
||||
- Agent behavior depends on user input
|
||||
- Need to pass dynamic values to sub-agents
|
||||
- Want to make agents reusable across different contexts
|
||||
- Require parameterized workflows
|
||||
- Need to track or reference user-provided context
|
||||
|
||||
**Examples**:
|
||||
- Extract project name from user prompt
|
||||
- Capture certification name for pipeline processing
|
||||
- Identify file paths or directories
|
||||
- Extract configuration options
|
||||
- Parse feature names or module identifiers
|
||||
|
||||
### Variable Declaration Pattern
|
||||
|
||||
Define variables section early in the agent prompt to document expected parameters:
|
||||
|
||||
```markdown
|
||||
# Agent Name
|
||||
|
||||
## Dynamic Parameters
|
||||
|
||||
- **Parameter Name**: Description and usage
|
||||
- **Another Parameter**: How it's extracted and used
|
||||
|
||||
## Your Mission
|
||||
|
||||
Process [PARAMETER_NAME] to accomplish [task].
|
||||
```
|
||||
|
||||
### Variable Extraction Methods
|
||||
|
||||
#### 1. **Explicit User Input**
|
||||
Ask the user to provide the variable if not detected in the prompt:
|
||||
|
||||
```markdown
|
||||
## Your Mission
|
||||
|
||||
Process the project by analyzing your codebase.
|
||||
|
||||
### Step 1: Identify Project
|
||||
If no project name is provided, **ASK THE USER** for:
|
||||
- Project name or identifier
|
||||
- Base path or directory location
|
||||
- Configuration type (if applicable)
|
||||
|
||||
Use this information to contextualize all subsequent tasks.
|
||||
```
|
||||
|
||||
#### 2. **Implicit Extraction from Prompt**
|
||||
Automatically extract variables from the user's natural language input:
|
||||
|
||||
```javascript
|
||||
// Example: Extract certification name from user input
|
||||
const userInput = "Process My Certification";
|
||||
|
||||
// Extract key information
|
||||
const certificationName = extractCertificationName(userInput);
|
||||
// Result: "My Certification"
|
||||
|
||||
const basePath = `certifications/${certificationName}`;
|
||||
// Result: "certifications/My Certification"
|
||||
```
|
||||
|
||||
#### 3. **Contextual Variable Resolution**
|
||||
Use file context or workspace information to derive variables:
|
||||
|
||||
```markdown
|
||||
## Variable Resolution Strategy
|
||||
|
||||
1. **From User Prompt**: First, look for explicit mentions in user input
|
||||
2. **From File Context**: Check current file name or path
|
||||
3. **From Workspace**: Use workspace folder or active project
|
||||
4. **From Settings**: Reference configuration files
|
||||
5. **Ask User**: If all else fails, request missing information
|
||||
```
|
||||
|
||||
### Using Variables in Agent Prompts
|
||||
|
||||
#### Variable Substitution in Instructions
|
||||
|
||||
Use template variables in agent prompts to make them dynamic:
|
||||
|
||||
```markdown
|
||||
# Agent Name
|
||||
|
||||
## Dynamic Parameters
|
||||
- **Project Name**: ${projectName}
|
||||
- **Base Path**: ${basePath}
|
||||
- **Output Directory**: ${outputDir}
|
||||
|
||||
## Your Mission
|
||||
|
||||
Process the **${projectName}** project located at `${basePath}`.
|
||||
|
||||
## Process Steps
|
||||
|
||||
1. Read input from: `${basePath}/input/`
|
||||
2. Process files according to project configuration
|
||||
3. Write results to: `${outputDir}/`
|
||||
4. Generate summary report
|
||||
|
||||
## Quality Standards
|
||||
|
||||
- Maintain project-specific coding standards for **${projectName}**
|
||||
- Follow directory structure: `${basePath}/[structure]`
|
||||
```
|
||||
|
||||
#### Passing Variables to Sub-Agents
|
||||
|
||||
When invoking a sub-agent, pass all context through template variables in the prompt:
|
||||
|
||||
```javascript
|
||||
// Extract and prepare variables
|
||||
const basePath = `projects/${projectName}`;
|
||||
const inputPath = `${basePath}/src/`;
|
||||
const outputPath = `${basePath}/docs/`;
|
||||
|
||||
// Pass to sub-agent with all variables substituted
|
||||
const result = await runSubagent({
|
||||
description: 'Generate project documentation',
|
||||
prompt: `You are the Documentation specialist.
|
||||
|
||||
Project: ${projectName}
|
||||
Input: ${inputPath}
|
||||
Output: ${outputPath}
|
||||
|
||||
Task:
|
||||
1. Read source files from ${inputPath}
|
||||
2. Generate comprehensive documentation
|
||||
3. Write to ${outputPath}/index.md
|
||||
4. Include code examples and usage guides
|
||||
|
||||
Return: Summary of documentation generated (file count, word count)`
|
||||
});
|
||||
```
|
||||
|
||||
The sub-agent receives all necessary context embedded in the prompt. Variables are resolved before sending the prompt, so the sub-agent works with concrete paths and values, not variable placeholders.
|
||||
|
||||
### Real-World Example: Code Review Orchestrator
|
||||
|
||||
Example of a simple orchestrator that validates code through multiple specialized agents:
|
||||
|
||||
```javascript
|
||||
async function reviewCodePipeline(repositoryName, prNumber) {
|
||||
const basePath = `projects/${repositoryName}/pr-${prNumber}`;
|
||||
|
||||
// Step 1: Security Review
|
||||
const security = await runSubagent({
|
||||
description: 'Scan for security vulnerabilities',
|
||||
prompt: `You are the Security Reviewer specialist.
|
||||
|
||||
Repository: ${repositoryName}
|
||||
PR: ${prNumber}
|
||||
Code: ${basePath}/changes/
|
||||
|
||||
Task:
|
||||
1. Scan code for OWASP Top 10 vulnerabilities
|
||||
2. Check for injection attacks, auth flaws
|
||||
3. Write findings to ${basePath}/security-review.md
|
||||
|
||||
Return: List of critical, high, and medium issues found`
|
||||
});
|
||||
|
||||
// Step 2: Test Coverage Check
|
||||
const coverage = await runSubagent({
|
||||
description: 'Verify test coverage for changes',
|
||||
prompt: `You are the Test Coverage specialist.
|
||||
|
||||
Repository: ${repositoryName}
|
||||
PR: ${prNumber}
|
||||
Changes: ${basePath}/changes/
|
||||
|
||||
Task:
|
||||
1. Analyze code coverage for modified files
|
||||
2. Identify untested critical paths
|
||||
3. Write report to ${basePath}/coverage-report.md
|
||||
|
||||
Return: Current coverage percentage and gaps`
|
||||
});
|
||||
|
||||
// Step 3: Aggregate Results
|
||||
const finalReport = await runSubagent({
|
||||
description: 'Compile all review findings',
|
||||
prompt: `You are the Review Aggregator specialist.
|
||||
|
||||
Repository: ${repositoryName}
|
||||
Reports: ${basePath}/*.md
|
||||
|
||||
Task:
|
||||
1. Read all review reports from ${basePath}/
|
||||
2. Synthesize findings into single report
|
||||
3. Determine overall verdict (APPROVE/NEEDS_FIXES/BLOCK)
|
||||
4. Write to ${basePath}/final-review.md
|
||||
|
||||
Return: Final verdict and executive summary`
|
||||
});
|
||||
|
||||
return finalReport;
|
||||
}
|
||||
```
|
||||
|
||||
This pattern applies to any orchestration scenario: extract variables, call sub-agents with clear context, await results.
|
||||
|
||||
|
||||
### Variable Best Practices
|
||||
|
||||
#### 1. **Clear Documentation**
|
||||
Always document what variables are expected:
|
||||
|
||||
```markdown
|
||||
## Required Variables
|
||||
- **projectName**: The name of the project (string, required)
|
||||
- **basePath**: Root directory for project files (path, required)
|
||||
|
||||
## Optional Variables
|
||||
- **mode**: Processing mode - quick/standard/detailed (enum, default: standard)
|
||||
- **outputFormat**: Output format - markdown/json/html (enum, default: markdown)
|
||||
|
||||
## Derived Variables
|
||||
- **outputDir**: Automatically set to ${basePath}/output
|
||||
- **logFile**: Automatically set to ${basePath}/.log.md
|
||||
```
|
||||
|
||||
#### 2. **Consistent Naming**
|
||||
Use consistent variable naming conventions:
|
||||
|
||||
```javascript
|
||||
// Good: Clear, descriptive naming
|
||||
const variables = {
|
||||
projectName, // What project to work on
|
||||
basePath, // Where project files are located
|
||||
outputDirectory, // Where to save results
|
||||
processingMode, // How to process (detail level)
|
||||
configurationPath // Where config files are
|
||||
};
|
||||
|
||||
// Avoid: Ambiguous or inconsistent
|
||||
const bad_variables = {
|
||||
name, // Too generic
|
||||
path, // Unclear which path
|
||||
mode, // Too short
|
||||
config // Too vague
|
||||
};
|
||||
```
|
||||
|
||||
#### 3. **Validation and Constraints**
|
||||
Document valid values and constraints:
|
||||
|
||||
```markdown
|
||||
## Variable Constraints
|
||||
|
||||
**projectName**:
|
||||
- Type: string (alphanumeric, hyphens, underscores allowed)
|
||||
- Length: 1-100 characters
|
||||
- Required: yes
|
||||
- Pattern: `/^[a-zA-Z0-9_-]+$/`
|
||||
|
||||
**processingMode**:
|
||||
- Type: enum
|
||||
- Valid values: "quick" (< 5min), "standard" (5-15min), "detailed" (15+ min)
|
||||
- Default: "standard"
|
||||
- Required: no
|
||||
```
|
||||
|
||||
## MCP Server Configuration (Organization/Enterprise Only)
|
||||
|
||||
MCP servers extend agent capabilities with additional tools. Only supported for organization and enterprise-level agents.
|
||||
|
||||
### Configuration Format
|
||||
|
||||
```yaml
|
||||
---
|
||||
name: my-custom-agent
|
||||
description: 'Agent with MCP integration'
|
||||
tools: ['read', 'edit', 'custom-mcp/tool-1']
|
||||
mcp-servers:
|
||||
custom-mcp:
|
||||
type: 'local'
|
||||
command: 'some-command'
|
||||
args: ['--arg1', '--arg2']
|
||||
tools: ["*"]
|
||||
env:
|
||||
ENV_VAR_NAME: ${{ secrets.API_KEY }}
|
||||
---
|
||||
```
|
||||
|
||||
### MCP Server Properties
|
||||
|
||||
- **type**: Server type (`'local'` or `'stdio'`)
|
||||
- **command**: Command to start the MCP server
|
||||
- **args**: Array of command arguments
|
||||
- **tools**: Tools to enable from this server (`["*"]` for all)
|
||||
- **env**: Environment variables (supports secrets)
|
||||
|
||||
### Environment Variables and Secrets
|
||||
|
||||
Secrets must be configured in repository settings under "copilot" environment.
|
||||
|
||||
**Supported syntax**:
|
||||
```yaml
|
||||
env:
|
||||
# Environment variable only
|
||||
VAR_NAME: COPILOT_MCP_ENV_VAR_VALUE
|
||||
|
||||
# Variable with header
|
||||
VAR_NAME: $COPILOT_MCP_ENV_VAR_VALUE
|
||||
VAR_NAME: ${COPILOT_MCP_ENV_VAR_VALUE}
|
||||
|
||||
# GitHub Actions-style (YAML only)
|
||||
VAR_NAME: ${{ secrets.COPILOT_MCP_ENV_VAR_VALUE }}
|
||||
VAR_NAME: ${{ var.COPILOT_MCP_ENV_VAR_VALUE }}
|
||||
```
|
||||
|
||||
## File Organization and Naming
|
||||
|
||||
### Repository-Level Agents
|
||||
- Location: `.github/agents/`
|
||||
- Scope: Available only in the specific repository
|
||||
- Access: Uses repository-configured MCP servers
|
||||
|
||||
### Organization/Enterprise-Level Agents
|
||||
- Location: `.github-private/agents/` (then move to `agents/` root)
|
||||
- Scope: Available across all repositories in org/enterprise
|
||||
- Access: Can configure dedicated MCP servers
|
||||
|
||||
### Naming Conventions
|
||||
- Use lowercase with hyphens: `test-specialist.agent.md`
|
||||
- Name should reflect agent purpose
|
||||
- Filename becomes default agent name (if `name` not specified)
|
||||
- Allowed characters: `.`, `-`, `_`, `a-z`, `A-Z`, `0-9`
|
||||
|
||||
## Agent Processing and Behavior
|
||||
|
||||
### Versioning
|
||||
- Based on Git commit SHAs for the agent file
|
||||
- Create branches/tags for different agent versions
|
||||
- Instantiated using latest version for repository/branch
|
||||
- PR interactions use same agent version for consistency
|
||||
|
||||
### Name Conflicts
|
||||
Priority (highest to lowest):
|
||||
1. Repository-level agent
|
||||
2. Organization-level agent
|
||||
3. Enterprise-level agent
|
||||
|
||||
Lower-level configurations override higher-level ones with the same name.
|
||||
|
||||
### Tool Processing
|
||||
- `tools` list filters available tools (built-in and MCP)
|
||||
- No tools specified = all tools enabled
|
||||
- Empty list (`[]`) = all tools disabled
|
||||
- Specific list = only those tools enabled
|
||||
- Unrecognized tool names are ignored (allows environment-specific tools)
|
||||
|
||||
### MCP Server Processing Order
|
||||
1. Out-of-the-box MCP servers (e.g., GitHub MCP)
|
||||
2. Custom agent MCP configuration (org/enterprise only)
|
||||
3. Repository-level MCP configurations
|
||||
|
||||
Each level can override settings from previous levels.
|
||||
|
||||
## Agent Creation Checklist
|
||||
|
||||
### Frontmatter
|
||||
- [ ] `description` field present and descriptive (50-150 chars)
|
||||
- [ ] `description` wrapped in single quotes
|
||||
- [ ] `name` specified (optional but recommended)
|
||||
- [ ] `tools` configured appropriately (or intentionally omitted)
|
||||
- [ ] `model` specified for optimal performance
|
||||
- [ ] `target` set if environment-specific
|
||||
- [ ] `infer` set to `false` if manual selection required
|
||||
|
||||
### Prompt Content
|
||||
- [ ] Clear agent identity and role defined
|
||||
- [ ] Core responsibilities listed explicitly
|
||||
- [ ] Approach and methodology explained
|
||||
- [ ] Guidelines and constraints specified
|
||||
- [ ] Output expectations documented
|
||||
- [ ] Examples provided where helpful
|
||||
- [ ] Instructions are specific and actionable
|
||||
- [ ] Scope and boundaries clearly defined
|
||||
- [ ] Total content under 30,000 characters
|
||||
|
||||
### File Structure
|
||||
- [ ] Filename follows lowercase-with-hyphens convention
|
||||
- [ ] File placed in correct directory (`.github/agents/` or `agents/`)
|
||||
- [ ] Filename uses only allowed characters
|
||||
- [ ] File extension is `.agent.md`
|
||||
|
||||
### Quality Assurance
|
||||
- [ ] Agent purpose is unique and not duplicative
|
||||
- [ ] Tools are minimal and necessary
|
||||
- [ ] Instructions are clear and unambiguous
|
||||
- [ ] Agent has been tested with representative tasks
|
||||
- [ ] Documentation references are current
|
||||
- [ ] Security considerations addressed (if applicable)
|
||||
|
||||
## Common Agent Patterns
|
||||
|
||||
### Testing Specialist
|
||||
**Purpose**: Focus on test coverage and quality
|
||||
**Tools**: All tools (for comprehensive test creation)
|
||||
**Approach**: Analyze, identify gaps, write tests, avoid production code changes
|
||||
|
||||
### Implementation Planner
|
||||
**Purpose**: Create detailed technical plans and specifications
|
||||
**Tools**: Limited to `['read', 'search', 'edit']`
|
||||
**Approach**: Analyze requirements, create documentation, avoid implementation
|
||||
|
||||
### Code Reviewer
|
||||
**Purpose**: Review code quality and provide feedback
|
||||
**Tools**: `['read', 'search']` only
|
||||
**Approach**: Analyze, suggest improvements, no direct modifications
|
||||
|
||||
### Refactoring Specialist
|
||||
**Purpose**: Improve code structure and maintainability
|
||||
**Tools**: `['read', 'search', 'edit']`
|
||||
**Approach**: Analyze patterns, propose refactorings, implement safely
|
||||
|
||||
### Security Auditor
|
||||
**Purpose**: Identify security issues and vulnerabilities
|
||||
**Tools**: `['read', 'search', 'web']`
|
||||
**Approach**: Scan code, check against OWASP, report findings
|
||||
|
||||
## Common Mistakes to Avoid
|
||||
|
||||
### Frontmatter Errors
|
||||
- ❌ Missing `description` field
|
||||
- ❌ Description not wrapped in quotes
|
||||
- ❌ Invalid tool names without checking documentation
|
||||
- ❌ Incorrect YAML syntax (indentation, quotes)
|
||||
|
||||
### Tool Configuration Issues
|
||||
- ❌ Granting excessive tool access unnecessarily
|
||||
- ❌ Missing required tools for agent's purpose
|
||||
- ❌ Not using tool aliases consistently
|
||||
- ❌ Forgetting MCP server namespace (`server-name/tool`)
|
||||
|
||||
### Prompt Content Problems
|
||||
- ❌ Vague, ambiguous instructions
|
||||
- ❌ Conflicting or contradictory guidelines
|
||||
- ❌ Lack of clear scope definition
|
||||
- ❌ Missing output expectations
|
||||
- ❌ Overly verbose instructions (exceeding character limits)
|
||||
- ❌ No examples or context for complex tasks
|
||||
|
||||
### Organizational Issues
|
||||
- ❌ Filename doesn't reflect agent purpose
|
||||
- ❌ Wrong directory (confusing repo vs org level)
|
||||
- ❌ Using spaces or special characters in filename
|
||||
- ❌ Duplicate agent names causing conflicts
|
||||
|
||||
## Testing and Validation
|
||||
|
||||
### Manual Testing
|
||||
1. Create the agent file with proper frontmatter
|
||||
2. Reload VS Code or refresh GitHub.com
|
||||
3. Select the agent from the dropdown in Copilot Chat
|
||||
4. Test with representative user queries
|
||||
5. Verify tool access works as expected
|
||||
6. Confirm output meets expectations
|
||||
|
||||
### Integration Testing
|
||||
- Test agent with different file types in scope
|
||||
- Verify MCP server connectivity (if configured)
|
||||
- Check agent behavior with missing context
|
||||
- Test error handling and edge cases
|
||||
- Validate agent switching and handoffs
|
||||
|
||||
### Quality Checks
|
||||
- Run through agent creation checklist
|
||||
- Review against common mistakes list
|
||||
- Compare with example agents in repository
|
||||
- Get peer review for complex agents
|
||||
- Document any special configuration needs
|
||||
|
||||
## Additional Resources
|
||||
|
||||
### Official Documentation
|
||||
- [Creating Custom Agents](https://docs.github.com/en/copilot/how-tos/use-copilot-agents/coding-agent/create-custom-agents)
|
||||
- [Custom Agents Configuration](https://docs.github.com/en/copilot/reference/custom-agents-configuration)
|
||||
- [Custom Agents in VS Code](https://code.visualstudio.com/docs/copilot/customization/custom-agents)
|
||||
- [MCP Integration](https://docs.github.com/en/copilot/how-tos/use-copilot-agents/coding-agent/extend-coding-agent-with-mcp)
|
||||
|
||||
### Community Resources
|
||||
- [Awesome Copilot Agents Collection](https://github.com/github/awesome-copilot/tree/main/agents)
|
||||
- [Customization Library Examples](https://docs.github.com/en/copilot/tutorials/customization-library/custom-agents)
|
||||
- [Your First Custom Agent Tutorial](https://docs.github.com/en/copilot/tutorials/customization-library/custom-agents/your-first-custom-agent)
|
||||
|
||||
### Related Files
|
||||
- [Prompt Files Guidelines](./prompt.instructions.md) - For creating prompt files
|
||||
- [Instructions Guidelines](./instructions.instructions.md) - For creating instruction files
|
||||
|
||||
## Version Compatibility Notes
|
||||
|
||||
### GitHub.com (Coding Agent)
|
||||
- ✅ Fully supports all standard frontmatter properties
|
||||
- ✅ Repository and org/enterprise level agents
|
||||
- ✅ MCP server configuration (org/enterprise)
|
||||
- ❌ Does not support `model`, `argument-hint`, `handoffs` properties
|
||||
|
||||
### VS Code / JetBrains / Eclipse / Xcode
|
||||
- ✅ Supports `model` property for AI model selection
|
||||
- ✅ Supports `argument-hint` and `handoffs` properties
|
||||
- ✅ User profile and workspace-level agents
|
||||
- ❌ Cannot configure MCP servers at repository level
|
||||
- ⚠️ Some properties may behave differently
|
||||
|
||||
When creating agents for multiple environments, focus on common properties and test in all target environments. Use `target` property to create environment-specific agents when necessary.
|
||||
@@ -1,187 +0,0 @@
|
||||
---
|
||||
description: 'Best practices for Azure DevOps Pipeline YAML files'
|
||||
applyTo: '**/azure-pipelines.yml, **/azure-pipelines*.yml, **/*.pipeline.yml'
|
||||
---
|
||||
|
||||
# Azure DevOps Pipeline YAML Best Practices
|
||||
|
||||
Guidelines for creating maintainable, secure, and efficient Azure DevOps pipelines in PowerToys.
|
||||
|
||||
## General Guidelines
|
||||
|
||||
- Use YAML syntax consistently with proper indentation (2 spaces)
|
||||
- Always include meaningful names and display names for pipelines, stages, jobs, and steps
|
||||
- Implement proper error handling and conditional execution
|
||||
- Use variables and parameters to make pipelines reusable and maintainable
|
||||
- Follow the principle of least privilege for service connections and permissions
|
||||
- Include comprehensive logging and diagnostics for troubleshooting
|
||||
|
||||
## Pipeline Structure
|
||||
|
||||
- Organize complex pipelines using stages for better visualization and control
|
||||
- Use jobs to group related steps and enable parallel execution when possible
|
||||
- Implement proper dependencies between stages and jobs
|
||||
- Use templates for reusable pipeline components
|
||||
- Keep pipeline files focused and modular - split large pipelines into multiple files
|
||||
|
||||
## Build Best Practices
|
||||
|
||||
- Use specific agent pool versions and VM images for consistency
|
||||
- Cache dependencies (npm, NuGet, Maven, etc.) to improve build performance
|
||||
- Implement proper artifact management with meaningful names and retention policies
|
||||
- Use build variables for version numbers and build metadata
|
||||
- Include code quality gates (lint checks, testing, security scans)
|
||||
- Ensure builds are reproducible and environment-independent
|
||||
|
||||
## Testing Integration
|
||||
|
||||
- Run unit tests as part of the build process
|
||||
- Publish test results in standard formats (JUnit, VSTest, etc.)
|
||||
- Include code coverage reporting and quality gates
|
||||
- Implement integration and end-to-end tests in appropriate stages
|
||||
- Use test impact analysis when available to optimize test execution
|
||||
- Fail fast on test failures to provide quick feedback
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Use Azure Key Vault for sensitive configuration and secrets
|
||||
- Implement proper secret management with variable groups
|
||||
- Use service connections with minimal required permissions
|
||||
- Enable security scans (dependency vulnerabilities, static analysis)
|
||||
- Implement approval gates for production deployments
|
||||
- Use managed identities when possible instead of service principals
|
||||
|
||||
## Deployment Strategies
|
||||
|
||||
- Implement proper environment promotion (dev → staging → production)
|
||||
- Use deployment jobs with proper environment targeting
|
||||
- Implement blue-green or canary deployment strategies when appropriate
|
||||
- Include rollback mechanisms and health checks
|
||||
- Use infrastructure as code (ARM, Bicep, Terraform) for consistent deployments
|
||||
- Implement proper configuration management per environment
|
||||
|
||||
## Variable and Parameter Management
|
||||
|
||||
- Use variable groups for shared configuration across pipelines
|
||||
- Implement runtime parameters for flexible pipeline execution
|
||||
- Use conditional variables based on branches or environments
|
||||
- Secure sensitive variables and mark them as secrets
|
||||
- Document variable purposes and expected values
|
||||
- Use variable templates for complex variable logic
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
- Use parallel jobs and matrix strategies when appropriate
|
||||
- Implement proper caching strategies for dependencies and build outputs
|
||||
- Use shallow clone for Git operations when full history isn't needed
|
||||
- Optimize Docker image builds with multi-stage builds and layer caching
|
||||
- Monitor pipeline performance and optimize bottlenecks
|
||||
- Use pipeline resource triggers efficiently
|
||||
|
||||
## Monitoring and Observability
|
||||
|
||||
- Include comprehensive logging throughout the pipeline
|
||||
- Use Azure Monitor and Application Insights for deployment tracking
|
||||
- Implement proper notification strategies for failures and successes
|
||||
- Include deployment health checks and automated rollback triggers
|
||||
- Use pipeline analytics to identify improvement opportunities
|
||||
- Document pipeline behavior and troubleshooting steps
|
||||
|
||||
## Template and Reusability
|
||||
|
||||
- Create pipeline templates for common patterns
|
||||
- Use extends templates for complete pipeline inheritance
|
||||
- Implement step templates for reusable task sequences
|
||||
- Use variable templates for complex variable logic
|
||||
- Version templates appropriately for stability
|
||||
- Document template parameters and usage examples
|
||||
|
||||
## Branch and Trigger Strategy
|
||||
|
||||
- Implement appropriate triggers for different branch types
|
||||
- Use path filters to trigger builds only when relevant files change
|
||||
- Configure proper CI/CD triggers for main/master branches
|
||||
- Use pull request triggers for code validation
|
||||
- Implement scheduled triggers for maintenance tasks
|
||||
- Consider resource triggers for multi-repository scenarios
|
||||
|
||||
## Example Structure
|
||||
|
||||
```yaml
|
||||
# azure-pipelines.yml
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- develop
|
||||
paths:
|
||||
exclude:
|
||||
- docs/*
|
||||
- README.md
|
||||
|
||||
variables:
|
||||
- group: shared-variables
|
||||
- name: buildConfiguration
|
||||
value: 'Release'
|
||||
|
||||
stages:
|
||||
- stage: Build
|
||||
displayName: 'Build and Test'
|
||||
jobs:
|
||||
- job: Build
|
||||
displayName: 'Build Application'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Use .NET SDK'
|
||||
inputs:
|
||||
version: '8.x'
|
||||
|
||||
- task: DotNetCoreCLI@2
|
||||
displayName: 'Restore dependencies'
|
||||
inputs:
|
||||
command: 'restore'
|
||||
projects: '**/*.csproj'
|
||||
|
||||
- task: DotNetCoreCLI@2
|
||||
displayName: 'Build application'
|
||||
inputs:
|
||||
command: 'build'
|
||||
projects: '**/*.csproj'
|
||||
arguments: '--configuration $(buildConfiguration) --no-restore'
|
||||
|
||||
- stage: Deploy
|
||||
displayName: 'Deploy to Staging'
|
||||
dependsOn: Build
|
||||
condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/main'))
|
||||
jobs:
|
||||
- deployment: DeployToStaging
|
||||
displayName: 'Deploy to Staging Environment'
|
||||
environment: 'staging'
|
||||
strategy:
|
||||
runOnce:
|
||||
deploy:
|
||||
steps:
|
||||
- download: current
|
||||
displayName: 'Download drop artifact'
|
||||
artifact: drop
|
||||
- task: AzureWebApp@1
|
||||
displayName: 'Deploy to Azure Web App'
|
||||
inputs:
|
||||
azureSubscription: 'staging-service-connection'
|
||||
appType: 'webApp'
|
||||
appName: 'myapp-staging'
|
||||
package: '$(Pipeline.Workspace)/drop/**/*.zip'
|
||||
```
|
||||
|
||||
## Common Anti-Patterns to Avoid
|
||||
|
||||
- Hardcoding sensitive values directly in YAML files
|
||||
- Using overly broad triggers that cause unnecessary builds
|
||||
- Mixing build and deployment logic in a single stage
|
||||
- Not implementing proper error handling and cleanup
|
||||
- Using deprecated task versions without upgrade plans
|
||||
- Creating monolithic pipelines that are difficult to maintain
|
||||
- Not using proper naming conventions for clarity
|
||||
- Ignoring pipeline security best practices
|
||||
@@ -1,61 +0,0 @@
|
||||
---
|
||||
description: 'Guidelines for shared libraries including logging, IPC, settings, DPI, telemetry, and utilities consumed by multiple modules'
|
||||
applyTo: 'src/common/**'
|
||||
---
|
||||
|
||||
# Common Libraries – Shared Code Guidance
|
||||
|
||||
Guidelines for modifying shared code in `src/common/`. Changes here can have wide-reaching impact across the entire PowerToys codebase.
|
||||
|
||||
## Scope
|
||||
|
||||
- Logging infrastructure (`src/common/logger/`)
|
||||
- IPC primitives and named pipe utilities
|
||||
- Settings serialization and management
|
||||
- DPI awareness and scaling utilities
|
||||
- Telemetry helpers
|
||||
- General utilities (JSON parsing, string helpers, etc.)
|
||||
|
||||
## Guidelines
|
||||
|
||||
### API Stability
|
||||
|
||||
- Avoid breaking public headers/APIs; if changed, search & update all callers
|
||||
- Coordinate ABI-impacting struct/class layout changes; keep binary compatibility
|
||||
- When modifying public interfaces, grep the entire codebase for usages
|
||||
|
||||
### Performance
|
||||
|
||||
- Watch perf in hot paths (hooks, timers, serialization)
|
||||
- Avoid avoidable allocations in frequently called code
|
||||
- Profile changes that touch performance-sensitive areas
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Ask before adding third-party deps or changing serialization formats
|
||||
- New dependencies must be MIT-licensed or approved by PM team
|
||||
- Add any new external packages to `NOTICE.md`
|
||||
|
||||
### Logging
|
||||
|
||||
- C++ logging uses spdlog (`Logger::info`, `Logger::warn`, `Logger::error`, `Logger::debug`)
|
||||
- Initialize with `init_logger()` early in startup
|
||||
- Keep hot paths quiet – no logging in tight loops or hooks
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- No unintended ABI breaks
|
||||
- No noisy logs in hot paths
|
||||
- New non-obvious symbols briefly commented
|
||||
- All callers updated when interfaces change
|
||||
|
||||
## Code Style
|
||||
|
||||
- **C++**: Follow `.clang-format` in `src/`; use Modern C++ patterns per C++ Core Guidelines
|
||||
- **C#**: Follow `src/.editorconfig`; enforce StyleCop.Analyzers
|
||||
|
||||
## Validation
|
||||
|
||||
- Build: `tools\build\build.cmd` from `src/common/` folder
|
||||
- Verify no ABI breaks: grep for changed function/struct names across codebase
|
||||
- Check logs: ensure no new logging in performance-critical paths
|
||||
256
.github/instructions/instructions.instructions.md
vendored
256
.github/instructions/instructions.instructions.md
vendored
@@ -1,256 +0,0 @@
|
||||
---
|
||||
description: 'Guidelines for creating high-quality custom instruction files for GitHub Copilot'
|
||||
applyTo: '**/*.instructions.md'
|
||||
---
|
||||
|
||||
# Custom Instructions File Guidelines
|
||||
|
||||
Instructions for creating effective and maintainable custom instruction files that guide GitHub Copilot in generating domain-specific code and following project conventions.
|
||||
|
||||
## Project Context
|
||||
|
||||
- Target audience: Developers and GitHub Copilot working with domain-specific code
|
||||
- File format: Markdown with YAML frontmatter
|
||||
- File naming convention: lowercase with hyphens (e.g., `react-best-practices.instructions.md`)
|
||||
- Location: `.github/instructions/` directory
|
||||
- Purpose: Provide context-aware guidance for code generation, review, and documentation
|
||||
|
||||
## Required Frontmatter
|
||||
|
||||
Every instruction file must include YAML frontmatter with the following fields:
|
||||
|
||||
```yaml
|
||||
---
|
||||
description: 'Brief description of the instruction purpose and scope'
|
||||
applyTo: 'glob pattern for target files (e.g., **/*.ts, **/*.py)'
|
||||
---
|
||||
```
|
||||
|
||||
### Frontmatter Guidelines
|
||||
|
||||
- **description**: Single-quoted string, 1-500 characters, clearly stating the purpose
|
||||
- **applyTo**: Glob pattern(s) specifying which files these instructions apply to
|
||||
- Single pattern: `'**/*.ts'`
|
||||
- Multiple patterns: `'**/*.ts, **/*.tsx, **/*.js'`
|
||||
- Specific files: `'src/**/*.py'`
|
||||
- All files: `'**'`
|
||||
|
||||
## File Structure
|
||||
|
||||
A well-structured instruction file should include the following sections:
|
||||
|
||||
### 1. Title and Overview
|
||||
|
||||
- Clear, descriptive title using `#` heading
|
||||
- Brief introduction explaining the purpose and scope
|
||||
- Optional: Project context section with key technologies and versions
|
||||
|
||||
### 2. Core Sections
|
||||
|
||||
Organize content into logical sections based on the domain:
|
||||
|
||||
- **General Instructions**: High-level guidelines and principles
|
||||
- **Best Practices**: Recommended patterns and approaches
|
||||
- **Code Standards**: Naming conventions, formatting, style rules
|
||||
- **Architecture/Structure**: Project organization and design patterns
|
||||
- **Common Patterns**: Frequently used implementations
|
||||
- **Security**: Security considerations (if applicable)
|
||||
- **Performance**: Optimization guidelines (if applicable)
|
||||
- **Testing**: Testing standards and approaches (if applicable)
|
||||
|
||||
### 3. Examples and Code Snippets
|
||||
|
||||
Provide concrete examples with clear labels:
|
||||
|
||||
```markdown
|
||||
### Good Example
|
||||
\`\`\`language
|
||||
// Recommended approach
|
||||
code example here
|
||||
\`\`\`
|
||||
|
||||
### Bad Example
|
||||
\`\`\`language
|
||||
// Avoid this pattern
|
||||
code example here
|
||||
\`\`\`
|
||||
```
|
||||
|
||||
### 4. Validation and Verification (Optional but Recommended)
|
||||
|
||||
- Build commands to verify code
|
||||
- Lint checks and formatting tools
|
||||
- Testing requirements
|
||||
- Verification steps
|
||||
|
||||
## Content Guidelines
|
||||
|
||||
### Writing Style
|
||||
|
||||
- Use clear, concise language
|
||||
- Write in imperative mood ("Use", "Implement", "Avoid")
|
||||
- Be specific and actionable
|
||||
- Avoid ambiguous terms like "should", "might", "possibly"
|
||||
- Use bullet points and lists for readability
|
||||
- Keep sections focused and scannable
|
||||
|
||||
### Best Practices
|
||||
|
||||
- **Be Specific**: Provide concrete examples rather than abstract concepts
|
||||
- **Show Why**: Explain the reasoning behind recommendations when it adds value
|
||||
- **Use Tables**: For comparing options, listing rules, or showing patterns
|
||||
- **Include Examples**: Real code snippets are more effective than descriptions
|
||||
- **Stay Current**: Reference current versions and best practices
|
||||
- **Link Resources**: Include official documentation and authoritative sources
|
||||
|
||||
### Common Patterns to Include
|
||||
|
||||
1. **Naming Conventions**: How to name variables, functions, classes, files
|
||||
2. **Code Organization**: File structure, module organization, import order
|
||||
3. **Error Handling**: Preferred error handling patterns
|
||||
4. **Dependencies**: How to manage and document dependencies
|
||||
5. **Comments and Documentation**: When and how to document code
|
||||
6. **Version Information**: Target language/framework versions
|
||||
|
||||
## Patterns to Follow
|
||||
|
||||
### Bullet Points and Lists
|
||||
|
||||
```markdown
|
||||
## Security Best Practices
|
||||
|
||||
- Always validate user input before processing
|
||||
- Use parameterized queries to prevent SQL injection
|
||||
- Store secrets in environment variables, never in code
|
||||
- Implement proper authentication and authorization
|
||||
- Enable HTTPS for all production endpoints
|
||||
```
|
||||
|
||||
### Tables for Structured Information
|
||||
|
||||
```markdown
|
||||
## Common Issues
|
||||
|
||||
| Issue | Solution | Example |
|
||||
| ---------------- | ------------------- | ----------------------------- |
|
||||
| Magic numbers | Use named constants | `const MAX_RETRIES = 3` |
|
||||
| Deep nesting | Extract functions | Refactor nested if statements |
|
||||
| Hardcoded values | Use configuration | Store API URLs in config |
|
||||
```
|
||||
|
||||
### Code Comparison
|
||||
|
||||
```markdown
|
||||
### Good Example - Using TypeScript interfaces
|
||||
\`\`\`typescript
|
||||
interface User {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
function getUser(id: string): User {
|
||||
// Implementation
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
### Bad Example - Using any type
|
||||
\`\`\`typescript
|
||||
function getUser(id: any): any {
|
||||
// Loses type safety
|
||||
}
|
||||
\`\`\`
|
||||
```
|
||||
|
||||
### Conditional Guidance
|
||||
|
||||
```markdown
|
||||
## Framework Selection
|
||||
|
||||
- **For small projects**: Use Minimal API approach
|
||||
- **For large projects**: Use controller-based architecture with clear separation
|
||||
- **For microservices**: Consider domain-driven design patterns
|
||||
```
|
||||
|
||||
## Patterns to Avoid
|
||||
|
||||
- **Overly verbose explanations**: Keep it concise and scannable
|
||||
- **Outdated information**: Always reference current versions and practices
|
||||
- **Ambiguous guidelines**: Be specific about what to do or avoid
|
||||
- **Missing examples**: Abstract rules without concrete code examples
|
||||
- **Contradictory advice**: Ensure consistency throughout the file
|
||||
- **Copy-paste from documentation**: Add value by distilling and providing context
|
||||
|
||||
## Testing Your Instructions
|
||||
|
||||
Before finalizing instruction files:
|
||||
|
||||
1. **Test with Copilot**: Try the instructions with actual prompts in VS Code
|
||||
2. **Verify Examples**: Ensure code examples are correct and run without errors
|
||||
3. **Check Glob Patterns**: Confirm `applyTo` patterns match intended files
|
||||
|
||||
## Example Structure
|
||||
|
||||
Here's a minimal example structure for a new instruction file:
|
||||
|
||||
```markdown
|
||||
---
|
||||
description: 'Brief description of purpose'
|
||||
applyTo: '**/*.ext'
|
||||
---
|
||||
|
||||
# Technology Name Development
|
||||
|
||||
Brief introduction and context.
|
||||
|
||||
## General Instructions
|
||||
|
||||
- High-level guideline 1
|
||||
- High-level guideline 2
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Specific practice 1
|
||||
- Specific practice 2
|
||||
|
||||
## Code Standards
|
||||
|
||||
### Naming Conventions
|
||||
- Rule 1
|
||||
- Rule 2
|
||||
|
||||
### File Organization
|
||||
- Structure 1
|
||||
- Structure 2
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Pattern 1
|
||||
Description and example
|
||||
|
||||
\`\`\`language
|
||||
code example
|
||||
\`\`\`
|
||||
|
||||
### Pattern 2
|
||||
Description and example
|
||||
|
||||
## Validation
|
||||
|
||||
- Build command: `command to verify`
|
||||
- Lint checks: `command to lint`
|
||||
- Testing: `command to test`
|
||||
```
|
||||
|
||||
## Maintenance
|
||||
|
||||
- Review instructions when dependencies or frameworks are updated
|
||||
- Update examples to reflect current best practices
|
||||
- Remove outdated patterns or deprecated features
|
||||
- Add new patterns as they emerge in the community
|
||||
- Keep glob patterns accurate as project structure evolves
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Custom Instructions Documentation](https://code.visualstudio.com/docs/copilot/customization/custom-instructions)
|
||||
- [Awesome Copilot Instructions](https://github.com/github/awesome-copilot/tree/main/instructions)
|
||||
88
.github/instructions/prompt.instructions.md
vendored
88
.github/instructions/prompt.instructions.md
vendored
@@ -1,88 +0,0 @@
|
||||
---
|
||||
description: 'Guidelines for creating high-quality prompt files for GitHub Copilot'
|
||||
applyTo: '**/*.prompt.md'
|
||||
---
|
||||
|
||||
# Copilot Prompt Files Guidelines
|
||||
|
||||
Instructions for creating effective and maintainable prompt files that guide GitHub Copilot in delivering consistent, high-quality outcomes across any repository.
|
||||
|
||||
## Scope and Principles
|
||||
- Target audience: maintainers and contributors authoring reusable prompts for Copilot Chat.
|
||||
- Goals: predictable behaviour, clear expectations, minimal permissions, and portability across repositories.
|
||||
- Primary references: VS Code documentation on prompt files and organization-specific conventions.
|
||||
|
||||
## Frontmatter Requirements
|
||||
|
||||
Every prompt file should include YAML frontmatter with the following fields:
|
||||
|
||||
### Required/Recommended Fields
|
||||
|
||||
| Field | Required | Description |
|
||||
|-------|----------|-------------|
|
||||
| `description` | Recommended | A short description of the prompt (single sentence, actionable outcome) |
|
||||
| `name` | Optional | The name shown after typing `/` in chat. Defaults to filename if not specified |
|
||||
| `agent` | Recommended | The agent to use: `ask`, `edit`, `agent`, or a custom agent name. Defaults to current agent |
|
||||
| `model` | Optional | The language model to use. Defaults to currently selected model |
|
||||
| `tools` | Optional | List of tool/tool set names available for this prompt |
|
||||
| `argument-hint` | Optional | Hint text shown in chat input to guide user interaction |
|
||||
|
||||
### Guidelines
|
||||
|
||||
- Use consistent quoting (single quotes recommended) and keep one field per line for readability and version control clarity
|
||||
- If `tools` are specified and current agent is `ask` or `edit`, the default agent becomes `agent`
|
||||
- Preserve any additional metadata (`language`, `tags`, `visibility`, etc.) required by your organization
|
||||
|
||||
## File Naming and Placement
|
||||
- Use kebab-case filenames ending with `.prompt.md` and store them under `.github/prompts/` unless your workspace standard specifies another directory.
|
||||
- Provide a short filename that communicates the action (for example, `generate-readme.prompt.md` rather than `prompt1.prompt.md`).
|
||||
|
||||
## Body Structure
|
||||
- Start with an `#` level heading that matches the prompt intent so it surfaces well in Quick Pick search.
|
||||
- Organize content with predictable sections. Recommended baseline: `Mission` or `Primary Directive`, `Scope & Preconditions`, `Inputs`, `Workflow` (step-by-step), `Output Expectations`, and `Quality Assurance`.
|
||||
- Adjust section names to fit the domain, but retain the logical flow: why → context → inputs → actions → outputs → validation.
|
||||
- Reference related prompts or instruction files using relative links to aid discoverability.
|
||||
|
||||
## Input and Context Handling
|
||||
- Use `${input:variableName[:placeholder]}` for required values and explain when the user must supply them. Provide defaults or alternatives where possible.
|
||||
- Call out contextual variables such as `${selection}`, `${file}`, `${workspaceFolder}` only when they are essential, and describe how Copilot should interpret them.
|
||||
- Document how to proceed when mandatory context is missing (for example, “Request the file path and stop if it remains undefined”).
|
||||
|
||||
## Tool and Permission Guidance
|
||||
- Limit `tools` to the smallest set that enables the task. List them in the preferred execution order when the sequence matters.
|
||||
- If the prompt inherits tools from a chat mode, mention that relationship and state any critical tool behaviours or side effects.
|
||||
- Warn about destructive operations (file creation, edits, terminal commands) and include guard rails or confirmation steps in the workflow.
|
||||
|
||||
## Instruction Tone and Style
|
||||
- Write in direct, imperative sentences targeted at Copilot (for example, “Analyze”, “Generate”, “Summarize”).
|
||||
- Keep sentences short and unambiguous, following Google Developer Documentation translation best practices to support localization.
|
||||
- Avoid idioms, humor, or culturally specific references; favor neutral, inclusive language.
|
||||
|
||||
## Output Definition
|
||||
- Specify the format, structure, and location of expected results (for example, “Create an architecture decision record file using the template below, such as `docs/architecture-decisions/record-XXXX.md`).
|
||||
- Include success criteria and failure triggers so Copilot knows when to halt or retry.
|
||||
- Provide validation steps—manual checks, automated commands, or acceptance criteria lists—that reviewers can execute after running the prompt.
|
||||
|
||||
## Examples and Reusable Assets
|
||||
- Embed Good/Bad examples or scaffolds (Markdown templates, JSON stubs) that the prompt should produce or follow.
|
||||
- Maintain reference tables (capabilities, status codes, role descriptions) inline to keep the prompt self-contained. Update these tables when upstream resources change.
|
||||
- Link to authoritative documentation instead of duplicating lengthy guidance.
|
||||
|
||||
## Quality Assurance Checklist
|
||||
- [ ] Frontmatter fields are complete, accurate, and least-privilege.
|
||||
- [ ] Inputs include placeholders, default behaviours, and fallbacks.
|
||||
- [ ] Workflow covers preparation, execution, and post-processing without gaps.
|
||||
- [ ] Output expectations include formatting and storage details.
|
||||
- [ ] Validation steps are actionable (commands, diff checks, review prompts).
|
||||
- [ ] Security, compliance, and privacy policies referenced by the prompt are current.
|
||||
- [ ] Prompt executes successfully in VS Code (`Chat: Run Prompt`) using representative scenarios.
|
||||
|
||||
## Maintenance Guidance
|
||||
- Version-control prompts alongside the code they affect; update them when dependencies, tooling, or review processes change.
|
||||
- Review prompts periodically to ensure tool lists, model requirements, and linked documents remain valid.
|
||||
- Coordinate with other repositories: when a prompt proves broadly useful, extract common guidance into instruction files or shared prompt packs.
|
||||
|
||||
## Additional Resources
|
||||
- [Prompt Files Documentation](https://code.visualstudio.com/docs/copilot/customization/prompt-files#_prompt-file-format)
|
||||
- [Awesome Copilot Prompt Files](https://github.com/github/awesome-copilot/tree/main/prompts)
|
||||
- [Tool Configuration](https://code.visualstudio.com/docs/copilot/chat/chat-agent-mode#_agent-mode-tools)
|
||||
@@ -1,68 +0,0 @@
|
||||
---
|
||||
description: 'Guidelines for Runner and Settings UI components that communicate via named pipes and manage module lifecycle'
|
||||
applyTo: 'src/runner/**,src/settings-ui/**'
|
||||
---
|
||||
|
||||
# Runner & Settings UI – Core Components Guidance
|
||||
|
||||
Guidelines for modifying the Runner (tray/module loader) and Settings UI (configuration app). These components communicate via Windows Named Pipes using JSON messages.
|
||||
|
||||
## Runner (`src/runner/`)
|
||||
|
||||
### Scope
|
||||
|
||||
- Module bootstrap, hotkey management, settings bridge, update/elevation handling
|
||||
|
||||
### Guidelines
|
||||
|
||||
- If IPC/JSON contracts change, mirror updates in `src/settings-ui/**`
|
||||
- Keep module discovery in `src/runner/main.cpp` in sync when adding/removing modules
|
||||
- Keep startup lean: avoid blocking/network calls in early init path
|
||||
- Preserve GPO & elevation behaviors; confirm no regression in policy handling
|
||||
- Ask before modifying update workflow or elevation logic
|
||||
|
||||
### Acceptance Criteria
|
||||
|
||||
- Stable startup, consistent contracts, no unnecessary logging noise
|
||||
|
||||
## Settings UI (`src/settings-ui/`)
|
||||
|
||||
### Scope
|
||||
|
||||
- WinUI/WPF UI, communicates with Runner over named pipes; manages persisted settings schema
|
||||
|
||||
### Guidelines
|
||||
|
||||
- Don't break settings schema silently; add migration when shape changes
|
||||
- If IPC/JSON contracts change, align with `src/runner/**` implementation
|
||||
- Keep UI responsive: marshal to UI thread for UI-bound operations
|
||||
- Reuse existing styles/resources; avoid duplicate theme keys
|
||||
- Add/adjust migration or serialization tests when changing persisted settings
|
||||
|
||||
### Acceptance Criteria
|
||||
|
||||
- Schema integrity preserved, responsive UI, consistent contracts, no style duplication
|
||||
|
||||
## Shared Concerns
|
||||
|
||||
### IPC Contract Changes
|
||||
|
||||
When modifying the JSON message format between Runner and Settings UI:
|
||||
|
||||
1. Update both `src/runner/` and `src/settings-ui/` in the same PR
|
||||
2. Preserve backward compatibility where possible
|
||||
3. Add migration logic for settings schema changes
|
||||
4. Test both directions of communication
|
||||
|
||||
### Code Style
|
||||
|
||||
- **C++ (Runner)**: Follow `.clang-format` in `src/`
|
||||
- **C# (Settings UI)**: Follow `src/.editorconfig`, use StyleCop.Analyzers
|
||||
- **XAML**: Use XamlStyler or run `.\.pipelines\applyXamlStyling.ps1 -Main`
|
||||
|
||||
## Validation
|
||||
|
||||
- Build Runner: `tools\build\build.cmd` from `src/runner/`
|
||||
- Build Settings UI: `tools\build\build.cmd` from `src/settings-ui/`
|
||||
- Test IPC: Launch both Runner and Settings UI, verify communication works
|
||||
- Schema changes: Run serialization tests if settings shape changed
|
||||
@@ -1,228 +0,0 @@
|
||||
---
|
||||
description: 'Instructions for building Model Context Protocol (MCP) servers using the TypeScript SDK'
|
||||
applyTo: '**/*.ts, **/*.js, **/package.json'
|
||||
---
|
||||
|
||||
# TypeScript MCP Server Development
|
||||
|
||||
## Instructions
|
||||
|
||||
- Use the **@modelcontextprotocol/sdk** npm package: `npm install @modelcontextprotocol/sdk`
|
||||
- Import from specific paths: `@modelcontextprotocol/sdk/server/mcp.js`, `@modelcontextprotocol/sdk/server/stdio.js`, etc.
|
||||
- Use `McpServer` class for high-level server implementation with automatic protocol handling
|
||||
- Use `Server` class for low-level control with manual request handlers
|
||||
- Use **zod** for input/output schema validation: `npm install zod@3`
|
||||
- Always provide `title` field for tools, resources, and prompts for better UI display
|
||||
- Use `registerTool()`, `registerResource()`, and `registerPrompt()` methods (recommended over older APIs)
|
||||
- Define schemas using zod: `{ inputSchema: { param: z.string() }, outputSchema: { result: z.string() } }`
|
||||
- Return both `content` (for display) and `structuredContent` (for structured data) from tools
|
||||
- For HTTP servers, use `StreamableHTTPServerTransport` with Express or similar frameworks
|
||||
- For local integrations, use `StdioServerTransport` for stdio-based communication
|
||||
- Create new transport instances per request to prevent request ID collisions (stateless mode)
|
||||
- Use session management with `sessionIdGenerator` for stateful servers
|
||||
- Enable DNS rebinding protection for local servers: `enableDnsRebindingProtection: true`
|
||||
- Configure CORS headers and expose `Mcp-Session-Id` for browser-based clients
|
||||
- Use `ResourceTemplate` for dynamic resources with URI parameters: `new ResourceTemplate('resource://{param}', { list: undefined })`
|
||||
- Support completions for better UX using `completable()` wrapper from `@modelcontextprotocol/sdk/server/completable.js`
|
||||
- Implement sampling with `server.server.createMessage()` to request LLM completions from clients
|
||||
- Use `server.server.elicitInput()` to request additional user input during tool execution
|
||||
- Enable notification debouncing for bulk updates: `debouncedNotificationMethods: ['notifications/tools/list_changed']`
|
||||
- Dynamic updates: call `.enable()`, `.disable()`, `.update()`, or `.remove()` on registered items to emit `listChanged` notifications
|
||||
- Use `getDisplayName()` from `@modelcontextprotocol/sdk/shared/metadataUtils.js` for UI display names
|
||||
- Test servers with MCP Inspector: `npx @modelcontextprotocol/inspector`
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Keep tool implementations focused on single responsibilities
|
||||
- Provide clear, descriptive titles and descriptions for LLM understanding
|
||||
- Use proper TypeScript types for all parameters and return values
|
||||
- Implement comprehensive error handling with try-catch blocks
|
||||
- Return `isError: true` in tool results for error conditions
|
||||
- Use async/await for all asynchronous operations
|
||||
- Close database connections and clean up resources properly
|
||||
- Validate input parameters before processing
|
||||
- Use structured logging for debugging without polluting stdout/stderr
|
||||
- Consider security implications when exposing file system or network access
|
||||
- Implement proper resource cleanup on transport close events
|
||||
- Use environment variables for configuration (ports, API keys, etc.)
|
||||
- Document tool capabilities and limitations clearly
|
||||
- Test with multiple clients to ensure compatibility
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Basic Server Setup (HTTP)
|
||||
```typescript
|
||||
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';
|
||||
import express from 'express';
|
||||
|
||||
const server = new McpServer({
|
||||
name: 'my-server',
|
||||
version: '1.0.0'
|
||||
});
|
||||
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
|
||||
app.post('/mcp', async (req, res) => {
|
||||
const transport = new StreamableHTTPServerTransport({
|
||||
sessionIdGenerator: undefined,
|
||||
enableJsonResponse: true
|
||||
});
|
||||
|
||||
res.on('close', () => transport.close());
|
||||
|
||||
await server.connect(transport);
|
||||
await transport.handleRequest(req, res, req.body);
|
||||
});
|
||||
|
||||
app.listen(3000);
|
||||
```
|
||||
|
||||
### Basic Server Setup (stdio)
|
||||
```typescript
|
||||
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
||||
|
||||
const server = new McpServer({
|
||||
name: 'my-server',
|
||||
version: '1.0.0'
|
||||
});
|
||||
|
||||
// ... register tools, resources, prompts ...
|
||||
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
```
|
||||
|
||||
### Simple Tool
|
||||
```typescript
|
||||
import { z } from 'zod';
|
||||
|
||||
server.registerTool(
|
||||
'calculate',
|
||||
{
|
||||
title: 'Calculator',
|
||||
description: 'Perform basic calculations',
|
||||
inputSchema: { a: z.number(), b: z.number(), op: z.enum(['+', '-', '*', '/']) },
|
||||
outputSchema: { result: z.number() }
|
||||
},
|
||||
async ({ a, b, op }) => {
|
||||
const result = op === '+' ? a + b : op === '-' ? a - b :
|
||||
op === '*' ? a * b : a / b;
|
||||
const output = { result };
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify(output) }],
|
||||
structuredContent: output
|
||||
};
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Dynamic Resource
|
||||
```typescript
|
||||
import { ResourceTemplate } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
|
||||
server.registerResource(
|
||||
'user',
|
||||
new ResourceTemplate('users://{userId}', { list: undefined }),
|
||||
{
|
||||
title: 'User Profile',
|
||||
description: 'Fetch user profile data'
|
||||
},
|
||||
async (uri, { userId }) => ({
|
||||
contents: [{
|
||||
uri: uri.href,
|
||||
text: `User ${userId} data here`
|
||||
}]
|
||||
})
|
||||
);
|
||||
```
|
||||
|
||||
### Tool with Sampling
|
||||
```typescript
|
||||
server.registerTool(
|
||||
'summarize',
|
||||
{
|
||||
title: 'Text Summarizer',
|
||||
description: 'Summarize text using LLM',
|
||||
inputSchema: { text: z.string() },
|
||||
outputSchema: { summary: z.string() }
|
||||
},
|
||||
async ({ text }) => {
|
||||
const response = await server.server.createMessage({
|
||||
messages: [{
|
||||
role: 'user',
|
||||
content: { type: 'text', text: `Summarize: ${text}` }
|
||||
}],
|
||||
maxTokens: 500
|
||||
});
|
||||
|
||||
const summary = response.content.type === 'text' ?
|
||||
response.content.text : 'Unable to summarize';
|
||||
const output = { summary };
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify(output) }],
|
||||
structuredContent: output
|
||||
};
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Prompt with Completion
|
||||
```typescript
|
||||
import { completable } from '@modelcontextprotocol/sdk/server/completable.js';
|
||||
|
||||
server.registerPrompt(
|
||||
'review',
|
||||
{
|
||||
title: 'Code Review',
|
||||
description: 'Review code with specific focus',
|
||||
argsSchema: {
|
||||
language: completable(z.string(), value =>
|
||||
['typescript', 'python', 'javascript', 'java']
|
||||
.filter(l => l.startsWith(value))
|
||||
),
|
||||
code: z.string()
|
||||
}
|
||||
},
|
||||
({ language, code }) => ({
|
||||
messages: [{
|
||||
role: 'user',
|
||||
content: {
|
||||
type: 'text',
|
||||
text: `Review this ${language} code:\n\n${code}`
|
||||
}
|
||||
}]
|
||||
})
|
||||
);
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
```typescript
|
||||
server.registerTool(
|
||||
'risky-operation',
|
||||
{
|
||||
title: 'Risky Operation',
|
||||
description: 'An operation that might fail',
|
||||
inputSchema: { input: z.string() },
|
||||
outputSchema: { result: z.string() }
|
||||
},
|
||||
async ({ input }) => {
|
||||
try {
|
||||
const result = await performRiskyOperation(input);
|
||||
const output = { result };
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify(output) }],
|
||||
structuredContent: output
|
||||
};
|
||||
} catch (err: unknown) {
|
||||
const error = err as Error;
|
||||
return {
|
||||
content: [{ type: 'text', text: `Error: ${error.message}` }],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
}
|
||||
);
|
||||
```
|
||||
13
.github/policies/resourceManagement.yml
vendored
13
.github/policies/resourceManagement.yml
vendored
@@ -236,22 +236,11 @@ configuration:
|
||||
- if:
|
||||
- payloadType: Issue_Comment
|
||||
- commentContains:
|
||||
pattern: "I(( would|'d) (like|love|be happy)| want) (to help|helping|to contribute|contributing|to implement|implementing|to fix|fixing)"
|
||||
pattern: 'I would [like|love] [to help|helping|to contribute|contributing|to implement|implementing|to fix|fixing]'
|
||||
isRegex: True
|
||||
then:
|
||||
- addReply:
|
||||
reply: Hi! Your last comment indicates to our system, that you might want to contribute to this feature/fix this bug. Thank you! Please make us aware on our ["Would you like to contribute to PowerToys?" thread](https://github.com/microsoft/PowerToys/issues/28769), as we don't see all the comments. <br /><br />_I'm a bot (beep!) so please excuse any mistakes I may make_
|
||||
description:
|
||||
- if:
|
||||
- payloadType: Issues
|
||||
- isAction:
|
||||
action: Opened
|
||||
- bodyContains:
|
||||
pattern: 'Area\(s\) with issue\?\s*\nWorkspaces'
|
||||
isRegex: True
|
||||
then:
|
||||
- addLabel:
|
||||
label: Product-Workspaces
|
||||
description:
|
||||
onFailure:
|
||||
onSuccess:
|
||||
|
||||
50
.github/prompts/create-commit-title.prompt.md
vendored
50
.github/prompts/create-commit-title.prompt.md
vendored
@@ -1,50 +0,0 @@
|
||||
---
|
||||
agent: 'agent'
|
||||
model: 'GPT-5.1-Codex-Max'
|
||||
description: 'Generate an 80-character git commit title for the local diff'
|
||||
---
|
||||
|
||||
# Generate Commit Title
|
||||
|
||||
## Purpose
|
||||
Provide a single-line, ready-to-paste git commit title (<= 80 characters) that reflects the most important local changes since `HEAD`.
|
||||
|
||||
## Input to collect
|
||||
- Run exactly one command to view the local diff:
|
||||
```@terminal
|
||||
git diff HEAD
|
||||
```
|
||||
|
||||
## How to decide the title
|
||||
1. From the diff, find the dominant area (e.g., `src/modules/*`, `doc/devdocs/**`) and the change type (bug fix, docs update, config tweak).
|
||||
2. Draft an imperative, plain-ASCII title that:
|
||||
- Mentions the primary component when obvious (e.g., `FancyZones:` or `Docs:`)
|
||||
- Stays within 80 characters and has no trailing punctuation
|
||||
|
||||
## Final output
|
||||
- Reply with only the commit title on a single line—no extra text.
|
||||
|
||||
## PR title convention (when asked)
|
||||
Use Conventional Commits style:
|
||||
|
||||
`<type>(<scope>): <summary>`
|
||||
|
||||
**Allowed types**
|
||||
- feat, fix, docs, refactor, perf, test, build, ci, chore
|
||||
|
||||
**Scope rules**
|
||||
- Use a short, PowerToys-focused scope (one word preferred). Common scopes:
|
||||
- Core: `runner`, `settings-ui`, `common`, `docs`, `build`, `ci`, `installer`, `gpo`, `dsc`
|
||||
- Modules: `fancyzones`, `powerrename`, `awake`, `colorpicker`, `imageresizer`, `keyboardmanager`, `mouseutils`, `peek`, `hosts`, `file-locksmith`, `screen-ruler`, `text-extractor`, `cropandlock`, `paste`, `powerlauncher`
|
||||
- If unclear, pick the closest module or subsystem; omit only if unavoidable
|
||||
|
||||
**Summary rules**
|
||||
- Imperative, present tense (“add”, “update”, “remove”, “fix”)
|
||||
- Keep it <= 72 characters when possible; be specific, avoid “misc changes”
|
||||
|
||||
**Examples**
|
||||
- `feat(fancyzones): add canvas template duplication`
|
||||
- `fix(mouseutils): guard crosshair toggle when dpi info missing`
|
||||
- `docs(runner): document tray icon states`
|
||||
- `build(installer): align wix v5 suffix flag`
|
||||
- `ci(ci): cache pipeline artifacts for x64`
|
||||
25
.github/prompts/create-pr-summary.prompt.md
vendored
25
.github/prompts/create-pr-summary.prompt.md
vendored
@@ -1,25 +0,0 @@
|
||||
---
|
||||
agent: 'agent'
|
||||
model: 'GPT-5.1-Codex-Max'
|
||||
description: 'Generate a PowerToys-ready pull request description from the local diff'
|
||||
---
|
||||
|
||||
# Generate PR Summary
|
||||
|
||||
**Goal:** Produce a ready-to-paste PR title and description that follows PowerToys conventions by comparing the current branch against a user-selected target branch.
|
||||
|
||||
**Repo guardrails:**
|
||||
- Treat `.github/pull_request_template.md` as the single source of truth; load it at runtime instead of embedding hardcoded content in this prompt.
|
||||
- Preserve section order from the template but only surface checklist lines that are relevant for the detected changes, filling them with `[x]`/`[ ]` as appropriate.
|
||||
- Cite touched paths with inline backticks, matching the guidance in `.github/copilot-instructions.md`.
|
||||
- Call out test coverage explicitly: list automated tests run (unit/UI) or state why they are not applicable.
|
||||
|
||||
**Workflow:**
|
||||
1. Determine the target branch from user context; default to `main` when no branch is supplied.
|
||||
2. Run `git status --short` once to surface uncommitted files that may influence the summary.
|
||||
3. Run `git diff <target-branch>...HEAD` a single time to review the detailed changes. Only when confidence stays low dig deeper with focused calls such as `git diff <target-branch>...HEAD -- <path>`.
|
||||
4. From the diff, capture impacted areas, key file changes, behavioral risks, migrations, and noteworthy edge cases.
|
||||
5. Confirm validation: list tests executed with results or state why tests were skipped in line with repo guidance.
|
||||
6. Load `.github/pull_request_template.md`, mirror its section order, and populate it with the gathered facts. Include only relevant checklist entries, marking them `[x]/[ ]` and noting any intentional omissions as "N/A".
|
||||
7. Present the filled template inside a fenced ```markdown code block with no extra commentary so it is ready to paste into a PR, clearly flagging any placeholders that still need user input.
|
||||
8. Prepend the PR title above the filled template, applying the Conventional Commit type/scope rules from `.github/prompts/create-commit-title.prompt.md`; pick the dominant component from the diff and keep the title concise and imperative.
|
||||
73
.github/prompts/fix-issue.prompt.md
vendored
73
.github/prompts/fix-issue.prompt.md
vendored
@@ -1,73 +0,0 @@
|
||||
---
|
||||
agent: 'agent'
|
||||
model: 'GPT-5.1-Codex-Max'
|
||||
description: 'Execute the fix for a GitHub issue using the previously generated implementation plan'
|
||||
---
|
||||
|
||||
# Fix GitHub Issue
|
||||
|
||||
## Dependencies
|
||||
Source review prompt (for generating the implementation plan if missing):
|
||||
- .github/prompts/review-issue.prompt.md
|
||||
|
||||
Required plan file (single source of truth):
|
||||
- Generated Files/issueReview/{{issue_number}}/implementation-plan.md
|
||||
|
||||
## Dependency Handling
|
||||
1) If `implementation-plan.md` exists → proceed.
|
||||
2) If missing → run the review prompt:
|
||||
- Invoke: `.github/prompts/review-issue.prompt.md`
|
||||
- Pass: `issue_number={{issue_number}}`
|
||||
- Then re-check for `implementation-plan.md`.
|
||||
3) If still missing → stop and generate:
|
||||
- `Generated Files/issueFix/{{issue_number}}/manual-steps.md` containing:
|
||||
“implementation-plan.md not found; please run .github/prompts/review-issue.prompt.md for #{{issue_number}}.”
|
||||
|
||||
# GOAL
|
||||
For **#{{issue_number}}**:
|
||||
- Use implementation-plan.md as the single authority.
|
||||
- Apply code and test changes directly in the repository.
|
||||
- Produce a PR-ready description.
|
||||
|
||||
# OUTPUT FILES
|
||||
1) Generated Files/issueFix/{{issue_number}}/pr-description.md
|
||||
2) Generated Files/issueFix/{{issue_number}}/manual-steps.md # only if human interaction or external setup is required
|
||||
|
||||
# EXECUTION RULES
|
||||
1) Read implementation-plan.md and execute:
|
||||
- Layers & Files → edit/create as listed
|
||||
- Pattern Choices → follow repository conventions
|
||||
- Fundamentals (perf, security, compatibility, accessibility)
|
||||
- Logging & Exceptions
|
||||
- Telemetry (only if explicitly included in the plan)
|
||||
- Risks & Mitigations
|
||||
- Tests to Add
|
||||
2) Locate affected files via `rg` or `git grep`.
|
||||
3) Add/update tests to enforce the fixed behavior.
|
||||
4) If any ambiguity exists, add:
|
||||
// TODO(Human input needed): <clarification needed>
|
||||
5) Verify locally: build & tests run successfully.
|
||||
|
||||
# pr-description.md should include:
|
||||
- Title: `Fix: <short summary> (#{{issue_number}})`
|
||||
- What changed and why the fix works
|
||||
- Files or modules touched
|
||||
- Risks & mitigations (implemented)
|
||||
- Tests added/updated and how to run them
|
||||
- Telemetry behavior (if applicable)
|
||||
- Validation / reproduction steps
|
||||
- `Closes #{{issue_number}}`
|
||||
|
||||
# manual-steps.md (only if needed)
|
||||
- List required human actions: secrets, config, approvals, missing info, or code comments requiring human decisions.
|
||||
|
||||
# IMPORTANT
|
||||
- Apply code and tests directly; do not produce patch files.
|
||||
- Follow implementation-plan.md as the source of truth.
|
||||
- Insert comments for human review where a decision or input is required.
|
||||
- Use repository conventions and deterministic, minimal changes.
|
||||
|
||||
# FINALIZE
|
||||
- Write pr-description.md
|
||||
- Write manual-steps.md only if needed
|
||||
- Print concise success message or note items requiring human interaction
|
||||
25
.github/prompts/fix-spelling.prompt.md
vendored
25
.github/prompts/fix-spelling.prompt.md
vendored
@@ -1,25 +0,0 @@
|
||||
---
|
||||
agent: 'agent'
|
||||
model: 'GPT-5.1-Codex-Max'
|
||||
description: 'Resolve Code scanning / check-spelling comments on the active PR'
|
||||
---
|
||||
|
||||
# Fix Spelling Comments
|
||||
|
||||
**Goal:** Clear every outstanding GitHub pull request comment created by the `Code scanning / check-spelling` workflow by explicitly allowing intentional terms.
|
||||
|
||||
**Guardrails:**
|
||||
- Update only discussion threads authored by `github-actions` or `github-actions[bot]` that mention `Code scanning results / check-spelling`.
|
||||
- Prefer improving the wording in the originally flagged file when it clarifies intent without changing meaning; if the wording is already clear/standard for the context, handle it via `.github/actions/spell-check/expect.txt` and reuse existing entries.
|
||||
- Limit edits to the flagged text and `.github/actions/spell-check/expect.txt`; leave all other files and topics untouched.
|
||||
|
||||
**Prerequisites:**
|
||||
- Install GitHub CLI if it is not present: `winget install GitHub.cli`.
|
||||
- Run `gh auth login` once before the first CLI use.
|
||||
|
||||
**Workflow:**
|
||||
1. Determine the active pull request with a single `gh pr view --json number` call (default to the current branch).
|
||||
2. Fetch all PR discussion data once via `gh pr view --json comments,reviews` and filter to check-spelling comments authored by `github-actions` or `github-actions[bot]` that are not minimized; when several remain, process only the most recent comment body.
|
||||
3. For each flagged token, first consider tightening or rephrasing the original text to avoid the false positive while keeping the meaning intact; if the existing wording is already normal and professional for the context, proceed to allowlisting instead of changing it.
|
||||
4. When allowlisting, review `.github/actions/spell-check/expect.txt` for an equivalent term (for example an existing lowercase variant); when found, reuse that normalized term rather than adding a new entry, even if the flagged token differs only by casing. Only add a new entry after confirming no equivalent already exists.
|
||||
5. Add any remaining missing token to `.github/actions/spell-check/expect.txt`, keeping surrounding formatting intact.
|
||||
166
.github/prompts/review-issue.prompt.md
vendored
166
.github/prompts/review-issue.prompt.md
vendored
@@ -1,166 +0,0 @@
|
||||
---
|
||||
agent: 'agent'
|
||||
model: 'GPT-5.1-Codex-Max'
|
||||
description: 'Review a GitHub issue, score it (0-100), and generate an implementation plan'
|
||||
---
|
||||
|
||||
# Review GitHub Issue
|
||||
|
||||
## Goal
|
||||
For **#{{issue_number}}** produce:
|
||||
1) `Generated Files/issueReview/{{issue_number}}/overview.md`
|
||||
2) `Generated Files/issueReview/{{issue_number}}/implementation-plan.md`
|
||||
|
||||
## Inputs
|
||||
Figure out required inputs {{issue_number}} from the invocation context; if anything is missing, ask for the value or note it as a gap.
|
||||
|
||||
# CONTEXT (brief)
|
||||
Ground evidence using `gh issue view {{issue_number}} --json number,title,body,author,createdAt,updatedAt,state,labels,milestone,reactions,comments,linkedPullRequests`, download images via MCP `github_issue_images` to better understand the issue context. Finally, use MCP `github_issue_attachments` to download logs with parameter `extractFolder` as `Generated Files/issueReview/{{issue_number}}/logs`, and analyze the downloaded logs if available to identify relevant issues. Locate the source code in the current workspace (use `rg`/`git grep` as needed). Link related issues and PRs.
|
||||
|
||||
## When to call MCP tools
|
||||
If the following MCP "github-artifacts" tools are available in the environment, use them:
|
||||
- `github_issue_images`: use when the issue/PR likely contains screenshots or other visual evidence (UI bugs, glitches, design problems).
|
||||
- `github_issue_attachments`: use when the issue/PR mentions attached ZIPs (PowerToysReport_*.zip, logs.zip, debug.zip) or asks to analyze logs/diagnostics. Always provide `extractFolder` as `Generated Files/issueReview/{{issue_number}}/logs`
|
||||
|
||||
If these tools are not available (not listed by the runtime), start the MCP server "github-artifacts" first.
|
||||
|
||||
# OVERVIEW.MD
|
||||
## Summary
|
||||
Issue, state, milestone, labels. **Signals**: 👍/❤️/👎, comment count, last activity, linked PRs.
|
||||
|
||||
## At-a-Glance Score Table
|
||||
Present all ratings in a compact table for quick scanning:
|
||||
|
||||
| Dimension | Score | Assessment | Key Drivers |
|
||||
|-----------|-------|------------|-------------|
|
||||
| **A) Business Importance** | X/100 | Low/Medium/High | Top 2 factors with scores |
|
||||
| **B) Community Excitement** | X/100 | Low/Medium/High | Top 2 factors with scores |
|
||||
| **C) Technical Feasibility** | X/100 | Low/Medium/High | Top 2 factors with scores |
|
||||
| **D) Requirement Clarity** | X/100 | Low/Medium/High | Top 2 factors with scores |
|
||||
| **Overall Priority** | X/100 | Low/Medium/High/Critical | Average or weighted summary |
|
||||
| **Effort Estimate** | X days (T-shirt) | XS/S/M/L/XL/XXL/Epic | Type: bug/feature/chore |
|
||||
| **Similar Issues Found** | X open, Y closed | — | Quick reference to related work |
|
||||
| **Potential Assignees** | @username, @username | — | Top contributors to module |
|
||||
|
||||
**Assessment bands**: 0-25 Low, 26-50 Medium, 51-75 High, 76-100 Critical
|
||||
|
||||
## Ratings (0–100) — add evidence & short rationale
|
||||
### A) Business Importance
|
||||
- Labels (priority/security/regression): **≤35**
|
||||
- Milestone/roadmap: **≤25**
|
||||
- Customer/contract impact: **≤20**
|
||||
- Unblocks/platform leverage: **≤20**
|
||||
### B) Community Excitement
|
||||
- 👍+❤️ normalized: **≤45**
|
||||
- Comment volume & unique participants: **≤25**
|
||||
- Recent activity (≤30d): **≤15**
|
||||
- Duplicates/related issues: **≤15**
|
||||
### C) Technical Feasibility
|
||||
- Contained surface/clear seams: **≤30**
|
||||
- Existing patterns/utilities: **≤25**
|
||||
- Risk (perf/sec/compat) manageable: **≤25**
|
||||
- Testability & CI support: **≤20**
|
||||
### D) Requirement Clarity
|
||||
- Behavior/repro/constraints: **≤60**
|
||||
- Non-functionals (perf/sec/i18n/a11y): **≤25**
|
||||
- Decision owners/acceptance signals: **≤15**
|
||||
|
||||
## Effort
|
||||
Days + **T-shirt** (XS 0.5–1d, S 1–2, M 2–4, L 4–7, XL 7–14, XXL 14–30, Epic >30).
|
||||
Type/level: bug/feature/chore/docs/refactor/test-only; severity/value tier.
|
||||
|
||||
## Suggested Actions
|
||||
Provide actionable recommendations for issue triage and assignment:
|
||||
|
||||
### A) Requirement Clarification (if Clarity score <50)
|
||||
**When Requirement Clarity (Dimension D) is Medium or Low:**
|
||||
- Identify specific gaps in issue description: missing repro steps, unclear expected behavior, undefined acceptance criteria, missing non-functional requirements
|
||||
- Draft 3-5 clarifying questions to post as issue comment
|
||||
- Suggest additional information needed: screenshots, logs, environment details, OS version, PowerToys version, error messages
|
||||
- If behavior is ambiguous, propose 2-3 interpretation scenarios and ask reporter to confirm
|
||||
- Example questions:
|
||||
- "Can you provide exact steps to reproduce this issue?"
|
||||
- "What is the expected behavior vs. what you're actually seeing?"
|
||||
- "Does this happen on Windows 10, 11, or both?"
|
||||
- "Can you attach a screenshot or screen recording?"
|
||||
|
||||
### B) Correct Label Suggestions
|
||||
- Analyze issue type, module, and severity to suggest missing or incorrect labels
|
||||
- Recommend labels from: `Issue-Bug`, `Issue-Feature`, `Issue-Docs`, `Issue-Task`, `Priority-High`, `Priority-Medium`, `Priority-Low`, `Needs-Triage`, `Needs-Author-Feedback`, `Product-<ModuleName>`, etc.
|
||||
- If Requirement Clarity is low (<50), add `Needs-Author-Feedback` label
|
||||
- If current labels are incorrect or incomplete, provide specific label changes with rationale
|
||||
|
||||
### C) Find Similar Issues & Past Fixes
|
||||
- Search for similar issues using `gh issue list --search "keywords" --state all --json number,title,state,closedAt`
|
||||
- Identify patterns: duplicate issues, related bugs, or similar feature requests
|
||||
- For closed issues, find linked PRs that fixed them: check `linkedPullRequests` in issue data
|
||||
- Provide 3-5 examples of similar issues with format: `#<number> - <title> (closed by PR #<pr>)` or `(still open)`
|
||||
|
||||
### D) Identify Subject Matter Experts
|
||||
- Use git blame/log to find who fixed similar issues in the past
|
||||
- Search for PR authors who touched relevant files: `git log --all --format='%aN' -- <file_paths> | sort | uniq -c | sort -rn | head -5`
|
||||
- Check issue/PR history for frequent contributors to the affected module
|
||||
- Suggest 2-3 potential assignees with context: `@<username> - <reason>` (e.g., "fixed similar rendering bug in #12345", "maintains FancyZones module")
|
||||
|
||||
### E) Semantic Search for Related Work
|
||||
- Use semantic_search tool to find similar issues, code patterns, or past discussions
|
||||
- Search queries should include: issue keywords, module names, error messages, feature descriptions
|
||||
- Cross-reference semantic results with GitHub issue search for comprehensive coverage
|
||||
|
||||
**Output format for Suggested Actions section in overview.md:**
|
||||
```markdown
|
||||
## Suggested Actions
|
||||
|
||||
### Clarifying Questions (if Clarity <50)
|
||||
Post these questions as issue comment to gather missing information:
|
||||
1. <question>
|
||||
2. <question>
|
||||
3. <question>
|
||||
|
||||
**Recommended label**: `Needs-Author-Feedback`
|
||||
|
||||
### Label Recommendations
|
||||
- Add: `<label>` - <reason>
|
||||
- Remove: `<label>` - <reason>
|
||||
- Current labels are appropriate ✓
|
||||
|
||||
### Similar Issues Found
|
||||
1. #<number> - <title> (<state>, closed by PR #<pr> on <date>)
|
||||
2. #<number> - <title> (<state>)
|
||||
...
|
||||
|
||||
### Potential Assignees
|
||||
- @<username> - <reason>
|
||||
- @<username> - <reason>
|
||||
|
||||
### Related Code/Discussions
|
||||
- <semantic search findings>
|
||||
```
|
||||
|
||||
# IMPLEMENTATION-PLAN.MD
|
||||
1) **Problem Framing** — restate problem; current vs expected; scope boundaries.
|
||||
2) **Layers & Files** — layers (UI/domain/data/infra/build). For each, list **files/dirs to modify** and **new files** (exact paths + why). Prefer repo patterns; cite examples/PRs.
|
||||
3) **Pattern Choices** — reuse existing; if new, justify trade-offs & transition.
|
||||
4) **Fundamentals** (brief plan or N/A + reason):
|
||||
- Performance (hot paths, allocs, caching/streaming)
|
||||
- Security (validation, authN/Z, secrets, SSRF/XSS/CSRF)
|
||||
- G11N/L10N (resources, number/date, pluralization)
|
||||
- Compatibility (public APIs, formats, OS/runtime/toolchain)
|
||||
- Extensibility (DI seams, options/flags, plugin points)
|
||||
- Accessibility (roles, labels, focus, keyboard, contrast)
|
||||
- SOLID & repo conventions (naming, folders, dependency direction)
|
||||
5) **Logging & Exception Handling**
|
||||
- Where to log; levels; structured fields; correlation/traces.
|
||||
- What to catch vs rethrow; retries/backoff; user-visible errors.
|
||||
- **Privacy**: never log secrets/PII; redaction policy.
|
||||
6) **Telemetry (optional — business metrics only)**
|
||||
- Events/metrics (name, when, props); success signal; privacy/sampling; dashboards/alerts.
|
||||
7) **Risks & Mitigations** — flags/canary/shadow-write/config guards.
|
||||
8) **Task Breakdown (agent-ready)** — table (leave a blank line before the header so Markdown renders correctly):
|
||||
|
||||
| Task | Intent | Files/Areas | Steps | Tests (brief) | Owner (Agent/Human) | Human interaction needed? (why) |
|
||||
|---|---|---|---|---|---|---|
|
||||
|
||||
9) **Tests to Add (only)**
|
||||
- **Unit**: targets, cases (success/edge/error), mocks/fixtures, path, notes.
|
||||
- **UI** (if applicable): flows, locator strategy, env/data/flags, path, flake mitigation.
|
||||
199
.github/prompts/review-pr.prompt.md
vendored
199
.github/prompts/review-pr.prompt.md
vendored
@@ -1,199 +0,0 @@
|
||||
---
|
||||
agent: 'agent'
|
||||
model: 'GPT-5.1-Codex-Max'
|
||||
description: 'Perform a comprehensive PR review with per-step Markdown and machine-readable outputs'
|
||||
---
|
||||
|
||||
# Review Pull Request
|
||||
|
||||
**Goal**: Given `{{pr_number}}`, run a *one-topic-per-step* review. Write files to `Generated Files/prReview/{{pr_number}}/` (replace `{{pr_number}}` with the integer). Emit machine‑readable blocks for a GitHub MCP to post review comments.
|
||||
|
||||
## PR selection
|
||||
Resolve the target PR using these fallbacks in order:
|
||||
1. Parse the invocation text for an explicit identifier (first integer following patterns such as a leading hash and digits or the text `PR:` followed by digits).
|
||||
2. If no PR is found yet, locate the newest `Generated Files/prReview/_batch/batch-overview-*.md` file (highest timestamp in filename, fallback newest mtime) and take the first entry in its `## PRs` list whose review folder is missing `00-OVERVIEW.md` or contains `__error.flag`.
|
||||
3. If the batch file has no pending PRs, query assignments with `gh pr list --assignee @me --state open --json number,updatedAt --limit 20` and pick the most recently updated PR that does not already have a completed review folder.
|
||||
4. If still unknown, run `gh pr view --json number` in the current branch and use that result when it is unambiguous.
|
||||
5. If every step above fails, prompt the user for a PR number before proceeding.
|
||||
|
||||
## Fetch PR data with `gh`
|
||||
- `gh pr view {{pr_number}} --json number,baseRefName,headRefName,baseRefOid,headRefOid,changedFiles,files`
|
||||
- `gh api repos/:owner/:repo/pulls/{{pr_number}}/files?per_page=250` # patches for line mapping
|
||||
|
||||
### Incremental review workflow
|
||||
1. **Check for existing review**: Read `Generated Files/prReview/{{pr_number}}/00-OVERVIEW.md`
|
||||
2. **Extract state**: Parse `Last reviewed SHA:` from review metadata section
|
||||
3. **Detect changes**: Run `Get-PrIncrementalChanges.ps1 -PullRequestNumber {{pr_number}} -LastReviewedCommitSha {{sha}}`
|
||||
4. **Analyze result**:
|
||||
- `NeedFullReview: true` → Review all files in the PR
|
||||
- `NeedFullReview: false` and `IsIncremental: true` → Review only files in `ChangedFiles` array
|
||||
- `ChangedFiles` is empty → No changes, skip review (update iteration history with "No changes since last review")
|
||||
5. **Apply smart filtering**: Use the file patterns in smart step filtering table to skip irrelevant steps
|
||||
6. **Update metadata**: After completing review, save current `headRefOid` as `Last reviewed SHA:` in `00-OVERVIEW.md`
|
||||
|
||||
### Reusable PowerShell scripts
|
||||
Scripts live in `.github/review-tools/` to avoid repeated manual approvals during PR reviews:
|
||||
|
||||
| Script | Usage |
|
||||
| --- | --- |
|
||||
| `.github/review-tools/Get-GitHubRawFile.ps1` | Download a repository file at a given ref, optionally with line numbers. |
|
||||
| `.github/review-tools/Get-GitHubPrFilePatch.ps1` | Fetch the unified diff for a specific file within a pull request via `gh api`. |
|
||||
| `.github/review-tools/Get-PrIncrementalChanges.ps1` | Compare last reviewed SHA with current PR head to identify incremental changes. Returns JSON with changed files, new commits, and whether full review is needed. |
|
||||
| `.github/review-tools/Test-IncrementalReview.ps1` | Test helper to preview incremental review detection for a PR. Use before running full review to see what changed. |
|
||||
|
||||
Always prefer these scripts (or new ones added under `.github/review-tools/`) over raw `gh api` or similar shell commands so the review flow does not trigger interactive approval prompts.
|
||||
|
||||
## Output files
|
||||
Folder: `Generated Files/prReview/{{pr_number}}/`
|
||||
Files: `00-OVERVIEW.md`, `01-functionality.md`, `02-compatibility.md`, `03-performance.md`, `04-accessibility.md`, `05-security.md`, `06-localization.md`, `07-globalization.md`, `08-extensibility.md`, `09-solid-design.md`, `10-repo-patterns.md`, `11-docs-automation.md`, `12-code-comments.md`, `13-copilot-guidance.md` *(only if guidance md exists).*
|
||||
- **Write-after-step rule:** Immediately after completing each TODO step, persist that step's markdown file before proceeding to the next. Generate `00-OVERVIEW.md` only after every step file has been refreshed for the current run.
|
||||
|
||||
## Iteration management
|
||||
- Determine the current review iteration by reading `00-OVERVIEW.md` (look for `Review iteration:`). If missing, assume iteration `1`.
|
||||
- Extract the last reviewed SHA from `00-OVERVIEW.md` (look for `Last reviewed SHA:` in the review metadata section). If missing, this is iteration 1.
|
||||
- **Incremental review detection**:
|
||||
1. Call `.github/review-tools/Get-PrIncrementalChanges.ps1 -PullRequestNumber {{pr_number}} -LastReviewedCommitSha {{last_sha}}` to get delta analysis.
|
||||
2. Parse the JSON result to determine if incremental review is possible (`IsIncremental: true`, `NeedFullReview: false`).
|
||||
3. If force-push detected or first review, proceed with full review of all changed files.
|
||||
4. If incremental, review only the files listed in `ChangedFiles` array and apply smart step filtering (see below).
|
||||
- Increment the iteration for each review run and propagate the new value to all step files and the overview.
|
||||
- Preserve prior iteration notes by keeping/expanding an `## Iteration history` section in each markdown file, appending the newest summary under `### Iteration <N>`.
|
||||
- Summaries should capture key deltas since the previous iteration so reruns can pick up context quickly.
|
||||
- **After review completion**, update `Last reviewed SHA:` in `00-OVERVIEW.md` with the current `headRefOid` and update the timestamp.
|
||||
|
||||
### Smart step filtering (incremental reviews only)
|
||||
When performing incremental review, skip steps that are irrelevant based on changed file types:
|
||||
|
||||
| File pattern | Required steps | Skippable steps |
|
||||
| --- | --- | --- |
|
||||
| `**/*.cs`, `**/*.cpp`, `**/*.h` | Functionality, Compatibility, Performance, Security, SOLID, Repo patterns, Code comments | (depends on files) |
|
||||
| `**/*.resx`, `**/Resources/*.xaml` | Localization, Globalization | Most others |
|
||||
| `**/*.md` (docs) | Docs & automation | Most others (unless copilot guidance) |
|
||||
| `**/*copilot*.md`, `.github/prompts/*.md` | Copilot guidance, Docs & automation | Most others |
|
||||
| `**/*.csproj`, `**/*.vcxproj`, `**/packages.config` | Compatibility, Security, Repo patterns | Localization, Globalization, Accessibility |
|
||||
| `**/UI/**`, `**/*View.xaml` | Accessibility, Localization | Performance (unless perf-sensitive controls) |
|
||||
|
||||
**Default**: If uncertain or files span multiple categories, run all applicable steps. When in doubt, be conservative and review more rather than less.
|
||||
|
||||
## TODO steps (one concern each)
|
||||
1) Functionality
|
||||
2) Compatibility
|
||||
3) Performance
|
||||
4) Accessibility
|
||||
5) Security
|
||||
6) Localization
|
||||
7) Globalization
|
||||
8) Extensibility
|
||||
9) SOLID principles
|
||||
10) Repo patterns
|
||||
11) Docs & automation coverage for the changes
|
||||
12) Code comments
|
||||
13) Copilot guidance (conditional): if changed folders contain `*copilot*.md` or `.github/prompts/*.md`, review diffs **against** that guidance and write `13-copilot-guidance.md` (omit if none).
|
||||
|
||||
## Per-step file template (use verbatim)
|
||||
```md
|
||||
# <STEP TITLE>
|
||||
**PR:** (populate with PR identifier) — Base:<baseRefName> Head:<headRefName>
|
||||
**Review iteration:** ITERATION
|
||||
|
||||
## Iteration history
|
||||
- Maintain subsections titled `### Iteration N` in reverse chronological order (append the latest at the top) with 2–4 bullet highlights.
|
||||
|
||||
### Iteration ITERATION
|
||||
- <Latest key point 1>
|
||||
- <Latest key point 2>
|
||||
|
||||
## Checks executed
|
||||
- List the concrete checks for *this step only* (5–10 bullets).
|
||||
|
||||
## Findings
|
||||
(If none, write **None**. Defaults have one or more blocks:)
|
||||
|
||||
```mcp-review-comment
|
||||
{"file":"relative/path.ext","start_line":123,"end_line":125,"severity":"high|medium|low|info","tags":["<step-slug>","pr-tag-here"],"related_files":["optional/other/file1"],"body":"Problem → Why it matters → Concrete fix. If spans multiple files, name them here."}
|
||||
```
|
||||
Use the second tag to encode the PR number.
|
||||
|
||||
```
|
||||
## Overview file (`00-OVERVIEW.md`) template
|
||||
```md
|
||||
# PR Review Overview — (populate with PR identifier)
|
||||
**Review iteration:** ITERATION
|
||||
**Changed files:** <n> | **High severity issues:** <count>
|
||||
|
||||
## Review metadata
|
||||
**Last reviewed SHA:** <headRefOid from gh pr view>
|
||||
**Last review timestamp:** <ISO8601 timestamp>
|
||||
**Review mode:** <Full|Incremental (N files changed since iteration X)>
|
||||
**Base ref:** <baseRefName>
|
||||
**Head ref:** <headRefName>
|
||||
|
||||
## Step results
|
||||
Write lines like: `01 Functionality — <OK|Issues|Skipped> (see 01-functionality.md)` … through step 13.
|
||||
Mark steps as "Skipped" when using incremental review smart filtering.
|
||||
|
||||
## Iteration history
|
||||
- Maintain subsections titled `### Iteration N` mirroring the per-step convention with concise deltas and cross-links to the relevant step files.
|
||||
- For incremental reviews, list the specific files that changed and which commits were added.
|
||||
```
|
||||
|
||||
## Line numbers & multi‑file issues
|
||||
- Map head‑side lines from `patch` hunks (`@@ -a,b +c,d @@` → new lines `+c..+c+d-1`).
|
||||
- For cross‑file issues: set the primary `"file"`, list others in `"related_files"`, and name them in `"body"`.
|
||||
|
||||
## Posting (for MCP)
|
||||
- Parse all ```mcp-review-comment``` blocks across step files and post as PR review comments.
|
||||
- If posting isn’t available, still write all files.
|
||||
|
||||
## Constraint
|
||||
Read/analyze only; don't modify code. Keep comments small, specific, and fix‑oriented.
|
||||
|
||||
**Testing**: Use `.github/review-tools/Test-IncrementalReview.ps1 -PullRequestNumber 42374` to preview incremental detection before running full review.
|
||||
|
||||
## Scratch cache for large PRs
|
||||
|
||||
Create a local scratch workspace to progressively summarize diffs and reload state across runs.
|
||||
|
||||
### Paths
|
||||
- Root: `Generated Files/prReview/{{pr_number}}/__tmp/`
|
||||
- Files:
|
||||
- `index.jsonl` — append-only JSON Lines index of artifacts.
|
||||
- `todo-queue.json` — pending items (files/chunks/steps).
|
||||
- `rollup-<step>-v<N>.md` — iterative per-step aggregates.
|
||||
- `file-<hash>.txt` — optional saved chunk text (when needed).
|
||||
|
||||
### JSON schema (per line in `index.jsonl`)
|
||||
```json
|
||||
{"type":"chunk|summary|issue|crosslink",
|
||||
"path":"relative/file.ext","chunk_id":"f-12","step":"functionality|compatibility|...",
|
||||
"base_sha":"...", "head_sha":"...", "range":[start,end], "version":1,
|
||||
"notes":"short text or key:value map", "created_utc":"ISO8601"}
|
||||
```
|
||||
|
||||
### Phases (stateful; resume-safe)
|
||||
0. **Discover** PR + SHAs: `gh pr view <PR> --json baseRefName,headRefName,baseRefOid,headRefOid,files`.
|
||||
1. **Chunk** each changed file (head): split into ~300–600 LOC or ~4k chars; stable `chunk_id` = hash(path+start).
|
||||
- Save `chunk` records. Optionally write `file-<hash>.txt` for expensive chunks.
|
||||
2. **Summarize** per chunk: intent, APIs, risks per TODO step; emit `summary` records (≤600 tokens each).
|
||||
3. **Issues**: convert findings to machine-readable blocks and emit `issue` records (later rendered to step MD).
|
||||
4. **Rollups**: build/update `rollup-<step>-v<N>.md` from `summary`+`issue`. Keep prior versions.
|
||||
5. **Finalize**: write per-step files + `00-OVERVIEW.md` from rollups. Post comments via MCP if available.
|
||||
|
||||
### Re-use & token limits
|
||||
- Always **reload** `index.jsonl` first; skip chunks with same `head_sha` and `range`.
|
||||
- **Incremental review optimization**: When `Get-PrIncrementalChanges.ps1` returns a subset of changed files, load only chunks from those files. Reuse existing chunks/summaries for unchanged files.
|
||||
- Prefer re-summarizing only changed chunks; merge chunk summaries → file summaries → step rollups.
|
||||
- When context is tight, load only the minimal chunk text (or its saved `file-<hash>.txt`) needed for a comment.
|
||||
|
||||
### Original vs diff
|
||||
- Fetch base content when needed: prefer `git show <baseRefName>:<path>`; fallback `gh api repos/:owner/:repo/contents/<path>?ref=<base_sha>` (base64).
|
||||
- Use patch hunks from `gh api .../pulls/<PR>/files` to compute **head** line numbers.
|
||||
|
||||
### Queue-driven loop
|
||||
- Seed `todo-queue.json` with all changed files.
|
||||
- Process: chunk → summarize → detect issues → roll up.
|
||||
- Append to `index.jsonl` after each step; never rewrite previous lines (append-only).
|
||||
|
||||
### Hygiene
|
||||
- `__tmp/` is implementation detail; do not include in final artifacts.
|
||||
- It is safe to delete to force a clean pass; the next run rebuilds it.
|
||||
9
.github/pull_request_template.md
vendored
9
.github/pull_request_template.md
vendored
@@ -4,11 +4,10 @@
|
||||
<!-- Please review the items on the PR checklist before submitting-->
|
||||
## PR Checklist
|
||||
|
||||
- [ ] Closes: #xxx
|
||||
<!-- - [ ] Closes: #yyy (add separate lines for additional resolved issues) -->
|
||||
- [ ] **Communication:** I've discussed this with core contributors already. If the work hasn't been agreed, this work might be rejected
|
||||
- [ ] **Closes:** #xxx
|
||||
- [ ] **Communication:** I've discussed this with core contributors already. If work hasn't been agreed, this work might be rejected
|
||||
- [ ] **Tests:** Added/updated and all pass
|
||||
- [ ] **Localization:** All end-user-facing strings can be localized
|
||||
- [ ] **Localization:** All end user facing strings can be localized
|
||||
- [ ] **Dev docs:** Added/updated
|
||||
- [ ] **New binaries:** Added on the required places
|
||||
- [ ] [JSON for signing](https://github.com/microsoft/PowerToys/blob/main/.pipelines/ESRPSigning_core.json) for new binaries
|
||||
@@ -17,7 +16,7 @@
|
||||
- [ ] [YML for signed pipeline](https://github.com/microsoft/PowerToys/blob/main/.pipelines/release.yml)
|
||||
- [ ] **Documentation updated:** If checked, please file a pull request on [our docs repo](https://github.com/MicrosoftDocs/windows-uwp/tree/docs/hub/powertoys) and link it here: #xxx
|
||||
|
||||
<!-- Provide a more detailed description of the PR, other things fixed, or any additional comments/features here -->
|
||||
<!-- Provide a more detailed description of the PR, other things fixed or any additional comments/features here -->
|
||||
## Detailed Description of the Pull Request / Additional comments
|
||||
|
||||
<!-- Describe how you validated the behavior. Add automated tests wherever possible, but list manual validation steps taken as well -->
|
||||
|
||||
79
.github/review-tools/Get-GitHubPrFilePatch.ps1
vendored
79
.github/review-tools/Get-GitHubPrFilePatch.ps1
vendored
@@ -1,79 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Retrieves the unified diff patch for a specific file in a GitHub pull request.
|
||||
|
||||
.DESCRIPTION
|
||||
This script fetches the patch content (unified diff format) for a specified file
|
||||
within a pull request. It uses the GitHub CLI (gh) to query the GitHub API and
|
||||
retrieve file change information.
|
||||
|
||||
.PARAMETER PullRequestNumber
|
||||
The pull request number to query.
|
||||
|
||||
.PARAMETER FilePath
|
||||
The relative path to the file in the repository (e.g., "src/modules/main.cpp").
|
||||
|
||||
.PARAMETER RepositoryOwner
|
||||
The GitHub repository owner. Defaults to "microsoft".
|
||||
|
||||
.PARAMETER RepositoryName
|
||||
The GitHub repository name. Defaults to "PowerToys".
|
||||
|
||||
.EXAMPLE
|
||||
.\Get-GitHubPrFilePatch.ps1 -PullRequestNumber 42374 -FilePath "src/modules/cmdpal/main.cpp"
|
||||
Retrieves the patch for main.cpp in PR #42374.
|
||||
|
||||
.EXAMPLE
|
||||
.\Get-GitHubPrFilePatch.ps1 -PullRequestNumber 42374 -FilePath "README.md" -RepositoryOwner "myorg" -RepositoryName "myrepo"
|
||||
Retrieves the patch from a different repository.
|
||||
|
||||
.NOTES
|
||||
Requires GitHub CLI (gh) to be installed and authenticated.
|
||||
Run 'gh auth login' if not already authenticated.
|
||||
|
||||
.LINK
|
||||
https://cli.github.com/
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true, HelpMessage = "Pull request number")]
|
||||
[int]$PullRequestNumber,
|
||||
|
||||
[Parameter(Mandatory = $true, HelpMessage = "Relative path to the file in the repository")]
|
||||
[string]$FilePath,
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Repository owner")]
|
||||
[string]$RepositoryOwner = "microsoft",
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Repository name")]
|
||||
[string]$RepositoryName = "PowerToys"
|
||||
)
|
||||
|
||||
# Construct GitHub API path for pull request files
|
||||
$apiPath = "repos/$RepositoryOwner/$RepositoryName/pulls/$PullRequestNumber/files?per_page=250"
|
||||
|
||||
# Query GitHub API to get all files in the pull request
|
||||
try {
|
||||
$pullRequestFiles = gh api $apiPath | ConvertFrom-Json
|
||||
} catch {
|
||||
Write-Error "Failed to query GitHub API for PR #$PullRequestNumber. Ensure gh CLI is authenticated. Details: $_"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Find the matching file in the pull request
|
||||
$matchedFile = $pullRequestFiles | Where-Object { $_.filename -eq $FilePath }
|
||||
|
||||
if (-not $matchedFile) {
|
||||
Write-Error "File '$FilePath' not found in PR #$PullRequestNumber."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if patch content exists
|
||||
if (-not $matchedFile.patch) {
|
||||
Write-Warning "File '$FilePath' has no patch content (possibly binary or too large)."
|
||||
return
|
||||
}
|
||||
|
||||
# Output the patch content
|
||||
$matchedFile.patch
|
||||
91
.github/review-tools/Get-GitHubRawFile.ps1
vendored
91
.github/review-tools/Get-GitHubRawFile.ps1
vendored
@@ -1,91 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Downloads and displays the content of a file from a GitHub repository at a specific git reference.
|
||||
|
||||
.DESCRIPTION
|
||||
This script fetches the raw content of a file from a GitHub repository using GitHub's raw content API.
|
||||
It can optionally display line numbers and supports any valid git reference (branch, tag, or commit SHA).
|
||||
|
||||
.PARAMETER FilePath
|
||||
The relative path to the file in the repository (e.g., "src/modules/main.cpp").
|
||||
|
||||
.PARAMETER GitReference
|
||||
The git reference (branch name, tag, or commit SHA) to fetch the file from. Defaults to "main".
|
||||
|
||||
.PARAMETER RepositoryOwner
|
||||
The GitHub repository owner. Defaults to "microsoft".
|
||||
|
||||
.PARAMETER RepositoryName
|
||||
The GitHub repository name. Defaults to "PowerToys".
|
||||
|
||||
.PARAMETER ShowLineNumbers
|
||||
When specified, displays line numbers before each line of content.
|
||||
|
||||
.PARAMETER StartLineNumber
|
||||
The starting line number to use when ShowLineNumbers is enabled. Defaults to 1.
|
||||
|
||||
.EXAMPLE
|
||||
.\Get-GitHubRawFile.ps1 -FilePath "README.md" -GitReference "main"
|
||||
Downloads and displays the README.md file from the main branch.
|
||||
|
||||
.EXAMPLE
|
||||
.\Get-GitHubRawFile.ps1 -FilePath "src/runner/main.cpp" -GitReference "dev/feature-branch" -ShowLineNumbers
|
||||
Downloads main.cpp from a feature branch and displays it with line numbers.
|
||||
|
||||
.EXAMPLE
|
||||
.\Get-GitHubRawFile.ps1 -FilePath "LICENSE" -GitReference "abc123def" -ShowLineNumbers -StartLineNumber 10
|
||||
Downloads the LICENSE file from a specific commit and displays it with line numbers starting at 10.
|
||||
|
||||
.NOTES
|
||||
Requires internet connectivity to access GitHub's raw content API.
|
||||
Does not require GitHub CLI authentication for public repositories.
|
||||
|
||||
.LINK
|
||||
https://docs.github.com/en/rest/repos/contents
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true, HelpMessage = "Relative path to the file in the repository")]
|
||||
[string]$FilePath,
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Git reference (branch, tag, or commit SHA)")]
|
||||
[string]$GitReference = "main",
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Repository owner")]
|
||||
[string]$RepositoryOwner = "microsoft",
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Repository name")]
|
||||
[string]$RepositoryName = "PowerToys",
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Display line numbers before each line")]
|
||||
[switch]$ShowLineNumbers,
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Starting line number for display")]
|
||||
[int]$StartLineNumber = 1
|
||||
)
|
||||
|
||||
# Construct the raw content URL
|
||||
$rawContentUrl = "https://raw.githubusercontent.com/$RepositoryOwner/$RepositoryName/$GitReference/$FilePath"
|
||||
|
||||
# Fetch the file content from GitHub
|
||||
try {
|
||||
$response = Invoke-WebRequest -UseBasicParsing -Uri $rawContentUrl
|
||||
} catch {
|
||||
Write-Error "Failed to fetch file from $rawContentUrl. Details: $_"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Split content into individual lines
|
||||
$contentLines = $response.Content -split "`n"
|
||||
|
||||
# Display the content with or without line numbers
|
||||
if ($ShowLineNumbers) {
|
||||
$currentLineNumber = $StartLineNumber
|
||||
foreach ($line in $contentLines) {
|
||||
Write-Output ("{0:d4}: {1}" -f $currentLineNumber, $line)
|
||||
$currentLineNumber++
|
||||
}
|
||||
} else {
|
||||
$contentLines | ForEach-Object { Write-Output $_ }
|
||||
}
|
||||
173
.github/review-tools/Get-PrIncrementalChanges.ps1
vendored
173
.github/review-tools/Get-PrIncrementalChanges.ps1
vendored
@@ -1,173 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Detects changes between the last reviewed commit and current head of a pull request.
|
||||
|
||||
.DESCRIPTION
|
||||
This script compares a previously reviewed commit SHA with the current head of a pull request
|
||||
to determine what has changed. It helps enable incremental reviews by identifying new commits
|
||||
and modified files since the last review iteration.
|
||||
|
||||
The script handles several scenarios:
|
||||
- First review (no previous SHA provided)
|
||||
- No changes (current SHA matches last reviewed SHA)
|
||||
- Force-push detected (last reviewed SHA no longer in history)
|
||||
- Incremental changes (new commits added since last review)
|
||||
|
||||
.PARAMETER PullRequestNumber
|
||||
The pull request number to analyze.
|
||||
|
||||
.PARAMETER LastReviewedCommitSha
|
||||
The commit SHA that was last reviewed. If omitted, this is treated as a first review.
|
||||
|
||||
.PARAMETER RepositoryOwner
|
||||
The GitHub repository owner. Defaults to "microsoft".
|
||||
|
||||
.PARAMETER RepositoryName
|
||||
The GitHub repository name. Defaults to "PowerToys".
|
||||
|
||||
.OUTPUTS
|
||||
JSON object containing:
|
||||
- PullRequestNumber: The PR number being analyzed
|
||||
- CurrentHeadSha: The current head commit SHA
|
||||
- LastReviewedSha: The last reviewed commit SHA (if provided)
|
||||
- BaseRefName: Base branch name
|
||||
- HeadRefName: Head branch name
|
||||
- IsIncremental: Boolean indicating if incremental review is possible
|
||||
- NeedFullReview: Boolean indicating if a full review is required
|
||||
- ChangedFiles: Array of files that changed (filename, status, additions, deletions)
|
||||
- NewCommits: Array of commits added since last review (sha, message, author, date)
|
||||
- Summary: Human-readable description of changes
|
||||
|
||||
.EXAMPLE
|
||||
.\Get-PrIncrementalChanges.ps1 -PullRequestNumber 42374
|
||||
Analyzes PR #42374 with no previous review (first review scenario).
|
||||
|
||||
.EXAMPLE
|
||||
.\Get-PrIncrementalChanges.ps1 -PullRequestNumber 42374 -LastReviewedCommitSha "abc123def456"
|
||||
Compares current PR state against the last reviewed commit to identify incremental changes.
|
||||
|
||||
.EXAMPLE
|
||||
$changes = .\Get-PrIncrementalChanges.ps1 -PullRequestNumber 42374 -LastReviewedCommitSha "abc123" | ConvertFrom-Json
|
||||
if ($changes.IsIncremental) { Write-Host "Can perform incremental review" }
|
||||
Captures the output as a PowerShell object for further processing.
|
||||
|
||||
.NOTES
|
||||
Requires GitHub CLI (gh) to be installed and authenticated.
|
||||
Run 'gh auth login' if not already authenticated.
|
||||
|
||||
.LINK
|
||||
https://cli.github.com/
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true, HelpMessage = "Pull request number")]
|
||||
[int]$PullRequestNumber,
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Commit SHA that was last reviewed")]
|
||||
[string]$LastReviewedCommitSha,
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Repository owner")]
|
||||
[string]$RepositoryOwner = "microsoft",
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Repository name")]
|
||||
[string]$RepositoryName = "PowerToys"
|
||||
)
|
||||
|
||||
# Fetch current pull request state from GitHub
|
||||
try {
|
||||
$pullRequestData = gh pr view $PullRequestNumber --json headRefOid,headRefName,baseRefName,baseRefOid | ConvertFrom-Json
|
||||
} catch {
|
||||
Write-Error "Failed to fetch PR #$PullRequestNumber details. Details: $_"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$currentHeadSha = $pullRequestData.headRefOid
|
||||
$baseRefName = $pullRequestData.baseRefName
|
||||
$headRefName = $pullRequestData.headRefName
|
||||
|
||||
# Initialize result object
|
||||
$analysisResult = @{
|
||||
PullRequestNumber = $PullRequestNumber
|
||||
CurrentHeadSha = $currentHeadSha
|
||||
BaseRefName = $baseRefName
|
||||
HeadRefName = $headRefName
|
||||
LastReviewedSha = $LastReviewedCommitSha
|
||||
IsIncremental = $false
|
||||
NeedFullReview = $true
|
||||
ChangedFiles = @()
|
||||
NewCommits = @()
|
||||
Summary = ""
|
||||
}
|
||||
|
||||
# Scenario 1: First review (no previous SHA provided)
|
||||
if ([string]::IsNullOrWhiteSpace($LastReviewedCommitSha)) {
|
||||
$analysisResult.Summary = "Initial review - no previous iteration found"
|
||||
$analysisResult.NeedFullReview = $true
|
||||
return $analysisResult | ConvertTo-Json -Depth 10
|
||||
}
|
||||
|
||||
# Scenario 2: No changes since last review
|
||||
if ($currentHeadSha -eq $LastReviewedCommitSha) {
|
||||
$analysisResult.Summary = "No changes since last review (SHA: $currentHeadSha)"
|
||||
$analysisResult.NeedFullReview = $false
|
||||
$analysisResult.IsIncremental = $true
|
||||
return $analysisResult | ConvertTo-Json -Depth 10
|
||||
}
|
||||
|
||||
# Scenario 3: Check for force-push (last reviewed SHA no longer exists in history)
|
||||
try {
|
||||
$null = gh api "repos/$RepositoryOwner/$RepositoryName/commits/$LastReviewedCommitSha" 2>&1
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
# SHA not found - likely force-push or branch rewrite
|
||||
$analysisResult.Summary = "Force-push detected - last reviewed SHA $LastReviewedCommitSha no longer exists. Full review required."
|
||||
$analysisResult.NeedFullReview = $true
|
||||
return $analysisResult | ConvertTo-Json -Depth 10
|
||||
}
|
||||
} catch {
|
||||
$analysisResult.Summary = "Cannot verify last reviewed SHA $LastReviewedCommitSha - assuming force-push. Full review required."
|
||||
$analysisResult.NeedFullReview = $true
|
||||
return $analysisResult | ConvertTo-Json -Depth 10
|
||||
}
|
||||
|
||||
# Scenario 4: Get incremental changes between last reviewed SHA and current head
|
||||
try {
|
||||
$compareApiPath = "repos/$RepositoryOwner/$RepositoryName/compare/$LastReviewedCommitSha...$currentHeadSha"
|
||||
$comparisonData = gh api $compareApiPath | ConvertFrom-Json
|
||||
|
||||
# Extract new commits information
|
||||
$analysisResult.NewCommits = $comparisonData.commits | ForEach-Object {
|
||||
@{
|
||||
Sha = $_.sha.Substring(0, 7)
|
||||
Message = $_.commit.message.Split("`n")[0] # First line only
|
||||
Author = $_.commit.author.name
|
||||
Date = $_.commit.author.date
|
||||
}
|
||||
}
|
||||
|
||||
# Extract changed files information
|
||||
$analysisResult.ChangedFiles = $comparisonData.files | ForEach-Object {
|
||||
@{
|
||||
Filename = $_.filename
|
||||
Status = $_.status # added, modified, removed, renamed
|
||||
Additions = $_.additions
|
||||
Deletions = $_.deletions
|
||||
Changes = $_.changes
|
||||
}
|
||||
}
|
||||
|
||||
$fileCount = $analysisResult.ChangedFiles.Count
|
||||
$commitCount = $analysisResult.NewCommits.Count
|
||||
|
||||
$analysisResult.IsIncremental = $true
|
||||
$analysisResult.NeedFullReview = $false
|
||||
$analysisResult.Summary = "Incremental review: $commitCount new commit(s), $fileCount file(s) changed since SHA $($LastReviewedCommitSha.Substring(0, 7))"
|
||||
|
||||
} catch {
|
||||
Write-Error "Failed to compare commits. Details: $_"
|
||||
$analysisResult.Summary = "Error comparing commits - defaulting to full review"
|
||||
$analysisResult.NeedFullReview = $true
|
||||
}
|
||||
|
||||
# Return the analysis result as JSON
|
||||
return $analysisResult | ConvertTo-Json -Depth 10
|
||||
170
.github/review-tools/Test-IncrementalReview.ps1
vendored
170
.github/review-tools/Test-IncrementalReview.ps1
vendored
@@ -1,170 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Tests and previews incremental review detection for a pull request.
|
||||
|
||||
.DESCRIPTION
|
||||
This helper script validates the incremental review detection logic by analyzing an existing
|
||||
PR review folder. It reads the last reviewed SHA from the overview file, compares it with
|
||||
the current PR state, and displays detailed information about what has changed.
|
||||
|
||||
This is useful for:
|
||||
- Testing the incremental review system before running a full review
|
||||
- Understanding what changed since the last review iteration
|
||||
- Verifying that review metadata was properly recorded
|
||||
|
||||
.PARAMETER PullRequestNumber
|
||||
The pull request number to test incremental review detection for.
|
||||
|
||||
.PARAMETER RepositoryOwner
|
||||
The GitHub repository owner. Defaults to "microsoft".
|
||||
|
||||
.PARAMETER RepositoryName
|
||||
The GitHub repository name. Defaults to "PowerToys".
|
||||
|
||||
.OUTPUTS
|
||||
Colored console output displaying:
|
||||
- Current and last reviewed commit SHAs
|
||||
- Whether incremental review is possible
|
||||
- List of new commits since last review
|
||||
- List of changed files with status indicators
|
||||
- Recommended review strategy
|
||||
|
||||
.EXAMPLE
|
||||
.\Test-IncrementalReview.ps1 -PullRequestNumber 42374
|
||||
Tests incremental review detection for PR #42374.
|
||||
|
||||
.EXAMPLE
|
||||
.\Test-IncrementalReview.ps1 -PullRequestNumber 42374 -RepositoryOwner "myorg" -RepositoryName "myrepo"
|
||||
Tests incremental review for a PR in a different repository.
|
||||
|
||||
.NOTES
|
||||
Requires GitHub CLI (gh) to be installed and authenticated.
|
||||
Run 'gh auth login' if not already authenticated.
|
||||
|
||||
Prerequisites:
|
||||
- PR review folder must exist at "Generated Files\prReview\{PRNumber}"
|
||||
- 00-OVERVIEW.md must exist in the review folder
|
||||
- For incremental detection, overview must contain "Last reviewed SHA" metadata
|
||||
|
||||
.LINK
|
||||
https://cli.github.com/
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true, HelpMessage = "Pull request number to test")]
|
||||
[int]$PullRequestNumber,
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Repository owner")]
|
||||
[string]$RepositoryOwner = "microsoft",
|
||||
|
||||
[Parameter(Mandatory = $false, HelpMessage = "Repository name")]
|
||||
[string]$RepositoryName = "PowerToys"
|
||||
)
|
||||
|
||||
# Resolve paths to review folder and overview file
|
||||
$repositoryRoot = Split-Path (Split-Path $PSScriptRoot -Parent) -Parent
|
||||
$reviewFolderPath = Join-Path $repositoryRoot "Generated Files\prReview\$PullRequestNumber"
|
||||
$overviewFilePath = Join-Path $reviewFolderPath "00-OVERVIEW.md"
|
||||
|
||||
Write-Host "=== Testing Incremental Review for PR #$PullRequestNumber ===" -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Check if review folder exists
|
||||
if (-not (Test-Path $reviewFolderPath)) {
|
||||
Write-Host "❌ Review folder not found: $reviewFolderPath" -ForegroundColor Red
|
||||
Write-Host "This appears to be a new review (iteration 1)" -ForegroundColor Yellow
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Check if overview file exists
|
||||
if (-not (Test-Path $overviewFilePath)) {
|
||||
Write-Host "❌ Overview file not found: $overviewFilePath" -ForegroundColor Red
|
||||
Write-Host "This appears to be an incomplete review" -ForegroundColor Yellow
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Read overview file and extract last reviewed SHA
|
||||
Write-Host "📄 Reading overview file..." -ForegroundColor Green
|
||||
$overviewFileContent = Get-Content $overviewFilePath -Raw
|
||||
|
||||
if ($overviewFileContent -match '\*\*Last reviewed SHA:\*\*\s+(\w+)') {
|
||||
$lastReviewedSha = $Matches[1]
|
||||
Write-Host "✅ Found last reviewed SHA: $lastReviewedSha" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "⚠️ No 'Last reviewed SHA' found in overview - this may be an old format" -ForegroundColor Yellow
|
||||
Write-Host "Proceeding without incremental detection (full review will be needed)" -ForegroundColor Yellow
|
||||
exit 0
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "🔍 Running incremental change detection..." -ForegroundColor Cyan
|
||||
|
||||
# Call the incremental changes detection script
|
||||
$incrementalChangesScriptPath = Join-Path $PSScriptRoot "Get-PrIncrementalChanges.ps1"
|
||||
if (-not (Test-Path $incrementalChangesScriptPath)) {
|
||||
Write-Host "❌ Script not found: $incrementalChangesScriptPath" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
try {
|
||||
$analysisResult = & $incrementalChangesScriptPath `
|
||||
-PullRequestNumber $PullRequestNumber `
|
||||
-LastReviewedCommitSha $lastReviewedSha `
|
||||
-RepositoryOwner $RepositoryOwner `
|
||||
-RepositoryName $RepositoryName | ConvertFrom-Json
|
||||
|
||||
# Display analysis results
|
||||
Write-Host ""
|
||||
Write-Host "=== Incremental Review Analysis ===" -ForegroundColor Cyan
|
||||
Write-Host "Current HEAD SHA: $($analysisResult.CurrentHeadSha)" -ForegroundColor White
|
||||
Write-Host "Last reviewed SHA: $($analysisResult.LastReviewedSha)" -ForegroundColor White
|
||||
Write-Host "Base branch: $($analysisResult.BaseRefName)" -ForegroundColor White
|
||||
Write-Host "Head branch: $($analysisResult.HeadRefName)" -ForegroundColor White
|
||||
Write-Host ""
|
||||
Write-Host "Is incremental? $($analysisResult.IsIncremental)" -ForegroundColor $(if ($analysisResult.IsIncremental) { "Green" } else { "Yellow" })
|
||||
Write-Host "Need full review? $($analysisResult.NeedFullReview)" -ForegroundColor $(if ($analysisResult.NeedFullReview) { "Yellow" } else { "Green" })
|
||||
Write-Host ""
|
||||
Write-Host "Summary: $($analysisResult.Summary)" -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Display new commits if any
|
||||
if ($analysisResult.NewCommits -and $analysisResult.NewCommits.Count -gt 0) {
|
||||
Write-Host "📝 New commits ($($analysisResult.NewCommits.Count)):" -ForegroundColor Green
|
||||
foreach ($commit in $analysisResult.NewCommits) {
|
||||
Write-Host " - $($commit.Sha): $($commit.Message)" -ForegroundColor Gray
|
||||
}
|
||||
Write-Host ""
|
||||
}
|
||||
|
||||
# Display changed files if any
|
||||
if ($analysisResult.ChangedFiles -and $analysisResult.ChangedFiles.Count -gt 0) {
|
||||
Write-Host "📁 Changed files ($($analysisResult.ChangedFiles.Count)):" -ForegroundColor Green
|
||||
foreach ($file in $analysisResult.ChangedFiles) {
|
||||
$statusDisplayColor = switch ($file.Status) {
|
||||
"added" { "Green" }
|
||||
"removed" { "Red" }
|
||||
"modified" { "Yellow" }
|
||||
"renamed" { "Cyan" }
|
||||
default { "White" }
|
||||
}
|
||||
Write-Host " - [$($file.Status)] $($file.Filename) (+$($file.Additions)/-$($file.Deletions))" -ForegroundColor $statusDisplayColor
|
||||
}
|
||||
Write-Host ""
|
||||
}
|
||||
|
||||
# Suggest review strategy based on analysis
|
||||
Write-Host "=== Recommended Review Strategy ===" -ForegroundColor Cyan
|
||||
if ($analysisResult.NeedFullReview) {
|
||||
Write-Host "🔄 Full review recommended" -ForegroundColor Yellow
|
||||
} elseif ($analysisResult.IsIncremental -and ($analysisResult.ChangedFiles.Count -eq 0)) {
|
||||
Write-Host "✅ No changes detected - no review needed" -ForegroundColor Green
|
||||
} elseif ($analysisResult.IsIncremental) {
|
||||
Write-Host "⚡ Incremental review possible - review only changed files" -ForegroundColor Green
|
||||
Write-Host "💡 Consider applying smart step filtering based on file types" -ForegroundColor Cyan
|
||||
}
|
||||
|
||||
} catch {
|
||||
Write-Host "❌ Error running incremental change detection: $_" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
313
.github/review-tools/review-tools.instructions.md
vendored
313
.github/review-tools/review-tools.instructions.md
vendored
@@ -1,313 +0,0 @@
|
||||
---
|
||||
description: PowerShell scripts for efficient PR reviews in PowerToys repository
|
||||
applyTo: '**'
|
||||
---
|
||||
|
||||
# PR Review Tools - Reference Guide
|
||||
|
||||
PowerShell scripts to support efficient and incremental pull request reviews in the PowerToys repository.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
- PowerShell 7+ (or Windows PowerShell 5.1+)
|
||||
- GitHub CLI (`gh`) installed and authenticated (`gh auth login`)
|
||||
- Access to the PowerToys repository
|
||||
|
||||
### Testing Your Setup
|
||||
|
||||
Run the full test suite (recommended):
|
||||
```powershell
|
||||
cd "d:\PowerToys-00c1\.github\review-tools"
|
||||
.\Run-ReviewToolsTests.ps1
|
||||
```
|
||||
|
||||
Expected: 9-10 tests passing
|
||||
|
||||
### Individual Script Tests
|
||||
|
||||
**Test incremental change detection:**
|
||||
```powershell
|
||||
.\Get-PrIncrementalChanges.ps1 -PullRequestNumber 42374
|
||||
```
|
||||
Expected: JSON output showing review analysis
|
||||
|
||||
**Preview incremental review:**
|
||||
```powershell
|
||||
.\Test-IncrementalReview.ps1 -PullRequestNumber 42374
|
||||
```
|
||||
Expected: Analysis showing current vs last reviewed SHA
|
||||
|
||||
**Fetch file content:**
|
||||
```powershell
|
||||
.\Get-GitHubRawFile.ps1 -FilePath "README.md" -GitReference "main"
|
||||
```
|
||||
Expected: README content displayed
|
||||
|
||||
**Get PR file patch:**
|
||||
```powershell
|
||||
.\Get-GitHubPrFilePatch.ps1 -PullRequestNumber 42374 -FilePath ".github/actions/spell-check/expect.txt"
|
||||
```
|
||||
Expected: Unified diff output
|
||||
|
||||
## Available Scripts
|
||||
|
||||
### Get-GitHubRawFile.ps1
|
||||
|
||||
Downloads and displays file content from a GitHub repository at a specific git reference.
|
||||
|
||||
**Purpose:** Retrieve baseline file content for comparison during PR reviews.
|
||||
|
||||
**Parameters:**
|
||||
- `FilePath` (required): Relative path to file in repository
|
||||
- `GitReference` (optional): Git ref (branch, tag, SHA). Default: "main"
|
||||
- `RepositoryOwner` (optional): Repository owner. Default: "microsoft"
|
||||
- `RepositoryName` (optional): Repository name. Default: "PowerToys"
|
||||
- `ShowLineNumbers` (switch): Prefix each line with line number
|
||||
- `StartLineNumber` (optional): Starting line number when using `-ShowLineNumbers`. Default: 1
|
||||
|
||||
**Usage:**
|
||||
```powershell
|
||||
.\Get-GitHubRawFile.ps1 -FilePath "src/runner/main.cpp" -GitReference "main" -ShowLineNumbers
|
||||
```
|
||||
|
||||
### Get-GitHubPrFilePatch.ps1
|
||||
|
||||
Fetches the unified diff (patch) for a specific file in a pull request.
|
||||
|
||||
**Purpose:** Get the exact changes made to a file in a PR for detailed review.
|
||||
|
||||
**Parameters:**
|
||||
- `PullRequestNumber` (required): Pull request number
|
||||
- `FilePath` (required): Relative path to file in the PR
|
||||
- `RepositoryOwner` (optional): Repository owner. Default: "microsoft"
|
||||
- `RepositoryName` (optional): Repository name. Default: "PowerToys"
|
||||
|
||||
**Usage:**
|
||||
```powershell
|
||||
.\Get-GitHubPrFilePatch.ps1 -PullRequestNumber 42374 -FilePath "src/modules/cmdpal/main.cpp"
|
||||
```
|
||||
|
||||
**Output:** Unified diff showing changes made to the file.
|
||||
|
||||
### Get-PrIncrementalChanges.ps1
|
||||
|
||||
Compares the last reviewed commit with the current PR head to identify incremental changes.
|
||||
|
||||
**Purpose:** Enable efficient incremental reviews by detecting what changed since the last review iteration.
|
||||
|
||||
**Parameters:**
|
||||
- `PullRequestNumber` (required): Pull request number
|
||||
- `LastReviewedCommitSha` (optional): SHA of the commit that was last reviewed. If omitted, assumes first review.
|
||||
- `RepositoryOwner` (optional): Repository owner. Default: "microsoft"
|
||||
- `RepositoryName` (optional): Repository name. Default: "PowerToys"
|
||||
|
||||
**Usage:**
|
||||
```powershell
|
||||
.\Get-PrIncrementalChanges.ps1 -PullRequestNumber 42374 -LastReviewedCommitSha "abc123def456"
|
||||
```
|
||||
|
||||
**Output:** JSON object with detailed change analysis:
|
||||
```json
|
||||
{
|
||||
"PullRequestNumber": 42374,
|
||||
"CurrentHeadSha": "xyz789abc123",
|
||||
"LastReviewedSha": "abc123def456",
|
||||
"IsIncremental": true,
|
||||
"NeedFullReview": false,
|
||||
"ChangedFiles": [
|
||||
{
|
||||
"Filename": "src/modules/cmdpal/main.cpp",
|
||||
"Status": "modified",
|
||||
"Additions": 15,
|
||||
"Deletions": 8,
|
||||
"Changes": 23
|
||||
}
|
||||
],
|
||||
"NewCommits": [
|
||||
{
|
||||
"Sha": "def456",
|
||||
"Message": "Fix memory leak",
|
||||
"Author": "John Doe",
|
||||
"Date": "2025-11-07T10:30:00Z"
|
||||
}
|
||||
],
|
||||
"Summary": "Incremental review: 1 new commit(s), 1 file(s) changed since SHA abc123d"
|
||||
}
|
||||
```
|
||||
|
||||
**Scenarios Handled:**
|
||||
- **No LastReviewedCommitSha**: Returns `NeedFullReview: true` (first review)
|
||||
- **SHA matches current HEAD**: Returns empty `ChangedFiles` (no changes)
|
||||
- **Force-push detected**: Returns `NeedFullReview: true` (SHA not in history)
|
||||
- **Incremental changes**: Returns list of changed files and new commits
|
||||
|
||||
### Test-IncrementalReview.ps1
|
||||
|
||||
Helper script to test and preview incremental review detection before running the full review.
|
||||
|
||||
**Purpose:** Validate incremental review functionality and preview what changed.
|
||||
|
||||
**Parameters:**
|
||||
- `PullRequestNumber` (required): Pull request number
|
||||
- `RepositoryOwner` (optional): Repository owner. Default: "microsoft"
|
||||
- `RepositoryName` (optional): Repository name. Default: "PowerToys"
|
||||
|
||||
**Usage:**
|
||||
```powershell
|
||||
.\Test-IncrementalReview.ps1 -PullRequestNumber 42374
|
||||
```
|
||||
|
||||
**Output:** Colored console output showing:
|
||||
- Current and last reviewed SHAs
|
||||
- Whether incremental review is possible
|
||||
- List of new commits and changed files
|
||||
- Recommended review strategy
|
||||
|
||||
## Workflow Integration
|
||||
|
||||
These scripts integrate with the PR review prompt (`.github/prompts/review-pr.prompt.md`).
|
||||
|
||||
### Typical Review Flow
|
||||
|
||||
1. **Initial Review (Iteration 1)**
|
||||
- Review prompt processes the PR
|
||||
- Creates `Generated Files/prReview/{PR}/00-OVERVIEW.md`
|
||||
- Includes review metadata section with current HEAD SHA
|
||||
|
||||
2. **Subsequent Reviews (Iteration 2+)**
|
||||
- Review prompt reads `00-OVERVIEW.md` to get last reviewed SHA
|
||||
- Calls `Get-PrIncrementalChanges.ps1` to detect what changed
|
||||
- If incremental:
|
||||
- Reviews only changed files
|
||||
- Skips irrelevant review steps (e.g., skip Localization if no `.resx` files changed)
|
||||
- Uses `Get-GitHubPrFilePatch.ps1` to get patches for changed files
|
||||
- Updates `00-OVERVIEW.md` with new SHA and iteration number
|
||||
|
||||
### Manual Testing Workflow
|
||||
|
||||
Preview changes before review:
|
||||
```powershell
|
||||
# Check what changed in PR #42374 since last review
|
||||
.\Test-IncrementalReview.ps1 -PullRequestNumber 42374
|
||||
|
||||
# Get incremental changes programmatically
|
||||
$changes = .\Get-PrIncrementalChanges.ps1 -PullRequestNumber 42374 -LastReviewedCommitSha "abc123" | ConvertFrom-Json
|
||||
|
||||
if (-not $changes.NeedFullReview) {
|
||||
Write-Host "Only need to review $($changes.ChangedFiles.Count) files"
|
||||
|
||||
# Review each changed file
|
||||
foreach ($file in $changes.ChangedFiles) {
|
||||
Write-Host "Reviewing $($file.Filename)..."
|
||||
.\Get-GitHubPrFilePatch.ps1 -PullRequestNumber 42374 -FilePath $file.Filename
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling and Troubleshooting
|
||||
|
||||
### Common Requirements
|
||||
|
||||
All scripts:
|
||||
- Exit with code 1 on error
|
||||
- Write detailed error messages to stderr
|
||||
- Require `gh` CLI to be installed and authenticated
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Error: "gh not found"**
|
||||
- **Solution**: Install GitHub CLI from https://cli.github.com/ and run `gh auth login`
|
||||
|
||||
**Error: "Failed to query GitHub API"**
|
||||
- **Solution**: Verify `gh` authentication with `gh auth status`
|
||||
- **Solution**: Check PR number exists and you have repository access
|
||||
|
||||
**Error: "PR not found"**
|
||||
- **Solution**: Verify the PR number is correct and still exists
|
||||
- **Solution**: Ensure repository owner and name are correct
|
||||
|
||||
**Error: "SHA not found" or "Force-push detected"**
|
||||
- **Explanation**: Last reviewed SHA no longer exists in branch history (force-push occurred)
|
||||
- **Solution**: A full review is required; incremental review not possible
|
||||
|
||||
**Tests show "FAIL" but functionality works**
|
||||
- **Explanation**: Some tests may show exit code failures even when logic is correct
|
||||
- **Solution**: Check test output message - if it says "Correctly detected", functionality is working
|
||||
|
||||
**Error: "Could not find insertion point"**
|
||||
- **Explanation**: Overview file doesn't have expected "**Changed files:**" line
|
||||
- **Solution**: Verify overview file format is correct or regenerate it
|
||||
|
||||
### Verification Checklist
|
||||
|
||||
After setup, verify:
|
||||
- [ ] `Run-ReviewToolsTests.ps1` shows 9+ tests passing
|
||||
- [ ] `Get-PrIncrementalChanges.ps1` returns valid JSON
|
||||
- [ ] `Test-IncrementalReview.ps1` analyzes a PR without errors
|
||||
- [ ] `Get-GitHubRawFile.ps1` downloads files correctly
|
||||
- [ ] `Get-GitHubPrFilePatch.ps1` retrieves patches correctly
|
||||
|
||||
## Best Practices
|
||||
|
||||
### For Review Authors
|
||||
|
||||
1. **Test before full review**: Use `Test-IncrementalReview.ps1` to preview changes
|
||||
2. **Check for force-push**: Review the analysis output - force-pushes require full reviews
|
||||
3. **Smart step filtering**: Skip review steps for file types that didn't change
|
||||
|
||||
### For Script Users
|
||||
|
||||
1. **Use absolute paths**: When specifying folders, use absolute paths to avoid ambiguity
|
||||
2. **Check exit codes**: Scripts exit with code 1 on error - check `$LASTEXITCODE` in automation
|
||||
3. **Parse JSON output**: Use `ConvertFrom-Json` to work with structured output from `Get-PrIncrementalChanges.ps1`
|
||||
4. **Handle empty results**: Check `ChangedFiles.Count` before iterating
|
||||
|
||||
### Performance Tips
|
||||
|
||||
1. **Batch operations**: When reviewing multiple PRs, collect all PR numbers and process in batch
|
||||
2. **Cache raw files**: Download baseline files once and reuse for multiple comparisons
|
||||
3. **Filter early**: Use incremental detection to skip unnecessary file reviews
|
||||
4. **Parallel processing**: Consider processing independent PRs in parallel
|
||||
|
||||
## Integration with AI Review Systems
|
||||
|
||||
These tools are designed to work with AI-powered review systems:
|
||||
|
||||
1. **Copilot Instructions**: This file serves as reference documentation for GitHub Copilot
|
||||
2. **Structured Output**: JSON output from scripts is easily parsed by AI systems
|
||||
3. **Incremental Intelligence**: AI can focus on changed files for more efficient reviews
|
||||
4. **Metadata Tracking**: Review iterations are tracked for context-aware suggestions
|
||||
|
||||
### Example AI Integration
|
||||
|
||||
```powershell
|
||||
# Get incremental changes
|
||||
$analysis = .\Get-PrIncrementalChanges.ps1 -PullRequestNumber $PR | ConvertFrom-Json
|
||||
|
||||
# Feed to AI review system
|
||||
$reviewPrompt = @"
|
||||
Review the following changed files in PR #$PR:
|
||||
$($analysis.ChangedFiles | ForEach-Object { "- $($_.Filename) ($($_.Status))" } | Out-String)
|
||||
|
||||
Focus on incremental changes only. Previous review was at SHA $($analysis.LastReviewedSha).
|
||||
"@
|
||||
|
||||
# Execute AI review with context
|
||||
Invoke-AIReview -Prompt $reviewPrompt -Files $analysis.ChangedFiles
|
||||
```
|
||||
|
||||
## Support and Further Information
|
||||
|
||||
For detailed script documentation, use PowerShell's help system:
|
||||
```powershell
|
||||
Get-Help .\Get-PrIncrementalChanges.ps1 -Full
|
||||
Get-Help .\Test-IncrementalReview.ps1 -Detailed
|
||||
```
|
||||
|
||||
Related documentation:
|
||||
- `.github/prompts/review-pr.prompt.md` - Complete review workflow guide
|
||||
- `doc/devdocs/` - PowerToys development documentation
|
||||
- GitHub CLI documentation: https://cli.github.com/manual/
|
||||
|
||||
For issues or questions, refer to the PowerToys contribution guidelines.
|
||||
201
.github/skills/release-note-generation/LICENSE.txt
vendored
201
.github/skills/release-note-generation/LICENSE.txt
vendored
@@ -1,201 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to the Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2026 Microsoft Corporation
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
132
.github/skills/release-note-generation/SKILL.md
vendored
132
.github/skills/release-note-generation/SKILL.md
vendored
@@ -1,132 +0,0 @@
|
||||
---
|
||||
name: release-note-generation
|
||||
description: Toolkit for generating PowerToys release notes from GitHub milestone PRs or commit ranges. Use when asked to create release notes, summarize milestone PRs, generate changelog, prepare release documentation, request Copilot reviews for PRs, update README for a new release, manage PR milestones, or collect PRs between commits/tags. Supports PR collection by milestone or commit range, milestone assignment, grouping by label, summarization with external contributor attribution, and README version bumping.
|
||||
license: Complete terms in LICENSE.txt
|
||||
---
|
||||
|
||||
# Release Note Generation Skill
|
||||
|
||||
Generate professional release notes for PowerToys milestones by collecting merged PRs, requesting Copilot code reviews, grouping by label, and producing user-facing summaries.
|
||||
|
||||
## Output Directory
|
||||
|
||||
All generated artifacts are placed under `Generated Files/ReleaseNotes/` at the repository root (gitignored).
|
||||
|
||||
```
|
||||
Generated Files/ReleaseNotes/
|
||||
├── milestone_prs.json # Raw PR data from GitHub
|
||||
├── sorted_prs.csv # Sorted PR list with Copilot summaries
|
||||
├── prs_with_milestone.csv # Milestone assignment tracking
|
||||
├── grouped_csv/ # PRs grouped by label (one CSV per label)
|
||||
├── grouped_md/ # Generated markdown summaries per label
|
||||
└── v{VERSION}-release-notes.md # Final consolidated release notes
|
||||
```
|
||||
|
||||
## When to Use This Skill
|
||||
|
||||
- Generate release notes for a milestone
|
||||
- Summarize PRs merged in a release
|
||||
- Request Copilot reviews for milestone PRs
|
||||
- Assign milestones to PRs missing them
|
||||
- Collect PRs between two commits/tags
|
||||
- Update README.md for a new version
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- GitHub CLI (`gh`) installed and authenticated
|
||||
- MCP Server: github-mcp-server installed
|
||||
- GitHub Copilot code review enabled for the org/repo
|
||||
|
||||
## Required Variables
|
||||
|
||||
⚠️ **Before starting**, confirm `{{ReleaseVersion}}` with the user. If not provided, **ASK**: "What release version are we generating notes for? (e.g., 0.98)"
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `{{ReleaseVersion}}` | Target release version | `0.98` |
|
||||
|
||||
## Workflow Overview
|
||||
|
||||
```
|
||||
┌────────────────────────────────┐
|
||||
│ 1.1 Collect PRs (stable range) │
|
||||
└────────────────────────────────┘
|
||||
↓
|
||||
┌────────────────────────────────┐
|
||||
│ 1.2 Assign Milestones │
|
||||
└────────────────────────────────┘
|
||||
↓
|
||||
┌────────────────────────────────┐
|
||||
│ 2.1–2.4 Label PRs (auto+human) │
|
||||
└────────────────────────────────┘
|
||||
↓
|
||||
┌────────────────────────────────┐
|
||||
│ 3.1 Request Reviews (Copilot) │
|
||||
└────────────────────────────────┘
|
||||
↓
|
||||
┌────────────────────────────────┐
|
||||
│ 3.2 Refresh PR data │
|
||||
│ (CopilotSummary) │
|
||||
└────────────────────────────────┘
|
||||
↓
|
||||
┌────────────────────────────────┐
|
||||
│ 3.3 Group by label │
|
||||
│ (grouped_csv) │
|
||||
└────────────────────────────────┘
|
||||
↓
|
||||
┌────────────────────────────────┐
|
||||
│ 4.1 Summarize (grouped_md) │
|
||||
└────────────────────────────────┘
|
||||
↓
|
||||
┌────────────────────────────────┐
|
||||
│ 4.2 Final notes (v{VERSION}.md) │
|
||||
└────────────────────────────────┘
|
||||
```
|
||||
|
||||
| Step | Action | Details |
|
||||
|------|--------|---------|
|
||||
| 1.1 | Collect PRs | From previous release tag on `stable` branch → `sorted_prs.csv` |
|
||||
| 1.2 | Assign Milestones | Ensure all PRs have correct milestone |
|
||||
| 2.1–2.4 | Label PRs | Auto-suggest + human label low-confidence |
|
||||
| 3.1–3.3 | Reviews & Grouping | Request Copilot reviews → refresh → group by label |
|
||||
| 4.1–4.2 | Summaries & Final | Generate grouped summaries, then consolidate |
|
||||
|
||||
## Detailed workflow docs
|
||||
|
||||
Do not read all steps at once—only read the step you are executing.
|
||||
|
||||
- [Step 1: Collection & Milestones](./references/step1-collection.md)
|
||||
- [Step 2: Labeling PRs](./references/step2-labeling.md)
|
||||
- [Step 3: Reviews & Grouping](./references/step3-review-grouping.md)
|
||||
- [Step 4: Summarization](./references/step4-summarization.md)
|
||||
|
||||
|
||||
## Available Scripts
|
||||
|
||||
| Script | Purpose |
|
||||
|--------|---------|
|
||||
| [dump-prs-since-commit.ps1](./scripts/dump-prs-since-commit.ps1) | Fetch PRs between commits/tags |
|
||||
| [group-prs-by-label.ps1](./scripts/group-prs-by-label.ps1) | Group PRs into CSVs |
|
||||
| [collect-or-apply-milestones.ps1](./scripts/collect-or-apply-milestones.ps1) | Assign milestones |
|
||||
| [diff_prs.ps1](./scripts/diff_prs.ps1) | Incremental PR diff |
|
||||
|
||||
## References
|
||||
|
||||
- [Sample Output](./references/SampleOutput.md) - Example summary formatting
|
||||
- [Detailed Instructions](./references/Instruction.md) - Legacy full documentation
|
||||
|
||||
## Conventions
|
||||
|
||||
- **Terminal usage**: Disabled by default; only run scripts when user explicitly requests
|
||||
- **Batch generation**: Generate ALL grouped_md files in one pass, then human reviews
|
||||
- **PR order**: Preserve order from `sorted_prs.csv` in all outputs
|
||||
- **Label filtering**: Keeps `Product-*`, `Area-*`, `GitHub*`, `*Plugin`, `Issue-*`
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
| Issue | Solution |
|
||||
|-------|----------|
|
||||
| `gh` command not found | Install GitHub CLI and add to PATH |
|
||||
| No PRs returned | Verify milestone title matches exactly |
|
||||
| Empty CopilotSummary | Request Copilot reviews first, then re-run dump |
|
||||
| Many unlabeled PRs | Return to labeling step before grouping |
|
||||
@@ -1,9 +0,0 @@
|
||||
- Added mouse button actions so you can choose what left, right, or middle click does. Thanks [@PesBandi](https://github.com/PesBandi)!
|
||||
|
||||
- Aligned window styling with current Windows theme for a cleaner look. Thanks [@sadirano](https://github.com/sadirano)!
|
||||
|
||||
- Ensured screen readers are notified when the selected item in the list changes for better accessibility.
|
||||
|
||||
- Implemented configurable UI test pipeline that can use pre-built official releases instead of building everything from scratch, reducing test execution time from 2+ hours.
|
||||
|
||||
- Fixed Alt+Left Arrow navigation not working when search box contains text. Thanks [@jiripolasek](https://github.com/jiripolasek)!
|
||||
@@ -1,143 +0,0 @@
|
||||
# Step 1: Collection and Milestones
|
||||
|
||||
## 1.0 To-do
|
||||
- 1.0.1 Generate MemberList.md (REQUIRED)
|
||||
- 1.1 Collect PRs
|
||||
- 1.2 Assign Milestones (REQUIRED)
|
||||
|
||||
## Required Variables
|
||||
|
||||
⚠️ **Before starting**, confirm these values with the user:
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `{{ReleaseVersion}}` | Target release version | `0.97` |
|
||||
| `{{PreviousReleaseTag}}` | Previous release tag from releases page | `v0.96.1` |
|
||||
|
||||
**If user hasn't specified `{{ReleaseVersion}}`, ASK:** "What release version are we generating notes for? (e.g., 0.97)"
|
||||
|
||||
**`{{PreviousReleaseTag}}` is derived from the releases page, not user input.** Use the latest published release tag (top of the page). You will use its tag name and tag commit SHA in Step 1.
|
||||
|
||||
---
|
||||
|
||||
## 1.0.1 Generate MemberList.md (REQUIRED)
|
||||
|
||||
Create `Generated Files/ReleaseNotes/MemberList.md` from the **PowerToys core team** section in [COMMUNITY.md](../../../COMMUNITY.md).
|
||||
|
||||
Rules:
|
||||
- One GitHub username per line, **no** `@` prefix.
|
||||
- Use the usernames exactly as listed in the core team section.
|
||||
- Do not include former team members or other sections.
|
||||
|
||||
Example (format only):
|
||||
```
|
||||
example-user
|
||||
another-user
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 1.1 Collect PRs
|
||||
|
||||
### 1.1.1 Get the previous release commit
|
||||
|
||||
1. Open the [PowerToys releases page](https://github.com/microsoft/PowerToys/releases/)
|
||||
2. Find the latest release (e.g., v0.96.1, which should be at the top)
|
||||
3. Set `{{PreviousReleaseTag}}` to that tag name (e.g., `v0.96.1`)
|
||||
4. Copy the full tag commit SHA as `{{SHALastRelease}}`
|
||||
|
||||
|
||||
**If the release SHA is not in your branch history:** Use the helper script to find an equivalent commit on the target branch by matching the commit title:
|
||||
|
||||
```powershell
|
||||
pwsh ./.github/skills/release-note-generation/scripts/find-commit-by-title.ps1 `
|
||||
-Commit '{{SHALastRelease}}' `
|
||||
-Branch 'stable'
|
||||
```
|
||||
|
||||
### 1.1.2 Run collection script against stable branch
|
||||
|
||||
```powershell
|
||||
# Collect PRs from previous release to current HEAD of stable branch
|
||||
pwsh ./.github/skills/release-note-generation/scripts/dump-prs-since-commit.ps1 `
|
||||
-StartCommit '{{SHALastRelease}}' `
|
||||
-Branch 'stable' `
|
||||
-OutputDir 'Generated Files/ReleaseNotes'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `-StartCommit` - Previous release tag or commit SHA (exclusive)
|
||||
- `-Branch` - Always use `stable` branch, not `main` (script uses `origin/stable` as the end ref)
|
||||
- `-EndCommit` - Optional override if you need a custom end ref
|
||||
- `-OutputDir` - Output directory for generated files
|
||||
|
||||
**Reliability check:** If the script reports “No commits found”, the stable branch has not moved since the last release. In that case, either:
|
||||
- Confirm this is expected and stop (no new release notes), or
|
||||
- Re-run against `main` to gather pending changes for the next release cycle.
|
||||
|
||||
The script detects both merge commits (`Merge pull request #12345`) and squash commits (`Feature (#12345)`).
|
||||
|
||||
**Output** (in `Generated Files/ReleaseNotes/`):
|
||||
- `milestone_prs.json` - raw PR data
|
||||
- `sorted_prs.csv` - sorted PR list with columns: Id, Title, Labels, Author, Url, Body, CopilotSummary, NeedThanks
|
||||
|
||||
---
|
||||
|
||||
## 1.2 Assign Milestones (REQUIRED)
|
||||
|
||||
**Before generating release notes**, ensure all collected PRs have the correct milestone assigned.
|
||||
|
||||
⚠️ **CRITICAL:** Do NOT proceed to labeling until all PRs have milestones assigned.
|
||||
|
||||
### 1.2.1 Check current milestone status (dry run)
|
||||
|
||||
```powershell
|
||||
# Dry run first to see what would be changed:
|
||||
pwsh ./.github/skills/release-note-generation/scripts/collect-or-apply-milestones.ps1 `
|
||||
-InputCsv 'Generated Files/ReleaseNotes/sorted_prs.csv' `
|
||||
-OutputCsv 'Generated Files/ReleaseNotes/prs_with_milestone.csv' `
|
||||
-DefaultMilestone 'PowerToys {{ReleaseVersion}}' `
|
||||
-ApplyMissing -WhatIf
|
||||
```
|
||||
|
||||
This queries GitHub for each PR's current milestone and shows which PRs would be updated.
|
||||
|
||||
### 1.2.2 Apply milestones to PRs missing them
|
||||
|
||||
```powershell
|
||||
# Apply for real:
|
||||
pwsh ./.github/skills/release-note-generation/scripts/collect-or-apply-milestones.ps1 `
|
||||
-InputCsv 'Generated Files/ReleaseNotes/sorted_prs.csv' `
|
||||
-OutputCsv 'Generated Files/ReleaseNotes/prs_with_milestone.csv' `
|
||||
-DefaultMilestone 'PowerToys {{ReleaseVersion}}' `
|
||||
-ApplyMissing
|
||||
```
|
||||
|
||||
**Script Behavior:**
|
||||
- Queries each PR's current milestone from GitHub
|
||||
- PRs that already have a milestone are **skipped** (not overwritten)
|
||||
- PRs missing a milestone get the default milestone applied
|
||||
- Outputs `prs_with_milestone.csv` with (Id, Milestone) columns
|
||||
- Produces summary: `Updated=X Skipped=Y Failed=Z`
|
||||
|
||||
**Validation:** After assignment, all PRs in `prs_with_milestone.csv` should have the target milestone.
|
||||
|
||||
---
|
||||
|
||||
## Additional Commands
|
||||
|
||||
### Collect milestones only (no changes to GitHub)
|
||||
```powershell
|
||||
pwsh ./.github/skills/release-note-generation/scripts/collect-or-apply-milestones.ps1 `
|
||||
-InputCsv 'Generated Files/ReleaseNotes/sorted_prs.csv' `
|
||||
-OutputCsv 'Generated Files/ReleaseNotes/prs_with_milestone.csv'
|
||||
```
|
||||
|
||||
### Local assignment only (fill blanks in CSV, no GitHub changes)
|
||||
```powershell
|
||||
pwsh ./.github/skills/release-note-generation/scripts/collect-or-apply-milestones.ps1 `
|
||||
-InputCsv 'Generated Files/ReleaseNotes/sorted_prs.csv' `
|
||||
-OutputCsv 'Generated Files/ReleaseNotes/prs_with_milestone.csv' `
|
||||
-DefaultMilestone 'PowerToys {{ReleaseVersion}}' `
|
||||
-LocalAssign
|
||||
```
|
||||
@@ -1,131 +0,0 @@
|
||||
# Step 2: Label Unlabeled PRs
|
||||
|
||||
## 2.0 To-do
|
||||
- 2.1 Identify unlabeled PRs (Agent Mode)
|
||||
- 2.2 Suggest labels (Agent Mode)
|
||||
- 2.3 Human label low-confidence PRs
|
||||
- 2.4 Recheck labels, delete Unlabeled.csv, and re-collect
|
||||
|
||||
**Before grouping**, ensure all PRs have appropriate labels for categorization.
|
||||
|
||||
⚠️ **CRITICAL:** Do NOT proceed to grouping until all PRs have labels assigned. PRs without labels will end up in `Unlabeled.csv` and won't appear in the correct release note sections.
|
||||
|
||||
## 2.1 Identify unlabeled PRs (Agent Mode)
|
||||
|
||||
Read `sorted_prs.csv` and identify PRs with empty or missing `Labels` column.
|
||||
|
||||
For each unlabeled PR, analyze:
|
||||
- **Title** - Often contains module name or feature
|
||||
- **Body** - PR description with context
|
||||
- **CopilotSummary** - AI-generated summary of changes
|
||||
|
||||
## 2.2 Suggest labels (Agent Mode)
|
||||
|
||||
For each unlabeled PR, suggest an appropriate label based on the content analysis.
|
||||
|
||||
**Output:** Create `Generated Files/ReleaseNotes/prs_label_review.md` with the following format:
|
||||
|
||||
```markdown
|
||||
# PR Label Review
|
||||
|
||||
Generated: YYYY-MM-DD HH:mm:ss
|
||||
|
||||
## Summary
|
||||
- Total unlabeled PRs: X
|
||||
- High confidence: X
|
||||
- Medium confidence: X
|
||||
- Low confidence: X
|
||||
|
||||
---
|
||||
|
||||
## PRs Needing Review (sorted by confidence, low first)
|
||||
|
||||
| PR | Title | Suggested Label | Confidence | Reason |
|
||||
|----|-------|-----------------|------------|--------|
|
||||
| [#12347](url) | Some generic fix | ??? | Low | Unclear from content |
|
||||
| [#12346](url) | Update dependencies | `Area-Build` | Medium | Body mentions NuGet packages |
|
||||
```
|
||||
|
||||
Sort by confidence (low first) so human reviews uncertain ones first.
|
||||
|
||||
After writing `prs_label_review.md`, **generate `prs_to_label.csv`, apply labels, and re-run collection** so the CSV/labels stay in sync:
|
||||
|
||||
```powershell
|
||||
# Generate CSV from suggestions (agent)
|
||||
# Apply labels
|
||||
pwsh ./.github/skills/release-note-generation/scripts/apply-labels.ps1 `
|
||||
-InputCsv 'Generated Files/ReleaseNotes/prs_to_label.csv'
|
||||
|
||||
# Refresh collection
|
||||
pwsh ./.github/skills/release-note-generation/scripts/dump-prs-since-commit.ps1 `
|
||||
-StartCommit '{{PreviousReleaseTag}}' -Branch 'stable' `
|
||||
-OutputDir 'Generated Files/ReleaseNotes'
|
||||
```
|
||||
|
||||
## 2.3 Human label low-confidence PRs
|
||||
|
||||
Ask the human to label **low-confidence** PRs directly (in GitHub). Skip any they decide not to label.
|
||||
|
||||
## 2.4 Recheck labels, delete Unlabeled.csv, and re-collect
|
||||
|
||||
Recheck that all PRs now have labels. Delete `Unlabeled.csv` (if present), then re-run the collection script to update `sorted_prs.csv`:
|
||||
|
||||
```powershell
|
||||
# Remove stale unlabeled output if it exists
|
||||
Remove-Item 'Generated Files/ReleaseNotes/Unlabeled.csv' -ErrorAction SilentlyContinue
|
||||
```
|
||||
|
||||
```powershell
|
||||
pwsh ./.github/skills/release-note-generation/scripts/dump-prs-since-commit.ps1 `
|
||||
-StartCommit '{{PreviousReleaseTag}}' -Branch 'stable' `
|
||||
-OutputDir 'Generated Files/ReleaseNotes'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Label Mappings
|
||||
|
||||
| Keywords/Patterns | Suggested Label |
|
||||
| ----------------- | --------------- |
|
||||
| Advanced Paste, AP, clipboard, paste | `Product-Advanced Paste` |
|
||||
| CmdPal, Command Palette, cmdpal | `Product-Command Palette` |
|
||||
| FancyZones, zones, layout | `Product-FancyZones` |
|
||||
| ZoomIt, zoom, screen annotation | `Product-ZoomIt` |
|
||||
| Settings, settings-ui, Quick Access, flyout | `Product-Settings` |
|
||||
| Installer, setup, MSI, MSIX, WiX | `Area-Setup/Install` |
|
||||
| Build, pipeline, CI/CD, msbuild | `Area-Build` |
|
||||
| Test, unit test, UI test, fuzz | `Area-Tests` |
|
||||
| Localization, loc, translation, resw | `Area-Localization` |
|
||||
| Foundry, AI, LLM | `Product-Advanced Paste` (AI features) |
|
||||
| Mouse Without Borders, MWB | `Product-Mouse Without Borders` |
|
||||
| PowerRename, rename, regex | `Product-PowerRename` |
|
||||
| Peek, preview, file preview | `Product-Peek` |
|
||||
| Image Resizer, resize | `Product-Image Resizer` |
|
||||
| LightSwitch, theme, dark mode | `Product-LightSwitch` |
|
||||
| Quick Accent, accent, diacritics | `Product-Quick Accent` |
|
||||
| Awake, keep awake, caffeine | `Product-Awake` |
|
||||
| ColorPicker, color picker, eyedropper | `Product-ColorPicker` |
|
||||
| Hosts, hosts file | `Product-Hosts` |
|
||||
| Keyboard Manager, remap | `Product-Keyboard Manager` |
|
||||
| Mouse Highlighter | `Product-Mouse Highlighter` |
|
||||
| Mouse Jump | `Product-Mouse Jump` |
|
||||
| Find My Mouse | `Product-Find My Mouse` |
|
||||
| Mouse Pointer Crosshairs | `Product-Mouse Pointer Crosshairs` |
|
||||
| Shortcut Guide | `Product-Shortcut Guide` |
|
||||
| Text Extractor, OCR, PowerOCR | `Product-Text Extractor` |
|
||||
| Workspaces | `Product-Workspaces` |
|
||||
| File Locksmith | `Product-File Locksmith` |
|
||||
| Crop And Lock | `Product-CropAndLock` |
|
||||
| Environment Variables | `Product-Environment Variables` |
|
||||
| New+ | `Product-New+` |
|
||||
|
||||
## Label Filtering Rules
|
||||
|
||||
The grouping script keeps labels matching these patterns:
|
||||
- `Product-*`
|
||||
- `Area-*`
|
||||
- `GitHub*`
|
||||
- `*Plugin`
|
||||
- `Issue-*`
|
||||
|
||||
Other labels are ignored for grouping purposes.
|
||||
@@ -1,37 +0,0 @@
|
||||
# Step 3: Copilot Reviews and Grouping
|
||||
|
||||
## 3.0 To-do
|
||||
- 3.1 Request Copilot Reviews (Agent Mode)
|
||||
- 3.2 Refresh PR Data
|
||||
- 3.3 Group PRs by Label
|
||||
|
||||
## 3.1 Request Copilot Reviews (Agent Mode)
|
||||
|
||||
Use MCP tools to request Copilot reviews for all PRs in `Generated Files/ReleaseNotes/sorted_prs.csv`:
|
||||
|
||||
- Use `mcp_github_request_copilot_review` for each PR ID
|
||||
- Do NOT generate or run scripts for this step
|
||||
|
||||
---
|
||||
|
||||
## 3.2 Refresh PR Data
|
||||
|
||||
Re-run the collection script to capture Copilot review summaries into the `CopilotSummary` column:
|
||||
|
||||
```powershell
|
||||
pwsh ./.github/skills/release-note-generation/scripts/dump-prs-since-commit.ps1 `
|
||||
-StartCommit '{{PreviousReleaseTag}}' -Branch 'stable' `
|
||||
-OutputDir 'Generated Files/ReleaseNotes'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3.3 Group PRs by Label
|
||||
|
||||
```powershell
|
||||
pwsh ./.github/skills/release-note-generation/scripts/group-prs-by-label.ps1 -CsvPath 'Generated Files/ReleaseNotes/sorted_prs.csv' -OutDir 'Generated Files/ReleaseNotes/grouped_csv'
|
||||
```
|
||||
|
||||
Creates `Generated Files/ReleaseNotes/grouped_csv/` with one CSV per label combination.
|
||||
|
||||
**Validation:** The `Unlabeled.csv` file should be minimal (ideally empty). If many PRs remain unlabeled, return to Step 2 (see [step2-labeling.md](./step2-labeling.md)).
|
||||
@@ -1,88 +0,0 @@
|
||||
# Step 4: Summaries and Final Release Notes
|
||||
|
||||
## 4.0 To-do
|
||||
- 4.1 Generate Summary Markdown (Agent Mode)
|
||||
- 4.2 Produce Final Release Notes File
|
||||
|
||||
## 4.1 Generate Summary Markdown (Agent Mode)
|
||||
|
||||
For each CSV in `Generated Files/ReleaseNotes/grouped_csv/`, create a markdown file in `Generated Files/ReleaseNotes/grouped_md/`.
|
||||
|
||||
⚠️ **IMPORTANT:** Generate **ALL** markdown files first. Do NOT pause between files or ask for feedback during generation. Complete the entire batch, then human reviews afterwards.
|
||||
|
||||
### Structure per file
|
||||
|
||||
**1. Bullet list** - one concise, user-facing line per PR:
|
||||
- Use the “Verb-ed + Scenario + Impact” sentence structure—make readers think, “That’s exactly what I need” or “Yes, that’s an awesome fix.”; The "impact" can be end-user focused (written to convey user excitement) or technical (performance/stability) when user-facing impact is minimal.
|
||||
- If nothing special on impact or unclear impact, mark as needing human summary
|
||||
- Source from Title, Body, and CopilotSummary (prefer CopilotSummary when available)
|
||||
- If the column `NeedThanks` in CSV is `True`, append: `Thanks [@Author](https://github.com/Author)!`
|
||||
- Do NOT include PR numbers in bullet lines
|
||||
- Do NOT mention “security” or “privacy” issues, since these are not known and could be leveraged by attackers in earlier versions. Instead, describe the user-facing scenario, usage, or impact.
|
||||
- If confidence < 70%, write: `Human Summary Needed: <PR full link>`
|
||||
|
||||
**See [SampleOutput.md](./SampleOutput.md) for examples of well-written bullet summaries.**
|
||||
|
||||
**2. Three-column table** (same PR order):
|
||||
- Column 1: Concise summary (same as bullet)
|
||||
- Column 2: PR link `[#ID](URL)`
|
||||
- Column 3: Confidence level (High/Medium/Low)
|
||||
|
||||
### Review Process (AFTER all files generated)
|
||||
|
||||
- Human reviews each `grouped_md/*.md` file and requests rewrites as needed
|
||||
- Human may say "rewrite Product-X" or "combine these bullets"—apply changes to that specific file
|
||||
- Do NOT interrupt generation to ask for feedback
|
||||
|
||||
---
|
||||
|
||||
## 4.2 Produce Final Release Notes File
|
||||
|
||||
Once all `grouped_md/*.md` files are reviewed and approved, consolidate into a single release notes file.
|
||||
|
||||
**Output:** `Generated Files/ReleaseNotes/v{{ReleaseVersion}}-release-notes.md`
|
||||
|
||||
### Structure
|
||||
|
||||
**1. Highlights section** (top):
|
||||
- 8-12 bullets covering the most user-visible features and impactful fixes
|
||||
- Pattern: `**Module**: brief description`
|
||||
- Avoid internal refactors; focus on what users will notice
|
||||
|
||||
**2. Module sections** (alphabetical order):
|
||||
- One section per product (Advanced Paste, Awake, Command Palette, etc.)
|
||||
- Migrate bullet summaries from the approved `grouped_md/Product-*.md` files
|
||||
- One section 'Development' for all the rest summaries from the approved `grouped_md/Area-*.md` files
|
||||
- Re-review E2E, group release improvements by section, and move the most important items to the top of each section.
|
||||
Some items in the Development section may overlap and should be moved to the Module section where more applicable.
|
||||
|
||||
### Example Final Structure
|
||||
|
||||
```markdown
|
||||
# PowerToys v{{ReleaseVersion}} Release Notes
|
||||
|
||||
## Highlights
|
||||
|
||||
- **Command Palette**: Added theme customization and drag-and-drop support
|
||||
- **Advanced Paste**: Image input for AI, color detection in clipboard history
|
||||
- **FancyZones**: New CLI tool for command-line layout management
|
||||
...
|
||||
|
||||
---
|
||||
|
||||
## Advanced Paste
|
||||
|
||||
- Wrapped paste option lists in a single ScrollViewer
|
||||
- Added image input handling for AI-powered transformations
|
||||
...
|
||||
|
||||
## Awake
|
||||
|
||||
- Fixed timed mode expiration. Thanks [@daverayment](https://github.com/daverayment)!
|
||||
...
|
||||
|
||||
---
|
||||
|
||||
## Development
|
||||
...
|
||||
```
|
||||
@@ -1,90 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Apply labels to PRs from a CSV file.
|
||||
|
||||
.DESCRIPTION
|
||||
Reads a CSV with Id and Label columns and applies the specified label to each PR via GitHub CLI.
|
||||
Supports dry-run mode to preview changes before applying.
|
||||
|
||||
.PARAMETER InputCsv
|
||||
CSV file with Id and Label columns. Default: prs_to_label.csv
|
||||
|
||||
.PARAMETER Repo
|
||||
GitHub repository (owner/name). Default: microsoft/PowerToys
|
||||
|
||||
.PARAMETER WhatIf
|
||||
Dry run - show what would be applied without making changes.
|
||||
|
||||
.EXAMPLE
|
||||
pwsh ./apply-labels.ps1 -InputCsv 'Generated Files/ReleaseNotes/prs_to_label.csv'
|
||||
|
||||
.EXAMPLE
|
||||
pwsh ./apply-labels.ps1 -InputCsv 'Generated Files/ReleaseNotes/prs_to_label.csv' -WhatIf
|
||||
|
||||
.NOTES
|
||||
Requires: gh CLI authenticated with repo write access.
|
||||
|
||||
Input CSV format:
|
||||
Id,Label
|
||||
12345,Product-Advanced Paste
|
||||
12346,Product-Settings
|
||||
#>
|
||||
[CmdletBinding()] param(
|
||||
[Parameter(Mandatory=$false)][string]$InputCsv = 'prs_to_label.csv',
|
||||
[Parameter(Mandatory=$false)][string]$Repo = 'microsoft/PowerToys',
|
||||
[switch]$WhatIf
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
function Write-Info($m){ Write-Host "[info] $m" -ForegroundColor Cyan }
|
||||
function Write-Warn($m){ Write-Host "[warn] $m" -ForegroundColor Yellow }
|
||||
function Write-Err($m){ Write-Host "[error] $m" -ForegroundColor Red }
|
||||
function Write-OK($m){ Write-Host "[ok] $m" -ForegroundColor Green }
|
||||
|
||||
if (-not (Get-Command gh -ErrorAction SilentlyContinue)) { Write-Err "GitHub CLI 'gh' not found in PATH"; exit 1 }
|
||||
if (-not (Test-Path -LiteralPath $InputCsv)) { Write-Err "Input CSV not found: $InputCsv"; exit 1 }
|
||||
|
||||
$rows = Import-Csv -LiteralPath $InputCsv
|
||||
if (-not $rows) { Write-Info "No rows in CSV."; exit 0 }
|
||||
|
||||
$firstCols = $rows[0].PSObject.Properties.Name
|
||||
if (-not ($firstCols -contains 'Id' -and $firstCols -contains 'Label')) {
|
||||
Write-Err "CSV must contain 'Id' and 'Label' columns"; exit 1
|
||||
}
|
||||
|
||||
Write-Info "Processing $($rows.Count) label assignments..."
|
||||
if ($WhatIf) { Write-Warn "DRY RUN - no changes will be made" }
|
||||
|
||||
$applied = 0
|
||||
$skipped = 0
|
||||
$failed = 0
|
||||
|
||||
foreach ($row in $rows) {
|
||||
$id = $row.Id
|
||||
$label = $row.Label
|
||||
|
||||
if ([string]::IsNullOrWhiteSpace($id) -or [string]::IsNullOrWhiteSpace($label)) {
|
||||
Write-Warn "Skipping row with empty Id or Label"
|
||||
$skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
if ($WhatIf) {
|
||||
Write-Info "Would apply label '$label' to PR #$id"
|
||||
$applied++
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
gh pr edit $id --repo $Repo --add-label $label 2>&1 | Out-Null
|
||||
Write-OK "Applied '$label' to PR #$id"
|
||||
$applied++
|
||||
} catch {
|
||||
Write-Warn "Failed to apply label to PR #${id}: $_"
|
||||
$failed++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Info ""
|
||||
Write-Info "Summary: Applied=$applied Skipped=$skipped Failed=$failed"
|
||||
@@ -1,172 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Collect existing PR milestones or (optionally) assign/apply a milestone to missing PRs in one script.
|
||||
|
||||
.DESCRIPTION
|
||||
This unified script merges the behaviors of the previous add-milestone-column (collector) and
|
||||
set-milestones-missing (remote updater) scripts.
|
||||
|
||||
Modes (controlled by switches):
|
||||
1. Collect (default) – For each PR Id in the input CSV, queries GitHub for the current milestone and
|
||||
outputs a two-column CSV (Id,Milestone) leaving blanks where none are set.
|
||||
2. LocalAssign – Same as Collect, but for rows that end up blank assigns the value of -DefaultMilestone
|
||||
in memory (does NOT touch GitHub). Useful for quickly preparing a fully populated CSV.
|
||||
3. ApplyMissing – After determining which PRs have no milestone, call GitHub API to set their milestone
|
||||
to -DefaultMilestone. Requires milestone to already exist (open). Network + write.
|
||||
|
||||
You can combine LocalAssign and ApplyMissing: the remote update uses the existing live state; LocalAssign only
|
||||
affects the output CSV/pipeline objects.
|
||||
|
||||
.PARAMETER InputCsv
|
||||
Source CSV with at least an Id column. Default: sorted_prs.csv
|
||||
|
||||
.PARAMETER OutputCsv
|
||||
Destination CSV for collected (and optionally locally assigned) milestones. Default: prs_with_milestone.csv
|
||||
|
||||
.PARAMETER Repo
|
||||
GitHub repository (owner/name). Default: microsoft/PowerToys
|
||||
|
||||
.PARAMETER DefaultMilestone
|
||||
Milestone title used when -LocalAssign or -ApplyMissing is specified. Default: 'PowerToys 0.97'
|
||||
|
||||
.PARAMETER Offline
|
||||
Skip ALL GitHub lookups / updates. Implies Collect-only with all Milestone cells blank (unless LocalAssign).
|
||||
|
||||
.PARAMETER LocalAssign
|
||||
Populate empty Milestone cells in the output with -DefaultMilestone (does not modify GitHub).
|
||||
|
||||
.PARAMETER ApplyMissing
|
||||
For PRs which currently have no milestone (live on GitHub), set them to -DefaultMilestone via Issues API.
|
||||
|
||||
.PARAMETER WhatIf
|
||||
Dry run for ApplyMissing: show intended remote changes without performing PATCH requests.
|
||||
|
||||
.EXAMPLE
|
||||
# Collect only
|
||||
pwsh ./collect-or-apply-milestones.ps1
|
||||
|
||||
.EXAMPLE
|
||||
# Collect and fill blanks locally in the output only
|
||||
pwsh ./collect-or-apply-milestones.ps1 -LocalAssign
|
||||
|
||||
.EXAMPLE
|
||||
# Collect and remotely apply milestone to missing PRs
|
||||
pwsh ./collect-or-apply-milestones.ps1 -ApplyMissing
|
||||
|
||||
.EXAMPLE
|
||||
# Dry run remote application
|
||||
pwsh ./collect-or-apply-milestones.ps1 -ApplyMissing -WhatIf
|
||||
|
||||
.EXAMPLE
|
||||
# Offline local assignment
|
||||
pwsh ./collect-or-apply-milestones.ps1 -Offline -LocalAssign -DefaultMilestone 'PowerToys 0.96'
|
||||
|
||||
.NOTES
|
||||
Requires gh CLI unless -Offline AND -ApplyMissing not specified.
|
||||
Remote apply path queries milestones to resolve numeric ID.
|
||||
#>
|
||||
[CmdletBinding()] param(
|
||||
[Parameter(Mandatory=$false)][string]$InputCsv = 'sorted_prs.csv',
|
||||
[Parameter(Mandatory=$false)][string]$OutputCsv = 'prs_with_milestone.csv',
|
||||
[Parameter(Mandatory=$false)][string]$Repo = 'microsoft/PowerToys',
|
||||
[Parameter(Mandatory=$false)][string]$DefaultMilestone = 'PowerToys 0.97',
|
||||
[switch]$Offline,
|
||||
[switch]$LocalAssign,
|
||||
[switch]$ApplyMissing,
|
||||
[switch]$WhatIf
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
function Write-Info($m){ Write-Host "[info] $m" -ForegroundColor Cyan }
|
||||
function Write-Warn($m){ Write-Host "[warn] $m" -ForegroundColor Yellow }
|
||||
function Write-Err($m){ Write-Host "[error] $m" -ForegroundColor Red }
|
||||
|
||||
if (-not (Test-Path -LiteralPath $InputCsv)) { Write-Err "Input CSV not found: $InputCsv"; exit 1 }
|
||||
$rows = Import-Csv -LiteralPath $InputCsv
|
||||
if (-not $rows) { Write-Warn "Input CSV has no rows."; @() | Export-Csv -NoTypeInformation -LiteralPath $OutputCsv; exit 0 }
|
||||
if (-not ($rows[0].PSObject.Properties.Name -contains 'Id')) { Write-Err "Input CSV missing 'Id' column."; exit 1 }
|
||||
|
||||
$needGh = (-not $Offline) -and ($ApplyMissing -or -not $Offline)
|
||||
if ($needGh -and -not (Get-Command gh -ErrorAction SilentlyContinue)) { Write-Err "GitHub CLI 'gh' not found. Use -Offline or install gh."; exit 1 }
|
||||
|
||||
# Step 1: Collect current milestone titles
|
||||
$milestoneCache = @{}
|
||||
$collected = New-Object System.Collections.Generic.List[object]
|
||||
$idx = 0
|
||||
foreach ($row in $rows) {
|
||||
$idx++
|
||||
$id = $row.Id
|
||||
if (-not $id) { Write-Warn "Row $idx missing Id; skipping"; continue }
|
||||
$ms = ''
|
||||
if (-not $Offline) {
|
||||
if ($milestoneCache.ContainsKey($id)) { $ms = $milestoneCache[$id] }
|
||||
else {
|
||||
try {
|
||||
$json = gh pr view $id --repo $Repo --json milestone 2>$null | ConvertFrom-Json
|
||||
if ($json -and $json.milestone -and $json.milestone.title) { $ms = $json.milestone.title }
|
||||
} catch {
|
||||
Write-Warn "Failed to fetch PR #$id milestone: $_"
|
||||
}
|
||||
$milestoneCache[$id] = $ms
|
||||
}
|
||||
}
|
||||
$collected.Add([PSCustomObject]@{ Id = $id; Milestone = $ms }) | Out-Null
|
||||
}
|
||||
|
||||
# Step 2: Remote apply (if requested)
|
||||
$applySummary = @()
|
||||
if ($ApplyMissing) {
|
||||
if ($Offline) { Write-Err "Cannot use -ApplyMissing with -Offline."; exit 1 }
|
||||
Write-Info "Resolving milestone id for '$DefaultMilestone' ..."
|
||||
$milestonesRaw = gh api repos/$Repo/milestones --paginate --jq '.[] | {number,title,state}'
|
||||
$msObj = $milestonesRaw | ConvertFrom-Json | Where-Object { $_.title -eq $DefaultMilestone -and $_.state -eq 'open' } | Select-Object -First 1
|
||||
if (-not $msObj) { Write-Err "Milestone '$DefaultMilestone' not found/open."; exit 1 }
|
||||
$msNumber = $msObj.number
|
||||
$targets = $collected | Where-Object { [string]::IsNullOrWhiteSpace($_.Milestone) }
|
||||
Write-Info ("ApplyMissing: {0} PR(s) without milestone." -f $targets.Count)
|
||||
foreach ($t in $targets) {
|
||||
$id = $t.Id
|
||||
try {
|
||||
# Verify still missing live
|
||||
$current = gh pr view $id --repo $Repo --json milestone --jq '.milestone.title // ""'
|
||||
if ($current) {
|
||||
$applySummary += [PSCustomObject]@{ Id=$id; Action='Skip (already has)'; Milestone=$current; Status='OK' }
|
||||
continue
|
||||
}
|
||||
if ($WhatIf) {
|
||||
$applySummary += [PSCustomObject]@{ Id=$id; Action='Would set'; Milestone=$DefaultMilestone; Status='DRY RUN' }
|
||||
continue
|
||||
}
|
||||
gh api -X PATCH -H 'Accept: application/vnd.github+json' repos/$Repo/issues/$id -f milestone=$msNumber | Out-Null
|
||||
$applySummary += [PSCustomObject]@{ Id=$id; Action='Set'; Milestone=$DefaultMilestone; Status='OK' }
|
||||
# Reflect in collected object for CSV output if not LocalAssign already doing so
|
||||
$t.Milestone = $DefaultMilestone
|
||||
} catch {
|
||||
$errText = $_ | Out-String
|
||||
$applySummary += [PSCustomObject]@{ Id=$id; Action='Failed'; Milestone=$DefaultMilestone; Status=$errText.Trim() }
|
||||
Write-Warn ("Failed to set milestone for PR #{0}: {1}" -f $id, ($errText.Trim()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Step 3: Local assignment (purely for output) AFTER remote so remote actual result not overwritten accidentally
|
||||
if ($LocalAssign) {
|
||||
foreach ($item in $collected) {
|
||||
if ([string]::IsNullOrWhiteSpace($item.Milestone)) { $item.Milestone = $DefaultMilestone }
|
||||
}
|
||||
}
|
||||
|
||||
# Step 4: Export CSV
|
||||
$collected | Export-Csv -LiteralPath $OutputCsv -NoTypeInformation -Encoding UTF8
|
||||
Write-Info ("Wrote collected CSV -> {0}" -f (Resolve-Path -LiteralPath $OutputCsv))
|
||||
|
||||
# Step 5: Summaries
|
||||
if ($ApplyMissing) {
|
||||
$updated = ($applySummary | Where-Object { $_.Action -eq 'Set' }).Count
|
||||
$skipped = ($applySummary | Where-Object { $_.Action -like 'Skip*' }).Count
|
||||
$failed = ($applySummary | Where-Object { $_.Action -eq 'Failed' }).Count
|
||||
Write-Info ("ApplyMissing summary: Updated={0} Skipped={1} Failed={2}" -f $updated, $skipped, $failed)
|
||||
}
|
||||
|
||||
# Emit objects (final collected set)
|
||||
return $collected
|
||||
@@ -1,100 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Produce an incremental PR CSV containing rows present in a newer full export but absent from a baseline export.
|
||||
|
||||
.DESCRIPTION
|
||||
Compares two previously generated sorted PR CSV files (same schema). Any row whose key column value
|
||||
(defaults to 'Number') does not exist in the baseline file is emitted to a new incremental CSV, preserving
|
||||
the original column order. If no new rows are found, an empty CSV (with headers when determinable) is written.
|
||||
|
||||
.PARAMETER BaseCsv
|
||||
Path to the baseline (earlier) PR CSV.
|
||||
|
||||
.PARAMETER AllCsv
|
||||
Path to the newer full PR CSV containing superset (or equal set) of rows.
|
||||
|
||||
.PARAMETER OutCsv
|
||||
Path to write the incremental CSV containing only new rows.
|
||||
|
||||
.PARAMETER Key
|
||||
Column name used as unique identifier (defaults to 'Number'). Must exist in both CSVs.
|
||||
|
||||
.EXAMPLE
|
||||
pwsh ./diff_prs.ps1 -BaseCsv sorted_prs_prev.csv -AllCsv sorted_prs.csv -OutCsv sorted_prs_incremental.csv
|
||||
|
||||
.NOTES
|
||||
Requires: PowerShell 7+, both CSVs with identical column schemas.
|
||||
Exit code 0 on success (even if zero incremental rows). Throws on missing files.
|
||||
#>
|
||||
|
||||
[CmdletBinding()] param(
|
||||
[Parameter(Mandatory=$false)][string]$BaseCsv = "./sorted_prs_93_round1.csv",
|
||||
[Parameter(Mandatory=$false)][string]$AllCsv = "./sorted_prs.csv",
|
||||
[Parameter(Mandatory=$false)][string]$OutCsv = "./sorted_prs_93_incremental.csv",
|
||||
[Parameter(Mandatory=$false)][string]$Key = "Number"
|
||||
)
|
||||
|
||||
Set-StrictMode -Version Latest
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
function Write-Info($m) { Write-Host "[info] $m" -ForegroundColor Cyan }
|
||||
function Write-Warn($m) { Write-Host "[warn] $m" -ForegroundColor Yellow }
|
||||
|
||||
if (-not (Test-Path -LiteralPath $BaseCsv)) { throw "Base CSV not found: $BaseCsv" }
|
||||
if (-not (Test-Path -LiteralPath $AllCsv)) { throw "All CSV not found: $AllCsv" }
|
||||
|
||||
# Load CSVs
|
||||
$baseRows = Import-Csv -LiteralPath $BaseCsv
|
||||
$allRows = Import-Csv -LiteralPath $AllCsv
|
||||
|
||||
if (-not $baseRows) { Write-Warn "Base CSV has no rows." }
|
||||
if (-not $allRows) { Write-Warn "All CSV has no rows." }
|
||||
|
||||
# Validate key presence
|
||||
if ($baseRows -and -not ($baseRows[0].PSObject.Properties.Name -contains $Key)) { throw "Key column '$Key' not found in base CSV." }
|
||||
if ($allRows -and -not ($allRows[0].PSObject.Properties.Name -contains $Key)) { throw "Key column '$Key' not found in all CSV." }
|
||||
|
||||
# Build a set of existing keys from base
|
||||
$set = New-Object 'System.Collections.Generic.HashSet[string]'
|
||||
foreach ($row in $baseRows) {
|
||||
$val = [string]($row.$Key)
|
||||
if ($null -ne $val) { [void]$set.Add($val) }
|
||||
}
|
||||
|
||||
# Filter rows in AllCsv whose key is not in base (these are the new / incremental rows)
|
||||
$incremental = @()
|
||||
foreach ($row in $allRows) {
|
||||
$val = [string]($row.$Key)
|
||||
if (-not $set.Contains($val)) { $incremental += $row }
|
||||
}
|
||||
|
||||
# Preserve column order from the All CSV
|
||||
$columns = @()
|
||||
if ($allRows.Count -gt 0) {
|
||||
$columns = $allRows[0].PSObject.Properties.Name
|
||||
}
|
||||
|
||||
try {
|
||||
if ($incremental.Count -gt 0) {
|
||||
if ($columns.Count -gt 0) {
|
||||
$incremental | Select-Object -Property $columns | Export-Csv -LiteralPath $OutCsv -NoTypeInformation -Encoding UTF8
|
||||
} else {
|
||||
$incremental | Export-Csv -LiteralPath $OutCsv -NoTypeInformation -Encoding UTF8
|
||||
}
|
||||
} else {
|
||||
# Write an empty CSV with headers if we know them (facilitates downstream tooling expecting header row)
|
||||
if ($columns.Count -gt 0) {
|
||||
$obj = [PSCustomObject]@{}
|
||||
foreach ($c in $columns) { $obj | Add-Member -NotePropertyName $c -NotePropertyValue $null }
|
||||
$obj | Select-Object -Property $columns | Export-Csv -LiteralPath $OutCsv -NoTypeInformation -Encoding UTF8
|
||||
} else {
|
||||
'' | Out-File -LiteralPath $OutCsv -Encoding UTF8
|
||||
}
|
||||
}
|
||||
Write-Info ("Incremental rows: {0}" -f $incremental.Count)
|
||||
Write-Info ("Output: {0}" -f (Resolve-Path -LiteralPath $OutCsv))
|
||||
}
|
||||
catch {
|
||||
Write-Host "[error] Failed writing output CSV: $_" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
@@ -1,344 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Export merged PR metadata between two commits (exclusive start, inclusive end) to JSON and CSV.
|
||||
|
||||
.DESCRIPTION
|
||||
Identifies merge/squash commits reachable from EndCommit but not StartCommit, extracts PR numbers,
|
||||
queries GitHub for metadata plus (optionally) Copilot review/comment summaries, filters labels, then
|
||||
emits a JSON artifact and a sorted CSV (first label alphabetical).
|
||||
|
||||
.PARAMETER StartCommit
|
||||
Exclusive starting commit (SHA, tag, or ref). Commits AFTER this one are considered.
|
||||
|
||||
.PARAMETER EndCommit
|
||||
Inclusive ending commit (SHA, tag, or ref). If not provided, uses origin/<Branch> when Branch is set; otherwise uses HEAD.
|
||||
|
||||
.PARAMETER Repo
|
||||
GitHub repository (owner/name). Default: microsoft/PowerToys.
|
||||
|
||||
.PARAMETER OutputCsv
|
||||
Destination CSV path. Default: sorted_prs.csv.
|
||||
|
||||
.PARAMETER OutputJson
|
||||
Destination JSON path containing raw PR objects. Default: milestone_prs.json.
|
||||
|
||||
.EXAMPLE
|
||||
pwsh ./dump-prs-since-commit.ps1 -StartCommit 0123abcd -Branch stable
|
||||
|
||||
.EXAMPLE
|
||||
pwsh ./dump-prs-since-commit.ps1 -StartCommit 0123abcd -EndCommit 89ef7654 -OutputCsv delta.csv
|
||||
|
||||
.NOTES
|
||||
Requires: git, gh (authenticated). No Set-StrictMode to keep parity with existing release scripts.
|
||||
#>
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)][string]$StartCommit, # exclusive start (commits AFTER this one)
|
||||
[string]$EndCommit,
|
||||
[string]$Branch,
|
||||
[string]$Repo = "microsoft/PowerToys",
|
||||
[string]$OutputDir,
|
||||
[string]$OutputCsv = "sorted_prs.csv",
|
||||
[string]$OutputJson = "milestone_prs.json"
|
||||
)
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Dump merged PR information whose merge commits are reachable from EndCommit but not from StartCommit.
|
||||
.DESCRIPTION
|
||||
Uses git rev-list to compute commits in the (StartCommit, EndCommit] range, extracts PR numbers from merge commit messages,
|
||||
queries GitHub (gh CLI) for details, then outputs a CSV.
|
||||
|
||||
PR merge commit messages in PowerToys generally contain patterns like:
|
||||
Merge pull request #12345 from ...
|
||||
|
||||
.EXAMPLE
|
||||
pwsh ./dump-prs-since-commit.ps1 -StartCommit 0123abcd -Branch stable
|
||||
|
||||
.EXAMPLE
|
||||
pwsh ./dump-prs-since-commit.ps1 -StartCommit 0123abcd -EndCommit 89ef7654 -OutputCsv changes.csv
|
||||
|
||||
.NOTES
|
||||
Requires: gh CLI authenticated; git available in working directory (must be inside PowerToys repo clone).
|
||||
CopilotSummary behavior:
|
||||
- Attempts to locate the latest GitHub Copilot authored review (preferred).
|
||||
- If no review is found, lazily fetches PR comments to look for a Copilot-authored comment.
|
||||
- Normalizes whitespace and strips newlines. Empty when no Copilot activity detected.
|
||||
- Run with -Verbose to see whether the summary came from a 'review' or 'comment' source.
|
||||
#>
|
||||
|
||||
function Write-Info($msg) { Write-Host $msg -ForegroundColor Cyan }
|
||||
function Write-Warn($msg) { Write-Host $msg -ForegroundColor Yellow }
|
||||
function Write-Err($msg) { Write-Host $msg -ForegroundColor Red }
|
||||
function Write-DebugMsg($msg) { if ($PSBoundParameters.ContainsKey('Verbose') -or $VerbosePreference -eq 'Continue') { Write-Host "[VERBOSE] $msg" -ForegroundColor DarkGray } }
|
||||
|
||||
# Load member list from Generated Files/ReleaseNotes/MemberList.md (internal team - no thanks needed)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$repoRoot = Resolve-Path (Join-Path $scriptDir "..\..\..\..")
|
||||
$defaultMemberListPath = Join-Path $repoRoot "Generated Files\ReleaseNotes\MemberList.md"
|
||||
$memberListPath = $defaultMemberListPath
|
||||
if ($OutputDir) {
|
||||
$memberListFromOutputDir = Join-Path $OutputDir "MemberList.md"
|
||||
if (Test-Path $memberListFromOutputDir) {
|
||||
$memberListPath = $memberListFromOutputDir
|
||||
}
|
||||
}
|
||||
$memberList = @()
|
||||
if (Test-Path $memberListPath) {
|
||||
$memberListContent = Get-Content $memberListPath -Raw
|
||||
# Extract usernames - skip markdown code fence lines, get all non-empty lines
|
||||
$memberList = ($memberListContent -split "`n") | Where-Object { $_ -notmatch '^\s*```' -and $_.Trim() -ne '' } | ForEach-Object { $_.Trim() }
|
||||
if (-not $memberList -or $memberList.Count -eq 0) {
|
||||
Write-Err "MemberList.md is empty at $memberListPath"
|
||||
exit 1
|
||||
}
|
||||
Write-DebugMsg "Loaded $($memberList.Count) members from MemberList.md"
|
||||
} else {
|
||||
Write-Err "MemberList.md not found at $memberListPath"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Validate we are in a git repo
|
||||
#if (-not (Test-Path .git)) {
|
||||
# Write-Err "Current directory does not appear to be the root of a git repository."
|
||||
# exit 1
|
||||
#}
|
||||
|
||||
# Resolve output directory (if specified)
|
||||
if ($OutputDir) {
|
||||
if (-not (Test-Path $OutputDir)) {
|
||||
New-Item -ItemType Directory -Path $OutputDir -Force | Out-Null
|
||||
}
|
||||
if (-not [System.IO.Path]::IsPathRooted($OutputCsv)) {
|
||||
$OutputCsv = Join-Path $OutputDir $OutputCsv
|
||||
}
|
||||
if (-not [System.IO.Path]::IsPathRooted($OutputJson)) {
|
||||
$OutputJson = Join-Path $OutputDir $OutputJson
|
||||
}
|
||||
}
|
||||
|
||||
# Resolve commits
|
||||
try {
|
||||
if ($Branch) {
|
||||
Write-Info "Fetching latest '$Branch' from origin (with tags)..."
|
||||
git fetch origin $Branch --tags | Out-Null
|
||||
if ($LASTEXITCODE -ne 0) { throw "git fetch origin $Branch --tags failed" }
|
||||
}
|
||||
|
||||
$startSha = (git rev-parse --verify $StartCommit) 2>$null
|
||||
if (-not $startSha) { throw "StartCommit '$StartCommit' not found" }
|
||||
if ($Branch) {
|
||||
$branchRef = $Branch
|
||||
$branchSha = (git rev-parse --verify $branchRef) 2>$null
|
||||
if (-not $branchSha) {
|
||||
$branchRef = "origin/$Branch"
|
||||
$branchSha = (git rev-parse --verify $branchRef) 2>$null
|
||||
}
|
||||
if (-not $branchSha) { throw "Branch '$Branch' not found" }
|
||||
if (-not $PSBoundParameters.ContainsKey('EndCommit') -or [string]::IsNullOrWhiteSpace($EndCommit)) {
|
||||
$EndCommit = $branchRef
|
||||
}
|
||||
}
|
||||
if (-not $PSBoundParameters.ContainsKey('EndCommit') -or [string]::IsNullOrWhiteSpace($EndCommit)) {
|
||||
$EndCommit = "HEAD"
|
||||
}
|
||||
$endSha = (git rev-parse --verify $EndCommit) 2>$null
|
||||
if (-not $endSha) { throw "EndCommit '$EndCommit' not found" }
|
||||
}
|
||||
catch {
|
||||
Write-Err $_
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Info "Collecting commits between $startSha..$endSha (excluding start, including end)."
|
||||
# Get list of commits reachable from end but not from start.
|
||||
# IMPORTANT: In PowerShell, the .. operator creates a numeric/char range. If $startSha and $endSha look like hex strings,
|
||||
# `$startSha..$endSha` must be passed as a single string argument.
|
||||
$rangeArg = "$startSha..$endSha"
|
||||
$commitList = git rev-list $rangeArg
|
||||
|
||||
# Normalize list (filter out empty strings)
|
||||
$normalizedCommits = $commitList | Where-Object { $_ -and $_.Trim() -ne '' }
|
||||
$commitCount = ($normalizedCommits | Measure-Object).Count
|
||||
Write-DebugMsg ("Raw commitList length (including blanks): {0}" -f (($commitList | Measure-Object).Count))
|
||||
Write-DebugMsg ("Normalized commit count: {0}" -f $commitCount)
|
||||
if ($commitCount -eq 0) {
|
||||
Write-Warn "No commits found in specified range ($startSha..$endSha)."; exit 0
|
||||
}
|
||||
Write-DebugMsg ("First 5 commits: {0}" -f (($normalizedCommits | Select-Object -First 5) -join ', '))
|
||||
|
||||
<#
|
||||
Extract PR numbers from commits.
|
||||
Patterns handled:
|
||||
1. Merge commits: 'Merge pull request #12345 from ...'
|
||||
2. Squash commits: 'Some feature change (#12345)' (GitHub default squash format)
|
||||
We collect both. If a commit matches both (unlikely), it's deduped later.
|
||||
#>
|
||||
# Extract PR numbers from merge or squash commits
|
||||
$mergeCommits = @()
|
||||
foreach ($c in $normalizedCommits) {
|
||||
$subject = git show -s --format=%s $c
|
||||
$matched = $false
|
||||
# Pattern 1: Traditional merge commit
|
||||
if ($subject -match 'Merge pull request #([0-9]+) ') {
|
||||
$prNumber = [int]$matches[1]
|
||||
$mergeCommits += [PSCustomObject]@{ Sha = $c; Pr = $prNumber; Subject = $subject; Pattern = 'merge' }
|
||||
Write-DebugMsg "Matched merge PR #$prNumber in commit $c"
|
||||
$matched = $true
|
||||
}
|
||||
# Pattern 2: Squash merge subject line with ' (#12345)' at end (allow possible whitespace before paren)
|
||||
if ($subject -match '\(#([0-9]+)\)$') {
|
||||
$prNumber2 = [int]$matches[1]
|
||||
# Avoid duplicate object if pattern 1 already captured same number for same commit
|
||||
if (-not ($mergeCommits | Where-Object { $_.Sha -eq $c -and $_.Pr -eq $prNumber2 })) {
|
||||
$mergeCommits += [PSCustomObject]@{ Sha = $c; Pr = $prNumber2; Subject = $subject; Pattern = 'squash' }
|
||||
Write-DebugMsg "Matched squash PR #$prNumber2 in commit $c"
|
||||
}
|
||||
$matched = $true
|
||||
}
|
||||
if (-not $matched) {
|
||||
Write-DebugMsg "No PR pattern in commit $c : $subject"
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $mergeCommits -or $mergeCommits.Count -eq 0) {
|
||||
Write-Warn "No merge commits with PR numbers found in range."; exit 0
|
||||
}
|
||||
|
||||
# Deduplicate PR numbers (in case of revert or merges across branches)
|
||||
$prNumbers = $mergeCommits | Select-Object -ExpandProperty Pr -Unique | Sort-Object
|
||||
Write-Info ("Found {0} unique PRs: {1}" -f $prNumbers.Count, ($prNumbers -join ', '))
|
||||
Write-DebugMsg ("Total merge commits examined: {0}" -f $mergeCommits.Count)
|
||||
|
||||
# Query GitHub for each PR
|
||||
$prDetails = @()
|
||||
function Get-CopilotSummaryFromPrJson {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]$PrJson,
|
||||
[switch]$VerboseMode
|
||||
)
|
||||
# Returns a hashtable with Summary and Source keys.
|
||||
$result = @{ Summary = ""; Source = "" }
|
||||
if (-not $PrJson) { return $result }
|
||||
|
||||
$candidateAuthors = @(
|
||||
'github-copilot[bot]', 'github-copilot', 'copilot'
|
||||
)
|
||||
|
||||
# 1. Reviews (preferred) – pick the LONGEST valid Copilot body, not the most recent
|
||||
$reviews = $PrJson.reviews
|
||||
if ($reviews) {
|
||||
$copilotReviews = $reviews | Where-Object {
|
||||
($candidateAuthors -contains $_.author.login -or $_.author.login -like '*copilot*') -and $_.body -and $_.body.Trim() -ne ''
|
||||
}
|
||||
if ($copilotReviews) {
|
||||
$longest = $copilotReviews | Sort-Object { $_.body.Length } -Descending | Select-Object -First 1
|
||||
if ($longest) {
|
||||
$body = $longest.body
|
||||
$norm = ($body -replace "`r", '') -replace "`n", ' '
|
||||
$norm = $norm -replace '\s+', ' '
|
||||
$result.Summary = $norm
|
||||
$result.Source = 'review'
|
||||
if ($VerboseMode) { Write-DebugMsg "Selected Copilot review length=$($body.Length) (longest)." }
|
||||
return $result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# 2. Comments fallback (some repos surface Copilot summaries as PR comments rather than review objects)
|
||||
if ($null -eq $PrJson.comments) {
|
||||
try {
|
||||
# Lazy fetch comments only if needed
|
||||
$commentsJson = gh pr view $PrJson.number --repo $Repo --json comments 2>$null | ConvertFrom-Json
|
||||
if ($commentsJson -and $commentsJson.comments) {
|
||||
$PrJson | Add-Member -NotePropertyName comments -NotePropertyValue $commentsJson.comments -Force
|
||||
}
|
||||
} catch {
|
||||
if ($VerboseMode) { Write-DebugMsg "Failed to fetch comments for PR #$($PrJson.number): $_" }
|
||||
}
|
||||
}
|
||||
if ($PrJson.comments) {
|
||||
$copilotComments = $PrJson.comments | Where-Object {
|
||||
($candidateAuthors -contains $_.author.login -or $_.author.login -like '*copilot*') -and $_.body -and $_.body.Trim() -ne ''
|
||||
}
|
||||
if ($copilotComments) {
|
||||
$longestC = $copilotComments | Sort-Object { $_.body.Length } -Descending | Select-Object -First 1
|
||||
if ($longestC) {
|
||||
$body = $longestC.body
|
||||
$norm = ($body -replace "`r", '') -replace "`n", ' '
|
||||
$norm = $norm -replace '\s+', ' '
|
||||
$result.Summary = $norm
|
||||
$result.Source = 'comment'
|
||||
if ($VerboseMode) { Write-DebugMsg "Selected Copilot comment length=$($body.Length) (longest)." }
|
||||
return $result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
foreach ($pr in $prNumbers) {
|
||||
Write-Info "Fetching PR #$pr ..."
|
||||
try {
|
||||
# Include comments only if Verbose asked; if not, we lazily pull when reviews are missing
|
||||
$fields = 'number,title,labels,author,url,body,reviews'
|
||||
if ($PSBoundParameters.ContainsKey('Verbose')) { $fields += ',comments' }
|
||||
$json = gh pr view $pr --repo $Repo --json $fields 2>$null | ConvertFrom-Json
|
||||
if ($null -eq $json) { throw "Empty response" }
|
||||
|
||||
$copilot = Get-CopilotSummaryFromPrJson -PrJson $json -VerboseMode:($PSBoundParameters.ContainsKey('Verbose'))
|
||||
if ($copilot.Summary -and $copilot.Source -and $PSBoundParameters.ContainsKey('Verbose')) {
|
||||
Write-DebugMsg "Copilot summary source=$($copilot.Source) chars=$($copilot.Summary.Length)"
|
||||
} elseif (-not $copilot.Summary) {
|
||||
Write-DebugMsg "No Copilot summary found for PR #$pr"
|
||||
}
|
||||
|
||||
# Filter labels
|
||||
$filteredLabels = $json.labels | Where-Object {
|
||||
($_.name -like "Product-*") -or
|
||||
($_.name -like "Area-*") -or
|
||||
($_.name -like "GitHub*") -or
|
||||
($_.name -like "*Plugin") -or
|
||||
($_.name -like "Issue-*")
|
||||
}
|
||||
$labelNames = ($filteredLabels | ForEach-Object { $_.name }) -join ", "
|
||||
|
||||
$bodyValue = if ($json.body) { ($json.body -replace "`r", '') -replace "`n", ' ' } else { '' }
|
||||
$bodyValue = $bodyValue -replace '\s+', ' '
|
||||
|
||||
# Determine if author needs thanks (not in member list)
|
||||
$authorLogin = $json.author.login
|
||||
$needThanks = $true
|
||||
if ($memberList.Count -gt 0 -and $authorLogin) {
|
||||
$needThanks = -not ($memberList -contains $authorLogin)
|
||||
}
|
||||
|
||||
$prDetails += [PSCustomObject]@{
|
||||
Id = $json.number
|
||||
Title = $json.title
|
||||
Labels = $labelNames
|
||||
Author = $authorLogin
|
||||
Url = $json.url
|
||||
Body = $bodyValue
|
||||
CopilotSummary = $copilot.Summary
|
||||
NeedThanks = $needThanks
|
||||
}
|
||||
}
|
||||
catch {
|
||||
$err = $_
|
||||
Write-Warn ("Failed to fetch PR #{0}: {1}" -f $pr, $err)
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $prDetails) { Write-Warn "No PR details fetched."; exit 0 }
|
||||
|
||||
# Sort by Labels like original script (first label alphabetical)
|
||||
$sorted = $prDetails | Sort-Object { ($_.Labels -split ',')[0] }
|
||||
|
||||
# Output JSON raw (optional)
|
||||
$sorted | ConvertTo-Json -Depth 6 | Out-File -Encoding UTF8 $OutputJson
|
||||
|
||||
Write-Info "Saving CSV to $OutputCsv ..."
|
||||
$sorted | Export-Csv $OutputCsv -NoTypeInformation
|
||||
Write-Host "✅ Done. Generated $($prDetails.Count) PR rows." -ForegroundColor Green
|
||||
@@ -1,80 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Find a commit on a branch that has the same subject line as a reference commit.
|
||||
|
||||
.DESCRIPTION
|
||||
Given a commit SHA (often from a release tag) and a branch name, this script
|
||||
resolves the reference commit's subject, then searches the branch history for
|
||||
commits with the exact same subject line. Useful when the release tag commit
|
||||
is not reachable from your current branch history.
|
||||
|
||||
.PARAMETER Commit
|
||||
The reference commit SHA or ref (e.g., v0.96.1 or a full SHA).
|
||||
|
||||
.PARAMETER Branch
|
||||
The branch to search (e.g., stable or main). Defaults to stable.
|
||||
|
||||
.PARAMETER RepoPath
|
||||
Path to the local repo. Defaults to current directory.
|
||||
|
||||
.EXAMPLE
|
||||
pwsh ./find-commit-by-title.ps1 -Commit b62f6421845f7e5c92b8186868d98f46720db442 -Branch stable
|
||||
#>
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)][string]$Commit,
|
||||
[string]$Branch = "stable",
|
||||
[string]$RepoPath = "."
|
||||
)
|
||||
|
||||
function Write-Info($msg) { Write-Host $msg -ForegroundColor Cyan }
|
||||
function Write-Err($msg) { Write-Host $msg -ForegroundColor Red }
|
||||
|
||||
Push-Location $RepoPath
|
||||
try {
|
||||
Write-Info "Fetching latest '$Branch' from origin (with tags)..."
|
||||
git fetch origin $Branch --tags | Out-Null
|
||||
if ($LASTEXITCODE -ne 0) { throw "git fetch origin $Branch --tags failed" }
|
||||
|
||||
$commitSha = (git rev-parse --verify $Commit) 2>$null
|
||||
if (-not $commitSha) { throw "Commit '$Commit' not found" }
|
||||
|
||||
$subject = (git show -s --format=%s $commitSha) 2>$null
|
||||
if (-not $subject) { throw "Unable to read subject for '$commitSha'" }
|
||||
|
||||
$branchRef = $Branch
|
||||
$branchSha = (git rev-parse --verify $branchRef) 2>$null
|
||||
if (-not $branchSha) {
|
||||
$branchRef = "origin/$Branch"
|
||||
$branchSha = (git rev-parse --verify $branchRef) 2>$null
|
||||
}
|
||||
if (-not $branchSha) { throw "Branch '$Branch' not found" }
|
||||
|
||||
Write-Info "Reference commit: $commitSha"
|
||||
Write-Info "Reference title: $subject"
|
||||
Write-Info "Searching branch: $branchRef"
|
||||
|
||||
$matches = git log $branchRef --format="%H|%s" | Where-Object { $_ -match '\|' }
|
||||
$results = @()
|
||||
foreach ($line in $matches) {
|
||||
$parts = $line -split '\|', 2
|
||||
if ($parts.Count -eq 2 -and $parts[1] -eq $subject) {
|
||||
$results += [PSCustomObject]@{ Sha = $parts[0]; Title = $parts[1] }
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $results -or $results.Count -eq 0) {
|
||||
Write-Info "No matching commit found on $branchRef for the given title."
|
||||
exit 0
|
||||
}
|
||||
|
||||
Write-Info ("Found {0} matching commit(s):" -f $results.Count)
|
||||
$results | ForEach-Object { Write-Host ("{0} {1}" -f $_.Sha, $_.Title) }
|
||||
}
|
||||
catch {
|
||||
Write-Err $_
|
||||
exit 1
|
||||
}
|
||||
finally {
|
||||
Pop-Location
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Group PR rows by their Labels column and emit per-label CSV files.
|
||||
|
||||
.DESCRIPTION
|
||||
Reads a milestone PR CSV (usually produced by dump-prs-information / dump-prs-since-commit scripts),
|
||||
splits rows by label list, normalizes/sorts individual labels, and writes one CSV per unique label combination.
|
||||
Each output preserves the original row ordering within that subset and column order from the source.
|
||||
|
||||
.PARAMETER CsvPath
|
||||
Input CSV containing PR rows with a 'Labels' column (comma-separated list).
|
||||
|
||||
.PARAMETER OutDir
|
||||
Output directory to place grouped CSVs (created if missing). Default: 'grouped_csv'.
|
||||
|
||||
.NOTES
|
||||
Label combinations are joined using ' | ' when multiple labels present. Filenames are sanitized (invalid characters,
|
||||
whitespace collapsed) and truncated to <= 120 characters.
|
||||
#>
|
||||
param(
|
||||
[string]$CsvPath = "sorted_prs.csv",
|
||||
[string]$OutDir = "grouped_csv"
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
function Write-Info($msg) { Write-Host "[info] $msg" -ForegroundColor Cyan }
|
||||
function Write-Warn($msg) { Write-Host "[warn] $msg" -ForegroundColor Yellow }
|
||||
|
||||
if (-not (Test-Path -LiteralPath $CsvPath)) { throw "CSV not found: $CsvPath" }
|
||||
|
||||
Write-Info "Reading CSV: $CsvPath"
|
||||
$rows = Import-Csv -LiteralPath $CsvPath
|
||||
Write-Info ("Loaded {0} rows" -f $rows.Count)
|
||||
|
||||
function ConvertTo-SafeFileName {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory=$true)][string]$Name
|
||||
)
|
||||
if ([string]::IsNullOrWhiteSpace($Name)) { return 'Unnamed' }
|
||||
$s = $Name -replace '[<>:"/\\|?*]', '-' # invalid path chars
|
||||
$s = $s -replace '\s+', '-' # spaces to dashes
|
||||
$s = $s -replace '-{2,}', '-' # collapse dashes
|
||||
$s = $s.Trim('-')
|
||||
if ($s.Length -gt 120) { $s = $s.Substring(0,120).Trim('-') }
|
||||
if ([string]::IsNullOrWhiteSpace($s)) { return 'Unnamed' }
|
||||
return $s
|
||||
}
|
||||
|
||||
# Build groups keyed by normalized, sorted label combinations. Preserve original CSV row order.
|
||||
$groups = @{}
|
||||
foreach ($row in $rows) {
|
||||
$labelsRaw = $row.Labels
|
||||
if ([string]::IsNullOrWhiteSpace($labelsRaw)) {
|
||||
$labelParts = @('Unlabeled')
|
||||
} else {
|
||||
$parts = $labelsRaw -split ',' | ForEach-Object { $_.Trim() } | Where-Object { $_ }
|
||||
if (-not $parts -or $parts.Count -eq 0) { $labelParts = @('Unlabeled') }
|
||||
else { $labelParts = $parts | Sort-Object }
|
||||
}
|
||||
|
||||
$key = ($labelParts -join ' | ')
|
||||
if (-not $groups.ContainsKey($key)) { $groups[$key] = New-Object System.Collections.ArrayList }
|
||||
[void]$groups[$key].Add($row)
|
||||
}
|
||||
|
||||
if (-not (Test-Path -LiteralPath $OutDir)) {
|
||||
Write-Info "Creating output directory: $OutDir"
|
||||
New-Item -ItemType Directory -Path $OutDir | Out-Null
|
||||
}
|
||||
|
||||
Write-Info ("Generating {0} grouped CSV file(s) into: {1}" -f $groups.Count, $OutDir)
|
||||
|
||||
foreach ($key in $groups.Keys) {
|
||||
$labelParts = if ($key -eq 'Unlabeled') { @('Unlabeled') } else { $key -split '\s\|\s' }
|
||||
$safeName = ($labelParts | ForEach-Object { ConvertTo-SafeFileName -Name $_ }) -join '-'
|
||||
$filePath = Join-Path $OutDir ("$safeName.csv")
|
||||
|
||||
# Keep same columns and order
|
||||
$groups[$key] | Export-Csv -LiteralPath $filePath -NoTypeInformation -Encoding UTF8
|
||||
}
|
||||
|
||||
Write-Info "Done. Sample output files:"
|
||||
Get-ChildItem -LiteralPath $OutDir | Select-Object -First 10 Name | Format-Table -HideTableHeaders
|
||||
@@ -1,19 +0,0 @@
|
||||
name: Automatic New Issue Deduplication
|
||||
on:
|
||||
issues:
|
||||
types: [opened, reopened]
|
||||
permissions:
|
||||
models: read
|
||||
issues: write
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
deduplicate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Run Deduplicate Action
|
||||
uses: pelikhan/action-genai-issue-dedup@v0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
label_as_duplicate: true
|
||||
26
.github/workflows/dependency-review.yml
vendored
26
.github/workflows/dependency-review.yml
vendored
@@ -1,26 +0,0 @@
|
||||
# Dependency Review Action
|
||||
#
|
||||
# This Action will scan dependency manifest files that change as part of a Pull Request,
|
||||
# surfacing known-vulnerable versions of the packages declared or updated in the PR.
|
||||
# Once installed, if the workflow run is marked as required,
|
||||
# PRs introducing known-vulnerable packages will be blocked from merging.
|
||||
#
|
||||
# As recommended by Microsoft's security guidelines (https://docs.opensource.microsoft.com/security/tsg/actions/#requirements-for-security-hardening-your-own-github-actions),
|
||||
# 3rd-party actions should be pinned to a specific commit hash to prevent supply chain attacks.
|
||||
# This update aligns with best practices; 1st/2nd-party actions is not required hash pinning.
|
||||
#
|
||||
# Source repository: https://github.com/actions/dependency-review-action
|
||||
name: 'Dependency Review'
|
||||
on: [pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v6
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v4
|
||||
@@ -1,38 +0,0 @@
|
||||
name: Manual Batch Issue Deduplication
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue_numbers:
|
||||
description: "JSON array of issue numbers to deduplicate (e.g. [101,102,103])"
|
||||
required: true
|
||||
since:
|
||||
description: "Only compare against issues created after this date (ISO 8601, e.g. 2019-05-05T00:00:00Z)"
|
||||
required: false
|
||||
default: "2019-05-05T00:00:00Z"
|
||||
label_as_duplicate:
|
||||
description: "Apply duplicate label if duplicates are found (true/false)"
|
||||
required: false
|
||||
default: "true"
|
||||
|
||||
permissions:
|
||||
models: read
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
deduplicate:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
issue: ${{ fromJson(github.event.inputs.issue_numbers) }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Run GenAI Issue Deduplicator
|
||||
uses: pelikhan/action-genai-issue-dedup@v0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
github_issue: ${{ matrix.issue }}
|
||||
label_as_duplicate: ${{ github.event.inputs.label_as_duplicate }}
|
||||
|
||||
7
.github/workflows/msstore-submissions.yml
vendored
7
.github/workflows/msstore-submissions.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
- name: BODGY - Set up Gnome Keyring for future Cert Auth
|
||||
run: |-
|
||||
sudo apt-get update && sudo apt-get install -y gnome-keyring
|
||||
sudo apt-get install -y gnome-keyring
|
||||
export $(dbus-launch --sh-syntax)
|
||||
export $(echo 'anypass_just_to_unlock' | gnome-keyring-daemon --unlock)
|
||||
export $(echo 'anypass_just_to_unlock' | gnome-keyring-daemon --start --components=gpg,pkcs11,secrets,ssh)
|
||||
@@ -39,11 +39,6 @@ jobs:
|
||||
echo powerToysInstallerX64Url=$(jq -n "$powerToysSetup" | jq -r '[.[]|select(.name | contains("x64"))][0].browser_download_url') >> $GITHUB_OUTPUT
|
||||
echo powerToysInstallerArm64Url=$(jq -n "$powerToysSetup" | jq -r '[.[]|select(.name | contains("arm64"))][0].browser_download_url') >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Setup .NET 9.0
|
||||
uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: '9.0.x'
|
||||
|
||||
- uses: microsoft/setup-msstore-cli@v1
|
||||
|
||||
- name: Fetch Store Credential
|
||||
|
||||
39
.github/workflows/package-submissions.yml
vendored
39
.github/workflows/package-submissions.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: WinGet submission on release
|
||||
# based off of https://github.com/nushell/nushell/blob/main/.github/workflows/winget-submission.yml
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -8,31 +9,23 @@ on:
|
||||
jobs:
|
||||
winget:
|
||||
name: Publish winget package
|
||||
|
||||
# winget-create is only supported on Windows
|
||||
runs-on: windows-latest
|
||||
|
||||
# winget-create will read the following environment variable to access the GitHub token needed for submitting a PR
|
||||
# See https://aka.ms/winget-create-token
|
||||
env:
|
||||
WINGET_CREATE_GITHUB_TOKEN: ${{ secrets.PT_WINGET }}
|
||||
|
||||
# Only submit stable releases
|
||||
if: ${{ !github.event.release.prerelease }}
|
||||
steps:
|
||||
- name: Submit Microsoft.PowerToys package to Windows Package Manager Community Repository
|
||||
run: |
|
||||
# Get installer info from GitHub release event
|
||||
$assets = '${{ toJSON(github.event.release.assets) }}' | ConvertFrom-Json
|
||||
$x64UserInstallerUrl = $assets | Where-Object -Property name -match 'PowerToysUserSetup.*x64' | Select -ExpandProperty browser_download_url
|
||||
$x64MachineInstallerUrl = $assets | Where-Object -Property name -match 'PowerToysSetup.*x64' | Select -ExpandProperty browser_download_url
|
||||
$arm64UserInstallerUrl = $assets | Where-Object -Property name -match 'PowerToysUserSetup.*arm64' | Select -ExpandProperty browser_download_url
|
||||
$arm64MachineInstallerUrl = $assets | Where-Object -Property name -match 'PowerToysSetup.*arm64' | Select -ExpandProperty browser_download_url
|
||||
$packageVersion = (${{ toJSON(github.event.release.tag_name) }}).Trim('v')
|
||||
|
||||
# Update package using wingetcreate
|
||||
curl.exe -JLO https://aka.ms/wingetcreate/latest
|
||||
.\wingetcreate.exe update Microsoft.PowerToys `
|
||||
--version $packageVersion `
|
||||
--urls "$x64UserInstallerUrl|user" "$x64MachineInstallerUrl|machine" "$arm64UserInstallerUrl|user" "$arm64MachineInstallerUrl|machine" `
|
||||
--submit
|
||||
$wingetPackage = "Microsoft.PowerToys"
|
||||
$gitToken = "${{ secrets.PT_WINGET }}"
|
||||
|
||||
$github = Invoke-RestMethod -uri "https://api.github.com/repos/Microsoft/PowerToys/releases"
|
||||
|
||||
$targetRelease = $github | Where-Object -Property name -match 'Release'| Select -First 1
|
||||
$installerUserX64Url = $targetRelease | Select -ExpandProperty assets -First 1 | Where-Object -Property name -match 'PowerToysUserSetup.*x64' | Select -ExpandProperty browser_download_url
|
||||
$installerMachineX64Url = $targetRelease | Select -ExpandProperty assets -First 1 | Where-Object -Property name -match 'PowerToysSetup.*x64' | Select -ExpandProperty browser_download_url
|
||||
$installerUserArmUrl = $targetRelease | Select -ExpandProperty assets -First 1 | Where-Object -Property name -match 'PowerToysUserSetup.*arm64' | Select -ExpandProperty browser_download_url
|
||||
$installerMachineArmUrl = $targetRelease | Select -ExpandProperty assets -First 1 | Where-Object -Property name -match 'PowerToysSetup.*arm64' | Select -ExpandProperty browser_download_url
|
||||
$ver = $targetRelease.tag_name -ireplace '^v'
|
||||
|
||||
# getting latest wingetcreate file
|
||||
iwr https://aka.ms/wingetcreate/latest -OutFile wingetcreate.exe
|
||||
.\wingetcreate.exe update $wingetPackage -s -v $ver -u "$installerUserX64Url|user" "$installerMachineX64Url|machine" "$installerUserArmUrl|user" "$installerMachineArmUrl|machine" -t $gitToken
|
||||
|
||||
165
.github/workflows/spelling2.yml
vendored
165
.github/workflows/spelling2.yml
vendored
@@ -34,14 +34,14 @@ name: Spell checking
|
||||
#
|
||||
# For background, see: https://github.com/check-spelling/check-spelling/wiki/Feature:-Update-with-deploy-key
|
||||
|
||||
# SARIF reporting
|
||||
# Sarif reporting
|
||||
#
|
||||
# Access to SARIF reports is generally restricted (by GitHub) to members of the repository.
|
||||
# Access to Sarif reports is generally restricted (by GitHub) to members of the repository.
|
||||
#
|
||||
# Requires enabling `security-events: write`
|
||||
# and configuring the action with `use_sarif: 1`
|
||||
#
|
||||
# For information on the feature, see: https://github.com/check-spelling/check-spelling/wiki/Feature:-SARIF-output
|
||||
# For information on the feature, see: https://github.com/check-spelling/check-spelling/wiki/Feature:-Sarif-output
|
||||
|
||||
# Minimal workflow structure:
|
||||
#
|
||||
@@ -60,23 +60,23 @@ name: Spell checking
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "**"
|
||||
- "**"
|
||||
tags-ignore:
|
||||
- "**"
|
||||
- "**"
|
||||
pull_request_target:
|
||||
branches:
|
||||
- "**"
|
||||
- "**"
|
||||
types:
|
||||
- "opened"
|
||||
- "reopened"
|
||||
- "synchronize"
|
||||
- 'opened'
|
||||
- 'reopened'
|
||||
- 'synchronize'
|
||||
issue_comment:
|
||||
types:
|
||||
- "created"
|
||||
- 'created'
|
||||
|
||||
jobs:
|
||||
spelling:
|
||||
name: Check Spelling
|
||||
name: Spell checking
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
@@ -91,50 +91,52 @@ jobs:
|
||||
# note: If you use only_check_changed_files, you do not want cancel-in-progress
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- name: check-spelling
|
||||
id: spelling
|
||||
uses: check-spelling/check-spelling@c635c2f3f714eec2fcf27b643a1919b9a811ef2e # v0.0.25
|
||||
with:
|
||||
config: .github/actions/spell-check
|
||||
suppress_push_for_open_pull_request: ${{ github.actor != 'dependabot[bot]' && 1 }}
|
||||
checkout: true
|
||||
check_file_names: 1
|
||||
spell_check_this: microsoft/PowerToys@main
|
||||
post_comment: 0
|
||||
use_magic_file: 1
|
||||
report-timing: 1
|
||||
warnings: bad-regex,binary-file,deprecated-feature,ignored-expect-variant,large-file,limited-references,no-newline-at-eof,noisy-file,non-alpha-in-dictionary,token-is-substring,unexpected-line-ending,whitespace-in-dictionary,minified-file,unsupported-configuration,no-files-to-check,unclosed-block-ignore-begin,unclosed-block-ignore-end
|
||||
experimental_apply_changes_via_bot: 1
|
||||
use_sarif: 1
|
||||
check_extra_dictionaries: ""
|
||||
dictionary_source_prefixes: >
|
||||
{
|
||||
"cspell": "https://raw.githubusercontent.com/check-spelling/cspell-dicts/v20241114/dictionaries/"
|
||||
}
|
||||
extra_dictionaries: |
|
||||
cspell:software-terms/softwareTerms.txt
|
||||
cspell:cpp/stdlib-c.txt
|
||||
cspell:cpp/stdlib-cpp.txt
|
||||
cspell:filetypes/filetypes.txt
|
||||
cspell:php/php.txt
|
||||
cspell:dart/dart.txt
|
||||
cspell:dotnet/dotnet.txt
|
||||
cspell:powershell/powershell.txt
|
||||
cspell:csharp/csharp.txt
|
||||
cspell:python/python/python-lib.txt
|
||||
cspell:node/node.txt
|
||||
cspell:golang/go.txt
|
||||
cspell:npm/npm.txt
|
||||
cspell:fullstack/fullstack.txt
|
||||
cspell:css/css.txt
|
||||
cspell:java/java.txt
|
||||
cspell:typescript/typescript.txt
|
||||
cspell:html/html.txt
|
||||
cspell:r/r.txt
|
||||
cspell:aws/aws.txt
|
||||
cspell:cpp/compiler-msvc.txt
|
||||
cspell:python/common/extra.txt
|
||||
cspell:scala/scala.txt
|
||||
- name: check-spelling
|
||||
id: spelling
|
||||
uses: check-spelling/check-spelling@v0.0.22
|
||||
with:
|
||||
config: .github/actions/spell-check
|
||||
suppress_push_for_open_pull_request: ${{ github.actor != 'dependabot[bot]' && 1 }}
|
||||
checkout: true
|
||||
check_file_names: 1
|
||||
spell_check_this: microsoft/PowerToys@main
|
||||
post_comment: 0
|
||||
use_magic_file: 1
|
||||
warnings: bad-regex,binary-file,deprecated-feature,ignored-expect-variant,large-file,limited-references,no-newline-at-eof,noisy-file,non-alpha-in-dictionary,token-is-substring,unexpected-line-ending,whitespace-in-dictionary,minified-file,unsupported-configuration,no-files-to-check
|
||||
experimental_apply_changes_via_bot: ${{ github.repository_owner != 'microsoft' && 1 }}
|
||||
use_sarif: ${{ (!github.event.pull_request || (github.event.pull_request.head.repo.full_name == github.repository)) && 1 }}
|
||||
extra_dictionary_limit: 20
|
||||
extra_dictionaries:
|
||||
cspell:aws/aws.txt
|
||||
cspell:cpp/src/compiler-clang-attributes.txt
|
||||
cspell:cpp/src/compiler-msvc.txt
|
||||
cspell:cpp/src/lang-keywords.txt
|
||||
cspell:cpp/src/stdlib-c.txt
|
||||
cspell:cpp/src/stdlib-cmath.txt
|
||||
cspell:cpp/src/stdlib-cpp.txt
|
||||
cspell:csharp/csharp.txt
|
||||
cspell:css/dict/css.txt
|
||||
cspell:django/dict/django.txt
|
||||
cspell:dotnet/dict/dotnet.txt
|
||||
cspell:filetypes/filetypes.txt
|
||||
cspell:fullstack/dict/fullstack.txt
|
||||
cspell:golang/dict/go.txt
|
||||
cspell:html/dict/html.txt
|
||||
cspell:java/src/java.txt
|
||||
cspell:java/src/java-terms.txt
|
||||
cspell:k8s/dict/k8s.txt
|
||||
cspell:lorem-ipsum/dictionary.txt
|
||||
cspell:monkeyc/src/monkeyc_keywords.txt
|
||||
cspell:node/dict/node.txt
|
||||
cspell:php/dict/php.txt
|
||||
cspell:powershell/dict/powershell.txt
|
||||
cspell:python/src/common/extra.txt
|
||||
cspell:python/src/python/python.txt
|
||||
cspell:python/src/python/python-lib.txt
|
||||
cspell:scala/dict/scala.txt
|
||||
cspell:software-terms/dict/softwareTerms.txt
|
||||
cspell:swift/src/swift.txt
|
||||
cspell:typescript/dict/typescript.txt
|
||||
|
||||
comment-push:
|
||||
name: Report (Push)
|
||||
@@ -142,17 +144,16 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: spelling
|
||||
permissions:
|
||||
actions: read
|
||||
contents: write
|
||||
if: (success() || failure()) && needs.spelling.outputs.followup && github.event_name == 'push'
|
||||
steps:
|
||||
- name: comment
|
||||
uses: check-spelling/check-spelling@c635c2f3f714eec2fcf27b643a1919b9a811ef2e # v0.0.25
|
||||
with:
|
||||
config: .github/actions/spell-check
|
||||
checkout: true
|
||||
spell_check_this: microsoft/PowerToys@main
|
||||
task: ${{ needs.spelling.outputs.followup }}
|
||||
- name: comment
|
||||
uses: check-spelling/check-spelling@v0.0.22
|
||||
with:
|
||||
config: .github/actions/spell-check
|
||||
checkout: true
|
||||
spell_check_this: microsoft/PowerToys@main
|
||||
task: ${{ needs.spelling.outputs.followup }}
|
||||
|
||||
comment-pr:
|
||||
name: Report (PR)
|
||||
@@ -160,19 +161,18 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: spelling
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
pull-requests: write
|
||||
if: (success() || failure()) && needs.spelling.outputs.followup && contains(github.event_name, 'pull_request')
|
||||
steps:
|
||||
- name: comment
|
||||
uses: check-spelling/check-spelling@c635c2f3f714eec2fcf27b643a1919b9a811ef2e # v0.0.25
|
||||
with:
|
||||
config: .github/actions/spell-check
|
||||
checkout: true
|
||||
spell_check_this: check-spelling/spell-check-this@prerelease
|
||||
task: ${{ needs.spelling.outputs.followup }}
|
||||
experimental_apply_changes_via_bot: ${{ github.repository_owner != 'microsoft' && 1 }}
|
||||
- name: comment
|
||||
uses: check-spelling/check-spelling@v0.0.22
|
||||
with:
|
||||
config: .github/actions/spell-check
|
||||
checkout: true
|
||||
spell_check_this: check-spelling/spell-check-this@prerelease
|
||||
task: ${{ needs.spelling.outputs.followup }}
|
||||
experimental_apply_changes_via_bot: ${{ github.repository_owner != 'microsoft' && 1 }}
|
||||
|
||||
update:
|
||||
name: Update PR
|
||||
@@ -182,19 +182,18 @@ jobs:
|
||||
actions: read
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{
|
||||
github.repository_owner != 'microsoft' &&
|
||||
github.event_name == 'issue_comment' &&
|
||||
github.event.issue.pull_request &&
|
||||
contains(github.event.comment.body, '@check-spelling-bot apply') &&
|
||||
contains(github.event.comment.body, 'https://')
|
||||
github.repository_owner != 'microsoft' &&
|
||||
github.event_name == 'issue_comment' &&
|
||||
github.event.issue.pull_request &&
|
||||
contains(github.event.comment.body, '@check-spelling-bot apply')
|
||||
}}
|
||||
concurrency:
|
||||
group: spelling-update-${{ github.event.issue.number }}
|
||||
cancel-in-progress: false
|
||||
steps:
|
||||
- name: apply spelling updates
|
||||
uses: check-spelling/check-spelling@c635c2f3f714eec2fcf27b643a1919b9a811ef2e # v0.0.25
|
||||
with:
|
||||
experimental_apply_changes_via_bot: ${{ github.repository_owner != 'microsoft' && 1 }}
|
||||
checkout: true
|
||||
ssh_key: "${{ secrets.CHECK_SPELLING }}"
|
||||
- name: apply spelling updates
|
||||
uses: check-spelling/check-spelling@v0.0.22
|
||||
with:
|
||||
experimental_apply_changes_via_bot: ${{ github.repository_owner != 'microsoft' && 1 }}
|
||||
checkout: true
|
||||
ssh_key: "${{ secrets.CHECK_SPELLING }}"
|
||||
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -224,7 +224,7 @@ ClientBin/
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
@@ -322,7 +322,7 @@ ImageResizer/tools/**
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
@@ -331,7 +331,7 @@ ASALocalRun/
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
.mfractor/
|
||||
|
||||
# Temp build files
|
||||
@@ -349,13 +349,7 @@ src/common/Telemetry/*.etl
|
||||
/src/modules/powerrename/ui/RCb24464
|
||||
|
||||
# Generated installer file for Monaco source files.
|
||||
/installer/PowerToysSetupVNext/MonacoSRC.wxs
|
||||
/installer/PowerToysSetup/MonacoSRC.wxs
|
||||
|
||||
# MSBuildCache
|
||||
/MSBuildCacheLogs/
|
||||
|
||||
# PowerToys Settings generated search index (legacy location) and obj outputs
|
||||
/src/settings-ui/Settings.UI/Assets/Settings/search.index.json
|
||||
|
||||
# PowerToysInstaller Build Temp Files
|
||||
installer/*/*.wxs.bk
|
||||
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -4,3 +4,6 @@
|
||||
[submodule "deps/expected-lite"]
|
||||
path = deps/expected-lite
|
||||
url = https://github.com/martinmoene/expected-lite.git
|
||||
[submodule "deps/cziplib"]
|
||||
path = deps/cziplib
|
||||
url = https://github.com/kuba--/zip.git
|
||||
|
||||
Binary file not shown.
52
.pipelines/ESRPSigning_abstracted_utils_dll.json
Normal file
52
.pipelines/ESRPSigning_abstracted_utils_dll.json
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"Version": "1.0.0",
|
||||
"UseMinimatch": false,
|
||||
"SignBatches": [
|
||||
{
|
||||
"MatchedPath": [
|
||||
"PowerToys.HostsUILib.dll",
|
||||
"PowerToys.EnvironmentVariablesUILib.dll",
|
||||
"PowerToys.RegistryPreviewUILib.dll"
|
||||
],
|
||||
"SigningInfo": {
|
||||
"Operations": [
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationSetCode": "SigntoolSign",
|
||||
"Parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Microsoft"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "http://www.microsoft.com"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationSetCode": "SigntoolVerify",
|
||||
"Parameters": [],
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -3,260 +3,216 @@
|
||||
"UseMinimatch": false,
|
||||
"SignBatches": [
|
||||
{
|
||||
"MatchedPath": [
|
||||
"*.resources.dll",
|
||||
"WinUI3Apps\\Assets\\Settings\\Scripts\\*.ps1",
|
||||
"MatchedPath": [
|
||||
"*.resources.dll",
|
||||
|
||||
"WinUI3Apps\\Assets\\Settings\\Scripts\\*.ps1",
|
||||
|
||||
"PowerToys.ActionRunner.exe",
|
||||
"PowerToys.Update.exe",
|
||||
"PowerToys.BackgroundActivatorDLL.dll",
|
||||
"PowerToys.ActionRunner.exe",
|
||||
"PowerToys.Update.exe",
|
||||
"PowerToys.BackgroundActivatorDLL.dll",
|
||||
"Notifications.dll",
|
||||
"os-detection.dll",
|
||||
"PowerToys.exe",
|
||||
"PowerToys.FilePreviewCommon.dll",
|
||||
"PowerToys.Interop.dll",
|
||||
"Tools\\PowerToys.BugReportTool.exe",
|
||||
"WebcamReportTool\\PowerToys.WebcamReportTool.exe",
|
||||
"StylesReportTool\\PowerToys.StylesReportTool.exe",
|
||||
"Telemetry.dll",
|
||||
"PowerToys.ManagedTelemetry.dll",
|
||||
"PowerToys.ManagedCommon.dll",
|
||||
"PowerToys.Common.UI.dll",
|
||||
"PowerToys.Settings.UI.Lib.dll",
|
||||
"PowerToys.GPOWrapper.dll",
|
||||
"PowerToys.GPOWrapperProjection.dll",
|
||||
"PowerToys.AllExperiments.dll",
|
||||
|
||||
"PowerToys.exe",
|
||||
"PowerToys.FilePreviewCommon.dll",
|
||||
"PowerToys.Interop.dll",
|
||||
"Tools\\PowerToys.BugReportTool.exe",
|
||||
"StylesReportTool\\PowerToys.StylesReportTool.exe",
|
||||
"PowerToys.AlwaysOnTop.exe",
|
||||
"PowerToys.AlwaysOnTopModuleInterface.dll",
|
||||
|
||||
"CalculatorEngineCommon.dll",
|
||||
"PowerToys.ManagedTelemetry.dll",
|
||||
"PowerToys.ManagedCommon.dll",
|
||||
"PowerToys.ManagedCsWin32.dll",
|
||||
"PowerToys.Common.UI.dll",
|
||||
"PowerToys.Settings.UI.Lib.dll",
|
||||
"PowerToys.GPOWrapper.dll",
|
||||
"PowerToys.GPOWrapperProjection.dll",
|
||||
"PowerToys.AllExperiments.dll",
|
||||
"LanguageModelProvider.dll",
|
||||
"PowerToys.CmdNotFoundModuleInterface.dll",
|
||||
"PowerToys.CmdNotFound.dll",
|
||||
|
||||
"Common.Search.dll",
|
||||
"PowerToys.ColorPicker.dll",
|
||||
"PowerToys.ColorPickerUI.dll",
|
||||
"PowerToys.ColorPickerUI.exe",
|
||||
|
||||
"PowerToys.AlwaysOnTop.exe",
|
||||
"PowerToys.AlwaysOnTopModuleInterface.dll",
|
||||
"PowerToys.CropAndLockModuleInterface.dll",
|
||||
"PowerToys.CropAndLock.exe",
|
||||
|
||||
"PowerToys.CmdNotFoundModuleInterface.dll",
|
||||
"PowerToys.PowerOCRModuleInterface.dll",
|
||||
"PowerToys.PowerOCR.dll",
|
||||
"PowerToys.PowerOCR.exe",
|
||||
|
||||
"PowerToys.AdvancedPasteModuleInterface.dll",
|
||||
"WinUI3Apps\\PowerToys.AdvancedPaste.exe",
|
||||
"WinUI3Apps\\PowerToys.AdvancedPaste.dll",
|
||||
|
||||
"PowerToys.ColorPicker.dll",
|
||||
"PowerToys.ColorPickerUI.dll",
|
||||
"PowerToys.ColorPickerUI.exe",
|
||||
"PowerToys.AwakeModuleInterface.dll",
|
||||
"PowerToys.Awake.exe",
|
||||
"PowerToys.Awake.dll",
|
||||
|
||||
"PowerToys.CropAndLockModuleInterface.dll",
|
||||
"PowerToys.CropAndLock.exe",
|
||||
"fancyzones.dll",
|
||||
"PowerToys.FancyZonesEditor.exe",
|
||||
"PowerToys.FancyZonesEditor.dll",
|
||||
"PowerToys.FancyZonesEditorCommon.dll",
|
||||
"PowerToys.FancyZonesModuleInterface.dll",
|
||||
"PowerToys.FancyZones.exe",
|
||||
|
||||
"PowerToys.PowerOCRModuleInterface.dll",
|
||||
"PowerToys.PowerOCR.dll",
|
||||
"PowerToys.PowerOCR.exe",
|
||||
"PowerToys.GcodePreviewHandler.dll",
|
||||
"PowerToys.GcodePreviewHandler.exe",
|
||||
"PowerToys.GcodePreviewHandlerCpp.dll",
|
||||
"PowerToys.GcodeThumbnailProvider.dll",
|
||||
"PowerToys.GcodeThumbnailProvider.exe",
|
||||
"PowerToys.GcodeThumbnailProviderCpp.dll",
|
||||
"PowerToys.ManagedTelemetry.dll",
|
||||
"PowerToys.MarkdownPreviewHandler.dll",
|
||||
"PowerToys.MarkdownPreviewHandler.exe",
|
||||
"PowerToys.MarkdownPreviewHandlerCpp.dll",
|
||||
"PowerToys.MonacoPreviewHandler.dll",
|
||||
"PowerToys.MonacoPreviewHandler.exe",
|
||||
"PowerToys.MonacoPreviewHandlerCpp.dll",
|
||||
"PowerToys.PdfPreviewHandler.dll",
|
||||
"PowerToys.PdfPreviewHandler.exe",
|
||||
"PowerToys.PdfPreviewHandlerCpp.dll",
|
||||
"PowerToys.PdfThumbnailProvider.dll",
|
||||
"PowerToys.PdfThumbnailProvider.exe",
|
||||
"PowerToys.PdfThumbnailProviderCpp.dll",
|
||||
"PowerToys.powerpreview.dll",
|
||||
"PowerToys.PreviewHandlerCommon.dll",
|
||||
"PowerToys.QoiPreviewHandler.dll",
|
||||
"PowerToys.QoiPreviewHandler.exe",
|
||||
"PowerToys.QoiPreviewHandlerCpp.dll",
|
||||
"PowerToys.QoiThumbnailProvider.dll",
|
||||
"PowerToys.QoiThumbnailProvider.exe",
|
||||
"PowerToys.QoiThumbnailProviderCpp.dll",
|
||||
"PowerToys.StlThumbnailProvider.dll",
|
||||
"PowerToys.StlThumbnailProvider.exe",
|
||||
"PowerToys.StlThumbnailProviderCpp.dll",
|
||||
"PowerToys.SvgPreviewHandler.dll",
|
||||
"PowerToys.SvgPreviewHandler.exe",
|
||||
"PowerToys.SvgPreviewHandlerCpp.dll",
|
||||
"PowerToys.SvgThumbnailProvider.dll",
|
||||
"PowerToys.SvgThumbnailProvider.exe",
|
||||
"PowerToys.SvgThumbnailProviderCpp.dll",
|
||||
|
||||
"PowerToys.AdvancedPasteModuleInterface.dll",
|
||||
"WinUI3Apps\\PowerToys.AdvancedPaste.exe",
|
||||
"WinUI3Apps\\PowerToys.AdvancedPaste.dll",
|
||||
"WinUI3Apps\\PowerToys.HostsModuleInterface.dll",
|
||||
"WinUI3Apps\\PowerToys.HostsUILib.dll",
|
||||
"WinUI3Apps\\PowerToys.Hosts.dll",
|
||||
"WinUI3Apps\\PowerToys.Hosts.exe",
|
||||
|
||||
"PowerToys.AwakeModuleInterface.dll",
|
||||
"PowerToys.Awake.exe",
|
||||
"PowerToys.Awake.dll",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithLib.Interop.dll",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithExt.dll",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithUI.exe",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithUI.dll",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithContextMenu.dll",
|
||||
"FileLocksmithContextMenuPackage.msix",
|
||||
|
||||
"PowerToys.FancyZonesEditor.exe",
|
||||
"PowerToys.FancyZonesEditor.dll",
|
||||
"PowerToys.FancyZonesEditorCommon.dll",
|
||||
"PowerToys.FancyZonesModuleInterface.dll",
|
||||
"PowerToys.FancyZones.exe",
|
||||
"FancyZonesCLI.exe",
|
||||
"FancyZonesCLI.dll",
|
||||
"WinUI3Apps\\Peek.Common.dll",
|
||||
"WinUI3Apps\\Peek.FilePreviewer.dll",
|
||||
"WinUI3Apps\\Powertoys.Peek.UI.dll",
|
||||
"WinUI3Apps\\Powertoys.Peek.UI.exe",
|
||||
"WinUI3Apps\\Powertoys.Peek.dll",
|
||||
|
||||
"PowerToys.GcodePreviewHandler.dll",
|
||||
"PowerToys.GcodePreviewHandler.exe",
|
||||
"PowerToys.GcodePreviewHandlerCpp.dll",
|
||||
"PowerToys.GcodeThumbnailProvider.dll",
|
||||
"PowerToys.GcodeThumbnailProvider.exe",
|
||||
"PowerToys.GcodeThumbnailProviderCpp.dll",
|
||||
"PowerToys.BgcodePreviewHandler.dll",
|
||||
"PowerToys.BgcodePreviewHandler.exe",
|
||||
"PowerToys.BgcodePreviewHandlerCpp.dll",
|
||||
"PowerToys.BgcodeThumbnailProvider.dll",
|
||||
"PowerToys.BgcodeThumbnailProvider.exe",
|
||||
"PowerToys.BgcodeThumbnailProviderCpp.dll",
|
||||
"PowerToys.ManagedTelemetry.dll",
|
||||
"PowerToys.MarkdownPreviewHandler.dll",
|
||||
"PowerToys.MarkdownPreviewHandler.exe",
|
||||
"PowerToys.MarkdownPreviewHandlerCpp.dll",
|
||||
"PowerToys.MonacoPreviewHandler.dll",
|
||||
"PowerToys.MonacoPreviewHandler.exe",
|
||||
"PowerToys.MonacoPreviewHandlerCpp.dll",
|
||||
"PowerToys.PdfPreviewHandler.dll",
|
||||
"PowerToys.PdfPreviewHandler.exe",
|
||||
"PowerToys.PdfPreviewHandlerCpp.dll",
|
||||
"PowerToys.PdfThumbnailProvider.dll",
|
||||
"PowerToys.PdfThumbnailProvider.exe",
|
||||
"PowerToys.PdfThumbnailProviderCpp.dll",
|
||||
"PowerToys.powerpreview.dll",
|
||||
"PowerToys.PreviewHandlerCommon.dll",
|
||||
"PowerToys.QoiPreviewHandler.dll",
|
||||
"PowerToys.QoiPreviewHandler.exe",
|
||||
"PowerToys.QoiPreviewHandlerCpp.dll",
|
||||
"PowerToys.QoiThumbnailProvider.dll",
|
||||
"PowerToys.QoiThumbnailProvider.exe",
|
||||
"PowerToys.QoiThumbnailProviderCpp.dll",
|
||||
"PowerToys.StlThumbnailProvider.dll",
|
||||
"PowerToys.StlThumbnailProvider.exe",
|
||||
"PowerToys.StlThumbnailProviderCpp.dll",
|
||||
"PowerToys.SvgPreviewHandler.dll",
|
||||
"PowerToys.SvgPreviewHandler.exe",
|
||||
"PowerToys.SvgPreviewHandlerCpp.dll",
|
||||
"PowerToys.SvgThumbnailProvider.dll",
|
||||
"PowerToys.SvgThumbnailProvider.exe",
|
||||
"PowerToys.SvgThumbnailProviderCpp.dll",
|
||||
"WinUI3Apps\\PowerToys.EnvironmentVariablesModuleInterface.dll",
|
||||
"WinUI3Apps\\PowerToys.EnvironmentVariablesUILib.dll",
|
||||
"WinUI3Apps\\PowerToys.EnvironmentVariables.dll",
|
||||
"WinUI3Apps\\PowerToys.EnvironmentVariables.exe",
|
||||
|
||||
"WinUI3Apps\\PowerToys.HostsModuleInterface.dll",
|
||||
"WinUI3Apps\\PowerToys.HostsUILib.dll",
|
||||
"WinUI3Apps\\PowerToys.Hosts.dll",
|
||||
"WinUI3Apps\\PowerToys.Hosts.exe",
|
||||
"PowerToys.ImageResizer.exe",
|
||||
"PowerToys.ImageResizer.dll",
|
||||
"PowerToys.ImageResizerExt.dll",
|
||||
"PowerToys.ImageResizerContextMenu.dll",
|
||||
"ImageResizerContextMenuPackage.msix",
|
||||
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithLib.Interop.dll",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithExt.dll",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithUI.exe",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithUI.dll",
|
||||
"WinUI3Apps\\PowerToys.FileLocksmithContextMenu.dll",
|
||||
"FileLocksmithContextMenuPackage.msix",
|
||||
"FileLocksmithCLI.exe",
|
||||
"PowerToys.KeyboardManager.dll",
|
||||
"KeyboardManagerEditor\\PowerToys.KeyboardManagerEditor.exe",
|
||||
"KeyboardManagerEngine\\PowerToys.KeyboardManagerEngine.exe",
|
||||
|
||||
"WinUI3Apps\\Peek.Common.dll",
|
||||
"WinUI3Apps\\Peek.FilePreviewer.dll",
|
||||
"WinUI3Apps\\Powertoys.Peek.UI.dll",
|
||||
"WinUI3Apps\\Powertoys.Peek.UI.exe",
|
||||
"WinUI3Apps\\Powertoys.Peek.dll",
|
||||
"PowerToys.Launcher.dll",
|
||||
"PowerToys.PowerLauncher.dll",
|
||||
"PowerToys.PowerLauncher.exe",
|
||||
"PowerToys.PowerLauncher.Telemetry.dll",
|
||||
"Wox.dll",
|
||||
"Wox.Infrastructure.dll",
|
||||
"Wox.Plugin.dll",
|
||||
"RunPlugins\\Calculator\\Microsoft.PowerToys.Run.Plugin.Calculator.dll",
|
||||
"RunPlugins\\Folder\\Microsoft.Plugin.Folder.dll",
|
||||
"RunPlugins\\Indexer\\Microsoft.Plugin.Indexer.dll",
|
||||
"RunPlugins\\OneNote\\Microsoft.PowerToys.Run.Plugin.OneNote.dll",
|
||||
"RunPlugins\\History\\Microsoft.PowerToys.Run.Plugin.History.dll",
|
||||
"RunPlugins\\PowerToys\\Microsoft.PowerToys.Run.Plugin.PowerToys.dll",
|
||||
"RunPlugins\\Program\\Microsoft.Plugin.Program.dll",
|
||||
"RunPlugins\\Registry\\Microsoft.PowerToys.Run.Plugin.Registry.dll",
|
||||
"RunPlugins\\WindowsSettings\\Microsoft.PowerToys.Run.Plugin.WindowsSettings.dll",
|
||||
"RunPlugins\\Shell\\Microsoft.Plugin.Shell.dll",
|
||||
"RunPlugins\\Uri\\Microsoft.Plugin.Uri.dll",
|
||||
"RunPlugins\\WindowWalker\\Microsoft.Plugin.WindowWalker.dll",
|
||||
"RunPlugins\\UnitConverter\\Community.PowerToys.Run.Plugin.UnitConverter.dll",
|
||||
"RunPlugins\\VSCodeWorkspaces\\Community.PowerToys.Run.Plugin.VSCodeWorkspaces.dll",
|
||||
"RunPlugins\\Service\\Microsoft.PowerToys.Run.Plugin.Service.dll",
|
||||
"RunPlugins\\System\\Microsoft.PowerToys.Run.Plugin.System.dll",
|
||||
"RunPlugins\\TimeDate\\Microsoft.PowerToys.Run.Plugin.TimeDate.dll",
|
||||
"RunPlugins\\ValueGenerator\\Community.PowerToys.Run.Plugin.ValueGenerator.dll",
|
||||
"RunPlugins\\WebSearch\\Community.PowerToys.Run.Plugin.WebSearch.dll",
|
||||
"RunPlugins\\WindowsTerminal\\Microsoft.PowerToys.Run.Plugin.WindowsTerminal.dll",
|
||||
|
||||
"WinUI3Apps\\PowerToys.MeasureToolModuleInterface.dll",
|
||||
"WinUI3Apps\\PowerToys.MeasureToolCore.dll",
|
||||
"WinUI3Apps\\PowerToys.MeasureToolUI.dll",
|
||||
"WinUI3Apps\\PowerToys.MeasureToolUI.exe",
|
||||
|
||||
"WinUI3Apps\\PowerToys.QuickAccess.dll",
|
||||
"WinUI3Apps\\PowerToys.QuickAccess.exe",
|
||||
"WinUI3Apps\\PowerToys.Settings.UI.Controls.dll",
|
||||
"PowerToys.FindMyMouse.dll",
|
||||
"PowerToys.MouseHighlighter.dll",
|
||||
"PowerToys.MouseJump.dll",
|
||||
"PowerToys.MousePointerCrosshairs.dll",
|
||||
"PowerToys.MouseJumpUI.dll",
|
||||
"PowerToys.MouseJumpUI.exe",
|
||||
|
||||
"WinUI3Apps\\PowerToys.EnvironmentVariablesModuleInterface.dll",
|
||||
"WinUI3Apps\\PowerToys.EnvironmentVariablesUILib.dll",
|
||||
"WinUI3Apps\\PowerToys.EnvironmentVariables.dll",
|
||||
"WinUI3Apps\\PowerToys.EnvironmentVariables.exe",
|
||||
"PowerToys.MouseWithoutBorders.dll",
|
||||
"PowerToys.MouseWithoutBorders.exe",
|
||||
"PowerToys.MouseWithoutBordersModuleInterface.dll",
|
||||
"PowerToys.MouseWithoutBordersService.dll",
|
||||
"PowerToys.MouseWithoutBordersService.exe",
|
||||
"PowerToys.MouseWithoutBordersHelper.dll",
|
||||
"PowerToys.MouseWithoutBordersHelper.exe",
|
||||
|
||||
"PowerToys.ImageResizer.exe",
|
||||
"PowerToys.ImageResizer.dll",
|
||||
"WinUI3Apps\\PowerToys.ImageResizerCLI.exe",
|
||||
"WinUI3Apps\\PowerToys.ImageResizerCLI.dll",
|
||||
"PowerToys.ImageResizerExt.dll",
|
||||
"PowerToys.ImageResizerContextMenu.dll",
|
||||
"ImageResizerContextMenuPackage.msix",
|
||||
"PowerAccent.Core.dll",
|
||||
"PowerToys.PowerAccent.dll",
|
||||
"PowerToys.PowerAccent.exe",
|
||||
"PowerToys.PowerAccentModuleInterface.dll",
|
||||
"PowerToys.PowerAccentKeyboardService.dll",
|
||||
|
||||
"PowerToys.LightSwitchModuleInterface.dll",
|
||||
"LightSwitchService\\PowerToys.LightSwitchService.exe",
|
||||
"WinUI3Apps\\PowerToys.PowerRenameExt.dll",
|
||||
"WinUI3Apps\\PowerToys.PowerRename.exe",
|
||||
"WinUI3Apps\\PowerToys.PowerRenameContextMenu.dll",
|
||||
"WinUI3Apps\\PowerRenameContextMenuPackage.msix",
|
||||
|
||||
"PowerToys.KeyboardManager.dll",
|
||||
"KeyboardManagerEditor\\PowerToys.KeyboardManagerEditor.exe",
|
||||
"KeyboardManagerEngine\\PowerToys.KeyboardManagerEngine.exe",
|
||||
"PowerToys.KeyboardManagerEditorLibraryWrapper.dll",
|
||||
"PowerToys.WorkspacesSnapshotTool.exe",
|
||||
"PowerToys.WorkspacesLauncher.exe",
|
||||
"PowerToys.WorkspacesEditor.exe",
|
||||
"PowerToys.WorkspacesEditor.dll",
|
||||
"PowerToys.WorkspacesLauncherUI.exe",
|
||||
"PowerToys.WorkspacesLauncherUI.dll",
|
||||
"PowerToys.WorkspacesModuleInterface.dll",
|
||||
|
||||
"PowerToys.Launcher.dll",
|
||||
"PowerToys.PowerLauncher.dll",
|
||||
"PowerToys.PowerLauncher.exe",
|
||||
"PowerToys.PowerLauncher.Telemetry.dll",
|
||||
"WinUI3Apps\\PowerToys.RegistryPreviewExt.dll",
|
||||
"WinUI3Apps\\PowerToys.RegistryPreviewUILib.dll",
|
||||
"WinUI3Apps\\PowerToys.RegistryPreview.dll",
|
||||
"WinUI3Apps\\PowerToys.RegistryPreview.exe",
|
||||
|
||||
"Wox.Infrastructure.dll",
|
||||
"Wox.Plugin.dll",
|
||||
"RunPlugins\\Calculator\\Microsoft.PowerToys.Run.Plugin.Calculator.dll",
|
||||
"RunPlugins\\Folder\\Microsoft.Plugin.Folder.dll",
|
||||
"RunPlugins\\Indexer\\Microsoft.Plugin.Indexer.dll",
|
||||
"RunPlugins\\OneNote\\Microsoft.PowerToys.Run.Plugin.OneNote.dll",
|
||||
"RunPlugins\\History\\Microsoft.PowerToys.Run.Plugin.History.dll",
|
||||
"RunPlugins\\PowerToys\\Microsoft.PowerToys.Run.Plugin.PowerToys.dll",
|
||||
"RunPlugins\\Program\\Microsoft.Plugin.Program.dll",
|
||||
"RunPlugins\\Registry\\Microsoft.PowerToys.Run.Plugin.Registry.dll",
|
||||
"RunPlugins\\WindowsSettings\\Microsoft.PowerToys.Run.Plugin.WindowsSettings.dll",
|
||||
"RunPlugins\\Shell\\Microsoft.Plugin.Shell.dll",
|
||||
"RunPlugins\\Uri\\Microsoft.Plugin.Uri.dll",
|
||||
"RunPlugins\\WindowWalker\\Microsoft.Plugin.WindowWalker.dll",
|
||||
"RunPlugins\\UnitConverter\\Community.PowerToys.Run.Plugin.UnitConverter.dll",
|
||||
"RunPlugins\\VSCodeWorkspaces\\Community.PowerToys.Run.Plugin.VSCodeWorkspaces.dll",
|
||||
"RunPlugins\\Service\\Microsoft.PowerToys.Run.Plugin.Service.dll",
|
||||
"RunPlugins\\System\\Microsoft.PowerToys.Run.Plugin.System.dll",
|
||||
"RunPlugins\\TimeDate\\Microsoft.PowerToys.Run.Plugin.TimeDate.dll",
|
||||
"RunPlugins\\ValueGenerator\\Community.PowerToys.Run.Plugin.ValueGenerator.dll",
|
||||
"RunPlugins\\WebSearch\\Community.PowerToys.Run.Plugin.WebSearch.dll",
|
||||
"RunPlugins\\WindowsTerminal\\Microsoft.PowerToys.Run.Plugin.WindowsTerminal.dll",
|
||||
"PowerToys.ShortcutGuide.exe",
|
||||
"PowerToys.ShortcutGuideModuleInterface.dll",
|
||||
|
||||
"WinUI3Apps\\PowerToys.MeasureToolModuleInterface.dll",
|
||||
"WinUI3Apps\\PowerToys.MeasureToolCore.dll",
|
||||
"WinUI3Apps\\PowerToys.MeasureToolUI.dll",
|
||||
"WinUI3Apps\\PowerToys.MeasureToolUI.exe",
|
||||
"PowerToys.VideoConferenceModule.dll",
|
||||
"PowerToys.VideoConferenceProxyFilter_x86.dll",
|
||||
"PowerToys.VideoConferenceProxyFilter_x64.dll",
|
||||
"PowerToys.VideoConferenceProxyFilter_arm64.dll",
|
||||
|
||||
"PowerToys.FindMyMouse.dll",
|
||||
"PowerToys.MouseHighlighter.dll",
|
||||
"PowerToys.MouseJump.dll",
|
||||
"PowerToys.MouseJump.Common.dll",
|
||||
"PowerToys.MousePointerCrosshairs.dll",
|
||||
"PowerToys.MouseJumpUI.dll",
|
||||
"PowerToys.MouseJumpUI.exe",
|
||||
"PowerToys.CursorWrap.dll",
|
||||
|
||||
"PowerToys.MouseWithoutBorders.dll",
|
||||
"PowerToys.MouseWithoutBorders.exe",
|
||||
"PowerToys.MouseWithoutBordersModuleInterface.dll",
|
||||
"PowerToys.MouseWithoutBordersService.dll",
|
||||
"PowerToys.MouseWithoutBordersService.exe",
|
||||
"PowerToys.MouseWithoutBordersHelper.dll",
|
||||
"PowerToys.MouseWithoutBordersHelper.exe",
|
||||
|
||||
"WinUI3Apps\\PowerToys.NewPlus.ShellExtension.dll",
|
||||
"WinUI3Apps\\NewPlusPackage.msix",
|
||||
"WinUI3Apps\\PowerToys.NewPlus.ShellExtension.win10.dll",
|
||||
|
||||
"PowerAccent.Core.dll",
|
||||
"PowerToys.PowerAccent.dll",
|
||||
"PowerToys.PowerAccent.exe",
|
||||
"PowerToys.PowerAccentModuleInterface.dll",
|
||||
"PowerToys.PowerAccentKeyboardService.dll",
|
||||
|
||||
"WinUI3Apps\\PowerToys.PowerRenameExt.dll",
|
||||
"WinUI3Apps\\PowerToys.PowerRename.exe",
|
||||
"WinUI3Apps\\PowerToys.PowerRenameContextMenu.dll",
|
||||
"WinUI3Apps\\PowerRenameContextMenuPackage.msix",
|
||||
|
||||
"PowerToys.WorkspacesSnapshotTool.exe",
|
||||
"PowerToys.WorkspacesLauncher.exe",
|
||||
"PowerToys.WorkspacesWindowArranger.exe",
|
||||
"PowerToys.WorkspacesEditor.exe",
|
||||
"PowerToys.WorkspacesEditor.dll",
|
||||
"PowerToys.WorkspacesLauncherUI.exe",
|
||||
"PowerToys.WorkspacesLauncherUI.dll",
|
||||
"PowerToys.WorkspacesModuleInterface.dll",
|
||||
"PowerToys.WorkspacesCsharpLibrary.dll",
|
||||
|
||||
"WinUI3Apps\\PowerToys.RegistryPreviewExt.dll",
|
||||
"WinUI3Apps\\PowerToys.RegistryPreviewUILib.dll",
|
||||
"WinUI3Apps\\PowerToys.RegistryPreview.dll",
|
||||
"WinUI3Apps\\PowerToys.RegistryPreview.exe",
|
||||
|
||||
"PowerToys.ShortcutGuide.exe",
|
||||
"PowerToys.ShortcutGuideModuleInterface.dll",
|
||||
|
||||
"PowerToys.ZoomIt.exe",
|
||||
"PowerToys.ZoomItModuleInterface.dll",
|
||||
"PowerToys.ZoomItSettingsInterop.dll",
|
||||
|
||||
"WinUI3Apps\\PowerToys.Settings.dll",
|
||||
"WinUI3Apps\\PowerToys.Settings.exe",
|
||||
|
||||
"PowerToys.CmdPalModuleInterface.dll",
|
||||
"CmdPalKeyboardService.dll",
|
||||
"PowerToys.ModuleContracts.dll",
|
||||
"Awake.ModuleServices.dll",
|
||||
"ColorPicker.ModuleServices.dll",
|
||||
"Workspaces.ModuleServices.dll",
|
||||
"Microsoft.CommandPalette.Extensions.dll",
|
||||
"Microsoft.CommandPalette.Extensions.Toolkit.dll",
|
||||
"Microsoft.CmdPal.Ext.PowerToys.dll",
|
||||
"Microsoft.CmdPal.Ext.PowerToys.exe",
|
||||
"*Microsoft.CmdPal.UI_*.msix",
|
||||
|
||||
"PowerToys.DSC.dll",
|
||||
"PowerToys.DSC.exe",
|
||||
|
||||
"PowerToysSparse.msix"
|
||||
],
|
||||
"WinUI3Apps\\PowerToys.Settings.dll",
|
||||
"WinUI3Apps\\PowerToys.Settings.exe"
|
||||
],
|
||||
"SigningInfo": {
|
||||
"Operations": [
|
||||
{
|
||||
@@ -307,32 +263,41 @@
|
||||
"Mono.Cecil.Pdb.dll",
|
||||
"Mono.Cecil.Rocks.dll",
|
||||
"Newtonsoft.Json.dll",
|
||||
"CommunityToolkit.WinUI.Controls.TitleBar.dll",
|
||||
"CommunityToolkit.WinUI.Controls.OpacityMaskView.dll",
|
||||
|
||||
"Newtonsoft.Json.Bson.dll",
|
||||
"NLog.dll",
|
||||
"HtmlAgilityPack.dll",
|
||||
"Markdig.Signed.dll",
|
||||
"HelixToolkit.dll",
|
||||
"HelixToolkit.Core.Wpf.dll",
|
||||
"Mages.Core.dll",
|
||||
|
||||
"JetBrains.Annotations.dll",
|
||||
"NLog.Extensions.Logging.dll",
|
||||
|
||||
"getfilesiginforedist.dll",
|
||||
"concrt140_app.dll",
|
||||
"msvcp140_1_app.dll",
|
||||
"msvcp140_2_app.dll",
|
||||
"msvcp140_app.dll",
|
||||
"Namotion.Reflection.dll",
|
||||
"NJsonSchema.Annotations.dll",
|
||||
"NJsonSchema.dll",
|
||||
"vcamp140_app.dll",
|
||||
"vccorlib140_app.dll",
|
||||
"vcomp140_app.dll",
|
||||
"vcruntime140_1_app.dll",
|
||||
"vcruntime140_app.dll",
|
||||
|
||||
"WinUI3Apps\\CommunityToolkit.Labs.WinUI.SettingsControls.dll",
|
||||
"UnicodeInformation.dll",
|
||||
"Vanara.Core.dll",
|
||||
"Vanara.PInvoke.ComCtl32.dll",
|
||||
"Vanara.PInvoke.Cryptography.dll",
|
||||
"Vanara.PInvoke.Gdi32.dll",
|
||||
"Vanara.PInvoke.Kernel32.dll",
|
||||
"Vanara.PInvoke.Ole.dll",
|
||||
"Vanara.PInvoke.Rpc.dll",
|
||||
"Vanara.PInvoke.Security.dll",
|
||||
"Vanara.PInvoke.Shared.dll",
|
||||
"Vanara.PInvoke.Shell32.dll",
|
||||
"Vanara.PInvoke.ShlwApi.dll",
|
||||
"Vanara.PInvoke.User32.dll",
|
||||
"WinUI3Apps\\clrcompression.dll",
|
||||
"WinUI3Apps\\Microsoft.Graphics.Canvas.Interop.dll",
|
||||
"Microsoft.Web.WebView2.Core.dll",
|
||||
"Microsoft.Web.WebView2.WinForms.dll",
|
||||
"Microsoft.Web.WebView2.Wpf.dll",
|
||||
@@ -351,35 +316,11 @@
|
||||
"WinUI3Apps\\ReverseMarkdown.dll",
|
||||
"WinUI3Apps\\SharpCompress.dll",
|
||||
"WinUI3Apps\\ZstdSharp.dll",
|
||||
"CommunityToolkit.WinUI.Controls.MarkdownTextBlock.dll",
|
||||
"WinUI3Apps\\CommunityToolkit.WinUI.Controls.MarkdownTextBlock.dll",
|
||||
"Markdig.dll",
|
||||
"WinUI3Apps\\Markdig.dll",
|
||||
"RomanNumerals.dll",
|
||||
"WinUI3Apps\\RomanNumerals.dll",
|
||||
"TestableIO.System.IO.Abstractions.dll",
|
||||
"WinUI3Apps\\TestableIO.System.IO.Abstractions.dll",
|
||||
"TestableIO.System.IO.Abstractions.Wrappers.dll",
|
||||
"WinUI3Apps\\TestableIO.System.IO.Abstractions.Wrappers.dll",
|
||||
"WinUI3Apps\\OpenAI.dll",
|
||||
"Testably.Abstractions.FileSystem.Interface.dll",
|
||||
"WinUI3Apps\\Testably.Abstractions.FileSystem.Interface.dll",
|
||||
"ColorCode.Core.dll",
|
||||
"Microsoft.SemanticKernel.Connectors.Ollama.dll",
|
||||
"OllamaSharp.dll",
|
||||
|
||||
"boost_regex-vc143-mt-gd-x32-1_87.dll",
|
||||
"boost_regex-vc143-mt-gd-x64-1_87.dll",
|
||||
"boost_regex-vc143-mt-x32-1_87.dll",
|
||||
"boost_regex-vc143-mt-x64-1_87.dll",
|
||||
|
||||
"Microsoft.ML.OnnxRuntime.dll",
|
||||
|
||||
"ColorCode.UWP.dll",
|
||||
"UnitsNet.dll",
|
||||
"UtfUnknown.dll",
|
||||
"Wpf.Ui.dll",
|
||||
"Shmuelie.WinRTServer.dll",
|
||||
"ToolGood.Words.Pinyin.dll"
|
||||
"Wpf.Ui.dll"
|
||||
],
|
||||
"SigningInfo": {
|
||||
"Operations": [
|
||||
|
||||
52
.pipelines/ESRPSigning_installer.json
Normal file
52
.pipelines/ESRPSigning_installer.json
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"Version": "1.0.0",
|
||||
"UseMinimatch": false,
|
||||
"SignBatches": [
|
||||
{
|
||||
"MatchedPath": [
|
||||
"PowerToysSetupCustomActions.dll",
|
||||
"PowerToys*Setup-*.exe",
|
||||
"PowerToys*Setup-*.msi"
|
||||
],
|
||||
"SigningInfo": {
|
||||
"Operations": [
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationSetCode": "SigntoolSign",
|
||||
"Parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Microsoft"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "http://www.microsoft.com"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationSetCode": "SigntoolVerify",
|
||||
"Parameters": [],
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,108 +0,0 @@
|
||||
{
|
||||
"Version": "1.0.0",
|
||||
"UseMinimatch": false,
|
||||
"SignBatches": [
|
||||
{
|
||||
"MatchedPath": [
|
||||
"Microsoft.CommandPalette.Extensions.Toolkit.dll"
|
||||
],
|
||||
"SigningInfo": {
|
||||
"Operations": [
|
||||
{
|
||||
"KeyCode": "CP-233904-SN",
|
||||
"OperationSetCode": "StrongNameSign",
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0",
|
||||
"Parameters": []
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-233904-SN",
|
||||
"OperationSetCode": "StrongNameVerify",
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0",
|
||||
"Parameters": []
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationSetCode": "SigntoolSign",
|
||||
"Parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Microsoft"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "http://www.microsoft.com"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationSetCode": "SigntoolVerify",
|
||||
"Parameters": [],
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"MatchedPath": [
|
||||
"Microsoft.CommandPalette.Extensions.dll"
|
||||
],
|
||||
"SigningInfo": {
|
||||
"Operations": [
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationSetCode": "SigntoolSign",
|
||||
"Parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Microsoft"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "http://www.microsoft.com"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationSetCode": "SigntoolVerify",
|
||||
"Parameters": [],
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -4,8 +4,7 @@
|
||||
"SignBatches": [
|
||||
{
|
||||
"MatchedPath": [
|
||||
"*.dll",
|
||||
"*.exe"
|
||||
"PowerToys.VideoConferenceProxyFilter_x86.dll"
|
||||
],
|
||||
"SigningInfo": {
|
||||
"Operations": [
|
||||
@@ -1,248 +0,0 @@
|
||||
Param(
|
||||
# Using the default value of 1.7 for winAppSdkVersionNumber and useExperimentalVersion as false
|
||||
[Parameter(Mandatory=$False,Position=1)]
|
||||
[string]$winAppSdkVersionNumber = "1.8",
|
||||
|
||||
# When the pipeline calls the PS1 file, the passed parameters are converted to string type
|
||||
[Parameter(Mandatory=$False,Position=2)]
|
||||
[boolean]$useExperimentalVersion = $False,
|
||||
|
||||
# Root folder Path for processing
|
||||
[Parameter(Mandatory=$False,Position=3)]
|
||||
[string]$rootPath = $(Split-Path -Parent (Split-Path -Parent $MyInvocation.MyCommand.Path)),
|
||||
|
||||
# Root folder Path for processing
|
||||
[Parameter(Mandatory=$False,Position=4)]
|
||||
[string]$sourceLink = "https://microsoft.pkgs.visualstudio.com/ProjectReunion/_packaging/Project.Reunion.nuget.internal/nuget/v3/index.json"
|
||||
)
|
||||
|
||||
|
||||
|
||||
function Read-FileWithEncoding {
|
||||
param (
|
||||
[string]$Path
|
||||
)
|
||||
|
||||
$reader = New-Object System.IO.StreamReader($Path, $true) # auto-detect encoding
|
||||
$content = $reader.ReadToEnd()
|
||||
$encoding = $reader.CurrentEncoding
|
||||
$reader.Close()
|
||||
|
||||
return [PSCustomObject]@{
|
||||
Content = $content
|
||||
Encoding = $encoding
|
||||
}
|
||||
}
|
||||
|
||||
function Write-FileWithEncoding {
|
||||
param (
|
||||
[string]$Path,
|
||||
[string]$Content,
|
||||
[System.Text.Encoding]$Encoding
|
||||
)
|
||||
|
||||
$writer = New-Object System.IO.StreamWriter($Path, $false, $Encoding)
|
||||
$writer.Write($Content)
|
||||
$writer.Close()
|
||||
}
|
||||
|
||||
|
||||
function Add-NuGetSourceAndMapping {
|
||||
param (
|
||||
[xml]$Xml,
|
||||
[string]$Key,
|
||||
[string]$Value,
|
||||
[string[]]$Patterns
|
||||
)
|
||||
|
||||
# Ensure packageSources exists
|
||||
if (-not $Xml.configuration.packageSources) {
|
||||
$Xml.configuration.AppendChild($Xml.CreateElement("packageSources")) | Out-Null
|
||||
}
|
||||
$sources = $Xml.configuration.packageSources
|
||||
|
||||
# Add/Update Source
|
||||
$sourceNode = $sources.SelectSingleNode("add[@key='$Key']")
|
||||
if (-not $sourceNode) {
|
||||
$sourceNode = $Xml.CreateElement("add")
|
||||
$sourceNode.SetAttribute("key", $Key)
|
||||
$sources.AppendChild($sourceNode) | Out-Null
|
||||
}
|
||||
$sourceNode.SetAttribute("value", $Value)
|
||||
|
||||
# Ensure packageSourceMapping exists
|
||||
if (-not $Xml.configuration.packageSourceMapping) {
|
||||
$Xml.configuration.AppendChild($Xml.CreateElement("packageSourceMapping")) | Out-Null
|
||||
}
|
||||
$mapping = $Xml.configuration.packageSourceMapping
|
||||
|
||||
# Remove invalid packageSource nodes (missing key or empty key)
|
||||
$invalidNodes = $mapping.SelectNodes("packageSource[not(@key) or @key='']")
|
||||
if ($invalidNodes) {
|
||||
foreach ($node in $invalidNodes) {
|
||||
$mapping.RemoveChild($node) | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
# Add/Update Mapping Source
|
||||
$mappingSource = $mapping.SelectSingleNode("packageSource[@key='$Key']")
|
||||
if (-not $mappingSource) {
|
||||
$mappingSource = $Xml.CreateElement("packageSource")
|
||||
$mappingSource.SetAttribute("key", $Key)
|
||||
# Insert at top for priority
|
||||
if ($mapping.HasChildNodes) {
|
||||
$mapping.InsertBefore($mappingSource, $mapping.FirstChild) | Out-Null
|
||||
} else {
|
||||
$mapping.AppendChild($mappingSource) | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
# Double check and force attribute
|
||||
if (-not $mappingSource.HasAttribute("key")) {
|
||||
$mappingSource.SetAttribute("key", $Key)
|
||||
}
|
||||
|
||||
# Update Patterns
|
||||
# RemoveAll() removes all child nodes AND attributes, so we must re-set the key afterwards
|
||||
$mappingSource.RemoveAll()
|
||||
$mappingSource.SetAttribute("key", $Key)
|
||||
|
||||
foreach ($pattern in $Patterns) {
|
||||
$pkg = $Xml.CreateElement("package")
|
||||
$pkg.SetAttribute("pattern", $pattern)
|
||||
$mappingSource.AppendChild($pkg) | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
function Resolve-WinAppSdkSplitDependencies {
|
||||
Write-Host "Version $WinAppSDKVersion detected. Resolving split dependencies..."
|
||||
$installDir = Join-Path $rootPath "localpackages\output"
|
||||
New-Item -ItemType Directory -Path $installDir -Force | Out-Null
|
||||
|
||||
# Create a temporary nuget.config to avoid interference from the repo's config
|
||||
$tempConfig = Join-Path $env:TEMP "nuget_$(Get-Random).config"
|
||||
Set-Content -Path $tempConfig -Value "<?xml version='1.0' encoding='utf-8'?><configuration><packageSources><clear /><add key='TempSource' value='$sourceLink' /></packageSources></configuration>"
|
||||
|
||||
try {
|
||||
# Extract BuildTools version from Directory.Packages.props to ensure we have the required version
|
||||
$dirPackagesProps = Join-Path $rootPath "Directory.Packages.props"
|
||||
if (Test-Path $dirPackagesProps) {
|
||||
$propsContent = Get-Content $dirPackagesProps -Raw
|
||||
if ($propsContent -match '<PackageVersion Include="Microsoft.Windows.SDK.BuildTools" Version="([^"]+)"') {
|
||||
$buildToolsVersion = $Matches[1]
|
||||
Write-Host "Downloading Microsoft.Windows.SDK.BuildTools version $buildToolsVersion..."
|
||||
$nugetArgsBuildTools = "install Microsoft.Windows.SDK.BuildTools -Version $buildToolsVersion -ConfigFile $tempConfig -OutputDirectory $installDir -NonInteractive -NoCache"
|
||||
Invoke-Expression "nuget $nugetArgsBuildTools" | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
# Download package to inspect nuspec and keep it for the build
|
||||
$nugetArgs = "install Microsoft.WindowsAppSDK -Version $WinAppSDKVersion -ConfigFile $tempConfig -OutputDirectory $installDir -NonInteractive -NoCache"
|
||||
Invoke-Expression "nuget $nugetArgs" | Out-Null
|
||||
|
||||
# Parse dependencies from the installed folders
|
||||
# Folder structure is typically {PackageId}.{Version}
|
||||
$directories = Get-ChildItem -Path $installDir -Directory
|
||||
$allLocalPackages = @()
|
||||
foreach ($dir in $directories) {
|
||||
# Match any package pattern: PackageId.Version
|
||||
if ($dir.Name -match "^(.+?)\.(\d+\..*)$") {
|
||||
$pkgId = $Matches[1]
|
||||
$pkgVer = $Matches[2]
|
||||
$allLocalPackages += $pkgId
|
||||
|
||||
$packageVersions[$pkgId] = $pkgVer
|
||||
Write-Host "Found dependency: $pkgId = $pkgVer"
|
||||
}
|
||||
}
|
||||
|
||||
# Update repo's nuget.config to use localpackages
|
||||
$nugetConfig = Join-Path $rootPath "nuget.config"
|
||||
$configData = Read-FileWithEncoding -Path $nugetConfig
|
||||
[xml]$xml = $configData.Content
|
||||
|
||||
Add-NuGetSourceAndMapping -Xml $xml -Key "localpackages" -Value $installDir -Patterns $allLocalPackages
|
||||
|
||||
$xml.Save($nugetConfig)
|
||||
Write-Host "Updated nuget.config with localpackages mapping."
|
||||
} catch {
|
||||
Write-Warning "Failed to resolve dependencies: $_"
|
||||
} finally {
|
||||
Remove-Item $tempConfig -Force -ErrorAction SilentlyContinue
|
||||
}
|
||||
}
|
||||
|
||||
# Execute nuget list and capture the output
|
||||
if ($useExperimentalVersion) {
|
||||
# The nuget list for experimental versions will cost more time
|
||||
# So, we will not use -AllVersions to wast time
|
||||
# But it can only get the latest experimental version
|
||||
Write-Host "Fetching WindowsAppSDK with experimental versions"
|
||||
$nugetOutput = nuget list Microsoft.WindowsAppSDK `
|
||||
-Source $sourceLink `
|
||||
-Prerelease
|
||||
# Filter versions based on the specified version prefix
|
||||
$escapedVersionNumber = [regex]::Escape($winAppSdkVersionNumber)
|
||||
$filteredVersions = $nugetOutput | Where-Object { $_ -match "Microsoft.WindowsAppSDK $escapedVersionNumber\." }
|
||||
$latestVersions = $filteredVersions
|
||||
} else {
|
||||
Write-Host "Fetching stable WindowsAppSDK versions for $winAppSdkVersionNumber"
|
||||
$nugetOutput = nuget list Microsoft.WindowsAppSDK `
|
||||
-Source $sourceLink `
|
||||
-AllVersions
|
||||
# Filter versions based on the specified version prefix
|
||||
$escapedVersionNumber = [regex]::Escape($winAppSdkVersionNumber)
|
||||
$filteredVersions = $nugetOutput | Where-Object { $_ -match "Microsoft.WindowsAppSDK $escapedVersionNumber\." }
|
||||
$latestVersions = $filteredVersions | Sort-Object { [version]($_ -split ' ')[1] } -Descending | Select-Object -First 1
|
||||
}
|
||||
|
||||
Write-Host "Latest versions found: $latestVersions"
|
||||
# Extract the latest version number from the output
|
||||
$latestVersion = $latestVersions -split "`n" | `
|
||||
Select-String -Pattern 'Microsoft.WindowsAppSDK\s*([0-9]+\.[0-9]+\.[0-9]+-*[a-zA-Z0-9]*)' | `
|
||||
ForEach-Object { $_.Matches[0].Groups[1].Value } | `
|
||||
Sort-Object -Descending | `
|
||||
Select-Object -First 1
|
||||
|
||||
if ($latestVersion) {
|
||||
$WinAppSDKVersion = $latestVersion
|
||||
Write-Host "Extracted version: $WinAppSDKVersion"
|
||||
Write-Host "##vso[task.setvariable variable=WinAppSDKVersion]$WinAppSDKVersion"
|
||||
} else {
|
||||
Write-Host "Failed to extract version number from nuget list output"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Resolve dependencies for 1.8+
|
||||
$packageVersions = @{ "Microsoft.WindowsAppSDK" = $WinAppSDKVersion }
|
||||
|
||||
Resolve-WinAppSdkSplitDependencies
|
||||
|
||||
# Update Directory.Packages.props file
|
||||
Get-ChildItem -Path $rootPath -Recurse "Directory.Packages.props" | ForEach-Object {
|
||||
$file = Read-FileWithEncoding -Path $_.FullName
|
||||
$content = $file.Content
|
||||
$isModified = $false
|
||||
|
||||
foreach ($pkgId in $packageVersions.Keys) {
|
||||
$ver = $packageVersions[$pkgId]
|
||||
# Escape dots in package ID for regex
|
||||
$pkgIdRegex = $pkgId -replace '\.', '\.'
|
||||
|
||||
$newVersionString = "<PackageVersion Include=""$pkgId"" Version=""$ver"" />"
|
||||
$oldVersionString = "<PackageVersion Include=""$pkgIdRegex"" Version=""[-.0-9a-zA-Z]*"" />"
|
||||
|
||||
if ($content -match "<PackageVersion Include=""$pkgIdRegex""") {
|
||||
# Update existing package
|
||||
if ($content -notmatch [regex]::Escape($newVersionString)) {
|
||||
$content = $content -replace $oldVersionString, $newVersionString
|
||||
$isModified = $true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ($isModified) {
|
||||
Write-FileWithEncoding -Path $_.FullName -Content $content -Encoding $file.encoding
|
||||
Write-Host "Modified " $_.FullName
|
||||
}
|
||||
}
|
||||
@@ -41,9 +41,6 @@ Write-Output ""
|
||||
Write-Output "Restoring dotnet tools..."
|
||||
dotnet tool restore --disable-parallel --no-cache
|
||||
|
||||
# Use Regex syntax
|
||||
$PathExcludes = "(\\obj\\)|(\\bin\\)|(\\x64\\)|(\\Generated Files\\PowerRenameXAML\\)|(\\RegistryPreviewUILib\\Controls\\HexBox\\)"
|
||||
|
||||
if (-not $Passive)
|
||||
{
|
||||
# Look for unstaged changed files by default
|
||||
@@ -90,7 +87,7 @@ if (-not $Passive)
|
||||
}
|
||||
|
||||
Write-Output "Running Git Diff: $gitDiffCommand"
|
||||
$files = Invoke-Expression $gitDiffCommand | Select-String -Pattern "\.xaml$" | Where-Object { $_ -notmatch $PathExcludes }
|
||||
$files = Invoke-Expression $gitDiffCommand | Select-String -Pattern "\.xaml$"
|
||||
|
||||
if (-not $Passive -and -not $Main -and -not $Unstaged -and -not $Staged -and -not $LastCommit)
|
||||
{
|
||||
@@ -100,7 +97,7 @@ if (-not $Passive)
|
||||
|
||||
if ($files.count -gt 0)
|
||||
{
|
||||
dotnet tool run xstyler -c "$PSScriptRoot\..\src\Settings.XamlStyler" -f $files
|
||||
dotnet tool run xstyler -c "$PSScriptRoot\..\Settings.XamlStyler" -f $files
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -110,20 +107,17 @@ if (-not $Passive)
|
||||
else
|
||||
{
|
||||
Write-Output "Checking all files (passively)"
|
||||
$files = Get-ChildItem -Path "$PSScriptRoot\..\src\*.xaml" -Recurse | Select-Object -ExpandProperty FullName | Where-Object { $_ -notmatch $PathExcludes }
|
||||
$files = Get-ChildItem -Path "$PSScriptRoot\..\src\*.xaml" -Recurse | Select-Object -ExpandProperty FullName | Where-Object { $_ -notmatch "(\\obj\\)|(\\bin\\)|(\\x64\\)|(\\Generated Files\\PowerRenameXAML\\)" }
|
||||
|
||||
if ($files.count -gt 0)
|
||||
{
|
||||
dotnet tool run xstyler -p -c "$PSScriptRoot\..\src\Settings.XamlStyler" -f $files
|
||||
dotnet tool run xstyler -p -c "$PSScriptRoot\..\Settings.XamlStyler" -f $files
|
||||
|
||||
if ($lastExitCode -eq 1)
|
||||
{
|
||||
Write-Error 'XAML Styling is incorrect, please run `.\.pipelines\applyXamlStyling.ps1 -Main` locally.'
|
||||
}
|
||||
if ($lastExitCode -lt 0)
|
||||
{
|
||||
Write-Error "Error running dotnet tool run, with the exit code $lastExitCode. Please verify logs and running environment."
|
||||
}
|
||||
|
||||
# Return XAML Styler Status
|
||||
exit $lastExitCode
|
||||
}
|
||||
|
||||
41
.pipelines/ci/caching.yml
Normal file
41
.pipelines/ci/caching.yml
Normal file
@@ -0,0 +1,41 @@
|
||||
# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/azure-pipelines-vscode/main/service-schema.json
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable
|
||||
paths:
|
||||
exclude:
|
||||
- doc/*
|
||||
- temp/*
|
||||
- tools/*
|
||||
- '**.md'
|
||||
|
||||
pr:
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable
|
||||
paths:
|
||||
exclude:
|
||||
- '**.md'
|
||||
- doc
|
||||
|
||||
# 0.0.yyMM.dd##
|
||||
# 0.0.1904.0900
|
||||
name: 0.0.$(Date:yyMM).$(Date:dd)$(Rev:rr)
|
||||
|
||||
variables:
|
||||
EnablePipelineCache: true
|
||||
|
||||
jobs:
|
||||
- template: ./templates/build-powertoys-precheck.yml
|
||||
- template: ./templates/build-powertoys-ci.yml
|
||||
parameters:
|
||||
platform: x64
|
||||
enableCaching: true
|
||||
- template: ./templates/build-powertoys-ci.yml
|
||||
parameters:
|
||||
platform: arm64
|
||||
enableCaching: true
|
||||
46
.pipelines/ci/ci.yml
Normal file
46
.pipelines/ci/ci.yml
Normal file
@@ -0,0 +1,46 @@
|
||||
# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/azure-pipelines-vscode/main/service-schema.json
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable
|
||||
paths:
|
||||
exclude:
|
||||
- doc/*
|
||||
- temp/*
|
||||
- tools/*
|
||||
- '**.md'
|
||||
|
||||
pr:
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable
|
||||
paths:
|
||||
exclude:
|
||||
- '**.md'
|
||||
- doc
|
||||
|
||||
# 0.0.yyMM.dd##
|
||||
# 0.0.1904.0900
|
||||
name: 0.0.$(Date:yyMM).$(Date:dd)$(Rev:rr)
|
||||
|
||||
variables:
|
||||
EnablePipelineCache: true
|
||||
|
||||
jobs:
|
||||
- template: ./templates/build-powertoys-precheck.yml
|
||||
- template: ./templates/build-powertoys-ci.yml
|
||||
parameters:
|
||||
platform: x64
|
||||
${{ if eq(variables['System.PullRequest.IsFork'], 'False') }}:
|
||||
enableCaching: true
|
||||
- template: ./templates/build-powertoys-ci.yml
|
||||
parameters:
|
||||
platform: arm64
|
||||
${{ if eq(variables['System.PullRequest.IsFork'], 'False') }}:
|
||||
enableCaching: true
|
||||
# - template: ./templates/run-ui-tests-ci.yml
|
||||
# parameters:
|
||||
# platform: x64
|
||||
44
.pipelines/ci/templates/build-powertoys-ci.yml
Normal file
44
.pipelines/ci/templates/build-powertoys-ci.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
parameters:
|
||||
- name: configuration
|
||||
type: string
|
||||
default: 'Release'
|
||||
- name: platform
|
||||
type: string
|
||||
default: 'x64'
|
||||
- name: additionalBuildArguments
|
||||
type: string
|
||||
default: '-p:RestorePackagesConfig=true -m'
|
||||
- name: enableCaching
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
- job: Build${{ parameters.platform }}${{ parameters.configuration }}
|
||||
displayName: Build ${{ parameters.platform }} ${{ parameters.configuration }}
|
||||
dependsOn: Precheck
|
||||
condition: and(succeeded(),ne(dependencies.Precheck.outputs['verifyBuildRequest.skipBuild'], 'Yes'))
|
||||
variables:
|
||||
BuildConfiguration: ${{ parameters.configuration }}
|
||||
BuildPlatform: ${{ parameters.platform }}
|
||||
NUGET_RESTORE_MSBUILD_ARGS: /p:Platform=${{ parameters.platform }} # Required for nuget to work due to self contained
|
||||
NODE_OPTIONS: --max_old_space_size=16384
|
||||
pool:
|
||||
demands: ImageOverride -equals SHINE-VS17-Latest
|
||||
${{ if eq(variables['System.CollectionId'], 'cb55739e-4afe-46a3-970f-1b49d8ee7564') }}:
|
||||
name: SHINE-INT-L
|
||||
${{ else }}:
|
||||
name: SHINE-OSS-L
|
||||
timeoutInMinutes: 120
|
||||
strategy:
|
||||
maxParallel: 10
|
||||
steps:
|
||||
- template: build-powertoys-steps.yml
|
||||
parameters:
|
||||
additionalBuildArguments: ${{ parameters.additionalBuildArguments }}
|
||||
enableCaching: ${{ parameters.enableCaching }}
|
||||
|
||||
# It appears that the Component Governance build task that gets automatically injected stopped working
|
||||
# when we renamed our main branch.
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
condition: and(succeededOrFailed(), not(eq(variables['Build.Reason'], 'PullRequest')))
|
||||
20
.pipelines/ci/templates/build-powertoys-installer.yml
Normal file
20
.pipelines/ci/templates/build-powertoys-installer.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
parameters:
|
||||
configuration: 'Release'
|
||||
platform: ''
|
||||
additionalBuildArguments: '/p:RestorePackagesConfig=true -m'
|
||||
|
||||
jobs:
|
||||
- job: Build${{ parameters.platform }}${{ parameters.configuration }}
|
||||
displayName: Build ${{ parameters.platform }} ${{ parameters.configuration }}
|
||||
variables:
|
||||
BuildConfiguration: ${{ parameters.configuration }}
|
||||
BuildPlatform: ${{ parameters.platform }}
|
||||
pool:
|
||||
name: SHINE-INT-L
|
||||
timeoutInMinutes: 120
|
||||
strategy:
|
||||
maxParallel: 10
|
||||
steps:
|
||||
- template: build-powertoys-steps.yml
|
||||
parameters:
|
||||
additionalBuildArguments: ${{ parameters.additionalBuildArguments }}
|
||||
38
.pipelines/ci/templates/build-powertoys-precheck.yml
Normal file
38
.pipelines/ci/templates/build-powertoys-precheck.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/azure-pipelines-vscode/master/service-schema.json
|
||||
jobs:
|
||||
- job: Precheck
|
||||
pool:
|
||||
demands: ImageOverride -equals SHINE-VS17-Latest
|
||||
${{ if eq(variables['System.CollectionId'], 'cb55739e-4afe-46a3-970f-1b49d8ee7564') }}:
|
||||
name: SHINE-INT-L
|
||||
${{ else }}:
|
||||
name: SHINE-OSS-L
|
||||
steps:
|
||||
- checkout: none
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify Build Request
|
||||
inputs:
|
||||
targetType: 'inline'
|
||||
script: |
|
||||
try {
|
||||
# Try based on pull request first
|
||||
$pullRequestNumber = "$(system.pullRequest.pullRequestNumber)";
|
||||
$gitHubPullRequest = Invoke-RestMethod -Method Get "https://api.github.com/repos/microsoft/PowerToys/pulls/$pullRequestNumber/files"
|
||||
# If there are no files updated in the commit that are .md, set skipBuild variable
|
||||
if(([array]($gitHubPullRequest.filename) -notmatch ".md|.txt").Length -eq 0) {
|
||||
Write-Host '##vso[task.setvariable variable=skipBuild;isOutput=true]Yes'
|
||||
Write-Host 'Skipping Build'
|
||||
}
|
||||
}
|
||||
catch {
|
||||
# Fall back to the latest commit otherwise.
|
||||
$commit = "$(build.sourceVersion)";
|
||||
$gitHubCommit = Invoke-RestMethod -Method Get "https://api.github.com/repos/microsoft/PowerToys/commits/$commit"
|
||||
if(([array]($githubCommit.files.filename) -notmatch ".md|.txt").Length -eq 0) {
|
||||
Write-Host '##vso[task.setvariable variable=skipBuild;isOutput=true]Yes'
|
||||
Write-Host 'Skipping Build'
|
||||
}
|
||||
}
|
||||
pwsh: true
|
||||
name: verifyBuildRequest
|
||||
297
.pipelines/ci/templates/build-powertoys-steps.yml
Normal file
297
.pipelines/ci/templates/build-powertoys-steps.yml
Normal file
@@ -0,0 +1,297 @@
|
||||
parameters:
|
||||
- name: additionalBuildArguments
|
||||
type: string
|
||||
default: ''
|
||||
- name: enableCaching
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: 1
|
||||
submodules: true
|
||||
clean: true
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Use .NET 6 SDK'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: '6.x'
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify XAML formatting
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\applyXamlStyling.ps1'
|
||||
arguments: -Passive
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify Nuget package versions for PowerToys.sln
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyNugetPackages.ps1'
|
||||
arguments: -solution '$(build.sourcesdirectory)\PowerToys.sln'
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify Arm64 configuration for PowerToys.sln
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyArm64Configuration.ps1'
|
||||
arguments: -solution '$(build.sourcesdirectory)\PowerToys.sln'
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify Arm64 configuration for BugReportTool.sln
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyArm64Configuration.ps1'
|
||||
arguments: -solution '$(build.sourcesdirectory)\tools\BugReportTool\BugReportTool.sln'
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify Arm64 configuration for WebcamReportTool.sln
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyArm64Configuration.ps1'
|
||||
arguments: -solution '$(build.sourcesdirectory)\tools\WebcamReportTool\WebcamReportTool.sln'
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify Arm64 configuration for StylesReportTool.sln
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyArm64Configuration.ps1'
|
||||
arguments: -solution '$(build.sourcesdirectory)\tools\StylesReportTool\StylesReportTool.sln'
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify Arm64 configuration for PowerToysSetup.sln
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyArm64Configuration.ps1'
|
||||
arguments: -solution '$(build.sourcesdirectory)\installer\PowerToysSetup.sln'
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify and set latest VCToolsVersion usage
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyAndSetLatestVCToolsVersion.ps1'
|
||||
pwsh: true
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Use .NET 8 SDK'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: '8.x'
|
||||
includePreviewVersions: true
|
||||
|
||||
- task: VisualStudioTestPlatformInstaller@1
|
||||
displayName: Ensure VSTest Platform
|
||||
|
||||
- task: Cache@2
|
||||
displayName: 'Cache nuget packages (PackageReference)'
|
||||
inputs:
|
||||
key: '"PackageReference" | "$(Agent.OS)" | Directory.Packages.props'
|
||||
restoreKeys: |
|
||||
"PackageReference" | "$(Agent.OS)"
|
||||
"PackageReference"
|
||||
path: $(NUGET_PACKAGES)
|
||||
|
||||
- task: Cache@2
|
||||
displayName: 'Cache nuget packages (packages.config)'
|
||||
inputs:
|
||||
key: '"packages.config" | "$(Agent.OS)" | **/packages.config'
|
||||
restoreKeys: |
|
||||
"packages.config" | "$(Agent.OS)"
|
||||
"packages.config"
|
||||
path: packages
|
||||
|
||||
- ${{ if eq(parameters.enableCaching, true) }}:
|
||||
- task: NuGetToolInstaller@1
|
||||
displayName: Install NuGet
|
||||
|
||||
- script: nuget restore packages.config -SolutionDirectory .
|
||||
displayName: 'nuget restore packages.config'
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build and Test PowerToys.sln'
|
||||
inputs:
|
||||
solution: '**\PowerToys.sln'
|
||||
vsVersion: 17.0
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
${{ if eq(parameters.enableCaching, true) }}:
|
||||
msbuildArgs: -restore ${{ parameters.additionalBuildArguments }} -t:Build;Test -graph -reportfileaccesses -p:MSBuildCacheEnabled=true -p:MSBuildCacheLogDirectory=$(Build.ArtifactStagingDirectory)\logs\MSBuildCache -bl:$(Build.ArtifactStagingDirectory)\logs\PowerToys.binlog -ds:false
|
||||
${{ else }}:
|
||||
msbuildArgs: -restore ${{ parameters.additionalBuildArguments }} -t:Build;Test -graph -bl:$(Build.ArtifactStagingDirectory)\logs\PowerToys.binlog -ds:false
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
${{ if eq(parameters.enableCaching, true) }}:
|
||||
env:
|
||||
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build BugReportTool.sln'
|
||||
inputs:
|
||||
solution: '**\BugReportTool.sln'
|
||||
vsVersion: 17.0
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
msbuildArgs: -restore ${{ parameters.additionalBuildArguments }} -graph -bl:$(Build.ArtifactStagingDirectory)\logs\BugReportTool.binlog -ds:false
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build WebcamReportTool.sln'
|
||||
inputs:
|
||||
solution: '**\WebcamReportTool.sln'
|
||||
vsVersion: 17.0
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
msbuildArgs: -restore ${{ parameters.additionalBuildArguments }} -graph -bl:$(Build.ArtifactStagingDirectory)\logs\WebcamReportTool.binlog -ds:false
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build StylesReportTool.sln'
|
||||
inputs:
|
||||
solution: '**\StylesReportTool.sln'
|
||||
vsVersion: 17.0
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
msbuildArgs: -restore ${{ parameters.additionalBuildArguments }} -graph -bl:$(Build.ArtifactStagingDirectory)\logs\StylesReportTool.binlog -ds:false
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Download and install WiX 3.14 development build
|
||||
inputs:
|
||||
targetType: filePath
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\installWiX.ps1'
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build PowerToys per-machine MSI'
|
||||
inputs:
|
||||
solution: '**\installer\PowerToysSetup.sln'
|
||||
vsVersion: 17.0
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
msbuildArgs: /t:PowerToysInstaller -restore ${{ parameters.additionalBuildArguments }} -bl:$(Build.ArtifactStagingDirectory)\logs\PowerToysSetup-PowerToysInstaller.binlog -ds:false
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build PowerToys per-machine Bootstrapper'
|
||||
inputs:
|
||||
solution: '**\installer\PowerToysSetup.sln'
|
||||
vsVersion: 17.0
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
msbuildArgs: /t:PowerToysBootstrapper ${{ parameters.additionalBuildArguments }} -bl:$(Build.ArtifactStagingDirectory)\logs\PowerToysSetup-PowerToysBootstrapper.binlog -ds:false
|
||||
clean: false
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Clean installer dir before building per-user installer
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: git clean -xfd -e *exe -- .\installer\
|
||||
pwsh: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build PowerToys per-user MSI'
|
||||
inputs:
|
||||
solution: '**\installer\PowerToysSetup.sln'
|
||||
vsVersion: 17.0
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
msbuildArgs: /t:PowerToysInstaller -restore ${{ parameters.additionalBuildArguments }} /p:PerUser=true -bl:$(Build.ArtifactStagingDirectory)\logs\PowerToysSetup-PowerToysInstaller-PerUser.binlog -ds:false
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build PowerToys per-user Bootstrapper'
|
||||
inputs:
|
||||
solution: '**\installer\PowerToysSetup.sln'
|
||||
vsVersion: 17.0
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
msbuildArgs: /t:PowerToysBootstrapper ${{ parameters.additionalBuildArguments }} /p:PerUser=true -bl:$(Build.ArtifactStagingDirectory)\logs\PowerToysSetup-PowerToysBootstrapper-PerUser.binlog -ds:false
|
||||
clean: false
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
# Check if deps.json files don't reference different dll versions.
|
||||
- task: PowerShell@2
|
||||
displayName: Audit deps.json files for all applications
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyDepsJsonLibraryVersions.ps1'
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)'
|
||||
pwsh: true
|
||||
|
||||
# Check if asset files on the main application paths are playing nice and avoiding basic conflicts.
|
||||
- task: PowerShell@2
|
||||
displayName: Audit base applications path asset conflicts
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyPossibleAssetConflicts.ps1'
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)'
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Audit WinAppSDK applications path asset conflicts
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyPossibleAssetConflicts.ps1'
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)\WinUI3Apps'
|
||||
pwsh: true
|
||||
|
||||
# Publish test results which ran in MSBuild
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results'
|
||||
inputs:
|
||||
testResultsFormat: VSTest
|
||||
testResultsFiles: '**/*.trx'
|
||||
condition: ne(variables['BuildPlatform'],'arm64')
|
||||
|
||||
# Native dlls
|
||||
- task: VSTest@2
|
||||
condition: ne(variables['BuildPlatform'],'arm64') # No arm64 agents to run the tests.
|
||||
displayName: 'Native Tests'
|
||||
inputs:
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
testSelector: 'testAssemblies'
|
||||
testAssemblyVer2: |
|
||||
**\KeyboardManagerEngineTest.dll
|
||||
**\KeyboardManagerEditorTest.dll
|
||||
**\UnitTests-CommonLib.dll
|
||||
**\PowerRenameUnitTests.dll
|
||||
**\UnitTests-FancyZones.dll
|
||||
!**\obj\**
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Trigger dotnet welcome message so that it does not cause errors on other scripts
|
||||
inputs:
|
||||
targetType: 'inline'
|
||||
script: |
|
||||
dotnet list $(build.sourcesdirectory)\src\common\Common.UI\Common.UI.csproj package
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify Notice.md and Nuget packages match
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyNoticeMdAgainstNugetPackages.ps1'
|
||||
arguments: -path '$(build.sourcesdirectory)\'
|
||||
pwsh: true
|
||||
|
||||
- publish: $(Build.ArtifactStagingDirectory)\logs
|
||||
displayName: Publish Logs
|
||||
artifact: '$(System.JobDisplayName) logs'
|
||||
condition: always()
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Copy Build Files
|
||||
condition: and(succeeded(), ne(variables['BuildPlatform'],'arm64'))
|
||||
inputs:
|
||||
sourceFolder: '$(Build.SourcesDirectory)'
|
||||
contents: '$(BuildPlatform)/$(BuildConfiguration)/**/*'
|
||||
targetFolder: '$(Build.ArtifactStagingDirectory)\$(BuildPlatform)\$(BuildConfiguration)'
|
||||
|
||||
- publish: $(Build.ArtifactStagingDirectory)\$(BuildPlatform)\$(BuildConfiguration)
|
||||
displayName: Publish Build Artifacts
|
||||
artifact: build-$(BuildPlatform)-$(BuildConfiguration)
|
||||
condition: and(succeeded(), ne(variables['BuildPlatform'],'arm64'))
|
||||
70
.pipelines/ci/templates/run-ui-tests-ci.yml
Normal file
70
.pipelines/ci/templates/run-ui-tests-ci.yml
Normal file
@@ -0,0 +1,70 @@
|
||||
parameters:
|
||||
configuration: 'Release'
|
||||
platform: ''
|
||||
|
||||
jobs:
|
||||
- job: UITest
|
||||
displayName: UI Test ${{ parameters.platform }} ${{ parameters.configuration }}
|
||||
dependsOn: Build${{ parameters.platform }}${{ parameters.configuration }}
|
||||
variables:
|
||||
SrcPath: $(Build.Repository.LocalPath)
|
||||
pool:
|
||||
${{ if eq(variables['System.CollectionId'], 'cb55739e-4afe-46a3-970f-1b49d8ee7564') }}:
|
||||
name: SHINE-INT-Testing-x64
|
||||
${{ else }}:
|
||||
name: SHINE-OSS-Testing-x64
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: 1
|
||||
submodules: false
|
||||
clean: true
|
||||
fetchTags: false
|
||||
|
||||
- download: current
|
||||
displayName: Download artifacts
|
||||
artifact: build-${{ parameters.platform }}-${{ parameters.configuration }}
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Use .NET 6 SDK'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: '6.x'
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Use .NET 8 SDK'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: '8.x'
|
||||
includePreviewVersions: true
|
||||
|
||||
- task: VisualStudioTestPlatformInstaller@1
|
||||
displayName: Ensure VSTest Platform
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Download and install WinAppDriver
|
||||
inputs:
|
||||
targetType: filePath
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\InstallWinAppDriver.ps1'
|
||||
|
||||
- task: ScreenResolutionUtility@1
|
||||
inputs:
|
||||
displaySettings: 'optimal'
|
||||
|
||||
- task: VSTest@2
|
||||
displayName: 'UI Tests'
|
||||
condition: and(succeeded(), ne(variables['BuildPlatform'],'arm64')) # No arm64 agents to run the tests.
|
||||
inputs:
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
testSelector: 'testAssemblies'
|
||||
searchFolder: '$(Pipeline.Workspace)\build-${{ parameters.platform }}-${{ parameters.configuration }}'
|
||||
vstestLocationMethod: 'location' # otherwise fails to find vstest.console.exe
|
||||
#vstestLocation: '$(Agent.ToolsDirectory)\VsTest\**\${{ parameters.platform }}\tools\net462\Common7\IDE\Extensions\TestPlatform'
|
||||
vstestLocation: '$(Agent.ToolsDirectory)\VsTest\17.10.0\x64\tools\net462\Common7\IDE\Extensions\TestPlatform'
|
||||
uiTests: true
|
||||
rerunFailedTests: true
|
||||
testAssemblyVer2: |
|
||||
**\UITests-FancyZones.dll
|
||||
**\UITests-FancyZonesEditor.dll
|
||||
!**\obj\**
|
||||
!**\ref\**
|
||||
@@ -1,95 +0,0 @@
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$BuildPlatform,
|
||||
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$BuildConfiguration,
|
||||
|
||||
[Parameter()]
|
||||
[string]$RepoRoot = (Get-Location).Path
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
function Resolve-PlatformDirectory {
|
||||
param(
|
||||
[string]$Root,
|
||||
[string]$Platform
|
||||
)
|
||||
|
||||
$normalized = $Platform.Trim()
|
||||
$candidates = @()
|
||||
$candidates += Join-Path $Root $normalized
|
||||
$candidates += Join-Path $Root ($normalized.ToUpperInvariant())
|
||||
$candidates += Join-Path $Root ($normalized.ToLowerInvariant())
|
||||
$candidates = $candidates | Where-Object { -not [string]::IsNullOrWhiteSpace($_) } | Select-Object -Unique
|
||||
|
||||
foreach ($candidate in $candidates) {
|
||||
if (Test-Path $candidate) {
|
||||
return $candidate
|
||||
}
|
||||
}
|
||||
|
||||
return $candidates[0]
|
||||
}
|
||||
|
||||
Write-Host "Repo root: $RepoRoot"
|
||||
Write-Host "Requested build platform: $BuildPlatform"
|
||||
Write-Host "Requested configuration: $BuildConfiguration"
|
||||
|
||||
# Always use x64 PowerToys.DSC.exe since CI/CD machines are x64
|
||||
$exePlatform = 'x64'
|
||||
$exeRoot = Resolve-PlatformDirectory -Root $RepoRoot -Platform $exePlatform
|
||||
$exeOutputDir = Join-Path $exeRoot $BuildConfiguration
|
||||
$exePath = Join-Path $exeOutputDir 'PowerToys.DSC.exe'
|
||||
|
||||
Write-Host "Using x64 PowerToys.DSC.exe to generate DSC manifests for $BuildPlatform build"
|
||||
|
||||
if (-not (Test-Path $exePath)) {
|
||||
throw "PowerToys.DSC.exe not found at '$exePath'. Make sure it has been built first."
|
||||
}
|
||||
|
||||
Write-Host "Using PowerToys.DSC.exe at '$exePath'."
|
||||
|
||||
# Output DSC manifests to the target build platform directory (x64, ARM64, etc.)
|
||||
$outputRoot = Resolve-PlatformDirectory -Root $RepoRoot -Platform $BuildPlatform
|
||||
if (-not (Test-Path $outputRoot)) {
|
||||
Write-Host "Creating missing platform output root at '$outputRoot'."
|
||||
New-Item -Path $outputRoot -ItemType Directory -Force | Out-Null
|
||||
}
|
||||
|
||||
$outputDir = Join-Path $outputRoot $BuildConfiguration
|
||||
if (-not (Test-Path $outputDir)) {
|
||||
Write-Host "Creating missing configuration output directory at '$outputDir'."
|
||||
New-Item -Path $outputDir -ItemType Directory -Force | Out-Null
|
||||
}
|
||||
|
||||
# DSC v3 manifests go to DSCModules subfolder
|
||||
$dscOutputDir = Join-Path $outputDir 'DSCModules'
|
||||
if (-not (Test-Path $dscOutputDir)) {
|
||||
Write-Host "Creating DSCModules subfolder at '$dscOutputDir'."
|
||||
New-Item -Path $dscOutputDir -ItemType Directory -Force | Out-Null
|
||||
}
|
||||
|
||||
Write-Host "DSC manifests will be generated to: '$dscOutputDir'"
|
||||
|
||||
Write-Host "Cleaning previously generated DSC manifest files from '$dscOutputDir'."
|
||||
Get-ChildItem -Path $dscOutputDir -Filter 'microsoft.powertoys.*.settings.dsc.resource.json' -ErrorAction SilentlyContinue | Remove-Item -Force
|
||||
|
||||
$arguments = @('manifest', '--resource', 'settings', '--outputDir', $dscOutputDir)
|
||||
Write-Host "Invoking DSC manifest generator: '$exePath' $($arguments -join ' ')"
|
||||
& $exePath @arguments
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
throw "PowerToys.DSC.exe exited with code $LASTEXITCODE"
|
||||
}
|
||||
|
||||
$generatedFiles = Get-ChildItem -Path $dscOutputDir -Filter 'microsoft.powertoys.*.settings.dsc.resource.json' -ErrorAction Stop
|
||||
if ($generatedFiles.Count -eq 0) {
|
||||
throw "No DSC manifest files were generated in '$dscOutputDir'."
|
||||
}
|
||||
|
||||
Write-Host "Generated $($generatedFiles.Count) DSC manifest file(s):"
|
||||
foreach ($file in $generatedFiles) {
|
||||
Write-Host " - $($file.FullName)"
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
param(
|
||||
[Parameter()]
|
||||
[ValidateSet("Machine", "PerUser")]
|
||||
[string]$InstallMode = "Machine"
|
||||
)
|
||||
|
||||
$ProgressPreference = 'SilentlyContinue'
|
||||
|
||||
# Get artifact path
|
||||
$ArtifactPath = $ENV:BUILD_ARTIFACTSTAGINGDIRECTORY
|
||||
if (-not $ArtifactPath) {
|
||||
throw "BUILD_ARTIFACTSTAGINGDIRECTORY environment variable not set"
|
||||
}
|
||||
|
||||
# Since we only download PowerToysSetup-*.exe files, we can directly find it
|
||||
$Installer = Get-ChildItem -Path $ArtifactPath -Filter 'PowerToys*.exe' | Select-Object -First 1
|
||||
|
||||
if (-not $Installer) {
|
||||
throw "PowerToys installer not found"
|
||||
}
|
||||
|
||||
Write-Host "Installing PowerToys: $($Installer.Name)"
|
||||
|
||||
# Install PowerToys
|
||||
$Process = Start-Process -Wait -FilePath $Installer.FullName -ArgumentList "/passive", "/norestart" -PassThru -NoNewWindow
|
||||
|
||||
if ($Process.ExitCode -eq 0 -or $Process.ExitCode -eq 3010) {
|
||||
Write-Host "✅ PowerToys installation completed successfully"
|
||||
} else {
|
||||
throw "PowerToys installation failed with exit code: $($Process.ExitCode)"
|
||||
}
|
||||
|
||||
# Verify installation
|
||||
if ($InstallMode -eq "PerUser") {
|
||||
if (Test-Path "${env:LOCALAPPDATA}\PowerToys\PowerToys.exe") {
|
||||
Write-Host "✅ PowerToys verified at: ${env:LOCALAPPDATA}\PowerToys\PowerToys.exe"
|
||||
} else {
|
||||
throw "PowerToys installation verification failed"
|
||||
}
|
||||
} else {
|
||||
if (Test-Path "${env:ProgramFiles}\PowerToys\PowerToys.exe") {
|
||||
Write-Host "✅ PowerToys verified at: ${env:ProgramFiles}\PowerToys\PowerToys.exe"
|
||||
} else {
|
||||
throw "PowerToys installation verification failed"
|
||||
}
|
||||
}
|
||||
26
.pipelines/installWiX.ps1
Normal file
26
.pipelines/installWiX.ps1
Normal file
@@ -0,0 +1,26 @@
|
||||
$ProgressPreference = 'SilentlyContinue'
|
||||
|
||||
$WixDownloadUrl = "https://github.com/wixtoolset/wix3/releases/download/wix3141rtm/wix314.exe"
|
||||
$WixBinariesDownloadUrl = "https://github.com/wixtoolset/wix3/releases/download/wix3141rtm/wix314-binaries.zip"
|
||||
|
||||
# Download WiX binaries and verify their hash sums
|
||||
Invoke-WebRequest -Uri $WixDownloadUrl -OutFile "$($ENV:Temp)\wix314.exe"
|
||||
$Hash = (Get-FileHash -Algorithm SHA256 "$($ENV:Temp)\wix314.exe").Hash
|
||||
if ($Hash -ne '6BF6D03D6923D9EF827AE1D943B90B42B8EBB1B0F68EF6D55F868FA34C738A29')
|
||||
{
|
||||
Write-Error "$WixHash"
|
||||
throw "wix314.exe has unexpected SHA256 hash: $Hash"
|
||||
}
|
||||
Invoke-WebRequest -Uri $WixBinariesDownloadUrl -OutFile "$($ENV:Temp)\wix314-binaries.zip"
|
||||
$Hash = (Get-FileHash -Algorithm SHA256 "$($ENV:Temp)\wix314-binaries.zip").Hash
|
||||
if($Hash -ne '6AC824E1642D6F7277D0ED7EA09411A508F6116BA6FAE0AA5F2C7DAA2FF43D31')
|
||||
{
|
||||
throw "wix314-binaries.zip has unexpected SHA256 hash: $Hash"
|
||||
}
|
||||
|
||||
# Install WiX
|
||||
Start-Process -Wait -FilePath "$($ENV:Temp)\wix314.exe" -ArgumentList "/install /quiet"
|
||||
|
||||
# Extract WiX binaries and copy wix.targets to the installed dir
|
||||
Expand-Archive -Path "$($ENV:Temp)\wix314-binaries.zip" -Force -DestinationPath "$($ENV:Temp)"
|
||||
Copy-Item -Path "$($ENV:Temp)\wix.targets" -Destination "C:\Program Files (x86)\WiX Toolset v3.14\"
|
||||
152
.pipelines/installer-steps.yml
Normal file
152
.pipelines/installer-steps.yml
Normal file
@@ -0,0 +1,152 @@
|
||||
parameters:
|
||||
- name: versionNumber
|
||||
type: string
|
||||
default: "0.0.1"
|
||||
- name: perUserArg
|
||||
type: string
|
||||
default: "false"
|
||||
- name: buildSubDir
|
||||
type: string
|
||||
default: "MachineSetup"
|
||||
- name: installerPrefix
|
||||
type: string
|
||||
default: "PowerToysSetup"
|
||||
- name: signingParameters
|
||||
type: object
|
||||
default: {}
|
||||
|
||||
steps:
|
||||
- task: VSBuild@1
|
||||
displayName: Build PowerToysSetupCustomActions DLL # This dll needs to be build and signed before building the MSI.
|
||||
inputs:
|
||||
solution: "**/installer/PowerToysSetup.sln"
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: -restore /p:RestorePackagesConfig=true /p:RestoreConfigFile="$(Build.SourcesDirectory)\.pipelines\release-nuget.config" /p:CIBuild=true /bl:$(Build.SourcesDirectory)\msbuild.binlog /t:PowerToysSetupCustomActions /p:RunBuildEvents=true /p:PerUser=${{parameters.perUserArg}}
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
clean: true
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: Sign PowerToysSetupCustomActions DLL
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: 'installer/PowerToysSetupCustomActions/$(BuildPlatform)\$(BuildConfiguration)\${{parameters.buildSubDir}}'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_installer.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
|
||||
## INSTALLER START
|
||||
#### MSI BUILDING AND SIGNING
|
||||
- task: VSBuild@1
|
||||
displayName: Build MSI
|
||||
inputs:
|
||||
solution: "**/installer/PowerToysSetup.sln"
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: /p:CIBuild=true /p:BuildProjectReferences=false /target:PowerToysInstaller /bl:$(Build.SourcesDirectory)\msbuild.binlog /p:RunBuildEvents=false /p:PerUser=${{parameters.perUserArg}}
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
clean: false # don't undo our hard work above by deleting the CustomActions dll
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: CmdLine@2
|
||||
displayName: "Extracting MSI to verify contents"
|
||||
inputs:
|
||||
script: |
|
||||
"C:\Program Files (x86)\WiX Toolset v3.14\bin\dark.exe" -x $(build.sourcesdirectory)\extractedMsi installer\PowerToysSetup\$(BuildPlatform)\$(BuildConfiguration)\${{parameters.buildSubDir}}\${{parameters.installerPrefix}}-${{ parameters.versionNumber }}-$(BuildPlatform).msi
|
||||
dir $(build.sourcesdirectory)\extractedMsi
|
||||
|
||||
# Check if deps.json files don't reference different dll versions.
|
||||
- task: PowerShell@2
|
||||
displayName: Audit deps.json in MSI extracted files
|
||||
inputs:
|
||||
filePath: '.pipelines/verifyDepsJsonLibraryVersions.ps1'
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\extractedMsi\File'
|
||||
pwsh: true
|
||||
|
||||
# Did we sign all files
|
||||
- task: PowerShell@1
|
||||
displayName: Verifying entire build is signed and version set
|
||||
inputs:
|
||||
scriptName: .pipelines/versionAndSignCheck.ps1
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\extractedMsi\File'
|
||||
|
||||
- task: PowerShell@1
|
||||
displayName: Verifying MSI Custom Actions DLL is signed
|
||||
inputs:
|
||||
scriptName: .pipelines/versionAndSignCheck.ps1
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\extractedMsi\Binary'
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: Sign MSI
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: 'installer/PowerToysSetup/$(BuildPlatform)\$(BuildConfiguration)\${{parameters.buildSubDir}}'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_installer.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
#### END MSI
|
||||
#### BOOTSTRAP BUILDING AND SIGNING
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Build Bootstrapper
|
||||
inputs:
|
||||
solution: "**/installer/PowerToysSetup.sln"
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: /p:CIBuild=true /bl:$(Build.SourcesDirectory)\msbuild.binlog /t:PowerToysBootstrapper /p:PerUser=${{parameters.perUserArg}}
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
clean: false # don't undo our hard work above by deleting the MSI
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: CmdLine@2
|
||||
displayName: "Insignia: Extract Engine from Bundle"
|
||||
inputs:
|
||||
script: '"C:\Program Files (x86)\WiX Toolset v3.14\bin\insignia.exe" -ib installer\PowerToysSetup\$(BuildPlatform)\$(BuildConfiguration)\${{parameters.buildSubDir}}\${{parameters.installerPrefix}}-${{ parameters.versionNumber }}-$(BuildPlatform).exe -o installer\engine.exe'
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: "ESRP CodeSigning (Engine)"
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: "installer"
|
||||
Pattern: engine.exe
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationCode": "SigntoolSign",
|
||||
"Parameters": {
|
||||
"OpusName": "Microsoft",
|
||||
"OpusInfo": "http://www.microsoft.com",
|
||||
"FileDigest": "/fd \"SHA256\"",
|
||||
"PageHash": "/NPH",
|
||||
"TimeStamp": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
},
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-230012",
|
||||
"OperationCode": "SigntoolVerify",
|
||||
"Parameters": {},
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
|
||||
- task: CmdLine@2
|
||||
displayName: "Insignia: Merge Engine into Bundle"
|
||||
inputs:
|
||||
script: '"C:\Program Files (x86)\WiX Toolset v3.14\bin\insignia.exe" -ab installer\engine.exe installer\PowerToysSetup\$(BuildPlatform)\$(BuildConfiguration)\${{parameters.buildSubDir}}\${{parameters.installerPrefix}}-${{ parameters.versionNumber }}-$(BuildPlatform).exe -o installer\PowerToysSetup\$(BuildPlatform)\$(BuildConfiguration)\${{parameters.buildSubDir}}\${{parameters.installerPrefix}}-${{ parameters.versionNumber }}-$(BuildPlatform).exe'
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: Sign Bootstrapper
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: 'installer/PowerToysSetup/$(BuildPlatform)\$(BuildConfiguration)\${{parameters.buildSubDir}}'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_installer.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
#### END BOOTSTRAP
|
||||
## END INSTALLER
|
||||
@@ -25,16 +25,16 @@ steps:
|
||||
fetchDepth: 1 # Don't need a deep checkout for loc files!
|
||||
persistCredentials: true
|
||||
|
||||
- task: MicrosoftTDBuild.tdbuild-task.tdbuild-task.TouchdownBuildTask@5
|
||||
- task: MicrosoftTDBuild.tdbuild-task.tdbuild-task.TouchdownBuildTask@3
|
||||
displayName: 'Touchdown Build - 37400, PRODEXT'
|
||||
inputs:
|
||||
teamId: 37400
|
||||
FederatedIdentityTDBuildServiceConnection: $(TouchdownServiceConnection)
|
||||
authType: FederatedIdentityTDBuild
|
||||
TDBuildServiceConnection: $(TouchdownServiceConnection)
|
||||
authType: SubjectNameIssuer
|
||||
resourceFilePath: |
|
||||
src\**\Resources.resx
|
||||
src\**\Resource.resx
|
||||
src\**\Resources.resw
|
||||
**\Resources.resx
|
||||
**\Resource.resx
|
||||
**\Resources.resw
|
||||
outputDirectoryRoot: LocOutput
|
||||
appendRelativeDir: true
|
||||
pseudoSetting: Included
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<packages>
|
||||
<package id="Microsoft.PowerToys.Telemetry" version="2.0.4" />
|
||||
<package id="Microsoft.PowerToys.Telemetry" version="2.0" />
|
||||
</packages>
|
||||
|
||||
551
.pipelines/release.yml
Normal file
551
.pipelines/release.yml
Normal file
@@ -0,0 +1,551 @@
|
||||
# This build should never run as CI or against a pull request.
|
||||
name: $(BuildDefinitionName)_$(date:yyMM).$(date:dd)$(rev:rrr)
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
resources:
|
||||
repositories:
|
||||
- repository: 1ESPipelineTemplates
|
||||
type: git
|
||||
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||
ref: refs/tags/release
|
||||
|
||||
parameters:
|
||||
- name: buildConfigurations
|
||||
type: object
|
||||
default:
|
||||
- Release
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
- name: versionNumber
|
||||
type: string
|
||||
default: '0.0.1'
|
||||
- name: signingParameters
|
||||
type: object
|
||||
default:
|
||||
ConnectedServiceName: $(SigningServiceName)
|
||||
AppRegistrationClientId: $(SigningAppId)
|
||||
AppRegistrationTenantId: $(SigningTenantId)
|
||||
AuthAKVName: $(SigningAKVName)
|
||||
AuthCertName: $(SigningAuthCertName)
|
||||
AuthSignCertName: $(SigningSignCertName)
|
||||
|
||||
extends:
|
||||
template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
|
||||
parameters:
|
||||
customBuildTags:
|
||||
- 1ES.PT.ViaStartRight
|
||||
pool:
|
||||
name: SHINE-INT-S
|
||||
image: SHINE-VS17-Latest
|
||||
os: windows
|
||||
sdl:
|
||||
tsa:
|
||||
enabled: true
|
||||
configFile: '$(Build.SourcesDirectory)\.pipelines\tsa.json'
|
||||
|
||||
stages:
|
||||
- stage: build
|
||||
displayName: Build (Complete)
|
||||
pool:
|
||||
name: SHINE-INT-L
|
||||
image: SHINE-VS17-Latest
|
||||
os: windows
|
||||
jobs:
|
||||
- job: Build
|
||||
strategy:
|
||||
matrix:
|
||||
${{ each config in parameters.buildConfigurations }}:
|
||||
${{ each platform in parameters.buildPlatforms }}:
|
||||
${{ config }}_${{ platform }}:
|
||||
BuildConfiguration: ${{ config }}
|
||||
BuildPlatform: ${{ platform }}
|
||||
templateContext:
|
||||
outputs:
|
||||
- output: pipelineArtifact
|
||||
artifactName: setup-$(BuildPlatform)
|
||||
targetPath: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Build
|
||||
timeoutInMinutes: 240 # Some of the 1ES Pipeline stuff and Loc take a very long time
|
||||
cancelTimeoutInMinutes: 1
|
||||
variables:
|
||||
NUGET_RESTORE_MSBUILD_ARGS: /p:Platform=$(BuildPlatform) # Required for nuget to work due to self contained
|
||||
NODE_OPTIONS: --max_old_space_size=16384
|
||||
IsPipeline: 1 # The installer uses this to detect whether it should pick up localizations
|
||||
SkipCppCodeAnalysis: 1 # Skip the code analysis to speed up release CI. It runs on PR CI, anyway
|
||||
# IsExperimentationLive: 1 # The build and installer use this to turn on experimentation
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
submodules: true
|
||||
persistCredentials: True
|
||||
|
||||
# Sets versions for all PowerToy created DLLs
|
||||
- task: PowerShell@1
|
||||
displayName: Set Versions.Prop
|
||||
inputs:
|
||||
scriptName: .pipelines/versionSetting.ps1
|
||||
arguments: -versionNumber '${{ parameters.versionNumber }}' -DevEnvironment ''
|
||||
|
||||
# ESRP needs 'Microsoft.NETCore.App', version '6.0.0' (x64)
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Use .NET 6 SDK'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: '6.x'
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Use .NET 8 SDK'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: '8.x'
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Verify and set latest VCToolsVersion usage
|
||||
inputs:
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\verifyAndSetLatestVCToolsVersion.ps1'
|
||||
pwsh: true
|
||||
|
||||
- task: NuGetAuthenticate@1
|
||||
|
||||
- task: NuGetToolInstaller@1
|
||||
displayName: Use NuGet Installer latest
|
||||
|
||||
# this will restore the following nugets:
|
||||
# - main solution
|
||||
# - Bug report tool
|
||||
# - Webcam report tool
|
||||
# - Installer
|
||||
# - Bootstrapper Installer
|
||||
- task: PowerShell@2
|
||||
displayName: Download and install WiX 3.14 development build
|
||||
inputs:
|
||||
targetType: filePath
|
||||
filePath: '$(build.sourcesdirectory)\.pipelines\installWiX.ps1'
|
||||
|
||||
- task: MicrosoftTDBuild.tdbuild-task.tdbuild-task.TouchdownBuildTask@3
|
||||
displayName: 'Download Localization Files -- PowerToys 37400'
|
||||
inputs:
|
||||
teamId: 37400
|
||||
TDBuildServiceConnection: $(TouchdownServiceConnection)
|
||||
authType: SubjectNameIssuer
|
||||
resourceFilePath: |
|
||||
**\Resources.resx
|
||||
**\Resource.resx
|
||||
**\Resources.resw
|
||||
appendRelativeDir: true
|
||||
localizationTarget: false
|
||||
# pseudoSetting: Included
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Move Loc files into correct locations
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: >-
|
||||
$VerbosePreference = "Continue"
|
||||
|
||||
./tools/build/move-and-rename-resx.ps1
|
||||
|
||||
./tools/build/move-uwp-resw.ps1
|
||||
pwsh: true
|
||||
|
||||
- task: CmdLine@2
|
||||
displayName: Moving telem files
|
||||
inputs:
|
||||
script: |
|
||||
call nuget.exe restore -configFile .pipelines/release-nuget.config -PackagesDirectory . .pipelines/packages.config || exit /b 1
|
||||
move /Y "Microsoft.PowerToys.Telemetry.2.0.0\build\include\TraceLoggingDefines.h" "src\common\Telemetry\TraceLoggingDefines.h" || exit /b 1
|
||||
move /Y "Microsoft.PowerToys.Telemetry.2.0.0\build\include\TelemetryBase.cs" "src\common\Telemetry\TelemetryBase.cs" || exit /b 1
|
||||
|
||||
## ALL BUT INSTALLER BUILDING
|
||||
- task: VSBuild@1
|
||||
displayName: Build PowerToys main project
|
||||
inputs:
|
||||
solution: '**\PowerToys.sln'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: -restore -graph /p:RestorePackagesConfig=true /p:RestoreConfigFile="$(Build.SourcesDirectory)\.pipelines\release-nuget.config" /p:CIBuild=true /bl:$(Build.SourcesDirectory)\msbuild.binlog
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
clean: true
|
||||
maximumCpuCount: true
|
||||
|
||||
### BEGIN SECTION - build and sign nuget packages for abstracted UI utils
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: Sign Utilities libraries
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: 'src/modules'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_abstracted_utils_dll.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Create Hosts File Editor package
|
||||
inputs:
|
||||
solution: '**\HostsUILib.csproj'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: /p:CIBuild=true -t:pack /bl:$(Build.SourcesDirectory)\msbuild.binlog
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Create Environment Variables Editor package
|
||||
inputs:
|
||||
solution: '**\EnvironmentVariablesUILib.csproj'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: /p:CIBuild=true -t:pack /bl:$(Build.SourcesDirectory)\msbuild.binlog
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Create Registry Preview package
|
||||
inputs:
|
||||
solution: '**\RegistryPreviewUILib.csproj'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: /p:CIBuild=true -t:pack /bl:$(Build.SourcesDirectory)\msbuild.binlog
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Copying nuget packages file over
|
||||
inputs:
|
||||
contents: "**/bin/Release/PowerToys*.nupkg"
|
||||
flattenFolders: True
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)/nupkg
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: Submit *.nupkg to ESRP for code signing
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: $(Build.ArtifactStagingDirectory)/nupkg
|
||||
Pattern: '*.nupkg'
|
||||
UseMinimatch: true
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: >-
|
||||
[
|
||||
{
|
||||
"KeyCode": "CP-401405",
|
||||
"OperationCode": "NuGetSign",
|
||||
"Parameters": {},
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-401405",
|
||||
"OperationCode": "NuGetVerify",
|
||||
"Parameters": {},
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
### END SECTION - build and sign nuget packages for abstracted UI utils
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Build BugReportTool
|
||||
inputs:
|
||||
solution: '**/tools/BugReportTool/BugReportTool.sln'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: -restore -graph /p:RestorePackagesConfig=true /p:RestoreConfigFile="$(Build.SourcesDirectory)\.pipelines\release-nuget.config" /p:CIBuild=true /bl:$(Build.SourcesDirectory)\msbuild.binlog
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
clean: true
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Build WebcamReportTool
|
||||
inputs:
|
||||
solution: '**/tools/WebcamReportTool/WebcamReportTool.sln'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: -restore -graph /p:RestorePackagesConfig=true /p:RestoreConfigFile="$(Build.SourcesDirectory)\.pipelines\release-nuget.config" /p:CIBuild=true /bl:$(Build.SourcesDirectory)\msbuild.binlog
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
clean: true
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Build StylesReportTool
|
||||
inputs:
|
||||
solution: '**/tools/StylesReportTool/StylesReportTool.sln'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: -restore -graph /p:RestorePackagesConfig=true /p:RestoreConfigFile="$(Build.SourcesDirectory)\.pipelines\release-nuget.config" /p:CIBuild=true /bl:$(Build.SourcesDirectory)\msbuild.binlog
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
clean: true
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Publish Settings for Packaging
|
||||
inputs:
|
||||
solution: 'src/settings-ui/Settings.UI/PowerToys.Settings.csproj'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: >-
|
||||
/target:Publish
|
||||
/graph
|
||||
/p:Configuration=$(BuildConfiguration);Platform=$(BuildPlatform);AppxBundle=Never
|
||||
/p:VCRTForwarders-IncludeDebugCRT=false
|
||||
/p:PowerToysRoot=$(Build.SourcesDirectory)
|
||||
/p:PublishProfile=InstallationPublishProfile.pubxml
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Publish Launcher for Packaging
|
||||
inputs:
|
||||
solution: 'src/modules/launcher/PowerLauncher/PowerLauncher.csproj'
|
||||
vsVersion: 17.0
|
||||
# The arguments should be the same as the ones for Settings; make sure they are.
|
||||
msbuildArgs: >-
|
||||
/target:Publish
|
||||
/graph
|
||||
/p:Configuration=$(BuildConfiguration);Platform=$(BuildPlatform);AppxBundle=Never
|
||||
/p:VCRTForwarders-IncludeDebugCRT=false
|
||||
/p:PowerToysRoot=$(Build.SourcesDirectory)
|
||||
/p:PublishProfile=InstallationPublishProfile.pubxml
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Publish Monaco Preview Handler for Packaging
|
||||
inputs:
|
||||
solution: 'src/modules/previewpane/MonacoPreviewHandler/MonacoPreviewHandler.csproj'
|
||||
vsVersion: 17.0
|
||||
# The arguments should be the same as the ones for Settings; make sure they are.
|
||||
msbuildArgs: >-
|
||||
/target:Publish
|
||||
/graph
|
||||
/p:Configuration=$(BuildConfiguration);Platform=$(BuildPlatform);AppxBundle=Never
|
||||
/p:VCRTForwarders-IncludeDebugCRT=false
|
||||
/p:PowerToysRoot=$(Build.SourcesDirectory)
|
||||
/p:PublishProfile=InstallationPublishProfile.pubxml
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Publish Markdown Preview Handler for Packaging
|
||||
inputs:
|
||||
solution: 'src/modules/previewpane/MarkdownPreviewHandler/MarkdownPreviewHandler.csproj'
|
||||
vsVersion: 17.0
|
||||
# The arguments should be the same as the ones for Settings; make sure they are.
|
||||
msbuildArgs: >-
|
||||
/target:Publish
|
||||
/graph
|
||||
/p:Configuration=$(BuildConfiguration);Platform=$(BuildPlatform);AppxBundle=Never
|
||||
/p:VCRTForwarders-IncludeDebugCRT=false
|
||||
/p:PowerToysRoot=$(Build.SourcesDirectory)
|
||||
/p:PublishProfile=InstallationPublishProfile.pubxml
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Publish Svg Preview Handler for Packaging
|
||||
inputs:
|
||||
solution: 'src/modules/previewpane/SvgPreviewHandler/SvgPreviewHandler.csproj'
|
||||
vsVersion: 17.0
|
||||
# The arguments should be the same as the ones for Settings; make sure they are.
|
||||
msbuildArgs: >-
|
||||
/target:Publish
|
||||
/graph
|
||||
/p:Configuration=$(BuildConfiguration);Platform=$(BuildPlatform);AppxBundle=Never
|
||||
/p:VCRTForwarders-IncludeDebugCRT=false
|
||||
/p:PowerToysRoot=$(Build.SourcesDirectory)
|
||||
/p:PublishProfile=InstallationPublishProfile.pubxml
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Publish Svg Thumbnail Provider for Packaging
|
||||
inputs:
|
||||
solution: 'src/modules/previewpane/SvgThumbnailProvider/SvgThumbnailProvider.csproj'
|
||||
vsVersion: 17.0
|
||||
# The arguments should be the same as the ones for Settings; make sure they are.
|
||||
msbuildArgs: >-
|
||||
/target:Publish
|
||||
/graph
|
||||
/p:Configuration=$(BuildConfiguration);Platform=$(BuildPlatform);AppxBundle=Never
|
||||
/p:VCRTForwarders-IncludeDebugCRT=false
|
||||
/p:PowerToysRoot=$(Build.SourcesDirectory)
|
||||
/p:PublishProfile=InstallationPublishProfile.pubxml
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Publish File Locksmith UI for Packaging
|
||||
inputs:
|
||||
solution: 'src/modules/FileLocksmith/FileLocksmithUI/FileLocksmithUI.csproj'
|
||||
vsVersion: 17.0
|
||||
# The arguments should be the same as the ones for Settings; make sure they are.
|
||||
msbuildArgs: >-
|
||||
/target:Publish
|
||||
/graph
|
||||
/p:Configuration=$(BuildConfiguration);Platform=$(BuildPlatform);AppxBundle=Never
|
||||
/p:VCRTForwarders-IncludeDebugCRT=false
|
||||
/p:PowerToysRoot=$(Build.SourcesDirectory)
|
||||
/p:PublishProfile=InstallationPublishProfile.pubxml
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
maximumCpuCount: true
|
||||
|
||||
# Check if deps.json files don't reference different dll versions.
|
||||
- task: PowerShell@2
|
||||
displayName: Audit deps.json files for all applications
|
||||
inputs:
|
||||
filePath: '.pipelines/verifyDepsJsonLibraryVersions.ps1'
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)'
|
||||
pwsh: true
|
||||
|
||||
# Check if asset files on the main application paths are playing nice and avoiding basic conflicts.
|
||||
- task: PowerShell@2
|
||||
displayName: Audit base applications path asset conflicts
|
||||
inputs:
|
||||
filePath: '.pipelines/verifyPossibleAssetConflicts.ps1'
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)'
|
||||
pwsh: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Audit WinAppSDK applications path asset conflicts
|
||||
inputs:
|
||||
filePath: '.pipelines/verifyPossibleAssetConflicts.ps1'
|
||||
arguments: -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)\WinUI3Apps'
|
||||
pwsh: true
|
||||
|
||||
#### MAIN SIGNING AREA
|
||||
# reference https://dev.azure.com/microsoft/Dart/_git/AppDriver?path=/ESRPSigning.json&version=GBarm64-netcore&_a=contents for winappdriver
|
||||
# https://dev.azure.com/microsoft/Dart/_git/AppDriver?path=/CIPolicy.xml&version=GBarm64-netcore&_a=contents
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: Sign Core PT
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: '$(BuildPlatform)/$(BuildConfiguration)' # Video conf uses x86 and x64.
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_core.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: Sign DSC Powershell files
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: 'src/dsc/Microsoft.PowerToys.Configure'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_DSC.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5
|
||||
displayName: Sign x86 directshow VCM
|
||||
inputs:
|
||||
${{ insert }}: ${{ parameters.signingParameters }}
|
||||
FolderPath: 'x86/$(BuildConfiguration)' # Video conf uses x86 and x64.
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_vcm.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
#### END SIGNING
|
||||
## END MAIN
|
||||
|
||||
- pwsh: |-
|
||||
Move-Item msbuild.binlog "$(Build.ArtifactStagingDirectory)/"
|
||||
displayName: Stage binlog into artifact directory
|
||||
condition: always()
|
||||
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
displayName: Component Detection
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Copying files for symbols
|
||||
inputs:
|
||||
contents: >-
|
||||
**/*.pdb
|
||||
flattenFolders: True
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)/Symbols-$(BuildPlatform)/
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: 'Remove unneeded files from ArtifactStagingDirectory'
|
||||
inputs:
|
||||
targetType: 'inline'
|
||||
script: |
|
||||
cd $(Build.ArtifactStagingDirectory)/Symbols-$(BuildPlatform)/
|
||||
Remove-Item vc143.pdb
|
||||
Remove-Item *test*
|
||||
|
||||
- task: PublishSymbols@2
|
||||
displayName: Publish symbols path
|
||||
continueOnError: True
|
||||
inputs:
|
||||
SearchPattern: |
|
||||
$(Build.ArtifactStagingDirectory)/Symbols-$(BuildPlatform)/**/*.*
|
||||
IndexSources: false
|
||||
SymbolServerType: TeamServices
|
||||
|
||||
- template: .pipelines/installer-steps.yml@self
|
||||
parameters:
|
||||
signingParameters: ${{ parameters.signingParameters }}
|
||||
versionNumber: ${{ parameters.versionNumber }}
|
||||
perUserArg: "false"
|
||||
buildSubDir: "MachineSetup"
|
||||
installerPrefix: "PowerToysSetup"
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Clean installer dir before building per-user installer
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: git clean -xfd -e *exe -- .\installer\
|
||||
pwsh: true
|
||||
|
||||
- template: .pipelines/installer-steps.yml@self
|
||||
parameters:
|
||||
signingParameters: ${{ parameters.signingParameters }}
|
||||
versionNumber: ${{ parameters.versionNumber }}
|
||||
perUserArg: "true"
|
||||
buildSubDir: "UserSetup"
|
||||
installerPrefix: "PowerToysUserSetup"
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Copying setup file over
|
||||
inputs:
|
||||
contents: "**/PowerToys*Setup-*.exe"
|
||||
flattenFolders: True
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: 'Calculating SHA256 hash'
|
||||
inputs:
|
||||
targetType: 'inline'
|
||||
script: |
|
||||
$p = "$(System.ArtifactsDirectory)\";
|
||||
$staging = "$(Build.ArtifactStagingDirectory)\"
|
||||
$userHash = ((get-item $p\PowerToysUserSetup*.exe | Get-FileHash).Hash);
|
||||
$machineHash = ((get-item $p\PowerToysSetup*.exe | Get-FileHash).Hash);
|
||||
$userPlat = "hash_user_$(BuildPlatform).txt";
|
||||
$machinePlat = "hash_machine_$(BuildPlatform).txt";
|
||||
$combinedUserPath = $staging + $userPlat;
|
||||
$combinedMachinePath = $staging + $machinePlat;
|
||||
|
||||
echo $p
|
||||
|
||||
echo $userPlat
|
||||
echo $userHash
|
||||
echo $combinedUserPath
|
||||
|
||||
echo $machinePlat
|
||||
echo $machineHash
|
||||
echo $combinedMachinePath
|
||||
|
||||
$userHash | out-file -filepath $combinedUserPath
|
||||
$machineHash | out-file -filepath $combinedMachinePath
|
||||
pwsh: true
|
||||
|
||||
# Publishing the GPO files
|
||||
- pwsh: |-
|
||||
New-Item "$(Build.ArtifactStagingDirectory)/gpo" -Type Directory
|
||||
Copy-Item src\gpo\assets\* "$(Build.ArtifactStagingDirectory)/gpo" -Recurse
|
||||
displayName: Stage the GPO files
|
||||
|
||||
...
|
||||
@@ -3,5 +3,5 @@
|
||||
"notificationAliases": ["powertoys@microsoft.com"],
|
||||
"instanceUrl": "https://microsoft.visualstudio.com",
|
||||
"projectName": "OS",
|
||||
"areaPath": "OS\\Windows Client and Services\\WinPD\\DFX-Developer Fundamentals and Experiences\\DEFT\\PowerToys"
|
||||
"areaPath": "OS\\Windows Client and Services\\ADEPT\\E4D-Engineered for Developers\\SHINE\\PowerToys"
|
||||
}
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
# .pipelines/v2/nightly-prewarm.yml
|
||||
# Nightly pre-warm that reuses your existing ci.yml as-is
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
# (18:00 UTC) — adjust as you like
|
||||
schedules:
|
||||
- cron: "0 18 * * *" # UTC
|
||||
displayName: Nightly pre-warm (main)
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
always: true
|
||||
|
||||
name: $(BuildDefinitionName)_$(date:yyMM).$(date:dd)$(rev:rrr)
|
||||
|
||||
parameters:
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
displayName: "Enable MSBuild Caching"
|
||||
default: true
|
||||
- name: msBuildCacheIsReadOnly
|
||||
type: boolean
|
||||
displayName: "MSBuild Cache Read Only"
|
||||
default: false
|
||||
|
||||
extends:
|
||||
template: templates/pipeline-ci-build.yml
|
||||
parameters:
|
||||
buildPlatforms: ${{ parameters.buildPlatforms }}
|
||||
enableMsBuildCaching: ${{ parameters.enableMsBuildCaching }}
|
||||
msBuildCacheIsReadOnly: ${{ parameters.msBuildCacheIsReadOnly }}
|
||||
@@ -1,43 +0,0 @@
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
displayName: "Daily midnight Build"
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
always: false # only run if there's code changes!
|
||||
|
||||
name: $(BuildDefinitionName)_$(date:yyMM).$(date:dd)$(rev:rrr)
|
||||
|
||||
parameters:
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
displayName: "Enable MSBuild Caching"
|
||||
default: true
|
||||
- name: runTests
|
||||
type: boolean
|
||||
displayName: "Run Tests"
|
||||
default: true
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
displayName: "Build Using Visual Studio Preview"
|
||||
default: false
|
||||
- name: useLatestWebView2
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
extends:
|
||||
template: templates/pipeline-ci-build.yml
|
||||
parameters:
|
||||
buildPlatforms: ${{ parameters.buildPlatforms }}
|
||||
${{ if eq(variables['System.PullRequest.IsFork'], 'False') }}:
|
||||
enableMsBuildCaching: ${{ parameters.enableMsBuildCaching }}
|
||||
runTests: ${{ parameters.runTests }}
|
||||
useVSPreview: ${{ parameters.useVSPreview }}
|
||||
useLatestWebView2: ${{ parameters.useLatestWebView2 }}
|
||||
@@ -1,51 +0,0 @@
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
displayName: "Daily midnight Build"
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
always: false # only run if there's code changes!
|
||||
|
||||
name: $(BuildDefinitionName)_$(date:yyMM).$(date:dd)$(rev:rrr)
|
||||
|
||||
parameters:
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
displayName: "Enable MSBuild Caching"
|
||||
default: false
|
||||
- name: runTests
|
||||
type: boolean
|
||||
displayName: "Run Tests"
|
||||
default: true
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
displayName: "Build Using Visual Studio Preview"
|
||||
default: false
|
||||
- name: useLatestWinAppSDK
|
||||
type: boolean
|
||||
default: true
|
||||
- name: winAppSDKVersionNumber
|
||||
type: string
|
||||
default: 1.8
|
||||
- name: useExperimentalVersion
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
extends:
|
||||
template: templates/pipeline-ci-build.yml
|
||||
parameters:
|
||||
buildPlatforms: ${{ parameters.buildPlatforms }}
|
||||
${{ if eq(variables['System.PullRequest.IsFork'], 'False') }}:
|
||||
enableMsBuildCaching: ${{ parameters.enableMsBuildCaching }}
|
||||
runTests: ${{ parameters.runTests }}
|
||||
useVSPreview: ${{ parameters.useVSPreview }}
|
||||
useLatestWinAppSDK: ${{ parameters.useLatestWinAppSDK }}
|
||||
winAppSDKVersionNumber: ${{ parameters.winAppSDKVersionNumber }}
|
||||
useExperimentalVersion: ${{ parameters.useExperimentalVersion }}
|
||||
@@ -1,52 +0,0 @@
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable
|
||||
# paths:
|
||||
# exclude:
|
||||
# - doc/*
|
||||
# - temp/*
|
||||
# - tools/*
|
||||
# - '**.md'
|
||||
|
||||
pr:
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable
|
||||
# paths:
|
||||
# exclude:
|
||||
# - '**.md'
|
||||
# - doc
|
||||
|
||||
name: $(BuildDefinitionName)_$(date:yyMM).$(date:dd)$(rev:rrr)
|
||||
|
||||
parameters:
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
displayName: "Enable MSBuild Caching"
|
||||
default: false
|
||||
- name: runTests
|
||||
type: boolean
|
||||
displayName: "Run Tests"
|
||||
default: true
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
displayName: "Build Using Visual Studio Preview"
|
||||
default: false
|
||||
|
||||
extends:
|
||||
template: templates/pipeline-ci-build.yml
|
||||
parameters:
|
||||
buildPlatforms: ${{ parameters.buildPlatforms }}
|
||||
${{ if eq(variables['System.PullRequest.IsFork'], 'False') }}:
|
||||
enableMsBuildCaching: ${{ parameters.enableMsBuildCaching }}
|
||||
runTests: ${{ parameters.runTests }}
|
||||
useVSPreview: ${{ parameters.useVSPreview }}
|
||||
@@ -1,56 +0,0 @@
|
||||
pr: none
|
||||
trigger: none
|
||||
|
||||
schedules:
|
||||
- cron: "0 0 * * 1"
|
||||
displayName: Weekly fuzzing submission
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
always: true
|
||||
name: $(BuildDefinitionName)_$(date:yyMM).$(date:dd)$(rev:rrr)
|
||||
|
||||
parameters:
|
||||
- name: platform
|
||||
type: string
|
||||
default: x64 # for fuzzing, we only use x64 for now
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
displayName: "Enable MSBuild Caching"
|
||||
default: false
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
displayName: "Build Using Visual Studio Preview"
|
||||
default: false
|
||||
|
||||
stages:
|
||||
- stage: Build_${{ parameters.platform }}
|
||||
displayName: Build ${{ parameters.platform }}
|
||||
jobs:
|
||||
- template: templates/job-build-project.yml
|
||||
parameters:
|
||||
pool:
|
||||
${{ if eq(variables['System.CollectionId'], 'cb55739e-4afe-46a3-970f-1b49d8ee7564') }}:
|
||||
name: SHINE-INT-L
|
||||
${{ else }}:
|
||||
name: SHINE-OSS-L
|
||||
${{ if eq(parameters.useVSPreview, true) }}:
|
||||
demands: ImageOverride -equals SHINE-VS17-Preview
|
||||
buildPlatforms:
|
||||
- ${{ parameters.platform }}
|
||||
buildConfigurations: [Release]
|
||||
enablePackageCaching: true
|
||||
enableMsBuildCaching: ${{ parameters.enableMsBuildCaching }}
|
||||
runTests: true
|
||||
useVSPreview: ${{ parameters.useVSPreview }}
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- stage: OneFuzz
|
||||
displayName: Fuzz ${{ parameters.platform }}
|
||||
dependsOn:
|
||||
- Build_${{parameters.platform}}
|
||||
jobs:
|
||||
- template: templates/job-fuzz.yml
|
||||
parameters:
|
||||
platform: ${{ parameters.platform }}
|
||||
configuration: Release
|
||||
@@ -1,150 +0,0 @@
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
resources:
|
||||
repositories:
|
||||
- repository: 1ESPipelineTemplates
|
||||
type: git
|
||||
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||
ref: refs/tags/release
|
||||
|
||||
# Expose all of these parameters for user configuration.
|
||||
parameters:
|
||||
- name: publishSymbolsToPublic
|
||||
displayName: "Publish Symbols to **PUBLIC** (use only for Final Builds)"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
- name: versionNumber
|
||||
displayName: "Version Number"
|
||||
type: string
|
||||
default: '0.0.1'
|
||||
|
||||
- name: buildConfigurations
|
||||
displayName: "Build Configurations"
|
||||
type: object
|
||||
default:
|
||||
- Release
|
||||
|
||||
- name: buildPlatforms
|
||||
displayName: "Build Platforms"
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
displayName: "Build Using Visual Studio Preview"
|
||||
default: false
|
||||
|
||||
name: $(BuildDefinitionName)_$(date:yyMM).$(date:dd)$(rev:rrr)
|
||||
|
||||
variables:
|
||||
- template: templates/variables-nuget-package-version.yml
|
||||
|
||||
extends:
|
||||
template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
|
||||
parameters:
|
||||
customBuildTags:
|
||||
- 1ES.PT.ViaStartRight
|
||||
pool:
|
||||
name: SHINE-INT-S
|
||||
${{ if eq(parameters.useVSPreview, true) }}:
|
||||
demands: ImageOverride -equals SHINE-VS17-Preview
|
||||
os: windows
|
||||
sdl:
|
||||
tsa:
|
||||
enabled: true
|
||||
configFile: '$(Build.SourcesDirectory)\.pipelines\tsa.json'
|
||||
binskim:
|
||||
enabled: true
|
||||
# Exclude every dll/exe in tests/*, as well as all msdia*, covrun* and vcruntime*
|
||||
analyzeTargetGlob: +:file|$(Build.ArtifactStagingDirectory)/**/*.dll;+:file|$(Build.ArtifactStagingDirectory)/**/*.exe;-:file:regex|tests.*\.(dll|exe)$;-:file:regex|(covrun.*)\.dll$;-:file:regex|(msdia.*)\.dll$;-:file:regex|(vcruntime.*)\.dll$
|
||||
|
||||
stages:
|
||||
- stage: Build
|
||||
displayName: Build
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: .pipelines/v2/templates/job-build-project.yml@self
|
||||
parameters:
|
||||
pool:
|
||||
name: SHINE-INT-L
|
||||
demands:
|
||||
# Our INT agents have a large disk mounted at P:\
|
||||
- ${{ if eq(parameters.useVSPreview, true) }}:
|
||||
- ImageOverride -equals SHINE-VS17-Preview
|
||||
os: windows
|
||||
variables:
|
||||
IsPipeline: 1 # The installer uses this to detect whether it should pick up localizations
|
||||
SkipCppCodeAnalysis: 1 # Skip the code analysis to speed up release CI. It runs on PR CI, anyway
|
||||
# IsExperimentationLive: 1 # The build and installer use this to turn on experimentation
|
||||
buildPlatforms: ${{ parameters.buildPlatforms }}
|
||||
buildConfigurations: ${{ parameters.buildConfigurations }}
|
||||
versionNumber: ${{ parameters.versionNumber }}
|
||||
publishArtifacts: false # 1ES PT handles publication for us.
|
||||
official: true
|
||||
codeSign: true
|
||||
runTests: false
|
||||
signingIdentity:
|
||||
serviceName: $(SigningServiceName)
|
||||
appId: $(SigningAppId)
|
||||
tenantId: $(SigningTenantId)
|
||||
akvName: $(SigningAKVName)
|
||||
authCertName: $(SigningAuthCertName)
|
||||
signCertName: $(SigningSignCertName)
|
||||
useManagedIdentity: $(SigningUseManagedIdentity)
|
||||
clientId: $(SigningOriginalClientId)
|
||||
# Have msbuild use the release nuget config profile
|
||||
additionalBuildOptions: /p:RestoreConfigFile="$(Build.SourcesDirectory)\.pipelines\release-nuget.config" /p:EnableCmdPalAOT=true
|
||||
beforeBuildSteps:
|
||||
# Sets versions for all PowerToy created DLLs
|
||||
- pwsh: |-
|
||||
.pipelines/versionSetting.ps1 -versionNumber '${{ parameters.versionNumber }}' -DevEnvironment ''
|
||||
displayName: Prepare versioning
|
||||
|
||||
# Prepare the localizations and telemetry config before the release build
|
||||
- template: .pipelines/v2/templates/steps-fetch-and-prepare-localizations.yml@self
|
||||
|
||||
- pwsh: |-
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$PSNativeCommandUseErrorActionPreference = $true
|
||||
& nuget.exe restore -configFile .pipelines/release-nuget.config -PackagesDirectory . .pipelines/packages.config
|
||||
Move-Item -Force -Verbose "Microsoft.PowerToys.Telemetry.*\build\include\TraceLoggingDefines.h" "src\common\Telemetry\TraceLoggingDefines.h"
|
||||
Move-Item -Force -Verbose "Microsoft.PowerToys.Telemetry.*\build\include\TelemetryBase.cs" "src\common\Telemetry\TelemetryBase.cs"
|
||||
displayName: Emplace telemetry files
|
||||
|
||||
- stage: Build_SDK
|
||||
displayName: Build SDK
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: .pipelines/v2/templates/job-build-sdk.yml@self
|
||||
parameters:
|
||||
pool:
|
||||
name: SHINE-INT-L
|
||||
os: windows
|
||||
official: true
|
||||
codeSign: true
|
||||
signingIdentity:
|
||||
serviceName: $(SigningServiceName)
|
||||
appId: $(SigningAppId)
|
||||
tenantId: $(SigningTenantId)
|
||||
akvName: $(SigningAKVName)
|
||||
authCertName: $(SigningAuthCertName)
|
||||
signCertName: $(SigningSignCertName)
|
||||
useManagedIdentity: $(SigningUseManagedIdentity)
|
||||
clientId: $(SigningOriginalClientId)
|
||||
|
||||
- stage: Publish
|
||||
displayName: Publish
|
||||
dependsOn: [Build]
|
||||
jobs:
|
||||
- template: .pipelines/v2/templates/job-publish-symbols-using-symbolrequestprod-api.yml@self
|
||||
parameters:
|
||||
versionNumber: ${{ parameters.versionNumber }}
|
||||
includePublicSymbolServer: ${{ parameters.publishSymbolsToPublic }}
|
||||
${{ if ne(parameters.publishSymbolsToPublic, true) }}:
|
||||
symbolExpiryTime: 10 # For private builds, expire symbols within 10 days. The default is 100 years.
|
||||
subscription: $(SymbolPublishingServiceConnection)
|
||||
symbolProject: $(SymbolPublishingProject)
|
||||
@@ -1,627 +0,0 @@
|
||||
parameters:
|
||||
- name: additionalBuildOptions
|
||||
type: string
|
||||
default: ''
|
||||
- name: buildConfigurations
|
||||
type: object
|
||||
default:
|
||||
- Release
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
- name: official
|
||||
type: boolean
|
||||
default: false
|
||||
- name: codeSign
|
||||
type: boolean
|
||||
default: false
|
||||
- name: artifactStem
|
||||
type: string
|
||||
default: ''
|
||||
- name: jobName
|
||||
type: string
|
||||
default: 'Build'
|
||||
- name: condition
|
||||
type: string
|
||||
default: ''
|
||||
- name: dependsOn
|
||||
type: object
|
||||
default: []
|
||||
- name: pool
|
||||
type: object
|
||||
default: []
|
||||
- name: beforeBuildSteps
|
||||
type: stepList
|
||||
default: []
|
||||
- name: variables
|
||||
type: object
|
||||
default: {}
|
||||
- name: publishArtifacts
|
||||
type: boolean
|
||||
default: true
|
||||
- name: signingIdentity
|
||||
type: object
|
||||
default: {}
|
||||
- name: enablePackageCaching
|
||||
type: boolean
|
||||
default: false
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
default: false
|
||||
- name: msBuildCacheIsReadOnly
|
||||
type: boolean
|
||||
default: true
|
||||
- name: runTests
|
||||
type: boolean
|
||||
default: true
|
||||
- name: buildTests
|
||||
type: boolean
|
||||
default: true
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
default: false
|
||||
- name: versionNumber
|
||||
type: string
|
||||
default: '0.0.1'
|
||||
- name: useLatestWinAppSDK
|
||||
type: boolean
|
||||
default: false
|
||||
- name: winAppSDKVersionNumber
|
||||
type: string
|
||||
default: 1.6
|
||||
- name: useExperimentalVersion
|
||||
type: boolean
|
||||
default: false
|
||||
- name: csProjectsToPublish
|
||||
type: object
|
||||
default:
|
||||
- 'src/settings-ui/Settings.UI/PowerToys.Settings.csproj'
|
||||
- 'src/modules/launcher/PowerLauncher/PowerLauncher.csproj'
|
||||
- 'src/modules/previewpane/MonacoPreviewHandler/MonacoPreviewHandler.csproj'
|
||||
- 'src/modules/previewpane/MarkdownPreviewHandler/MarkdownPreviewHandler.csproj'
|
||||
- 'src/modules/previewpane/SvgPreviewHandler/SvgPreviewHandler.csproj'
|
||||
- 'src/modules/previewpane/SvgThumbnailProvider/SvgThumbnailProvider.csproj'
|
||||
- 'src/modules/FileLocksmith/FileLocksmithUI/FileLocksmithUI.csproj'
|
||||
- name: timeoutInMinutes
|
||||
type: number
|
||||
default: 240
|
||||
- name: cancelTimeoutInMinutes
|
||||
type: number
|
||||
default: 1
|
||||
|
||||
jobs:
|
||||
- job: ${{ parameters.jobName }}
|
||||
${{ if ne(length(parameters.pool), 0) }}:
|
||||
pool: ${{ parameters.pool }}
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
condition: ${{ parameters.condition }}
|
||||
strategy:
|
||||
matrix:
|
||||
${{ each config in parameters.buildConfigurations }}:
|
||||
${{ each platform in parameters.buildPlatforms }}:
|
||||
${{ config }}_${{ platform }}:
|
||||
BuildConfiguration: ${{ config }}
|
||||
BuildPlatform: ${{ platform }}
|
||||
${{ if eq(platform, 'x86') }}:
|
||||
OutputBuildPlatform: Win32
|
||||
${{ elseif eq(platform, 'Any CPU') }}:
|
||||
OutputBuildPlatform: AnyCPU
|
||||
${{ else }}:
|
||||
OutputBuildPlatform: ${{ platform }}
|
||||
variables:
|
||||
NUGET_PACKAGES: 'C:\NuGetPackages' # Some of our build steps cache these here... and it was apparently part of the global environment
|
||||
MakeAppxPath: 'C:\Program Files (x86)\Windows Kits\10\bin\10.0.26100.0\x86\MakeAppx.exe'
|
||||
# Azure DevOps abhors a vacuum
|
||||
# If these are blank, expansion will fail later on... which will result in direct substitution of the variable *names*
|
||||
# later on. We'll just... set them to a single space and if we need to, check IsNullOrWhiteSpace.
|
||||
# Yup.
|
||||
MSBuildCacheParameters: ' '
|
||||
JobOutputDirectory: $(Build.ArtifactStagingDirectory)
|
||||
LogOutputDirectory: $(Build.ArtifactStagingDirectory)\logs
|
||||
JobOutputArtifactName: build-$(BuildPlatform)-$(BuildConfiguration)${{ parameters.artifactStem }}
|
||||
NUGET_RESTORE_MSBUILD_ARGS: /p:Platform=$(BuildPlatform) # Required for nuget to work due to self contained
|
||||
NODE_OPTIONS: --max_old_space_size=16384
|
||||
${{ if or(eq(parameters.runTests, true), eq(parameters.buildTests, true)) }}:
|
||||
MSBuildMainBuildTargets: Build;Test
|
||||
${{ else }}:
|
||||
MSBuildMainBuildTargets: Build
|
||||
${{ insert }}: ${{ parameters.variables }}
|
||||
${{ if eq(parameters.useLatestWinAppSDK, true) }}:
|
||||
RestoreAdditionalProjectSourcesArg: '/p:RestoreAdditionalProjectSources="$(Build.SourcesDirectory)\localpackages\NugetPackages" /p:IgnoreExperimentalWarnings=true'
|
||||
${{ else }}:
|
||||
RestoreAdditionalProjectSourcesArg: ''
|
||||
displayName: Build
|
||||
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
|
||||
cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
|
||||
templateContext: # Required when this template is hosted in 1ES PT
|
||||
outputs:
|
||||
- output: pipelineArtifact
|
||||
artifactName: $(JobOutputArtifactName)
|
||||
targetPath: $(Build.ArtifactStagingDirectory)
|
||||
- output: pipelineArtifact
|
||||
artifactName: $(JobOutputArtifactName)-failure-$(System.JobAttempt)
|
||||
targetPath: $(LogOutputDirectory)
|
||||
condition: or(failed(), canceled())
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
submodules: true
|
||||
persistCredentials: True
|
||||
fetchTags: false
|
||||
fetchDepth: 1
|
||||
|
||||
- ${{ if eq(parameters.enableMsBuildCaching, true) }}:
|
||||
- pwsh: |-
|
||||
$MSBuildCacheParameters = ""
|
||||
$MSBuildCacheParameters += " -graph"
|
||||
$MSBuildCacheParameters += " -reportfileaccesses"
|
||||
$MSBuildCacheParameters += " -p:MSBuildCacheEnabled=true"
|
||||
$MSBuildCacheParameters += " -p:MSBuildCacheLogDirectory=$(LogOutputDirectory)\MSBuildCacheLogs"
|
||||
# Cache read-only policy controlled by parameter
|
||||
$cacheIsReadOnly = "${{ parameters.msBuildCacheIsReadOnly }}"
|
||||
if ($cacheIsReadOnly -eq "True") {
|
||||
$MSBuildCacheParameters += " /p:MSBuildCacheRemoteCacheIsReadOnly=true"
|
||||
}
|
||||
Write-Host "MSBuildCacheParameters: $MSBuildCacheParameters"
|
||||
Write-Host "##vso[task.setvariable variable=MSBuildCacheParameters]$MSBuildCacheParameters"
|
||||
displayName: Prepare MSBuildCache variables
|
||||
|
||||
- template: steps-ensure-dotnet-version.yml
|
||||
parameters:
|
||||
sdk: true
|
||||
version: '6.0' # .NET 6.0 is required in CI for ESRP code signing tasks. Please do not remove.
|
||||
|
||||
- template: steps-ensure-dotnet-version.yml
|
||||
parameters:
|
||||
sdk: true
|
||||
version: '8.0'
|
||||
|
||||
- template: steps-ensure-dotnet-version.yml
|
||||
parameters:
|
||||
sdk: true
|
||||
version: '9.0'
|
||||
|
||||
- ${{ if eq(parameters.runTests, true) }}:
|
||||
- task: VisualStudioTestPlatformInstaller@1
|
||||
displayName: Ensure VSTest Platform
|
||||
|
||||
- pwsh: |-
|
||||
& '.pipelines/applyXamlStyling.ps1' -Passive
|
||||
displayName: Verify XAML formatting
|
||||
|
||||
- pwsh: |-
|
||||
& '.pipelines/verifyNugetPackages.ps1' -solution '$(build.sourcesdirectory)\PowerToys.slnx'
|
||||
displayName: Verify Nuget package versions for PowerToys.slnx
|
||||
|
||||
- pwsh: |-
|
||||
& '.pipelines/verifyArm64Configuration.ps1' -solution '$(build.sourcesdirectory)\PowerToys.slnx'
|
||||
& '.pipelines/verifyArm64Configuration.ps1' -solution '$(build.sourcesdirectory)\tools\BugReportTool\BugReportTool.sln'
|
||||
& '.pipelines/verifyArm64Configuration.ps1' -solution '$(build.sourcesdirectory)\tools\StylesReportTool\StylesReportTool.sln'
|
||||
& '.pipelines/verifyArm64Configuration.ps1' -solution '$(build.sourcesdirectory)\installer\PowerToysSetup.slnx'
|
||||
displayName: Verify ARM64 configurations
|
||||
|
||||
- ${{ if eq(parameters.enablePackageCaching, true) }}:
|
||||
- task: Cache@2
|
||||
displayName: 'Cache nuget packages (PackageReference)'
|
||||
inputs:
|
||||
key: '"PackageReference" | "$(Agent.OS)" | Directory.Packages.props'
|
||||
restoreKeys: |
|
||||
"PackageReference" | "$(Agent.OS)"
|
||||
"PackageReference"
|
||||
path: $(NUGET_PACKAGES)
|
||||
|
||||
- task: Cache@2
|
||||
displayName: 'Cache nuget packages (packages.config)'
|
||||
inputs:
|
||||
key: '"packages.config" | "$(Agent.OS)" | **/packages.config'
|
||||
restoreKeys: |
|
||||
"packages.config" | "$(Agent.OS)"
|
||||
"packages.config"
|
||||
path: packages
|
||||
|
||||
- ${{ if eq(parameters.useLatestWinAppSDK, true)}}:
|
||||
- template: .\steps-update-winappsdk-and-restore-nuget.yml
|
||||
parameters:
|
||||
versionNumber: ${{ parameters.winAppSDKVersionNumber }}
|
||||
useExperimentalVersion: ${{ parameters.useExperimentalVersion }}
|
||||
|
||||
- ${{ if eq(parameters.useLatestWinAppSDK, false)}}:
|
||||
- template: .\steps-restore-nuget.yml
|
||||
|
||||
- pwsh: |-
|
||||
& "$(build.sourcesdirectory)\.pipelines\verifyAndSetLatestVCToolsVersion.ps1"
|
||||
displayName: Work around DD-1541167 (VCToolsVersion)
|
||||
${{ if eq(parameters.useVSPreview, true) }}:
|
||||
env:
|
||||
VCWhereExtraVersionTarget: '-prerelease'
|
||||
|
||||
- ${{ if eq(parameters.official, true) }}:
|
||||
- template: .\steps-setup-versioning.yml
|
||||
parameters:
|
||||
directory: $(build.sourcesdirectory)\src\modules\cmdpal
|
||||
|
||||
|
||||
|
||||
- ${{ parameters.beforeBuildSteps }}
|
||||
|
||||
- task: VSBuild@1
|
||||
${{ if eq(parameters.runTests, true) }}:
|
||||
displayName: Build and Test PowerToys main project
|
||||
${{ else }}:
|
||||
displayName: Build PowerToys main project
|
||||
inputs:
|
||||
solution: 'PowerToys.slnx'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: >-
|
||||
-restore -graph
|
||||
/p:RestorePackagesConfig=true
|
||||
/p:CIBuild=true
|
||||
/bl:$(LogOutputDirectory)\build-0-main.binlog
|
||||
${{ parameters.additionalBuildOptions }}
|
||||
$(MSBuildCacheParameters)
|
||||
/t:$(MSBuildMainBuildTargets)
|
||||
$(RestoreAdditionalProjectSourcesArg)
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
${{ if eq(parameters.enableMsBuildCaching, true) }}:
|
||||
env:
|
||||
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Generate DSC artifacts for ARM64
|
||||
condition: and(succeeded(), eq(variables['BuildPlatform'], 'arm64'))
|
||||
inputs:
|
||||
solution: PowerToys.slnx
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: >-
|
||||
-restore
|
||||
/p:Configuration=$(BuildConfiguration)
|
||||
/p:Platform=x64
|
||||
/t:DSC\PowerToys_Settings_DSC_Schema_Generator
|
||||
/bl:$(LogOutputDirectory)\build-dsc-generator.binlog
|
||||
${{ parameters.additionalBuildOptions }}
|
||||
$(MSBuildCacheParameters)
|
||||
$(RestoreAdditionalProjectSourcesArg)
|
||||
platform: x64
|
||||
configuration: $(BuildConfiguration)
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
# Build PowerToys.DSC.exe for ARM64 (x64 uses existing binary from previous build)
|
||||
- task: VSBuild@1
|
||||
displayName: Build PowerToys.DSC.exe (x64 for generating manifests)
|
||||
condition: and(succeeded(), ne(variables['BuildPlatform'], 'x64'))
|
||||
inputs:
|
||||
solution: src/dsc/v3/PowerToys.DSC/PowerToys.DSC.csproj
|
||||
msbuildArgs: /t:Build /m /restore
|
||||
platform: x64
|
||||
configuration: $(BuildConfiguration)
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
# Generate DSC manifests using PowerToys.DSC.exe
|
||||
- pwsh: |-
|
||||
& '.pipelines/generateDscManifests.ps1' -BuildPlatform '$(BuildPlatform)' -BuildConfiguration '$(BuildConfiguration)' -RepoRoot '$(Build.SourcesDirectory)'
|
||||
displayName: Generate DSC manifests
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Stage SDK/build
|
||||
inputs:
|
||||
contents: |-
|
||||
"**/cmdpal/extensionsdk/nuget/Microsoft.CommandPalette.Extensions.SDK.props"
|
||||
"**/cmdpal/extensionsdk/nuget/Microsoft.CommandPalette.Extensions.SDK.targets"
|
||||
flattenFolders: True
|
||||
targetFolder: $(JobOutputDirectory)/sdk/build
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Stage SDK/lib
|
||||
inputs:
|
||||
contents: |-
|
||||
"**/Microsoft.CommandPalette.Extensions.Toolkit/$(BuildPlatform)/release/WinUI3Apps/CmdPal/Microsoft.CommandPalette.Extensions.Toolkit.dll"
|
||||
"**/Microsoft.CommandPalette.Extensions.Toolkit/$(BuildPlatform)/release/WinUI3Apps/CmdPal/Microsoft.CommandPalette.Extensions.Toolkit.deps.json"
|
||||
flattenFolders: True
|
||||
targetFolder: $(JobOutputDirectory)/sdk/lib/net8.0-windows10.0.19041.0
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Stage SDK/winmd
|
||||
inputs:
|
||||
contents: |-
|
||||
"**/Microsoft.CommandPalette.Extensions/$(BuildPlatform)/release/Microsoft.CommandPalette.Extensions/Microsoft.CommandPalette.Extensions.winmd"
|
||||
flattenFolders: True
|
||||
targetFolder: $(JobOutputDirectory)/sdk/winmd
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Build BugReportTool
|
||||
inputs:
|
||||
solution: '**/tools/BugReportTool/BugReportTool.sln'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: >-
|
||||
-restore -graph
|
||||
/p:RestorePackagesConfig=true
|
||||
/p:CIBuild=true
|
||||
/bl:$(LogOutputDirectory)\build-bug-report.binlog
|
||||
${{ parameters.additionalBuildOptions }}
|
||||
$(MSBuildCacheParameters)
|
||||
$(RestoreAdditionalProjectSourcesArg)
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
${{ if eq(parameters.enableMsBuildCaching, true) }}:
|
||||
env:
|
||||
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
|
||||
|
||||
- task: VSBuild@1
|
||||
displayName: Build StylesReportTool
|
||||
inputs:
|
||||
solution: '**/tools/StylesReportTool/StylesReportTool.sln'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: >-
|
||||
-restore -graph
|
||||
/p:RestorePackagesConfig=true
|
||||
/p:CIBuild=true
|
||||
/bl:$(LogOutputDirectory)\build-styles-report.binlog
|
||||
${{ parameters.additionalBuildOptions }}
|
||||
$(MSBuildCacheParameters)
|
||||
$(RestoreAdditionalProjectSourcesArg)
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
${{ if eq(parameters.enableMsBuildCaching, true) }}:
|
||||
env:
|
||||
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
|
||||
|
||||
- ${{ each project in parameters.csProjectsToPublish }}:
|
||||
- task: VSBuild@1
|
||||
displayName: Publish ${{ project }} for Packaging
|
||||
inputs:
|
||||
solution: ${{ project }}
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: >-
|
||||
/target:Publish
|
||||
/graph
|
||||
/p:Configuration=$(BuildConfiguration);Platform=$(BuildPlatform);AppxBundle=Never
|
||||
/p:VCRTForwarders-IncludeDebugCRT=false
|
||||
/p:PowerToysRoot=$(Build.SourcesDirectory)
|
||||
/p:PublishProfile=InstallationPublishProfile.pubxml
|
||||
/p:TargetFramework=net9.0-windows10.0.26100.0
|
||||
/bl:$(LogOutputDirectory)\publish-${{ join('_',split(project, '/')) }}.binlog
|
||||
$(RestoreAdditionalProjectSourcesArg)
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
### HACK: On ARM64 builds, building an app with Windows App SDK copies the x64 WebView2 dll instead of the ARM64 one. This task makes sure the right dll is used.
|
||||
- task: CopyFiles@2
|
||||
displayName: HACK Copy core WebView2 ARM64 dll to output directory
|
||||
condition: and(succeeded(), eq(variables['BuildPlatform'], 'arm64'))
|
||||
inputs:
|
||||
contents: packages/Microsoft.Web.WebView2.1.0.2903.40/runtimes/win-ARM64/native_uap/Microsoft.Web.WebView2.Core.dll
|
||||
targetFolder: $(Build.SourcesDirectory)/ARM64/Release/WinUI3Apps/
|
||||
flattenFolders: True
|
||||
OverWrite: True
|
||||
|
||||
# Check if all projects (located in src sub-folder) import common props
|
||||
- pwsh: |-
|
||||
& '.pipelines/verifyCommonProps.ps1' -sourceDir '$(build.sourcesdirectory)\src'
|
||||
displayName: Audit shared common props for CSharp projects in src sub-folder
|
||||
|
||||
# Check if deps.json files don't reference different dll versions.
|
||||
- pwsh: |-
|
||||
& '.pipelines/verifyDepsJsonLibraryVersions.ps1' -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)'
|
||||
displayName: Audit deps.json files for all applications
|
||||
|
||||
# Check if asset files on the main application paths are playing nice and avoiding basic conflicts.
|
||||
- pwsh: |-
|
||||
& '.pipelines/verifyPossibleAssetConflicts.ps1' -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)'
|
||||
displayName: Audit base applications path asset conflicts
|
||||
|
||||
- pwsh: |-
|
||||
& '.pipelines/verifyPossibleAssetConflicts.ps1' -targetDir '$(build.sourcesdirectory)\$(BuildPlatform)\$(BuildConfiguration)\WinUI3Apps'
|
||||
displayName: Audit WinAppSDK applications path asset conflicts
|
||||
|
||||
# To streamline the pipeline and prevent errors, skip this step during compatibility tests with the latest WinAppSDK.
|
||||
- ${{ if eq(parameters.useLatestWinAppSDK, false) }}:
|
||||
- pwsh: |-
|
||||
& '.pipelines/verifyNoticeMdAgainstNugetPackages.ps1' -path '$(build.sourcesdirectory)\'
|
||||
displayName: Verify NOTICE.md and NuGet packages match
|
||||
|
||||
- ${{ if eq(parameters.runTests, true) }}:
|
||||
# Publish test results which ran in MSBuild
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results'
|
||||
inputs:
|
||||
testResultsFormat: VSTest
|
||||
testResultsFiles: '**/*.trx'
|
||||
condition: and(succeeded(), ne(variables['BuildPlatform'], 'arm64'))
|
||||
|
||||
# Native dlls
|
||||
- task: VSTest@2
|
||||
condition: and(succeeded(), ne(variables['BuildPlatform'], 'arm64')) # No arm64 agents to run the tests.
|
||||
displayName: 'Native Tests'
|
||||
inputs:
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
testSelector: 'testAssemblies'
|
||||
testAssemblyVer2: |
|
||||
**\KeyboardManagerEngineTest.dll
|
||||
**\KeyboardManagerEditorTest.dll
|
||||
**\*UnitTest*.dll
|
||||
!**\obj\**
|
||||
|
||||
- pwsh: |-
|
||||
$Packages = Get-ChildItem -Recurse -Filter "Microsoft.CmdPal.UI_*.msix"
|
||||
Write-Host "Found $($Packages.Count) CmdPal MSIX package(s):"
|
||||
foreach ($pkg in $Packages) {
|
||||
Write-Host " - $($pkg.FullName)"
|
||||
}
|
||||
|
||||
if ($Packages.Count -gt 0) {
|
||||
# Priority: Look for platform-specific MSIX (x64/arm64) first, then fall back to any
|
||||
$PlatformPackage = $Packages | Where-Object { $_.Name -match "Microsoft\.CmdPal\.UI_.*_(x64|arm64)\.msix$" } | Select-Object -First 1
|
||||
if ($PlatformPackage) {
|
||||
$Package = $PlatformPackage
|
||||
Write-Host "Using platform-specific package: $($Package.FullName)"
|
||||
} else {
|
||||
$Package = $Packages | Select-Object -First 1
|
||||
Write-Host "Using first available package: $($Package.FullName)"
|
||||
}
|
||||
|
||||
$PackageFilename = $Package.FullName
|
||||
Write-Host "##vso[task.setvariable variable=CmdPalPackagePath]${PackageFilename}"
|
||||
} else {
|
||||
Write-Warning "No CmdPal MSIX packages found!"
|
||||
}
|
||||
displayName: Locate the CmdPal MSIX
|
||||
|
||||
- ${{ if eq(parameters.codeSign, true) }}:
|
||||
- pwsh: |-
|
||||
& "$(MakeAppxPath)" unpack /p "$(CmdPalPackagePath)" /d "$(JobOutputDirectory)/CmdPalPackageContents"
|
||||
displayName: Unpack the MSIX for signing
|
||||
|
||||
- template: steps-esrp-signing.yml
|
||||
parameters:
|
||||
displayName: Sign CmdPal MSIX content
|
||||
signingIdentity: ${{ parameters.signingIdentity }}
|
||||
inputs:
|
||||
FolderPath: '$(JobOutputDirectory)/CmdPalPackageContents'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_cmdpal_msix_content.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
|
||||
- pwsh: |-
|
||||
$outDir = New-Item -Type Directory "$(JobOutputDirectory)/_appx" -ErrorAction:Ignore
|
||||
$PackageFilename = Join-Path $outDir.FullName (Split-Path -Leaf "$(CmdPalPackagePath)")
|
||||
& "$(MakeAppxPath)" pack /h SHA256 /o /p $PackageFilename /d "$(JobOutputDirectory)/CmdPalPackageContents"
|
||||
Copy-Item -Force $PackageFilename "$(CmdPalPackagePath)"
|
||||
Remove-Item -Force -Recurse "$(JobOutputDirectory)/CmdPalPackageContents" -ErrorAction:Ignore
|
||||
Remove-Item -Force -Recurse "$(JobOutputDirectory)/_appx" -ErrorAction:Ignore
|
||||
displayName: Re-pack the new CmdPal package after signing
|
||||
|
||||
- template: steps-esrp-signing.yml
|
||||
parameters:
|
||||
displayName: Sign Core PowerToys
|
||||
signingIdentity: ${{ parameters.signingIdentity }}
|
||||
inputs:
|
||||
FolderPath: '$(BuildPlatform)/$(BuildConfiguration)'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_core.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
|
||||
- template: steps-esrp-signing.yml
|
||||
parameters:
|
||||
displayName: Sign DSC files
|
||||
signingIdentity: ${{ parameters.signingIdentity }}
|
||||
inputs:
|
||||
FolderPath: 'src/dsc/Microsoft.PowerToys.Configure'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_DSC.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
|
||||
- pwsh: |-
|
||||
Copy-Item -Verbose -Force "$(CmdPalPackagePath)" "$(JobOutputDirectory)"
|
||||
displayName: Stage the final CmdPal package
|
||||
|
||||
|
||||
|
||||
- template: steps-build-installer-vnext.yml
|
||||
parameters:
|
||||
codeSign: ${{ parameters.codeSign }}
|
||||
signingIdentity: ${{ parameters.signingIdentity }}
|
||||
versionNumber: ${{ parameters.versionNumber }}
|
||||
additionalBuildOptions: ${{ parameters.additionalBuildOptions }}
|
||||
|
||||
# This saves ~1GiB per architecture. We won't need these later.
|
||||
# Removes:
|
||||
# - All .pdb files from any static libs .libs (which were only used during linking)
|
||||
- pwsh: |-
|
||||
$binDir = '$(Build.SourcesDirectory)'
|
||||
$ImportLibs = Get-ChildItem $binDir -Recurse -File -Filter '*.exp' | ForEach-Object { $_.FullName -Replace "exp$","lib" }
|
||||
$StaticLibs = Get-ChildItem $binDir -Recurse -File -Filter '*.lib' | Where-Object FullName -NotIn $ImportLibs
|
||||
|
||||
$Items = @()
|
||||
$Items += Get-Item ($StaticLibs.FullName -Replace "lib$","pdb") -ErrorAction:Ignore
|
||||
|
||||
$Items | Remove-Item -Recurse -Force -Verbose -ErrorAction:Ignore
|
||||
displayName: Clean up static libs PDBs
|
||||
errorActionPreference: silentlyContinue # It's OK if this silently fails
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Stage Installers
|
||||
inputs:
|
||||
contents: |-
|
||||
**/PowerToys*Setup-*.exe
|
||||
!**/PowerToysSetupVNext/obj/**
|
||||
flattenFolders: True
|
||||
targetFolder: $(JobOutputDirectory)
|
||||
|
||||
- pwsh: |-
|
||||
$Symbols = Get-ChildItem "$(BuildPlatform)" -Recurse -Filter *.pdb -Exclude "vc143.pdb","*test*.pdb" |
|
||||
Group-Object Name | ForEach-Object { $_.Group[0] }
|
||||
$OutDir = "$(JobOutputDirectory)/symbols-$(BuildPlatform)"
|
||||
New-Item -Type Directory $OutDir -EA:Ignore
|
||||
Write-Host "Linking $($Symbols.Length) symbols into place at $OutDir"
|
||||
ForEach($s in $Symbols) {
|
||||
New-Item -Type HardLink -Target $s.FullName (Join-Path $OutDir $s.Name)
|
||||
}
|
||||
displayName: Stage Unique Symbols (as hard links)
|
||||
|
||||
- pwsh: |-
|
||||
$p = "$(JobOutputDirectory)\"
|
||||
|
||||
# Calculate hashes for installers
|
||||
$userSetupFiles = Get-ChildItem -Path $p -Filter "PowerToysUserSetup*.exe"
|
||||
$machineSetupFiles = Get-ChildItem -Path $p -Filter "PowerToysSetup*.exe" | Where-Object { $_.Name -notmatch "PowerToysUserSetup" }
|
||||
|
||||
if ($userSetupFiles.Count -gt 0) {
|
||||
$userHash = ($userSetupFiles[0] | Get-FileHash).Hash;
|
||||
$userPlat = "hash_user_$(BuildPlatform).txt";
|
||||
$combinedUserPath = $p + $userPlat;
|
||||
echo "User: $userHash"
|
||||
$userHash | out-file -filepath $combinedUserPath
|
||||
}
|
||||
|
||||
if ($machineSetupFiles.Count -gt 0) {
|
||||
$machineHash = ($machineSetupFiles[0] | Get-FileHash).Hash;
|
||||
$machinePlat = "hash_machine_$(BuildPlatform).txt";
|
||||
$combinedMachinePath = $p + $machinePlat;
|
||||
echo "Machine: $machineHash"
|
||||
$machineHash | out-file -filepath $combinedMachinePath
|
||||
}
|
||||
displayName: Calculate file hashes for all installers
|
||||
|
||||
# Publishing the GPO files
|
||||
- pwsh: |-
|
||||
$GpoArchive = "$(JobOutputDirectory)\GroupPolicyObjectFiles-${{ parameters.versionNumber }}.zip"
|
||||
tar -c -v --format=zip -C .\src\gpo\assets -f $GpoArchive *
|
||||
displayName: Stage GPO files
|
||||
|
||||
- ${{ if or(eq(parameters.runTests, true), eq(parameters.buildTests, true)) }}:
|
||||
# Running the tests may result in future jobs consuming artifacts out of this build
|
||||
# Instead of running an expensive file copy step, move everything over since the build is totally done.
|
||||
- pwsh: |-
|
||||
# It seems weird, but this is for compatibility. Our artifacts historically contained the folder x64/Release/x64/Release (for example).
|
||||
$FinalOutputRoot = "$(JobOutputDirectory)\$(BuildPlatform)\$(BuildConfiguration)\$(BuildPlatform)"
|
||||
$ProjectBuildRoot = "$(Build.SourcesDirectory)\$(BuildPlatform)"
|
||||
$ProjectBuildDirectory = "$ProjectBuildRoot\$(BuildConfiguration)"
|
||||
|
||||
New-Item -Type Directory $FinalOutputRoot -EA:Ignore
|
||||
Move-Item $ProjectBuildDirectory $FinalOutputRoot
|
||||
displayName: Move entire output directory into artifacts
|
||||
|
||||
- ${{ if eq(parameters.publishArtifacts, true) }}:
|
||||
- publish: $(JobOutputDirectory)
|
||||
artifact: $(JobOutputArtifactName)
|
||||
displayName: Publish all outputs
|
||||
condition: succeeded()
|
||||
|
||||
- publish: $(JobOutputDirectory)
|
||||
artifact: $(JobOutputArtifactName)-failure-$(System.JobAttempt)
|
||||
displayName: Publish failure logs
|
||||
condition: or(failed(), canceled())
|
||||
@@ -1,100 +0,0 @@
|
||||
parameters:
|
||||
- name: buildConfigurations
|
||||
type: object
|
||||
default:
|
||||
- Release
|
||||
- name: official
|
||||
type: boolean
|
||||
default: false
|
||||
- name: codeSign
|
||||
type: boolean
|
||||
default: false
|
||||
- name: pool
|
||||
type: object
|
||||
default: []
|
||||
- name: signingIdentity
|
||||
type: object
|
||||
default: {}
|
||||
|
||||
jobs:
|
||||
- job: "BuildSDK"
|
||||
${{ if ne(length(parameters.pool), 0) }}:
|
||||
pool: ${{ parameters.pool }}
|
||||
displayName: Build SDK
|
||||
timeoutInMinutes: 240
|
||||
cancelTimeoutInMinutes: 1
|
||||
templateContext: # Required when this template is hosted in 1ES PT
|
||||
outputs:
|
||||
- output: pipelineArtifact
|
||||
artifactName: SDK
|
||||
targetPath: $(Build.ArtifactStagingDirectory)
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
submodules: true
|
||||
persistCredentials: True
|
||||
fetchTags: false
|
||||
fetchDepth: 1
|
||||
|
||||
- template: .\steps-ensure-nuget-version.yml
|
||||
|
||||
- task: NuGetAuthenticate@1
|
||||
|
||||
- ${{ if eq(parameters.official, true) }}:
|
||||
- template: .\steps-setup-versioning.yml
|
||||
parameters:
|
||||
directory: $(build.sourcesdirectory)\src\modules\cmdpal
|
||||
|
||||
- pwsh: |-
|
||||
& "$(build.sourcesdirectory)\src\modules\cmdpal\extensionsdk\nuget\BuildSDKHelper.ps1" -Configuration "Release" -BuildStep "build" -IsAzurePipelineBuild
|
||||
displayName: Build SDK
|
||||
|
||||
- ${{ if eq(parameters.codeSign, true) }}:
|
||||
- template: steps-esrp-signing.yml
|
||||
parameters:
|
||||
displayName: Sign SDK
|
||||
signingIdentity: ${{ parameters.signingIdentity }}
|
||||
inputs:
|
||||
FolderPath: 'src/modules'
|
||||
signType: batchSigning
|
||||
batchSignPolicyFile: '$(build.sourcesdirectory)\.pipelines\ESRPSigning_sdk.json'
|
||||
ciPolicyFile: '$(build.sourcesdirectory)\.pipelines\CIPolicy.xml'
|
||||
|
||||
- pwsh: |-
|
||||
& "$(build.sourcesdirectory)\src\modules\cmdpal\extensionsdk\nuget\BuildSDKHelper.ps1" -Configuration "Release" -BuildStep "pack" -IsAzurePipelineBuild
|
||||
displayName: Pack SDK
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: Copy Nuget to Artifact Staging
|
||||
inputs:
|
||||
sourceFolder: "$(build.sourcesdirectory)/src/modules/cmdpal/extensionsdk/_build"
|
||||
contents: '*.nupkg'
|
||||
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
|
||||
- ${{ if eq(parameters.codeSign, true) }}:
|
||||
- template: steps-esrp-signing.yml
|
||||
parameters:
|
||||
displayName: Sign NuGet packages
|
||||
signingIdentity: ${{ parameters.signingIdentity }}
|
||||
inputs:
|
||||
FolderPath: $(Build.ArtifactStagingDirectory)
|
||||
Pattern: '*.nupkg'
|
||||
UseMinimatch: true
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: >-
|
||||
[
|
||||
{
|
||||
"KeyCode": "CP-401405",
|
||||
"OperationCode": "NuGetSign",
|
||||
"Parameters": {},
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"KeyCode": "CP-401405",
|
||||
"OperationCode": "NuGetVerify",
|
||||
"Parameters": {},
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
@@ -1,131 +0,0 @@
|
||||
# Minimal UI Tests Build Template
|
||||
# This template only builds UI test projects and stages their test DLLs for consumption by test pipelines
|
||||
|
||||
parameters:
|
||||
- name: buildConfigurations
|
||||
type: object
|
||||
default:
|
||||
- Release
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- name: condition
|
||||
type: string
|
||||
default: ''
|
||||
- name: dependsOn
|
||||
type: object
|
||||
default: []
|
||||
- name: pool
|
||||
type: object
|
||||
default: []
|
||||
- name: variables
|
||||
type: object
|
||||
default: {}
|
||||
- name: uiTestModules
|
||||
type: object
|
||||
default: []
|
||||
|
||||
jobs:
|
||||
- job: BuildUITests
|
||||
${{ if ne(length(parameters.pool), 0) }}:
|
||||
pool: ${{ parameters.pool }}
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
condition: ${{ parameters.condition }}
|
||||
strategy:
|
||||
matrix:
|
||||
${{ each config in parameters.buildConfigurations }}:
|
||||
${{ each platform in parameters.buildPlatforms }}:
|
||||
${{ config }}_${{ platform }}:
|
||||
BuildConfiguration: ${{ config }}
|
||||
BuildPlatform: ${{ platform }}
|
||||
variables:
|
||||
JobOutputDirectory: $(Build.ArtifactStagingDirectory)
|
||||
LogOutputDirectory: $(Build.ArtifactStagingDirectory)\logs
|
||||
JobOutputArtifactName: build-$(BuildPlatform)-$(BuildConfiguration)
|
||||
NUGET_RESTORE_MSBUILD_ARGS: /p:Platform=$(BuildPlatform)
|
||||
${{ insert }}: ${{ parameters.variables }}
|
||||
displayName: Build UI Tests Only
|
||||
timeoutInMinutes: 60
|
||||
cancelTimeoutInMinutes: 1
|
||||
templateContext:
|
||||
outputs:
|
||||
- output: pipelineArtifact
|
||||
artifactName: $(JobOutputArtifactName)
|
||||
targetPath: $(Build.ArtifactStagingDirectory)
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
submodules: true
|
||||
persistCredentials: True
|
||||
fetchTags: false
|
||||
fetchDepth: 1
|
||||
|
||||
- template: steps-ensure-dotnet-version.yml
|
||||
parameters:
|
||||
sdk: true
|
||||
version: '9.0'
|
||||
|
||||
- template: .\steps-restore-nuget.yml
|
||||
|
||||
- task: MSBuild@1
|
||||
displayName: Restore solution-level NuGet packages
|
||||
inputs:
|
||||
solution: PowerToys.slnx
|
||||
msbuildArguments: '/t:restore /p:RestorePackagesConfig=true'
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
|
||||
# Build all UI test projects if no specific modules are specified
|
||||
- ${{ if eq(length(parameters.uiTestModules), 0) }}:
|
||||
- task: VSBuild@1
|
||||
displayName: Build UI Test Projects
|
||||
inputs:
|
||||
solution: '**/*UITest*.csproj'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: >-
|
||||
-restore
|
||||
-graph
|
||||
/p:RestorePackagesConfig=true
|
||||
/p:BuildProjectReferences=true
|
||||
/p:CIBuild=true
|
||||
/bl:$(LogOutputDirectory)\build-all-uitests.binlog
|
||||
$(NUGET_RESTORE_MSBUILD_ARGS)
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
# Build specific UI test modules
|
||||
- ${{ if ne(length(parameters.uiTestModules), 0) }}:
|
||||
- ${{ each module in parameters.uiTestModules }}:
|
||||
- task: VSBuild@1
|
||||
displayName: 'Build UI Test Module: ${{ module }}'
|
||||
inputs:
|
||||
solution: '**/*${{ module }}*.csproj'
|
||||
vsVersion: 17.0
|
||||
msbuildArgs: >-
|
||||
-restore
|
||||
-graph
|
||||
/p:RestorePackagesConfig=true
|
||||
/p:BuildProjectReferences=true
|
||||
/p:CIBuild=true
|
||||
/bl:$(LogOutputDirectory)\build-${{ module }}.binlog
|
||||
$(NUGET_RESTORE_MSBUILD_ARGS)
|
||||
platform: $(BuildPlatform)
|
||||
configuration: $(BuildConfiguration)
|
||||
msbuildArchitecture: x64
|
||||
maximumCpuCount: true
|
||||
|
||||
# Stage test project outputs with directory structure
|
||||
- task: CopyFiles@2
|
||||
displayName: Stage UI Test Build Outputs
|
||||
inputs:
|
||||
sourceFolder: '$(Build.SourcesDirectory)'
|
||||
contents: '**/$(BuildPlatform)/$(BuildConfiguration)/tests/**/*'
|
||||
targetFolder: '$(JobOutputDirectory)\$(BuildPlatform)\$(BuildConfiguration)'
|
||||
|
||||
- publish: $(JobOutputDirectory)
|
||||
artifact: $(JobOutputArtifactName)
|
||||
displayName: Publish UI Test artifacts
|
||||
condition: always()
|
||||
@@ -1,36 +0,0 @@
|
||||
parameters:
|
||||
- name: configuration
|
||||
type: string
|
||||
default: "Release"
|
||||
- name: platform
|
||||
type: string
|
||||
default: ""
|
||||
- name: inputArtifactStem
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
jobs:
|
||||
- job: OneFuzz
|
||||
pool:
|
||||
vmImage: windows-2022
|
||||
variables:
|
||||
ArtifactName: build-${{ parameters.platform }}-${{ parameters.configuration }}${{ parameters.inputArtifactStem }}
|
||||
steps:
|
||||
- checkout: self
|
||||
submodules: false
|
||||
clean: true
|
||||
fetchDepth: 1
|
||||
fetchTags: false
|
||||
|
||||
- download: current
|
||||
displayName: Download artifacts
|
||||
artifact: $(ArtifactName)
|
||||
patterns: |-
|
||||
**/tests/*.FuzzTests/**
|
||||
|
||||
- task: onefuzz-task@0
|
||||
inputs:
|
||||
onefuzzOSes: Windows
|
||||
env:
|
||||
onefuzzDropDirectory: $(Pipeline.Workspace)\$(ArtifactName)\x64\Release\x64\Release\tests
|
||||
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
|
||||
@@ -1,116 +0,0 @@
|
||||
parameters:
|
||||
- name: includePublicSymbolServer
|
||||
type: boolean
|
||||
default: false
|
||||
- name: pool
|
||||
type: object
|
||||
default: []
|
||||
- name: dependsOn
|
||||
type: object
|
||||
default: null
|
||||
- name: versionNumber
|
||||
type: string
|
||||
default: '0.0.1'
|
||||
- name: artifactStem
|
||||
type: string
|
||||
default: ''
|
||||
- name: jobName
|
||||
type: string
|
||||
default: PublishSymbols
|
||||
- name: symbolExpiryTime
|
||||
type: string
|
||||
default: 36530 # This is the default from PublishSymbols@2
|
||||
- name: variables
|
||||
type: object
|
||||
default: {}
|
||||
- name: subscription
|
||||
type: string
|
||||
- name: symbolProject
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
- job: ${{ parameters.jobName }}
|
||||
${{ if ne(length(parameters.pool), 0) }}:
|
||||
pool: ${{ parameters.pool }}
|
||||
${{ if eq(parameters.includePublicSymbolServer, true) }}:
|
||||
displayName: Publish Symbols to Internal and MSDL
|
||||
${{ else }}:
|
||||
displayName: Publish Symbols Internally
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
variables:
|
||||
${{ insert }}: ${{ parameters.variables }}
|
||||
SymbolsArtifactName: "PowerToys_${{parameters.versionNumber}}_$(Build.BuildNumber)"
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
fetchDepth: 1
|
||||
fetchTags: false # Tags still result in depth > 1 fetch; we don't need them here
|
||||
submodules: true
|
||||
persistCredentials: True
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: Download all PDBs from all prior build phases
|
||||
inputs:
|
||||
itemPattern: '**/*.pdb'
|
||||
targetPath: '$(Build.SourcesDirectory)/symbolStaging'
|
||||
|
||||
- powershell: |-
|
||||
Get-PackageProvider -Name NuGet -ForceBootstrap
|
||||
Install-Module -Verbose -AllowClobber -Force Az.Accounts, Az.Storage, Az.Network, Az.Resources, Az.Compute
|
||||
displayName: Install Azure Module Dependencies
|
||||
|
||||
# Transit the Azure token from the Service Connection into a secret variable for the rest of the pipeline to use.
|
||||
- task: AzurePowerShell@5
|
||||
displayName: Generate an Azure Token
|
||||
inputs:
|
||||
azureSubscription: ${{ parameters.subscription }}
|
||||
azurePowerShellVersion: LatestVersion
|
||||
pwsh: true
|
||||
ScriptType: InlineScript
|
||||
Inline: |-
|
||||
$AzToken = (Get-AzAccessToken -AsSecureString -ResourceUrl api://30471ccf-0966-45b9-a979-065dbedb24c1).Token | ConvertFrom-SecureString -AsPlainText
|
||||
Write-Host "##vso[task.setvariable variable=SymbolAccessToken;issecret=true]$AzToken"
|
||||
|
||||
|
||||
- task: PublishSymbols@2
|
||||
displayName: Publish Symbols (to current Azure DevOps tenant)
|
||||
continueOnError: True
|
||||
inputs:
|
||||
SymbolsFolder: '$(Build.SourcesDirectory)/symbolStaging'
|
||||
SearchPattern: '**/*.pdb'
|
||||
IndexSources: false
|
||||
DetailedLog: true
|
||||
SymbolsMaximumWaitTime: 30
|
||||
SymbolServerType: 'TeamServices'
|
||||
SymbolsProduct: 'PowerToys Converged Symbols'
|
||||
SymbolsVersion: '${{ parameters.versionNumber }}'
|
||||
SymbolsArtifactName: $(SymbolsArtifactName)
|
||||
SymbolExpirationInDays: ${{ parameters.symbolExpiryTime }}
|
||||
env:
|
||||
LIB: $(Build.SourcesDirectory)
|
||||
|
||||
- pwsh: |-
|
||||
# Prepare the defaults for IRM
|
||||
$PSDefaultParameterValues['Invoke-RestMethod:Headers'] = @{ Authorization = "Bearer $(SymbolAccessToken)" }
|
||||
$PSDefaultParameterValues['Invoke-RestMethod:ContentType'] = "application/json"
|
||||
$PSDefaultParameterValues['Invoke-RestMethod:Method'] = "POST"
|
||||
|
||||
$BaseUri = "https://symbolrequestprod.trafficmanager.net/projects/${{ parameters.symbolProject }}/requests"
|
||||
|
||||
# Prepare the request
|
||||
$expiration = (Get-Date).Add([TimeSpan]::FromDays(${{ parameters.symbolExpiryTime }}))
|
||||
$createRequestBody = @{
|
||||
requestName = "$(SymbolsArtifactName)";
|
||||
expirationTime = $expiration.ToString();
|
||||
}
|
||||
Write-Host "##[debug]Starting request $($createRequestBody.requestName) with expiration date of $($createRequestBody.expirationTime)"
|
||||
Invoke-RestMethod -Uri "$BaseUri" -Body ($createRequestBody | ConvertTo-Json -Compress) -Verbose
|
||||
|
||||
# Request symbol publication
|
||||
$publishRequestBody = @{
|
||||
publishToInternalServer = $true;
|
||||
publishToPublicServer = $${{ parameters.includePublicSymbolServer }};
|
||||
}
|
||||
Write-Host "##[debug]Submitting request $($createRequestBody.requestName) ($($publishRequestBody | ConvertTo-Json -Compress))"
|
||||
Invoke-RestMethod -Uri "$BaseUri/$($createRequestBody.requestName)" -Body ($publishRequestBody | ConvertTo-Json -Compress) -Verbose
|
||||
displayName: Publish Symbols using internal REST API
|
||||
@@ -1,197 +0,0 @@
|
||||
parameters:
|
||||
- name: configuration
|
||||
type: string
|
||||
default: "Release"
|
||||
- name: platform
|
||||
type: string
|
||||
default: ""
|
||||
- name: inputArtifactStem
|
||||
type: string
|
||||
default: ""
|
||||
- name: useLatestWebView2
|
||||
type: boolean
|
||||
default: false
|
||||
- name: buildSource
|
||||
type: string
|
||||
default: "latestMainOfficialBuild"
|
||||
displayName: "Build Source"
|
||||
- name: specificBuildId
|
||||
type: string
|
||||
default: "xxxx"
|
||||
displayName: "Build ID (for specific builds)"
|
||||
- name: uiTestModules
|
||||
type: object
|
||||
default: []
|
||||
- name: installMode
|
||||
type: string
|
||||
default: 'machine'
|
||||
values:
|
||||
- 'machine'
|
||||
- 'peruser'
|
||||
- name: jobSuffix
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
jobs:
|
||||
- job: Test${{ parameters.platform }}${{ parameters.configuration }}${{ parameters.jobSuffix }}
|
||||
displayName: Test ${{ parameters.platform }} ${{ parameters.configuration }}${{ parameters.jobSuffix }}
|
||||
timeoutInMinutes: 300
|
||||
variables:
|
||||
${{ if or(eq(parameters.platform, 'x64Win10'), eq(parameters.platform, 'x64Win11')) }}:
|
||||
BuildPlatform: x64
|
||||
${{ else }}:
|
||||
BuildPlatform: ${{ parameters.platform }}
|
||||
TestPlatform: ${{ parameters.platform }}
|
||||
BuildConfiguration: ${{ parameters.configuration }}
|
||||
SrcPath: $(Build.Repository.LocalPath)
|
||||
TestArtifactsName: build-${{ variables.BuildPlatform }}-${{ parameters.configuration }}${{ parameters.inputArtifactStem }}
|
||||
pool:
|
||||
${{ if eq(variables['System.CollectionId'], 'cb55739e-4afe-46a3-970f-1b49d8ee7564') }}:
|
||||
${{ if ne(parameters.platform, 'ARM64') }}:
|
||||
name: SHINE-INT-Testing-x64
|
||||
${{ if eq(parameters.platform, 'x64Win11') }}:
|
||||
demands: ImageOverride -equals SHINE-W11-Testing
|
||||
${{ else }}:
|
||||
name: SHINE-INT-Testing-arm64
|
||||
${{ else }}:
|
||||
${{ if ne(parameters.platform, 'ARM64') }}:
|
||||
name: SHINE-OSS-Testing-x64
|
||||
${{ if eq(parameters.platform, 'x64Win11') }}:
|
||||
demands: ImageOverride -equals SHINE-W11-Testing
|
||||
${{ else }}:
|
||||
name: SHINE-OSS-Testing-arm64
|
||||
steps:
|
||||
- checkout: self
|
||||
submodules: false
|
||||
clean: true
|
||||
fetchDepth: 1
|
||||
fetchTags: false
|
||||
|
||||
- ${{ if eq(parameters.useLatestWebView2, true) }}:
|
||||
- powershell: |
|
||||
$edge_url = 'https://go.microsoft.com/fwlink/?linkid=2084649&Channel=Canary&language=en'
|
||||
$timeout = New-TimeSpan -Minutes 6
|
||||
$timeoutSeconds = [int]$timeout.TotalSeconds
|
||||
$command = {
|
||||
Invoke-WebRequest -Uri $using:edge_url -OutFile $(Pipeline.Workspace)\MicrosoftEdgeSetup.exe
|
||||
Write-Host "##[command]Installing Canary channel of Microsoft Edge"
|
||||
Start-Process $(Pipeline.Workspace)\MicrosoftEdgeSetup.exe -ArgumentList '/silent /install' -Wait
|
||||
}
|
||||
|
||||
$job = Start-Job -ScriptBlock $command
|
||||
Wait-Job $job -Timeout $timeoutSeconds
|
||||
if ($job.State -eq "Running") {
|
||||
Stop-Job $job
|
||||
Write-Host "##[warning]The job was stopped because it exceeded the time limit."
|
||||
}
|
||||
displayName: "Install the latest MSEdge Canary"
|
||||
|
||||
- script:
|
||||
reg add "HKLM\Software\Policies\Microsoft\Edge\WebView2\ReleaseChannels" /v PowerToys.exe /t REG_SZ /d "3"
|
||||
displayName: "Enable WebView2 Canary Channel"
|
||||
|
||||
- ${{ if ne(parameters.platform, 'arm64') }}:
|
||||
- download: current
|
||||
displayName: Download artifacts
|
||||
artifact: $(TestArtifactsName)
|
||||
patterns: |-
|
||||
**
|
||||
!**\*.pdb
|
||||
!**\*.lib
|
||||
- ${{ else }}:
|
||||
- template: steps-download-artifacts-with-azure-cli.yml
|
||||
parameters:
|
||||
artifactName: $(TestArtifactsName)
|
||||
|
||||
- template: steps-ensure-dotnet-version.yml
|
||||
parameters:
|
||||
sdk: true
|
||||
version: '9.0'
|
||||
|
||||
- task: VisualStudioTestPlatformInstaller@1
|
||||
displayName: Ensure VSTest Platform
|
||||
|
||||
- pwsh: |-
|
||||
& '$(build.sourcesdirectory)\.pipelines\InstallWinAppDriver.ps1'
|
||||
displayName: Download and install WinAppDriver
|
||||
|
||||
- ${{ if ne(parameters.buildSource, 'buildNow') }}:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
buildType: 'specific'
|
||||
project: 'Dart'
|
||||
definition: '76541'
|
||||
${{ if eq(parameters.buildSource, 'specificBuildId') }}:
|
||||
buildVersionToDownload: 'specific'
|
||||
buildId: '${{ parameters.specificBuildId }}'
|
||||
${{ else }}:
|
||||
buildVersionToDownload: 'latestFromBranch'
|
||||
branchName: 'refs/heads/main'
|
||||
artifactName: 'build-$(BuildPlatform)-Release'
|
||||
targetPath: '$(Build.ArtifactStagingDirectory)'
|
||||
${{ if eq(parameters.installMode, 'peruser') }}:
|
||||
patterns: |
|
||||
**/PowerToysUserSetup*.exe
|
||||
${{ else }}:
|
||||
patterns: |
|
||||
**/PowerToysSetup*.exe
|
||||
|
||||
- ${{ if ne(parameters.buildSource, 'buildNow') }}:
|
||||
- ${{ if eq(parameters.installMode, 'peruser') }}:
|
||||
- pwsh: |-
|
||||
& "$(build.sourcesdirectory)\.pipelines\installPowerToys.ps1" -InstallMode "PerUser"
|
||||
displayName: Install PowerToys (Per-User)
|
||||
|
||||
- ${{ if eq(parameters.installMode, 'machine') }}:
|
||||
- pwsh: |-
|
||||
& "$(build.sourcesdirectory)\.pipelines\installPowerToys.ps1" -InstallMode "Machine"
|
||||
displayName: Install PowerToys (Machine-Level)
|
||||
|
||||
- ${{ if ne(parameters.platform, 'arm64') }}:
|
||||
- task: ScreenResolutionUtility@1
|
||||
inputs:
|
||||
displaySettings: 'optimal'
|
||||
|
||||
- ${{ if eq(length(parameters.uiTestModules), 0) }}:
|
||||
- task: VSTest@3
|
||||
displayName: Run UI Tests
|
||||
inputs:
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
testSelector: 'testAssemblies'
|
||||
searchFolder: '$(Pipeline.Workspace)\$(TestArtifactsName)'
|
||||
vsTestVersion: 'toolsInstaller'
|
||||
uiTests: true
|
||||
rerunFailedTests: true
|
||||
testRunTitle: 'UITests_${{ parameters.platform }}_${{ parameters.installMode }}'
|
||||
# Since UITests-FancyZonesEditor.dll is generated in both UITests-FancyZonesEditor and UITests-FancyZones, removed one to avoid duplicate test runs
|
||||
testAssemblyVer2: |
|
||||
**\*UITest*.dll
|
||||
!**\obj\**
|
||||
!**\ref\**
|
||||
!**\UITests-FancyZones\**\UITests-FancyZonesEditor.dll
|
||||
env:
|
||||
platform: '$(TestPlatform)'
|
||||
useInstallerForTest: ${{ ne(parameters.buildSource, 'buildNow') }}
|
||||
|
||||
- ${{ if ne(length(parameters.uiTestModules), 0) }}:
|
||||
- ${{ each module in parameters.uiTestModules }}:
|
||||
- task: VSTest@3
|
||||
displayName: Run UI Test - ${{ module }}
|
||||
inputs:
|
||||
platform: '$(BuildPlatform)'
|
||||
configuration: '$(BuildConfiguration)'
|
||||
testSelector: 'testAssemblies'
|
||||
searchFolder: '$(Pipeline.Workspace)\$(TestArtifactsName)'
|
||||
vsTestVersion: 'toolsInstaller'
|
||||
uiTests: true
|
||||
rerunFailedTests: true
|
||||
testRunTitle: 'UITests_${{ parameters.platform }}_${{ parameters.installMode }}'
|
||||
testAssemblyVer2: |
|
||||
**\*${{ module }}*.dll
|
||||
!**\obj\**
|
||||
!**\ref\**
|
||||
!**\UITests-FancyZones\**\UITests-FancyZonesEditor.dll
|
||||
env:
|
||||
platform: '$(TestPlatform)'
|
||||
useInstallerForTest: ${{ ne(parameters.buildSource, 'buildNow') }}
|
||||
@@ -1,82 +0,0 @@
|
||||
variables:
|
||||
- name: runCodesignValidationInjectionBG
|
||||
value: false
|
||||
- name: EnablePipelineCache
|
||||
value: true
|
||||
|
||||
parameters:
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
default: false
|
||||
- name: msBuildCacheIsReadOnly
|
||||
type: boolean
|
||||
default: true
|
||||
- name: runTests
|
||||
type: boolean
|
||||
default: true
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
default: false
|
||||
- name: useLatestWebView2
|
||||
type: boolean
|
||||
default: false
|
||||
- name: useLatestWinAppSDK
|
||||
type: boolean
|
||||
default: false
|
||||
- name: winAppSDKVersionNumber
|
||||
type: string
|
||||
default: 1.6
|
||||
- name: useExperimentalVersion
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
stages:
|
||||
- ${{ each platform in parameters.buildPlatforms }}:
|
||||
- stage: Build_${{ platform }}
|
||||
displayName: Build ${{ platform }}
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: job-build-project.yml
|
||||
parameters:
|
||||
pool:
|
||||
${{ if eq(variables['System.CollectionId'], 'cb55739e-4afe-46a3-970f-1b49d8ee7564') }}:
|
||||
name: SHINE-INT-L
|
||||
${{ else }}:
|
||||
name: SHINE-OSS-L
|
||||
${{ if eq(parameters.useVSPreview, true) }}:
|
||||
demands: ImageOverride -equals SHINE-VS17-Preview
|
||||
buildPlatforms:
|
||||
- ${{ platform }}
|
||||
buildConfigurations: [Release]
|
||||
enablePackageCaching: true
|
||||
enableMsBuildCaching: ${{ parameters.enableMsBuildCaching }}
|
||||
msBuildCacheIsReadOnly: ${{ parameters.msBuildCacheIsReadOnly }}
|
||||
runTests: ${{ parameters.runTests }}
|
||||
useVSPreview: ${{ parameters.useVSPreview }}
|
||||
useLatestWinAppSDK: ${{ parameters.useLatestWinAppSDK }}
|
||||
${{ if eq(parameters.useLatestWinAppSDK, true) }}:
|
||||
winAppSDKVersionNumber: ${{ parameters.winAppSDKVersionNumber }}
|
||||
useExperimentalVersion: ${{ parameters.useExperimentalVersion }}
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- stage: Build_SDK
|
||||
displayName: Build Command Palette Toolkit SDK
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: job-build-sdk.yml
|
||||
parameters:
|
||||
pool:
|
||||
${{ if eq(variables['System.CollectionId'], 'cb55739e-4afe-46a3-970f-1b49d8ee7564') }}:
|
||||
name: SHINE-INT-L
|
||||
${{ else }}:
|
||||
name: SHINE-OSS-L
|
||||
${{ if eq(parameters.useVSPreview, true) }}:
|
||||
demands: ImageOverride -equals SHINE-VS17-Preview
|
||||
buildConfigurations: [Release]
|
||||
official: false
|
||||
codeSign: false
|
||||
@@ -1,58 +0,0 @@
|
||||
variables:
|
||||
- name: runCodesignValidationInjectionBG
|
||||
value: false
|
||||
- name: EnablePipelineCache
|
||||
value: true
|
||||
|
||||
parameters:
|
||||
- name: buildPlatforms
|
||||
type: object
|
||||
default:
|
||||
- x64
|
||||
- arm64
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
default: false
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
default: false
|
||||
- name: useLatestWebView2
|
||||
type: boolean
|
||||
default: false
|
||||
- name: buildSource
|
||||
type: string
|
||||
default: "latestMainOfficialBuild"
|
||||
displayName: "Build Source"
|
||||
values:
|
||||
- latestMainOfficialBuild
|
||||
- buildNow
|
||||
- specificBuildId
|
||||
- name: specificBuildId
|
||||
type: string
|
||||
default: 'xxxx'
|
||||
displayName: "Build ID (only used when Build Source = specificBuildId)"
|
||||
- name: uiTestModules
|
||||
type: object
|
||||
default: []
|
||||
|
||||
stages:
|
||||
- ${{ each platform in parameters.buildPlatforms }}:
|
||||
# Full build path: build PowerToys + UI tests + run tests
|
||||
- ${{ if eq(parameters.buildSource, 'buildNow') }}:
|
||||
- template: pipeline-ui-tests-full-build.yml
|
||||
parameters:
|
||||
platform: ${{ platform }}
|
||||
enableMsBuildCaching: ${{ parameters.enableMsBuildCaching }}
|
||||
useVSPreview: ${{ parameters.useVSPreview }}
|
||||
useLatestWebView2: ${{ parameters.useLatestWebView2 }}
|
||||
uiTestModules: ${{ parameters.uiTestModules }}
|
||||
|
||||
# Official build path: build UI tests only + download official build + run tests
|
||||
- ${{ if ne(parameters.buildSource, 'buildNow') }}:
|
||||
- template: pipeline-ui-tests-official-build.yml
|
||||
parameters:
|
||||
platform: ${{ platform }}
|
||||
buildSource: ${{ parameters.buildSource }}
|
||||
specificBuildId: ${{ parameters.specificBuildId }}
|
||||
useLatestWebView2: ${{ parameters.useLatestWebView2 }}
|
||||
uiTestModules: ${{ parameters.uiTestModules }}
|
||||
@@ -1,80 +0,0 @@
|
||||
# Template for full build path: Build PowerToys + Build UI Tests + Run Tests
|
||||
parameters:
|
||||
- name: platform
|
||||
type: string
|
||||
- name: enableMsBuildCaching
|
||||
type: boolean
|
||||
default: false
|
||||
- name: useVSPreview
|
||||
type: boolean
|
||||
default: false
|
||||
- name: useLatestWebView2
|
||||
type: boolean
|
||||
default: false
|
||||
- name: uiTestModules
|
||||
type: object
|
||||
default: []
|
||||
|
||||
stages:
|
||||
# Stage 1: Build full PowerToys project
|
||||
- stage: Build_${{ parameters.platform }}
|
||||
displayName: Build PowerToys ${{ parameters.platform }}
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: job-build-project.yml
|
||||
parameters:
|
||||
pool:
|
||||
${{ if eq(variables['System.CollectionId'], 'cb55739e-4afe-46a3-970f-1b49d8ee7564') }}:
|
||||
name: SHINE-INT-L
|
||||
${{ else }}:
|
||||
name: SHINE-OSS-L
|
||||
${{ if eq(parameters.useVSPreview, true) }}:
|
||||
demands: ImageOverride -equals SHINE-VS17-Preview
|
||||
buildPlatforms:
|
||||
- ${{ parameters.platform }}
|
||||
buildConfigurations: [Release]
|
||||
enablePackageCaching: true
|
||||
enableMsBuildCaching: ${{ parameters.enableMsBuildCaching }}
|
||||
runTests: false
|
||||
buildTests: true
|
||||
useVSPreview: ${{ parameters.useVSPreview }}
|
||||
timeoutInMinutes: 90
|
||||
|
||||
# Stage 2: Run UI Tests
|
||||
- ${{ if eq(parameters.platform, 'x64') }}:
|
||||
- stage: Test_x64Win10_FullBuild
|
||||
displayName: Test x64Win10 (Full Build)
|
||||
dependsOn: Build_${{ parameters.platform }}
|
||||
jobs:
|
||||
- template: job-test-project.yml
|
||||
parameters:
|
||||
platform: x64Win10
|
||||
configuration: Release
|
||||
useLatestWebView2: ${{ parameters.useLatestWebView2 }}
|
||||
buildSource: 'buildNow'
|
||||
uiTestModules: ${{ parameters.uiTestModules }}
|
||||
|
||||
- stage: Test_x64Win11_FullBuild
|
||||
displayName: Test x64Win11 (Full Build)
|
||||
dependsOn: Build_${{ parameters.platform }}
|
||||
jobs:
|
||||
- template: job-test-project.yml
|
||||
parameters:
|
||||
platform: x64Win11
|
||||
configuration: Release
|
||||
useLatestWebView2: ${{ parameters.useLatestWebView2 }}
|
||||
buildSource: 'buildNow'
|
||||
uiTestModules: ${{ parameters.uiTestModules }}
|
||||
|
||||
- ${{ if ne(parameters.platform, 'x64') }}:
|
||||
- stage: Test_${{ parameters.platform }}_FullBuild
|
||||
displayName: Test ${{ parameters.platform }} (Full Build)
|
||||
dependsOn: Build_${{ parameters.platform }}
|
||||
jobs:
|
||||
- template: job-test-project.yml
|
||||
parameters:
|
||||
platform: ${{ parameters.platform }}
|
||||
configuration: Release
|
||||
useLatestWebView2: ${{ parameters.useLatestWebView2 }}
|
||||
buildSource: 'buildNow'
|
||||
uiTestModules: ${{ parameters.uiTestModules }}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user