Compare commits
414 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
56284d8081 | ||
|
|
35ad6787f5 | ||
|
|
ca0db0ea03 | ||
|
|
03fcd0b3bb | ||
|
|
35da2a2e01 | ||
|
|
2329d9fb48 | ||
|
|
154ca89d2e | ||
|
|
449352f10e | ||
|
|
7c1826332f | ||
|
|
af2816c654 | ||
|
|
88be287f56 | ||
|
|
5513a5e205 | ||
|
|
e7023fa1da | ||
|
|
46550b6fd9 | ||
|
|
9454999946 | ||
|
|
7611ff348d | ||
|
|
187cf1e88c | ||
|
|
e1e532cff0 | ||
|
|
dfecec4fb7 | ||
|
|
c116f52a15 | ||
|
|
0b0b6429e1 | ||
|
|
f732c371a4 | ||
|
|
04c00459a2 | ||
|
|
5cee511ba5 | ||
|
|
399525a994 | ||
|
|
c075bd2719 | ||
|
|
208b83f295 | ||
|
|
ccb109a584 | ||
|
|
2918913341 | ||
|
|
a66176714d | ||
|
|
95642a1ebb | ||
|
|
726e06f8ef | ||
|
|
d96fe5d997 | ||
|
|
4ae611e5c5 | ||
|
|
1c938490c8 | ||
|
|
ba558db977 | ||
|
|
bf19fa23a6 | ||
|
|
66b77cbd0c | ||
|
|
10bfa980b7 | ||
|
|
9e46b4f7f7 | ||
|
|
3bf6172534 | ||
|
|
45581f0044 | ||
|
|
9ebb48b57a | ||
|
|
4e2dca3ba5 | ||
|
|
af6bdde59a | ||
|
|
fc1fb2b582 | ||
|
|
883b4ccbdb | ||
|
|
f91c9fe8b1 | ||
|
|
3ca6b80013 | ||
|
|
ee5ef758aa | ||
|
|
42fe158594 | ||
|
|
48427afe26 | ||
|
|
5c74583cb3 | ||
|
|
742ed362b6 | ||
|
|
44d340e48c | ||
|
|
2561448da0 | ||
|
|
86cabf5ea2 | ||
|
|
b5fc67a4f5 | ||
|
|
0b22a52bff | ||
|
|
da093c1609 | ||
|
|
c634be959b | ||
|
|
2139fa1b6f | ||
|
|
6784c9e1ee | ||
|
|
607d5ddbbf | ||
|
|
d588360bfe | ||
|
|
90f2c0c1b1 | ||
|
|
0d6e9b7196 | ||
|
|
8f90cc165a | ||
|
|
75758bc65f | ||
|
|
066e72a5fb | ||
|
|
c23e564e67 | ||
|
|
c0080255e3 | ||
|
|
3a3d3a2b86 | ||
|
|
79ef9d3fee | ||
|
|
da4597ee22 | ||
|
|
9f4ec1c14f | ||
|
|
4da6e32cfe | ||
|
|
7f1c4cb6db | ||
|
|
939fbca781 | ||
|
|
9ba175ae24 | ||
|
|
f7779a6302 | ||
|
|
05bdc91228 | ||
|
|
6d07b6ee59 | ||
|
|
1cf54bb5d3 | ||
|
|
21b4a4bc7b | ||
|
|
658f397297 | ||
|
|
38a0c329bb | ||
|
|
b0be21b8df | ||
|
|
97825a1994 | ||
|
|
13189f9e52 | ||
|
|
2a9f067522 | ||
|
|
ad01f6fdba | ||
|
|
b082611680 | ||
|
|
a36c479373 | ||
|
|
90a4f2a326 | ||
|
|
41ff1f2249 | ||
|
|
b20ce84763 | ||
|
|
875213d4a9 | ||
|
|
71b57652f0 | ||
|
|
834b86247e | ||
|
|
d8d3669cb3 | ||
|
|
940b170a31 | ||
|
|
55b93d0734 | ||
|
|
25ef3926d1 | ||
|
|
4d33bd5f9c | ||
|
|
f25e47258b | ||
|
|
a3c4ccbde0 | ||
|
|
b114534101 | ||
|
|
303f064ba9 | ||
|
|
e981e53b7f | ||
|
|
d850ef91f7 | ||
|
|
8701b586f3 | ||
|
|
9b1ba9a603 | ||
|
|
18c8a4eceb | ||
|
|
892553a457 | ||
|
|
50a9cc9b7b | ||
|
|
c300d15dae | ||
|
|
c076064636 | ||
|
|
40526807ee | ||
|
|
a59bf8ffcf | ||
|
|
6821899764 | ||
|
|
bfc49f4cff | ||
|
|
7549e1e4b9 | ||
|
|
2fc653d87d | ||
|
|
273a7c1641 | ||
|
|
905fcf74f4 | ||
|
|
229fe310d2 | ||
|
|
85073ea010 | ||
|
|
e622bd9227 | ||
|
|
003b42ec7f | ||
|
|
0a5a862874 | ||
|
|
1864dc5311 | ||
|
|
1be6574035 | ||
|
|
51bc7ab145 | ||
|
|
b49b468c28 | ||
|
|
8055b31e95 | ||
|
|
c371c6f055 | ||
|
|
7448ec4f69 | ||
|
|
d1f3041043 | ||
|
|
d0064d8549 | ||
|
|
4270b9fd4b | ||
|
|
cd07d32e58 | ||
|
|
33ddc07d8b | ||
|
|
1c7fbaba14 | ||
|
|
af292f1e84 | ||
|
|
2ddcb04986 | ||
|
|
3ce5a2970f | ||
|
|
d898dd09e4 | ||
|
|
0e08afd95d | ||
|
|
955cdc8d81 | ||
|
|
e48cacbca5 | ||
|
|
5aee572571 | ||
|
|
c83cb31f5b | ||
|
|
fea790cb66 | ||
|
|
794c26fb9f | ||
|
|
0f6525cd7d | ||
|
|
28cf22057f | ||
|
|
faedee1163 | ||
|
|
4a7ca97de0 | ||
|
|
cef68eee2e | ||
|
|
40e81cc72b | ||
|
|
4d1f5becef | ||
|
|
c82b7e2992 | ||
|
|
02eb5a78aa | ||
|
|
2cee40ea93 | ||
|
|
5b02f7c186 | ||
|
|
2e7f228016 | ||
|
|
8a78ebb29f | ||
|
|
95690f9ece | ||
|
|
97266423db | ||
|
|
af4b210111 | ||
|
|
2a10bef1b4 | ||
|
|
5c45808ea0 | ||
|
|
a82ffc67b3 | ||
|
|
44225688bb | ||
|
|
f144a9e6da | ||
|
|
d08f64adf6 | ||
|
|
2d0db8be9e | ||
|
|
408093d9ff | ||
|
|
a8fcc78511 | ||
|
|
49e754c81d | ||
|
|
3b9406ef82 | ||
|
|
1d9d8f4a76 | ||
|
|
6777efe3ca | ||
|
|
2b4a057e15 | ||
|
|
1e88b750d4 | ||
|
|
57421690e0 | ||
|
|
1c14cc7ff3 | ||
|
|
54864d4c3b | ||
|
|
8f6e85fcdf | ||
|
|
6cd5b66101 | ||
|
|
3729bf289f | ||
|
|
db153baf73 | ||
|
|
344b35fc53 | ||
|
|
f318653745 | ||
|
|
3415b43780 | ||
|
|
15f38d6693 | ||
|
|
2160c8e998 | ||
|
|
39084f937a | ||
|
|
22a6a7dedc | ||
|
|
8f93691a81 | ||
|
|
c5c7d8163f | ||
|
|
e061de58da | ||
|
|
6f90eecfe8 | ||
|
|
4daaa8171d | ||
|
|
cbff9146e3 | ||
|
|
2d7c10954b | ||
|
|
6e74c9dbbb | ||
|
|
44827f39da | ||
|
|
2ba385a5e1 | ||
|
|
7d9358afc1 | ||
|
|
cf03855076 | ||
|
|
877b5ce98c | ||
|
|
428220b7f4 | ||
|
|
914e0223cf | ||
|
|
b79cd45602 | ||
|
|
19107de46c | ||
|
|
29dd1b9c76 | ||
|
|
dac82b0e15 | ||
|
|
4595c4c56b | ||
|
|
39000e536b | ||
|
|
181ee9b66b | ||
|
|
9053f9ea11 | ||
|
|
8238a41032 | ||
|
|
aaf4339aa1 | ||
|
|
d2486b470f | ||
|
|
e7fd9e59a4 | ||
|
|
e3cc088a96 | ||
|
|
72d7089b72 | ||
|
|
e94da5a065 | ||
|
|
a721d001dd | ||
|
|
8af3110d4d | ||
|
|
a351a301d2 | ||
|
|
62a23f82b1 | ||
|
|
ef42e5ad18 | ||
|
|
cd1e384723 | ||
|
|
a52f8621d2 | ||
|
|
920856cfdd | ||
|
|
08906b3254 | ||
|
|
cd35eace22 | ||
|
|
8296c33fd0 | ||
|
|
0ad71b9855 | ||
|
|
c98e6d20fc | ||
|
|
2398551415 | ||
|
|
060b3b9b03 | ||
|
|
ff65504325 | ||
|
|
543c1a25c6 | ||
|
|
1be42003d7 | ||
|
|
2325baada3 | ||
|
|
e6ce728d79 | ||
|
|
0b947ed818 | ||
|
|
e8dbf6d670 | ||
|
|
41c232aad8 | ||
|
|
4196030939 | ||
|
|
bbe796033e | ||
|
|
159562dc02 | ||
|
|
a21a533a0c | ||
|
|
eeb169a9f9 | ||
|
|
951140b94a | ||
|
|
dbf0700c7a | ||
|
|
dacbaebcaf | ||
|
|
36ab2fe9c2 | ||
|
|
7e3a4bac80 | ||
|
|
dcbe9805f4 | ||
|
|
fd73c1248c | ||
|
|
8e79ba7ab9 | ||
|
|
5db7b578d5 | ||
|
|
a17e8c5252 | ||
|
|
d1e9506b19 | ||
|
|
1fdfe0928a | ||
|
|
76c4d81a6a | ||
|
|
d8904df6c5 | ||
|
|
662c03ea61 | ||
|
|
c8b7da7226 | ||
|
|
249c1698c5 | ||
|
|
17352b2176 | ||
|
|
e8e6614779 | ||
|
|
ad4b06d999 | ||
|
|
b1a34eb336 | ||
|
|
d9a6ccb099 | ||
|
|
090eb05f17 | ||
|
|
690bef673a | ||
|
|
fb8d2c9c29 | ||
|
|
bb3b39cd33 | ||
|
|
001f56257c | ||
|
|
57245a9721 | ||
|
|
6ee9cdc581 | ||
|
|
dc40835d4d | ||
|
|
7e3122dc95 | ||
|
|
073479583b | ||
|
|
d76ddeeeaa | ||
|
|
7cdc56dd7a | ||
|
|
246636f5fa | ||
|
|
fe0fb71961 | ||
|
|
e60a7a9173 | ||
|
|
81bb5d8954 | ||
|
|
523e8b6f11 | ||
|
|
4a0a3c4aa5 | ||
|
|
9dfd3f686b | ||
|
|
796ac2d08f | ||
|
|
534262e0b2 | ||
|
|
673189f77b | ||
|
|
89cb9ea919 | ||
|
|
7113206187 | ||
|
|
926a3fd777 | ||
|
|
de23f62d1e | ||
|
|
afbabc4364 | ||
|
|
eda0811ed2 | ||
|
|
bda67fc879 | ||
|
|
5a5d398e5f | ||
|
|
f699bde5ad | ||
|
|
48566bbcc2 | ||
|
|
d68a6666be | ||
|
|
07660d51a9 | ||
|
|
632c84dcca | ||
|
|
1aee3621b1 | ||
|
|
8d335b7b7d | ||
|
|
fd460fa3ba | ||
|
|
5b84ed26a3 | ||
|
|
e03c026bf4 | ||
|
|
5704310356 | ||
|
|
c922361eb2 | ||
|
|
48576e6584 | ||
|
|
79d644d1a2 | ||
|
|
2f7246cb26 | ||
|
|
8476756ae3 | ||
|
|
d90c240f2a | ||
|
|
3be651e99d | ||
|
|
4fac98a656 | ||
|
|
bed56b29b8 | ||
|
|
2338e28eef | ||
|
|
d4eb53db66 | ||
|
|
8f777bbc0d | ||
|
|
7d256b941a | ||
|
|
c3ceccf9bc | ||
|
|
c89928824f | ||
|
|
047f654646 | ||
|
|
0fc9663aa7 | ||
|
|
50f623482f | ||
|
|
2e0f79f8b6 | ||
|
|
308cb3daf9 | ||
|
|
f1b3c2fa8b | ||
|
|
2f6d66af9d | ||
|
|
ef21b84507 | ||
|
|
c9dd6721cf | ||
|
|
428519788b | ||
|
|
522df62704 | ||
|
|
d35b677f4b | ||
|
|
b1c183b83c | ||
|
|
7555f14f91 | ||
|
|
bc2a793022 | ||
|
|
a40e52b504 | ||
|
|
c860b5c47f | ||
|
|
2f8b80270f | ||
|
|
d01e38ae60 | ||
|
|
eed6844f8b | ||
|
|
f6e73eb5b6 | ||
|
|
3034ed7851 | ||
|
|
34c9fcc96a | ||
|
|
b7f1b7347f | ||
|
|
fc4a224c47 | ||
|
|
b86b5055c6 | ||
|
|
c0d418d2f4 | ||
|
|
2cd89433de | ||
|
|
6cb2857ef0 | ||
|
|
716bc48e21 | ||
|
|
6afd248b18 | ||
|
|
8e62bee92f | ||
|
|
02ce009f42 | ||
|
|
88a8df65a2 | ||
|
|
5c42e98b8f | ||
|
|
fd5abe1ac5 | ||
|
|
088d89c918 | ||
|
|
8299ae07ec | ||
|
|
fd8ad0b934 | ||
|
|
2fc6aa80df | ||
|
|
1c26215f3f | ||
|
|
7b1e1540e2 | ||
|
|
02010edac3 | ||
|
|
e4e76943d6 | ||
|
|
569361586a | ||
|
|
785218258b | ||
|
|
0b45bc64c2 | ||
|
|
17f3fec1ed | ||
|
|
52dbf3e3cf | ||
|
|
0a814e4372 | ||
|
|
fc63326544 | ||
|
|
18bacaab15 | ||
|
|
e8f12786b4 | ||
|
|
8789204f97 | ||
|
|
819fb64520 | ||
|
|
31003b45e6 | ||
|
|
1f04b16682 | ||
|
|
0a8b04e3e3 | ||
|
|
f8b4039d96 | ||
|
|
6da496e2f9 | ||
|
|
68d38a3f16 | ||
|
|
0e8eddfd8c | ||
|
|
3645cf9fc3 | ||
|
|
a0585ff990 | ||
|
|
ec3a2a3b3b | ||
|
|
24e2a6fb86 | ||
|
|
1144bc6587 | ||
|
|
fa84d9471e | ||
|
|
71dfd0dc2e | ||
|
|
b941520afb | ||
|
|
be110124f6 | ||
|
|
927363397d | ||
|
|
9dc097cfe5 | ||
|
|
e3b0c8a681 | ||
|
|
a62be95af3 | ||
|
|
a2e707696a | ||
|
|
f267068b02 | ||
|
|
7f0849be06 |
@@ -187,6 +187,27 @@
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "V0lantis",
|
||||
"name": "Arthur",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/37664438?v=4",
|
||||
"profile": "https://arthurvolant.com",
|
||||
"contributions": [
|
||||
"bug",
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "rodrigorfk",
|
||||
"name": "Rodrigo Fior Kuntzer",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1995033?v=4",
|
||||
"profile": "https://github.com/rodrigorfk",
|
||||
"contributions": [
|
||||
"code",
|
||||
"test",
|
||||
"bug"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
@@ -195,5 +216,6 @@
|
||||
"repoType": "github",
|
||||
"repoHost": "https://github.com",
|
||||
"skipCi": true,
|
||||
"commitConvention": "angular"
|
||||
"commitConvention": "angular",
|
||||
"commitType": "docs"
|
||||
}
|
||||
|
||||
4
.codacy.yml
Normal file
4
.codacy.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
exclude_paths:
|
||||
- "*.md"
|
||||
- "dist/**"
|
||||
@@ -1,4 +1,5 @@
|
||||
dist/
|
||||
lib/
|
||||
node_modules/
|
||||
jest.config.js
|
||||
jest.config.js
|
||||
coverage/
|
||||
@@ -5,7 +5,8 @@
|
||||
"github"
|
||||
],
|
||||
"extends": [
|
||||
"plugin:github/recommended"
|
||||
"plugin:github/recommended",
|
||||
"plugin:prettier/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
|
||||
12
.github/FUNDING.yml
vendored
12
.github/FUNDING.yml
vendored
@@ -1,12 +0,0 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: jackton1
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: tj-actions
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: []
|
||||
98
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
98
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -1,98 +0,0 @@
|
||||
name: 🐞 Bug
|
||||
description: Create a report to help us improve
|
||||
title: "[BUG] <title>"
|
||||
labels: [bug, needs triage]
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue already exists for the bug you encountered.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Does this issue exist in the latest version?
|
||||
description: Please view all releases to confirm that this issue hasn't already been fixed.
|
||||
options:
|
||||
- label: I'm using the latest release
|
||||
required: true
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: Describe the bug?
|
||||
description: A clear and concise description of what the bug is
|
||||
placeholder: Tell us what you see!
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: To Reproduce
|
||||
description: Steps to reproduce the behavior?
|
||||
placeholder: |
|
||||
1. In this environment...
|
||||
2. With this config...
|
||||
3. Run '...'
|
||||
4. See error...
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: What OS are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- all
|
||||
- ubuntu-latest or ubuntu-20.04
|
||||
- ubuntu-18.04
|
||||
- macos-latest or macos-10.15
|
||||
- macos-11
|
||||
- windows-latest or windows-2019
|
||||
- windows-2016
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior?
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
placeholder: Tell us what you expected!
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output which is obtained after enabling debug logging. This will be automatically formatted into code, so no need for backticks.
|
||||
placeholder: |
|
||||
1. Re-running the workflow with debug logging enabled.
|
||||
2. Copy or download the log archive.
|
||||
3. Paste the contents here or upload the file in a subsequent comment.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything else?
|
||||
description: |
|
||||
Links? or References?
|
||||
|
||||
Anything that will give us more context about the issue you are encountering!
|
||||
|
||||
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||
validations:
|
||||
required: false
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](../blob/main/CODE_OF_CONDUCT.md)
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
59
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
59
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
@@ -1,59 +0,0 @@
|
||||
name: Feature request
|
||||
description: Suggest an idea for this project
|
||||
title: "[Feature] <title>"
|
||||
labels: [enhancement]
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this feature request!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is this feature missing in the latest version?
|
||||
description: Please upgrade to the latest version to verify that this feature is still missing.
|
||||
options:
|
||||
- label: I'm using the latest release
|
||||
required: true
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
label: Is your feature request related to a problem? Please describe.
|
||||
description: |
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
placeholder: Tell us what you see!
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: requests
|
||||
attributes:
|
||||
label: Describe the solution you'd like?
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: alternative
|
||||
attributes:
|
||||
label: Describe alternatives you've considered?
|
||||
description: A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything else?
|
||||
description: |
|
||||
Links? or References?
|
||||
|
||||
Add any other context or screenshots about the feature request here.
|
||||
|
||||
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||
validations:
|
||||
required: false
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](../blob/main/CODE_OF_CONDUCT.md)
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -15,3 +15,10 @@ updates:
|
||||
open-pull-requests-limit: 10
|
||||
labels:
|
||||
- "merge when passing"
|
||||
- package-ecosystem: gitsubmodule
|
||||
directory: /
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
labels:
|
||||
- "merge when passing"
|
||||
|
||||
33
.github/workflows/auto-approve.yml
vendored
33
.github/workflows/auto-approve.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: Auto approve
|
||||
|
||||
on:
|
||||
pull_request_target
|
||||
|
||||
|
||||
jobs:
|
||||
auto-approve:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: hmarr/auto-approve-action@v3
|
||||
if: |
|
||||
(
|
||||
github.event.pull_request.user.login == 'dependabot[bot]' ||
|
||||
github.event.pull_request.user.login == 'dependabot' ||
|
||||
github.event.pull_request.user.login == 'dependabot-preview[bot]' ||
|
||||
github.event.pull_request.user.login == 'dependabot-preview' ||
|
||||
github.event.pull_request.user.login == 'renovate[bot]' ||
|
||||
github.event.pull_request.user.login == 'renovate' ||
|
||||
github.event.pull_request.user.login == 'github-actions[bot]'
|
||||
)
|
||||
&&
|
||||
(
|
||||
github.actor == 'dependabot[bot]' ||
|
||||
github.actor == 'dependabot' ||
|
||||
github.actor == 'dependabot-preview[bot]' ||
|
||||
github.actor == 'dependabot-preview' ||
|
||||
github.actor == 'renovate[bot]' ||
|
||||
github.actor == 'renovate' ||
|
||||
github.actor == 'github-actions[bot]'
|
||||
)
|
||||
with:
|
||||
github-token: ${{ secrets.PAT_TOKEN }}
|
||||
4
.github/workflows/codacy-analysis.yml
vendored
4
.github/workflows/codacy-analysis.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
steps:
|
||||
# Checkout the repository to the GitHub Actions runner
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis
|
||||
- name: Run Codacy Analysis CLI
|
||||
@@ -51,6 +51,6 @@ jobs:
|
||||
# Upload the SARIF file generated in the previous step
|
||||
- name: Upload SARIF results file
|
||||
continue-on-error: true
|
||||
uses: github/codeql-action/upload-sarif@v2
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
|
||||
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
@@ -38,11 +38,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -69,6 +69,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
13
.github/workflows/greetings.yml
vendored
13
.github/workflows/greetings.yml
vendored
@@ -1,13 +0,0 @@
|
||||
name: Greetings
|
||||
|
||||
on: [pull_request_target, issues]
|
||||
|
||||
jobs:
|
||||
greeting:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/first-interaction@v1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-message: "Thanks for reporting this issue, don't forget to star this project if you haven't already to help us reach a wider audience."
|
||||
pr-message: "Thanks for implementing a fix, could you ensure that the test covers your changes if applicable."
|
||||
174
.github/workflows/issue-comment-test.yml
vendored
Normal file
174
.github/workflows/issue-comment-test.yml
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
name: Issue Comment Test
|
||||
on:
|
||||
issue_comment:
|
||||
|
||||
jobs:
|
||||
pr_commented:
|
||||
# This job only runs for pull request comments
|
||||
name: PR comment
|
||||
if: ${{ github.event.issue.pull_request }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo A comment on PR $NUMBER
|
||||
env:
|
||||
NUMBER: ${{ github.event.issue.number }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
|
||||
- name: Run changed-files with defaults
|
||||
id: changed-files
|
||||
uses: ./
|
||||
|
||||
- name: Show output
|
||||
run: |
|
||||
echo '${{ toJSON(steps.changed-files.outputs) }}'
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Run changed-files for old new filenames test rename
|
||||
id: changed-files-all-old-new-renamed-files
|
||||
uses: ./
|
||||
with:
|
||||
base_sha: d1c0ee4
|
||||
sha: 4d04215
|
||||
fetch_depth: 60000
|
||||
include_all_old_new_renamed_files: true
|
||||
|
||||
- name: Show output
|
||||
run: |
|
||||
echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}'
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Show output
|
||||
run: |
|
||||
echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}'
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Check all_old_new_renamed_files output on non windows platform
|
||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test/test rename 1.txt,test/test rename-1.txt') && runner.os != 'Windows'"
|
||||
run: |
|
||||
echo "Invalid output: Expected to include (test/test rename 1.txt,test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})"
|
||||
exit 1
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Check all_old_new_renamed_files output on windows platform
|
||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test\\test rename 1.txt,test\\test rename-1.txt') && runner.os == 'Windows'"
|
||||
run: |
|
||||
echo "Invalid output: Expected to not include (test\\test rename 1.txt,test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})"
|
||||
exit 1
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Check the renamed_files output on non windows platform
|
||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test/test rename-1.txt') && runner.os != 'Windows'"
|
||||
run: |
|
||||
echo "Invalid output: Expected to include (test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})"
|
||||
exit 1
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Check the renamed_files output on windows platform
|
||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test\\test rename-1.txt') && runner.os == 'Windows'"
|
||||
run: |
|
||||
echo "Invalid output: Expected to not include (test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})"
|
||||
exit 1
|
||||
shell:
|
||||
bash
|
||||
|
||||
issue_commented:
|
||||
# This job only runs for issue comments
|
||||
name: Issue comment
|
||||
if: ${{ !github.event.issue.pull_request }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo A comment on issue $NUMBER
|
||||
env:
|
||||
NUMBER: ${{ github.event.issue.number }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
|
||||
- name: Run changed-files with defaults
|
||||
id: changed-files
|
||||
uses: ./
|
||||
|
||||
- name: Show output
|
||||
run: |
|
||||
echo '${{ toJSON(steps.changed-files.outputs) }}'
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Run changed-files for old new filenames test rename
|
||||
id: changed-files-all-old-new-renamed-files
|
||||
uses: ./
|
||||
with:
|
||||
base_sha: d1c0ee4
|
||||
sha: 4d04215
|
||||
fetch_depth: 60000
|
||||
include_all_old_new_renamed_files: true
|
||||
|
||||
- name: Show output
|
||||
run: |
|
||||
echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}'
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Show output
|
||||
run: |
|
||||
echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}'
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Check all_old_new_renamed_files output on non windows platform
|
||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test/test rename 1.txt,test/test rename-1.txt') && runner.os != 'Windows'"
|
||||
run: |
|
||||
echo "Invalid output: Expected to include (test/test rename 1.txt,test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})"
|
||||
exit 1
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Check all_old_new_renamed_files output on windows platform
|
||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test\\test rename 1.txt,test\\test rename-1.txt') && runner.os == 'Windows'"
|
||||
run: |
|
||||
echo "Invalid output: Expected to not include (test\\test rename 1.txt,test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})"
|
||||
exit 1
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Check the renamed_files output on non windows platform
|
||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test/test rename-1.txt') && runner.os != 'Windows'"
|
||||
run: |
|
||||
echo "Invalid output: Expected to include (test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})"
|
||||
exit 1
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Check the renamed_files output on windows platform
|
||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test\\test rename-1.txt') && runner.os == 'Windows'"
|
||||
run: |
|
||||
echo "Invalid output: Expected to not include (test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})"
|
||||
exit 1
|
||||
shell:
|
||||
bash
|
||||
2
.github/workflows/manual-test.yml
vendored
2
.github/workflows/manual-test.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
fetch-depth: 0
|
||||
|
||||
14
.github/workflows/matrix-test.yml
vendored
14
.github/workflows/matrix-test.yml
vendored
@@ -8,13 +8,13 @@ on:
|
||||
|
||||
jobs:
|
||||
changed-files:
|
||||
name: Get changes
|
||||
name: Get changed files
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
matrix: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Get changed files
|
||||
@@ -23,22 +23,22 @@ jobs:
|
||||
with:
|
||||
json: true
|
||||
quotepath: false
|
||||
escape_json: false
|
||||
- name: List all changed files
|
||||
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
|
||||
- id: set-matrix
|
||||
run: echo "matrix={\"files\":${{ steps.changed-files.outputs.all_changed_files }}}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
matrix-job:
|
||||
name: Run Matrix Job
|
||||
runs-on: ubuntu-latest
|
||||
needs: [changed-files]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.changed-files.outputs.matrix) }}
|
||||
matrix:
|
||||
files: ${{ fromJSON(needs.changed-files.outputs.matrix) }}
|
||||
max-parallel: 4
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Test
|
||||
run: |
|
||||
echo ${{ matrix.files }}
|
||||
|
||||
29
.github/workflows/submodule-sync.yml
vendored
29
.github/workflows/submodule-sync.yml
vendored
@@ -1,29 +0,0 @@
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
name: Submodule Sync
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: Git Sumbodule Update
|
||||
run: |
|
||||
git pull --recurse-submodules
|
||||
git submodule update --remote --recursive
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5.0.2
|
||||
with:
|
||||
title: "Updated submodule"
|
||||
labels: "merge when passing"
|
||||
branch: "chore/update-submodule"
|
||||
commit-message: "Updated submodule"
|
||||
body: "Updated submodule"
|
||||
token: ${{ secrets.PAT_TOKEN }}
|
||||
12
.github/workflows/sync-release-version.yml
vendored
12
.github/workflows/sync-release-version.yml
vendored
@@ -8,11 +8,11 @@ jobs:
|
||||
update-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Run release-tagger
|
||||
uses: tj-actions/release-tagger@v3
|
||||
uses: tj-actions/release-tagger@v4
|
||||
- name: Sync release version.
|
||||
uses: tj-actions/sync-release-version@v13
|
||||
id: sync-release-version
|
||||
@@ -21,6 +21,14 @@ jobs:
|
||||
only_major: true
|
||||
paths: |
|
||||
README.md
|
||||
- name: Sync release package version.
|
||||
uses: tj-actions/sync-release-version@v13
|
||||
id: sync-release-package-version
|
||||
with:
|
||||
pattern: '"version": "'
|
||||
strip_prefix: "v"
|
||||
paths: |
|
||||
package.json
|
||||
- name: Run git-cliff
|
||||
uses: tj-actions/git-cliff@v1
|
||||
- name: Create Pull Request
|
||||
|
||||
762
.github/workflows/test.yml
vendored
762
.github/workflows/test.yml
vendored
File diff suppressed because it is too large
Load Diff
7
.github/workflows/update-readme.yml
vendored
7
.github/workflows/update-readme.yml
vendored
@@ -9,12 +9,15 @@ jobs:
|
||||
sync-assets:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run auto-doc
|
||||
uses: tj-actions/auto-doc@v2
|
||||
uses: tj-actions/auto-doc@v3
|
||||
with:
|
||||
use_code_blocks: true
|
||||
use_major_version: true
|
||||
|
||||
- name: Run remark
|
||||
uses: tj-actions/remark@v3
|
||||
|
||||
1065
HISTORY.md
1065
HISTORY.md
File diff suppressed because it is too large
Load Diff
32
SECURITY.md
Normal file
32
SECURITY.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# Security Policy
|
||||
|
||||
## Proactive Security Measures
|
||||
|
||||
To proactively detect and address security vulnerabilities, we utilize several robust tools and processes:
|
||||
|
||||
- **Dependency Updates:** We use [Renovate](https://renovatebot.com) and [Dependabot](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates) to keep our dependencies updated and promptly patch detected vulnerabilities through automated PRs.
|
||||
- **[GitHub's Security Features](https://github.com/features/security):** Our repository and dependencies are continuously monitored via GitHub's security features, which include:
|
||||
- **Code Scanning:** Using GitHub's CodeQL, all pull requests are scanned to identify potential vulnerabilities in our source code.
|
||||
- **Automated Alerts:** Dependabot identifies vulnerabilities based on the GitHub Advisory Database and opens PRs with patches, while automated [secret scanning](https://docs.github.com/en/enterprise-cloud@latest/code-security/secret-scanning/about-secret-scanning#about-secret-scanning-for-partner-patterns) provides alerts for detected secrets.
|
||||
- **[GitGuardian Security Checks](https://www.gitguardian.com/):** We employ GitGuardian to ensure security checks are performed on the codebase, enhancing the overall security of our project.
|
||||
- **Code Analysis and Security Scanning:** With the help of [Codacy Static Code Analysis](https://www.codacy.com/) and [Codacy Security Scan](https://security.codacy.com/), we conduct thorough analyses and scans of our code for potential security risks.
|
||||
|
||||
## Reporting Security Vulnerabilities
|
||||
|
||||
Despite our best efforts to deliver secure software, we acknowledge the invaluable role of the community in identifying security breaches.
|
||||
|
||||
### Private Vulnerability Disclosures
|
||||
|
||||
We request all suspected vulnerabilities to be responsibly and privately disclosed by sending an email to [support@tj-actions.online](mailto:support@tj-actions.online).
|
||||
|
||||
### Public Vulnerability Disclosures
|
||||
|
||||
For publicly disclosed security vulnerabilities, please **IMMEDIATELY** email [support@tj-actions.online](mailto:support@tj-actions.online) with the details for prompt action.
|
||||
|
||||
Upon confirmation of a breach, reporters will receive full credit and recognition for their contribution. Please note, that we do not offer monetary compensation for reporting vulnerabilities.
|
||||
|
||||
## Communication of Security Breaches
|
||||
|
||||
We will utilize the [GitHub Security Advisory](https://github.com/tj-actions/changed-files/security/advisories) to communicate any security breaches. The advisory will be made public once a patch has been released to rectify the issue.
|
||||
|
||||
We appreciate your cooperation and contribution to maintaining the security of our software. Remember, a secure community is a strong community.
|
||||
87
action.yml
87
action.yml
@@ -4,11 +4,11 @@ author: tj-actions
|
||||
|
||||
inputs:
|
||||
separator:
|
||||
description: "Split character for output strings"
|
||||
description: "Split character for output strings."
|
||||
required: false
|
||||
default: " "
|
||||
include_all_old_new_renamed_files:
|
||||
description: "Include `all_old_new_renamed_files` output. Note this can generate a large output See: [#501](https://github.com/tj-actions/changed-files/issues/501)."
|
||||
description: "Include `all_old_new_renamed_files` output. Note this can generate a large output See: #501."
|
||||
required: false
|
||||
default: "false"
|
||||
old_new_separator:
|
||||
@@ -24,11 +24,13 @@ inputs:
|
||||
required: false
|
||||
default: ""
|
||||
files_from_source_file_separator:
|
||||
description: 'Separator used to split the `files_from_source_file` input'
|
||||
description: "Separator used to split the `files_from_source_file` input."
|
||||
default: "\n"
|
||||
required: false
|
||||
files:
|
||||
description: "File and directory patterns used to detect changes (Defaults to the entire repo if unset) **NOTE:** Multiline file/directory patterns should not include quotes."
|
||||
description: |
|
||||
File and directory patterns used to detect changes (Defaults to the entire repo if unset).
|
||||
NOTE: Multiline file/directory patterns should not include quotes.
|
||||
required: false
|
||||
default: ""
|
||||
files_separator:
|
||||
@@ -40,7 +42,7 @@ inputs:
|
||||
required: false
|
||||
default: ""
|
||||
files_yaml_from_source_file:
|
||||
description: "Source file(s) used to populate the `files_yaml` input. [Example](https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml)"
|
||||
description: "Source file(s) used to populate the `files_yaml` input. Example: https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml"
|
||||
required: false
|
||||
default: ""
|
||||
files_yaml_from_source_file_separator:
|
||||
@@ -52,7 +54,7 @@ inputs:
|
||||
required: false
|
||||
default: ""
|
||||
files_ignore_yaml_from_source_file:
|
||||
description: "Source file(s) used to populate the `files_ignore_yaml` input. [Example](https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml)"
|
||||
description: "Source file(s) used to populate the `files_ignore_yaml` input. Example: https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml"
|
||||
required: false
|
||||
default: ""
|
||||
files_ignore_yaml_from_source_file_separator:
|
||||
@@ -60,7 +62,7 @@ inputs:
|
||||
default: "\n"
|
||||
required: false
|
||||
files_ignore:
|
||||
description: "Ignore changes to these file(s) **NOTE:** Multiline file/directory patterns should not include quotes."
|
||||
description: "Ignore changes to these file(s). NOTE: Multiline file/directory patterns should not include quotes."
|
||||
required: false
|
||||
default: ""
|
||||
files_ignore_separator:
|
||||
@@ -76,10 +78,10 @@ inputs:
|
||||
default: "\n"
|
||||
required: false
|
||||
sha:
|
||||
description: "Specify a different commit SHA used for comparing changes"
|
||||
description: "Specify a different commit SHA or branch used for comparing changes"
|
||||
required: false
|
||||
base_sha:
|
||||
description: "Specify a different base commit SHA used for comparing changes"
|
||||
description: "Specify a different base commit SHA or branch used for comparing changes"
|
||||
required: false
|
||||
since:
|
||||
description: "Get changed files for commits whose timestamp is older than the given time."
|
||||
@@ -94,16 +96,16 @@ inputs:
|
||||
required: false
|
||||
default: "."
|
||||
quotepath:
|
||||
description: "Use non-ascii characters to match files and output the filenames completely verbatim by setting this to `false`"
|
||||
description: "Use non-ASCII characters to match files and output the filenames completely verbatim by setting this to `false`"
|
||||
default: "true"
|
||||
required: false
|
||||
diff_relative:
|
||||
description: "Exclude changes outside the current directory and show path names relative to it. **NOTE:** This requires you to specify the top level directory via the `path` input."
|
||||
description: "Exclude changes outside the current directory and show path names relative to it. NOTE: This requires you to specify the top-level directory via the `path` input."
|
||||
required: false
|
||||
default: "true"
|
||||
dir_names:
|
||||
default: "false"
|
||||
description: "Output unique changed directories instead of filenames. **NOTE:** This returns `.` for changed files located in the current working directory which defaults to `$GITHUB_WORKSPACE`."
|
||||
description: "Output unique changed directories instead of filenames. NOTE: This returns `.` for changed files located in the current working directory which defaults to `$GITHUB_WORKSPACE`."
|
||||
required: false
|
||||
dir_names_max_depth:
|
||||
description: "Limit the directory output to a maximum depth e.g `test/test1/test2` with max depth of `2` returns `test/test1`."
|
||||
@@ -112,8 +114,20 @@ inputs:
|
||||
description: "Exclude the current directory represented by `.` from the output when `dir_names` is set to `true`."
|
||||
required: false
|
||||
default: "false"
|
||||
dir_names_include_files:
|
||||
description: "File and directory patterns to include in the output when `dir_names` is set to `true`. NOTE: This returns only the matching files and also the directory names."
|
||||
required: false
|
||||
default: ""
|
||||
dir_names_include_files_separator:
|
||||
description: "Separator used to split the `dir_names_include_files` input"
|
||||
default: "\n"
|
||||
required: false
|
||||
dir_names_deleted_files_include_only_deleted_dirs:
|
||||
description: "Include only directories that have been deleted as opposed to directory names of files that have been deleted in the `deleted_files` output when `dir_names` is set to `true`."
|
||||
required: false
|
||||
default: "false"
|
||||
json:
|
||||
description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs."
|
||||
description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs. Example: https://github.com/tj-actions/changed-files/blob/main/.github/workflows/matrix-test.yml"
|
||||
required: false
|
||||
default: "false"
|
||||
escape_json:
|
||||
@@ -121,11 +135,17 @@ inputs:
|
||||
required: false
|
||||
default: "true"
|
||||
fetch_depth:
|
||||
description: "Depth of additional branch history fetched. **NOTE**: This can be adjusted to resolve errors with insufficient history."
|
||||
description: "Depth of additional branch history fetched. NOTE: This can be adjusted to resolve errors with insufficient history."
|
||||
required: false
|
||||
default: "50"
|
||||
skip_initial_fetch:
|
||||
description: "Skip the initial fetch to improve performance for shallow repositories. **NOTE**: This could lead to errors with missing history and the intended use is limited to when you've fetched the history necessary to perform the diff."
|
||||
description: |
|
||||
Skip the initial fetch to improve performance for shallow repositories.
|
||||
NOTE: This could lead to errors with missing history and the intended use is limited to when you've fetched the history necessary to perform the diff.
|
||||
required: false
|
||||
default: "false"
|
||||
fetch_additional_submodule_history:
|
||||
description: "Fetch additional history for submodules."
|
||||
required: false
|
||||
default: "false"
|
||||
since_last_remote_commit:
|
||||
@@ -133,7 +153,7 @@ inputs:
|
||||
required: false
|
||||
default: "false"
|
||||
write_output_files:
|
||||
description: "Write outputs to the `output_dir` defaults to `.github/outputs` folder. **NOTE:** This creates a `.txt` file by default and a `.json` file if `json` is set to `true`."
|
||||
description: "Write outputs to the `output_dir` defaults to `.github/outputs` folder. NOTE: This creates a `.txt` file by default and a `.json` file if `json` is set to `true`."
|
||||
required: false
|
||||
default: "false"
|
||||
output_dir:
|
||||
@@ -153,7 +173,10 @@ inputs:
|
||||
required: false
|
||||
default: ""
|
||||
recover_files:
|
||||
description: "File and directory patterns used to recover deleted files, defaults to the patterns provided via the `files`, `files_from_source_file`, `files_ignore` and `files_ignore_from_source_file` inputs or all deleted files if no patterns are provided."
|
||||
description: |
|
||||
File and directory patterns used to recover deleted files,
|
||||
defaults to the patterns provided via the `files`, `files_from_source_file`, `files_ignore` and `files_ignore_from_source_file` inputs
|
||||
or all deleted files if no patterns are provided.
|
||||
required: false
|
||||
default: ""
|
||||
recover_files_separator:
|
||||
@@ -169,13 +192,25 @@ inputs:
|
||||
default: "\n"
|
||||
required: false
|
||||
token:
|
||||
description: "Github token used to fetch changed files from Github's API."
|
||||
description: "GitHub token used to fetch changed files from Github's API."
|
||||
required: false
|
||||
default: ${{ github.token }}
|
||||
api_url:
|
||||
description: "Github API URL."
|
||||
required: false
|
||||
default: ${{ github.api_url }}
|
||||
fail_on_initial_diff_error:
|
||||
description: "Fail when the initial diff fails."
|
||||
required: false
|
||||
default: "false"
|
||||
fail_on_submodule_diff_error:
|
||||
description: "Fail when the submodule diff fails."
|
||||
required: false
|
||||
default: "false"
|
||||
negation_patterns_first:
|
||||
description: "Apply the negation patterns first. NOTE: This affects how changed files are matched."
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
outputs:
|
||||
added_files:
|
||||
@@ -223,11 +258,11 @@ outputs:
|
||||
all_changed_files_count:
|
||||
description: "Returns the number of `all_changed_files`"
|
||||
any_changed:
|
||||
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has changed. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has changed. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
only_changed:
|
||||
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has changed. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has changed. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
other_changed_files:
|
||||
description: "Returns all other changed files not listed in the files input i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
description: "Returns all other changed files not listed in the files input i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
other_changed_files_count:
|
||||
description: "Returns the number of `other_changed_files`"
|
||||
all_modified_files:
|
||||
@@ -235,11 +270,11 @@ outputs:
|
||||
all_modified_files_count:
|
||||
description: "Returns the number of `all_modified_files`"
|
||||
any_modified:
|
||||
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has been modified. i.e. *using a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
|
||||
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has been modified. i.e. *includes a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
|
||||
only_modified:
|
||||
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has been modified. (ACMRD)."
|
||||
other_modified_files:
|
||||
description: "Returns all other modified files not listed in the files input i.e. *a combination of all added, copied, modified, and deleted files (ACMRD)*"
|
||||
description: "Returns all other modified files not listed in the files input i.e. *a combination of all added, copied, modified, and deleted files (ACMRD)*"
|
||||
other_modified_files_count:
|
||||
description: "Returns the number of `other_modified_files`"
|
||||
any_deleted:
|
||||
@@ -250,9 +285,13 @@ outputs:
|
||||
description: "Returns all other deleted files not listed in the files input i.e. *a combination of all deleted files (D)*"
|
||||
other_deleted_files_count:
|
||||
description: "Returns the number of `other_deleted_files`"
|
||||
modified_keys:
|
||||
description: "Returns all modified YAML keys when the `files_yaml` input is used. i.e. *key that contains any path that has either been added, copied, modified, and deleted (ACMRD)*"
|
||||
changed_keys:
|
||||
description: "Returns all changed YAML keys when the `files_yaml` input is used. i.e. *key that contains any path that has either been added, copied, modified, and renamed (ACMR)*"
|
||||
|
||||
runs:
|
||||
using: 'node16'
|
||||
using: 'node20'
|
||||
main: 'dist/index.js'
|
||||
|
||||
branding:
|
||||
|
||||
34233
dist/index.js
generated
vendored
34233
dist/index.js
generated
vendored
File diff suppressed because one or more lines are too long
2
dist/index.js.map
generated
vendored
2
dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
152
dist/licenses.txt
generated
vendored
152
dist/licenses.txt
generated
vendored
@@ -71,6 +71,28 @@ The above copyright notice and this permission notice shall be included in all c
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
@fastify/busboy
|
||||
MIT
|
||||
Copyright Brian White. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
|
||||
@octokit/auth-token
|
||||
MIT
|
||||
The MIT License
|
||||
@@ -243,16 +265,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
@vercel/ncc
|
||||
MIT
|
||||
Copyright 2018 ZEIT, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
before-after-hook
|
||||
Apache-2.0
|
||||
Apache License
|
||||
@@ -552,31 +564,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
is-plain-object
|
||||
MIT
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2017, Jon Schlinkert.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
lodash
|
||||
MIT
|
||||
Copyright OpenJS Foundation and other contributors <https://openjsf.org/>
|
||||
@@ -653,32 +640,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
node-fetch
|
||||
MIT
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 David Frank
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
|
||||
once
|
||||
ISC
|
||||
The ISC License
|
||||
@@ -748,9 +709,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
tr46
|
||||
MIT
|
||||
|
||||
tunnel
|
||||
MIT
|
||||
The MIT License (MIT)
|
||||
@@ -776,6 +734,31 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
undici
|
||||
MIT
|
||||
MIT License
|
||||
|
||||
Copyright (c) Matteo Collina and Undici contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
universal-user-agent
|
||||
ISC
|
||||
# [ISC License](https://spdx.org/licenses/ISC)
|
||||
@@ -800,47 +783,6 @@ The above copyright notice and this permission notice shall be included in all c
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
webidl-conversions
|
||||
BSD-2-Clause
|
||||
# The BSD 2-Clause License
|
||||
|
||||
Copyright (c) 2014, Domenic Denicola
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
whatwg-url
|
||||
MIT
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015–2016 Sebastian Mayr
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
wrappy
|
||||
ISC
|
||||
The ISC License
|
||||
|
||||
17
package.json
17
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@tj-actions/changed-files",
|
||||
"version": "37.4.0",
|
||||
"version": "40.2.2",
|
||||
"description": "Github action to retrieve all (added, copied, modified, deleted, renamed, type changed, unmerged, unknown) files and directories.",
|
||||
"main": "lib/main.js",
|
||||
"publishConfig": {
|
||||
@@ -8,10 +8,10 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"lint:fix": "eslint --fix src/**/*.ts",
|
||||
"format": "prettier --write src/*.ts src/**/*.ts",
|
||||
"format-check": "prettier --check src/*.ts src/**/*.ts",
|
||||
"lint": "eslint src/*.ts src/**/*.ts --max-warnings 0",
|
||||
"lint:fix": "eslint --fix src/*.ts src/**/*.ts",
|
||||
"package": "ncc build lib/main.js --source-map --license licenses.txt",
|
||||
"test": "jest --coverage",
|
||||
"all": "yarn build && yarn format && yarn lint && yarn package && yarn test"
|
||||
@@ -34,8 +34,8 @@
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@octokit/rest": "^19.0.13",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@octokit/rest": "^20.0.1",
|
||||
"lodash": "^4.17.21",
|
||||
"micromatch": "^4.0.5",
|
||||
"yaml": "^2.3.1"
|
||||
@@ -48,11 +48,12 @@
|
||||
"@types/uuid": "^9.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
||||
"@typescript-eslint/parser": "^6.0.0",
|
||||
"@vercel/ncc": "^0.36.1",
|
||||
"@vercel/ncc": "^0.38.0",
|
||||
"eslint": "^8.43.0",
|
||||
"eslint-plugin-github": "^4.8.0",
|
||||
"eslint-plugin-jest": "^27.2.2",
|
||||
"eslint-plugin-prettier": "^5.0.0-alpha.2",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"jest": "^29.5.0",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.0",
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
describe('main test', () => {
|
||||
it('adds two numbers', async () => {
|
||||
expect(1 + 1).toEqual(2)
|
||||
})
|
||||
})
|
||||
@@ -74,6 +74,7 @@ describe('utils test', () => {
|
||||
|
||||
// Tests that the function returns the correct dirname when the relative path is a Windows drive root and excludeCurrentDir is true
|
||||
it('test_windows_drive_root_and_exclude_current_dir_is_true', () => {
|
||||
mockedPlatform('win32')
|
||||
const result = getDirnameMaxDepth({
|
||||
relativePath: 'C:\\',
|
||||
dirNamesMaxDepth: 1,
|
||||
@@ -107,6 +108,7 @@ describe('utils test', () => {
|
||||
|
||||
// Tests that getDirnameMaxDepth returns the correct output for a Windows UNC root path
|
||||
it('test_windows_unc_root', () => {
|
||||
mockedPlatform('win32')
|
||||
const input = {
|
||||
relativePath: '\\hello',
|
||||
dirNamesMaxDepth: 2,
|
||||
@@ -118,6 +120,7 @@ describe('utils test', () => {
|
||||
|
||||
// Tests that getDirnameMaxDepth returns an empty string when given a Windows UNC root and excludeCurrentDir is true
|
||||
it('test_windows_unc_root_exclude_current_dir', () => {
|
||||
mockedPlatform('win32')
|
||||
const relativePath = '\\hello'
|
||||
const result = getDirnameMaxDepth({
|
||||
relativePath,
|
||||
@@ -152,6 +155,7 @@ describe('utils test', () => {
|
||||
|
||||
// Tests that the function returns the correct dirname for a valid Windows UNC root path
|
||||
it('test windows unc root path', () => {
|
||||
mockedPlatform('win32')
|
||||
expect(getDirname('\\helloworld')).toEqual('.')
|
||||
})
|
||||
|
||||
@@ -162,6 +166,7 @@ describe('utils test', () => {
|
||||
|
||||
// Tests that the function returns the correct dirname for a Windows UNC root path with a trailing slash
|
||||
it('test windows unc root path with trailing slash', () => {
|
||||
mockedPlatform('win32')
|
||||
expect(getDirname('\\hello\\world\\')).toEqual('.')
|
||||
})
|
||||
|
||||
@@ -172,6 +177,7 @@ describe('utils test', () => {
|
||||
|
||||
// Tests that the function returns the correct dirname for a Windows UNC root path with multiple slashes
|
||||
it('test windows unc root path with multiple slashes', () => {
|
||||
mockedPlatform('win32')
|
||||
expect(getDirname('\\hello\\world')).toEqual('.')
|
||||
})
|
||||
})
|
||||
@@ -194,6 +200,15 @@ describe('utils test', () => {
|
||||
expect(actualOutput).toEqual(expectedOutput)
|
||||
})
|
||||
|
||||
// Tests that forward slashes are normalized on Windows
|
||||
it('test mixed slashes windows', () => {
|
||||
mockedPlatform('win32')
|
||||
const input = 'path/to/file'
|
||||
const expectedOutput = 'path\\to\\file'
|
||||
const actualOutput = normalizeSeparators(input)
|
||||
expect(actualOutput).toEqual(expectedOutput)
|
||||
})
|
||||
|
||||
// Tests that mixed slashes are normalized on Windows
|
||||
it('test mixed slashes windows', () => {
|
||||
mockedPlatform('win32')
|
||||
@@ -221,6 +236,7 @@ describe('utils test', () => {
|
||||
|
||||
// Tests that UNC format is preserved on Windows
|
||||
it('test unc format windows', () => {
|
||||
mockedPlatform('win32')
|
||||
const input = '\\\\hello\\world'
|
||||
const expectedOutput = '\\\\hello\\world'
|
||||
const actualOutput = normalizeSeparators(input)
|
||||
@@ -229,6 +245,7 @@ describe('utils test', () => {
|
||||
|
||||
// Tests that a drive root is preserved on Windows
|
||||
it('test drive root windows', () => {
|
||||
mockedPlatform('win32')
|
||||
const input = 'C:\\'
|
||||
const expectedOutput = 'C:\\'
|
||||
const actualOutput = normalizeSeparators(input)
|
||||
@@ -236,8 +253,6 @@ describe('utils test', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Generated by CodiumAI
|
||||
|
||||
describe('getFilteredChangedFiles', () => {
|
||||
// Tests that the function returns an empty object when allDiffFiles and filePatterns are empty
|
||||
it('should return an empty object when allDiffFiles and filePatterns are empty', async () => {
|
||||
@@ -312,7 +327,7 @@ describe('utils test', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Tests that the function returns only the files that match the file patterns
|
||||
// Tests that the function returns only the files that match the file patterns on non windows platforms
|
||||
it('should return only the files that match the file patterns', async () => {
|
||||
const allDiffFiles = {
|
||||
[ChangeTypeEnum.Added]: [
|
||||
@@ -320,7 +335,7 @@ describe('utils test', () => {
|
||||
'file2.md',
|
||||
'file3.txt',
|
||||
'test/dir/file4.txt',
|
||||
'/test/dir/file5.txt',
|
||||
'test/dir/file5.txt',
|
||||
'dir/file6.md'
|
||||
],
|
||||
[ChangeTypeEnum.Copied]: [],
|
||||
@@ -347,7 +362,44 @@ describe('utils test', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Tests that the function returns only the files that match the file patterns with globstar
|
||||
// Tests that the function returns only the files that match the file patterns on windows
|
||||
it('should return only the files that match the file patterns on windows', async () => {
|
||||
mockedPlatform('win32')
|
||||
const allDiffFiles = {
|
||||
[ChangeTypeEnum.Added]: [
|
||||
'file1.txt',
|
||||
'file2.md',
|
||||
'file3.txt',
|
||||
'test\\dir\\file4.txt',
|
||||
'test\\dir\\file5.txt',
|
||||
'dir\\file6.md'
|
||||
],
|
||||
[ChangeTypeEnum.Copied]: [],
|
||||
[ChangeTypeEnum.Deleted]: [],
|
||||
[ChangeTypeEnum.Modified]: [],
|
||||
[ChangeTypeEnum.Renamed]: [],
|
||||
[ChangeTypeEnum.TypeChanged]: [],
|
||||
[ChangeTypeEnum.Unmerged]: [],
|
||||
[ChangeTypeEnum.Unknown]: []
|
||||
}
|
||||
const result = await getFilteredChangedFiles({
|
||||
allDiffFiles,
|
||||
filePatterns: ['*.txt']
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
[ChangeTypeEnum.Added]: ['file1.txt', 'file3.txt'],
|
||||
[ChangeTypeEnum.Copied]: [],
|
||||
[ChangeTypeEnum.Deleted]: [],
|
||||
[ChangeTypeEnum.Modified]: [],
|
||||
[ChangeTypeEnum.Renamed]: [],
|
||||
[ChangeTypeEnum.TypeChanged]: [],
|
||||
[ChangeTypeEnum.Unmerged]: [],
|
||||
[ChangeTypeEnum.Unknown]: []
|
||||
})
|
||||
})
|
||||
|
||||
// Tests that the function returns only the files that match the file patterns with globstar on non windows platforms
|
||||
it('should return only the files that match the file patterns with globstar', async () => {
|
||||
const allDiffFiles = {
|
||||
[ChangeTypeEnum.Added]: [
|
||||
@@ -355,7 +407,7 @@ describe('utils test', () => {
|
||||
'file2.md',
|
||||
'file3.txt',
|
||||
'test/dir/file4.txt',
|
||||
'/test/dir/file5.txt',
|
||||
'test/dir/file5.txt',
|
||||
'dir/file6.md'
|
||||
],
|
||||
[ChangeTypeEnum.Copied]: [],
|
||||
@@ -375,7 +427,7 @@ describe('utils test', () => {
|
||||
'file1.txt',
|
||||
'file3.txt',
|
||||
'test/dir/file4.txt',
|
||||
'/test/dir/file5.txt'
|
||||
'test/dir/file5.txt'
|
||||
],
|
||||
[ChangeTypeEnum.Copied]: [],
|
||||
[ChangeTypeEnum.Deleted]: [],
|
||||
@@ -387,6 +439,35 @@ describe('utils test', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Tests that the function returns only the files that match the file patterns with globstar on windows
|
||||
it('should return only the files that match the file patterns with globstar on windows', async () => {
|
||||
mockedPlatform('win32')
|
||||
const allDiffFiles = {
|
||||
[ChangeTypeEnum.Added]: ['test\\test rename-1.txt'],
|
||||
[ChangeTypeEnum.Copied]: [],
|
||||
[ChangeTypeEnum.Deleted]: [],
|
||||
[ChangeTypeEnum.Modified]: [],
|
||||
[ChangeTypeEnum.Renamed]: [],
|
||||
[ChangeTypeEnum.TypeChanged]: [],
|
||||
[ChangeTypeEnum.Unmerged]: [],
|
||||
[ChangeTypeEnum.Unknown]: []
|
||||
}
|
||||
const result = await getFilteredChangedFiles({
|
||||
allDiffFiles,
|
||||
filePatterns: ['test/**']
|
||||
})
|
||||
expect(result).toEqual({
|
||||
[ChangeTypeEnum.Added]: ['test\\test rename-1.txt'],
|
||||
[ChangeTypeEnum.Copied]: [],
|
||||
[ChangeTypeEnum.Deleted]: [],
|
||||
[ChangeTypeEnum.Modified]: [],
|
||||
[ChangeTypeEnum.Renamed]: [],
|
||||
[ChangeTypeEnum.TypeChanged]: [],
|
||||
[ChangeTypeEnum.Unmerged]: [],
|
||||
[ChangeTypeEnum.Unknown]: []
|
||||
})
|
||||
})
|
||||
|
||||
// Tests that the function returns an empty object when there are no files that match the file patterns
|
||||
it('should return an empty object when there are no files that match the file patterns', async () => {
|
||||
const allDiffFiles = {
|
||||
|
||||
@@ -2,18 +2,122 @@ import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethodTypes} from '@octokit/rest'
|
||||
import flatten from 'lodash/flatten'
|
||||
import mm from 'micromatch'
|
||||
import * as path from 'path'
|
||||
|
||||
import {setOutputsAndGetModifiedAndChangedFilesStatus} from './changedFilesOutput'
|
||||
import {DiffResult} from './commitSha'
|
||||
import {Inputs} from './inputs'
|
||||
import {
|
||||
canDiffCommits,
|
||||
getAllChangedFiles,
|
||||
getDirnameMaxDepth,
|
||||
getDirNamesIncludeFilesPattern,
|
||||
getFilteredChangedFiles,
|
||||
gitRenamedFiles,
|
||||
gitSubmoduleDiffSHA,
|
||||
jsonOutput
|
||||
isWindows,
|
||||
jsonOutput,
|
||||
setArrayOutput
|
||||
} from './utils'
|
||||
|
||||
export const processChangedFiles = async ({
|
||||
filePatterns,
|
||||
allDiffFiles,
|
||||
inputs,
|
||||
yamlFilePatterns,
|
||||
workingDirectory
|
||||
}: {
|
||||
filePatterns: string[]
|
||||
allDiffFiles: ChangedFiles
|
||||
inputs: Inputs
|
||||
yamlFilePatterns: Record<string, string[]>
|
||||
workingDirectory?: string
|
||||
}): Promise<void> => {
|
||||
if (filePatterns.length > 0) {
|
||||
core.startGroup('changed-files-patterns')
|
||||
const allFilteredDiffFiles = await getFilteredChangedFiles({
|
||||
allDiffFiles,
|
||||
filePatterns
|
||||
})
|
||||
core.debug(
|
||||
`All filtered diff files: ${JSON.stringify(allFilteredDiffFiles)}`
|
||||
)
|
||||
await setOutputsAndGetModifiedAndChangedFilesStatus({
|
||||
allDiffFiles,
|
||||
allFilteredDiffFiles,
|
||||
inputs,
|
||||
filePatterns,
|
||||
workingDirectory
|
||||
})
|
||||
core.info('All Done!')
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
if (Object.keys(yamlFilePatterns).length > 0) {
|
||||
const modifiedKeys: string[] = []
|
||||
const changedKeys: string[] = []
|
||||
|
||||
for (const key of Object.keys(yamlFilePatterns)) {
|
||||
core.startGroup(`changed-files-yaml-${key}`)
|
||||
const allFilteredDiffFiles = await getFilteredChangedFiles({
|
||||
allDiffFiles,
|
||||
filePatterns: yamlFilePatterns[key]
|
||||
})
|
||||
core.debug(
|
||||
`All filtered diff files for ${key}: ${JSON.stringify(
|
||||
allFilteredDiffFiles
|
||||
)}`
|
||||
)
|
||||
const {anyChanged, anyModified} =
|
||||
await setOutputsAndGetModifiedAndChangedFilesStatus({
|
||||
allDiffFiles,
|
||||
allFilteredDiffFiles,
|
||||
inputs,
|
||||
filePatterns: yamlFilePatterns[key],
|
||||
outputPrefix: key,
|
||||
workingDirectory
|
||||
})
|
||||
if (anyModified) {
|
||||
modifiedKeys.push(key)
|
||||
}
|
||||
if (anyChanged) {
|
||||
changedKeys.push(key)
|
||||
}
|
||||
|
||||
core.info('All Done!')
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
if (modifiedKeys.length > 0) {
|
||||
await setArrayOutput({
|
||||
key: 'modified_keys',
|
||||
inputs,
|
||||
value: modifiedKeys
|
||||
})
|
||||
}
|
||||
|
||||
if (changedKeys.length > 0) {
|
||||
await setArrayOutput({
|
||||
key: 'changed_keys',
|
||||
inputs,
|
||||
value: changedKeys
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) {
|
||||
core.startGroup('changed-files-all')
|
||||
await setOutputsAndGetModifiedAndChangedFilesStatus({
|
||||
allDiffFiles,
|
||||
allFilteredDiffFiles: allDiffFiles,
|
||||
inputs,
|
||||
workingDirectory
|
||||
})
|
||||
core.info('All Done!')
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
export const getRenamedFiles = async ({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
@@ -51,11 +155,29 @@ export const getRenamedFiles = async ({
|
||||
)
|
||||
|
||||
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
||||
let diff = '...'
|
||||
|
||||
if (
|
||||
!(await canDiffCommits({
|
||||
cwd: submoduleWorkingDirectory,
|
||||
sha1: submoduleShaResult.previousSha,
|
||||
sha2: submoduleShaResult.currentSha,
|
||||
diff
|
||||
}))
|
||||
) {
|
||||
let message = `Unable to use three dot diff for: ${submodulePath} submodule. Falling back to two dot diff. You can set 'fetch_additional_submodule_history: true' to fetch additional submodule history in order to use three dot diff`
|
||||
if (inputs.fetchSubmoduleHistory) {
|
||||
message = `To fetch additional submodule history for: ${submodulePath} you can increase history depth using 'fetch_depth' input`
|
||||
}
|
||||
core.info(message)
|
||||
diff = '..'
|
||||
}
|
||||
|
||||
const submoduleRenamedFiles = await gitRenamedFiles({
|
||||
cwd: submoduleWorkingDirectory,
|
||||
sha1: submoduleShaResult.previousSha,
|
||||
sha2: submoduleShaResult.currentSha,
|
||||
diff: diffResult.diff,
|
||||
diff,
|
||||
oldNewSeparator: inputs.oldNewSeparator,
|
||||
isSubmodule: true,
|
||||
parentDir: submodulePath
|
||||
@@ -98,20 +220,27 @@ export const getAllDiffFiles = async ({
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
submodulePaths,
|
||||
outputRenamedFilesAsDeletedAndAdded
|
||||
outputRenamedFilesAsDeletedAndAdded,
|
||||
fetchSubmoduleHistory,
|
||||
failOnInitialDiffError,
|
||||
failOnSubmoduleDiffError
|
||||
}: {
|
||||
workingDirectory: string
|
||||
hasSubmodule: boolean
|
||||
diffResult: DiffResult
|
||||
submodulePaths: string[]
|
||||
outputRenamedFilesAsDeletedAndAdded: boolean
|
||||
fetchSubmoduleHistory: boolean
|
||||
failOnInitialDiffError: boolean
|
||||
failOnSubmoduleDiffError: boolean
|
||||
}): Promise<ChangedFiles> => {
|
||||
const files = await getAllChangedFiles({
|
||||
cwd: workingDirectory,
|
||||
sha1: diffResult.previousSha,
|
||||
sha2: diffResult.currentSha,
|
||||
diff: diffResult.diff,
|
||||
outputRenamedFilesAsDeletedAndAdded
|
||||
outputRenamedFilesAsDeletedAndAdded,
|
||||
failOnInitialDiffError
|
||||
})
|
||||
|
||||
if (hasSubmodule) {
|
||||
@@ -130,14 +259,33 @@ export const getAllDiffFiles = async ({
|
||||
)
|
||||
|
||||
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
||||
let diff = '...'
|
||||
|
||||
if (
|
||||
!(await canDiffCommits({
|
||||
cwd: submoduleWorkingDirectory,
|
||||
sha1: submoduleShaResult.previousSha,
|
||||
sha2: submoduleShaResult.currentSha,
|
||||
diff
|
||||
}))
|
||||
) {
|
||||
let message = `Set 'fetch_additional_submodule_history: true' to fetch additional submodule history for: ${submodulePath}`
|
||||
if (fetchSubmoduleHistory) {
|
||||
message = `To fetch additional submodule history for: ${submodulePath} you can increase history depth using 'fetch_depth' input`
|
||||
}
|
||||
core.warning(message)
|
||||
diff = '..'
|
||||
}
|
||||
|
||||
const submoduleFiles = await getAllChangedFiles({
|
||||
cwd: submoduleWorkingDirectory,
|
||||
sha1: submoduleShaResult.previousSha,
|
||||
sha2: submoduleShaResult.currentSha,
|
||||
diff: diffResult.diff,
|
||||
diff,
|
||||
isSubmodule: true,
|
||||
parentDir: submodulePath,
|
||||
outputRenamedFilesAsDeletedAndAdded
|
||||
outputRenamedFilesAsDeletedAndAdded,
|
||||
failOnSubmoduleDiffError
|
||||
})
|
||||
|
||||
for (const changeType of Object.keys(
|
||||
@@ -155,6 +303,35 @@ export const getAllDiffFiles = async ({
|
||||
return files
|
||||
}
|
||||
|
||||
function* getFilePaths({
|
||||
inputs,
|
||||
filePaths,
|
||||
dirNamesIncludeFilePatterns
|
||||
}: {
|
||||
inputs: Inputs
|
||||
filePaths: string[]
|
||||
dirNamesIncludeFilePatterns: string[]
|
||||
}): Generator<string> {
|
||||
for (const filePath of filePaths) {
|
||||
if (inputs.dirNames) {
|
||||
if (dirNamesIncludeFilePatterns.length > 0) {
|
||||
const isWin = isWindows()
|
||||
const matchOptions = {dot: true, windows: isWin, noext: true}
|
||||
if (mm.isMatch(filePath, dirNamesIncludeFilePatterns, matchOptions)) {
|
||||
yield filePath
|
||||
}
|
||||
}
|
||||
yield getDirnameMaxDepth({
|
||||
relativePath: filePath,
|
||||
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
||||
excludeCurrentDir: inputs.dirNamesExcludeCurrentDir
|
||||
})
|
||||
} else {
|
||||
yield filePath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function* getChangeTypeFilesGenerator({
|
||||
inputs,
|
||||
changedFiles,
|
||||
@@ -164,18 +341,21 @@ function* getChangeTypeFilesGenerator({
|
||||
changedFiles: ChangedFiles
|
||||
changeTypes: ChangeTypeEnum[]
|
||||
}): Generator<string> {
|
||||
const dirNamesIncludeFilePatterns = getDirNamesIncludeFilesPattern({inputs})
|
||||
core.debug(
|
||||
`Dir names include file patterns: ${JSON.stringify(
|
||||
dirNamesIncludeFilePatterns
|
||||
)}`
|
||||
)
|
||||
|
||||
for (const changeType of changeTypes) {
|
||||
const files = changedFiles[changeType] || []
|
||||
for (const filePath of files) {
|
||||
if (inputs.dirNames) {
|
||||
yield getDirnameMaxDepth({
|
||||
relativePath: filePath,
|
||||
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
||||
excludeCurrentDir: inputs.dirNamesExcludeCurrentDir
|
||||
})
|
||||
} else {
|
||||
yield filePath
|
||||
}
|
||||
const filePaths = changedFiles[changeType] || []
|
||||
for (const filePath of getFilePaths({
|
||||
inputs,
|
||||
filePaths,
|
||||
dirNamesIncludeFilePatterns
|
||||
})) {
|
||||
yield filePath
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -188,20 +368,15 @@ export const getChangeTypeFiles = async ({
|
||||
inputs: Inputs
|
||||
changedFiles: ChangedFiles
|
||||
changeTypes: ChangeTypeEnum[]
|
||||
}): Promise<{paths: string; count: string}> => {
|
||||
}): Promise<{paths: string[] | string; count: string}> => {
|
||||
const files = [
|
||||
...new Set(getChangeTypeFilesGenerator({inputs, changedFiles, changeTypes}))
|
||||
].filter(Boolean)
|
||||
|
||||
if (inputs.json) {
|
||||
return {
|
||||
paths: jsonOutput({value: files, shouldEscape: inputs.escapeJson}),
|
||||
count: files.length.toString()
|
||||
}
|
||||
}
|
||||
const paths = inputs.json ? files : files.join(inputs.separator)
|
||||
|
||||
return {
|
||||
paths: files.join(inputs.separator),
|
||||
paths,
|
||||
count: files.length.toString()
|
||||
}
|
||||
}
|
||||
@@ -213,16 +388,21 @@ function* getAllChangeTypeFilesGenerator({
|
||||
inputs: Inputs
|
||||
changedFiles: ChangedFiles
|
||||
}): Generator<string> {
|
||||
for (const filePath of flatten(Object.values(changedFiles))) {
|
||||
if (inputs.dirNames) {
|
||||
yield getDirnameMaxDepth({
|
||||
relativePath: filePath,
|
||||
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
||||
excludeCurrentDir: inputs.dirNamesExcludeCurrentDir
|
||||
})
|
||||
} else {
|
||||
yield filePath
|
||||
}
|
||||
const dirNamesIncludeFilePatterns = getDirNamesIncludeFilesPattern({inputs})
|
||||
core.debug(
|
||||
`Dir names include file patterns: ${JSON.stringify(
|
||||
dirNamesIncludeFilePatterns
|
||||
)}`
|
||||
)
|
||||
|
||||
const filePaths = flatten(Object.values(changedFiles))
|
||||
|
||||
for (const filePath of getFilePaths({
|
||||
inputs,
|
||||
filePaths,
|
||||
dirNamesIncludeFilePatterns
|
||||
})) {
|
||||
yield filePath
|
||||
}
|
||||
}
|
||||
|
||||
@@ -232,20 +412,15 @@ export const getAllChangeTypeFiles = async ({
|
||||
}: {
|
||||
inputs: Inputs
|
||||
changedFiles: ChangedFiles
|
||||
}): Promise<{paths: string; count: string}> => {
|
||||
}): Promise<{paths: string[] | string; count: string}> => {
|
||||
const files = [
|
||||
...new Set(getAllChangeTypeFilesGenerator({inputs, changedFiles}))
|
||||
].filter(Boolean)
|
||||
|
||||
if (inputs.json) {
|
||||
return {
|
||||
paths: jsonOutput({value: files, shouldEscape: inputs.escapeJson}),
|
||||
count: files.length.toString()
|
||||
}
|
||||
}
|
||||
const paths = inputs.json ? files : files.join(inputs.separator)
|
||||
|
||||
return {
|
||||
paths: files.join(inputs.separator),
|
||||
paths,
|
||||
count: files.length.toString()
|
||||
}
|
||||
}
|
||||
@@ -278,9 +453,10 @@ export const getChangedFilesFromGithubAPI = async ({
|
||||
per_page: 100
|
||||
})
|
||||
|
||||
const paginatedResponse = await octokit.paginate<
|
||||
RestEndpointMethodTypes['pulls']['listFiles']['response']['data'][0]
|
||||
>(options)
|
||||
const paginatedResponse =
|
||||
await octokit.paginate<
|
||||
RestEndpointMethodTypes['pulls']['listFiles']['response']['data'][0]
|
||||
>(options)
|
||||
|
||||
core.info(`Found ${paginatedResponse.length} changed files from GitHub API`)
|
||||
const statusMap: Record<string, ChangeTypeEnum> = {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import * as core from '@actions/core'
|
||||
import path from 'path'
|
||||
import {
|
||||
ChangedFiles,
|
||||
ChangeTypeEnum,
|
||||
@@ -6,25 +7,30 @@ import {
|
||||
getChangeTypeFiles
|
||||
} from './changedFiles'
|
||||
import {Inputs} from './inputs'
|
||||
import {setOutput} from './utils'
|
||||
import {getOutputKey, setArrayOutput, setOutput, exists} from './utils'
|
||||
|
||||
const getOutputKey = (key: string, outputPrefix: string): string => {
|
||||
return outputPrefix ? `${outputPrefix}_${key}` : key
|
||||
const getArrayFromPaths = (
|
||||
paths: string | string[],
|
||||
inputs: Inputs
|
||||
): string[] => {
|
||||
return Array.isArray(paths) ? paths : paths.split(inputs.separator)
|
||||
}
|
||||
|
||||
export const setChangedFilesOutput = async ({
|
||||
export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
||||
allDiffFiles,
|
||||
allFilteredDiffFiles,
|
||||
inputs,
|
||||
filePatterns = [],
|
||||
outputPrefix = ''
|
||||
outputPrefix = '',
|
||||
workingDirectory
|
||||
}: {
|
||||
allDiffFiles: ChangedFiles
|
||||
allFilteredDiffFiles: ChangedFiles
|
||||
inputs: Inputs
|
||||
filePatterns?: string[]
|
||||
outputPrefix?: string
|
||||
}): Promise<void> => {
|
||||
workingDirectory?: string
|
||||
}): Promise<{anyModified: boolean; anyChanged: boolean}> => {
|
||||
const addedFiles = await getChangeTypeFiles({
|
||||
inputs,
|
||||
changedFiles: allFilteredDiffFiles,
|
||||
@@ -34,12 +40,16 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('added_files', outputPrefix),
|
||||
value: addedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
await setOutput({
|
||||
key: getOutputKey('added_files_count', outputPrefix),
|
||||
value: addedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const copiedFiles = await getChangeTypeFiles({
|
||||
@@ -51,13 +61,17 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('copied_files', outputPrefix),
|
||||
value: copiedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('copied_files_count', outputPrefix),
|
||||
value: copiedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const modifiedFiles = await getChangeTypeFiles({
|
||||
@@ -69,13 +83,17 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('modified_files', outputPrefix),
|
||||
value: modifiedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('modified_files_count', outputPrefix),
|
||||
value: modifiedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const renamedFiles = await getChangeTypeFiles({
|
||||
@@ -87,13 +105,17 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('renamed_files', outputPrefix),
|
||||
value: renamedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('renamed_files_count', outputPrefix),
|
||||
value: renamedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const typeChangedFiles = await getChangeTypeFiles({
|
||||
@@ -105,13 +127,17 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('type_changed_files', outputPrefix),
|
||||
value: typeChangedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('type_changed_files_count', outputPrefix),
|
||||
value: typeChangedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const unmergedFiles = await getChangeTypeFiles({
|
||||
@@ -123,13 +149,17 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('unmerged_files', outputPrefix),
|
||||
value: unmergedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('unmerged_files_count', outputPrefix),
|
||||
value: unmergedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const unknownFiles = await getChangeTypeFiles({
|
||||
@@ -141,13 +171,17 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('unknown_files', outputPrefix),
|
||||
value: unknownFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('unknown_files_count', outputPrefix),
|
||||
value: unknownFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const allChangedAndModifiedFiles = await getAllChangeTypeFiles({
|
||||
@@ -162,13 +196,17 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('all_changed_and_modified_files', outputPrefix),
|
||||
value: allChangedAndModifiedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('all_changed_and_modified_files_count', outputPrefix),
|
||||
value: allChangedAndModifiedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const allChangedFiles = await getChangeTypeFiles({
|
||||
@@ -185,19 +223,25 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('all_changed_files', outputPrefix),
|
||||
value: allChangedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('all_changed_files_count', outputPrefix),
|
||||
value: allChangedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('any_changed', outputPrefix),
|
||||
value: allChangedFiles.paths.length > 0 && filePatterns.length > 0,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json
|
||||
})
|
||||
|
||||
const allOtherChangedFiles = await getChangeTypeFiles({
|
||||
@@ -212,12 +256,18 @@ export const setChangedFilesOutput = async ({
|
||||
})
|
||||
core.debug(`All other changed files: ${JSON.stringify(allOtherChangedFiles)}`)
|
||||
|
||||
const otherChangedFiles = allOtherChangedFiles.paths
|
||||
.split(inputs.separator)
|
||||
.filter(
|
||||
(filePath: string) =>
|
||||
!allChangedFiles.paths.split(inputs.separator).includes(filePath)
|
||||
)
|
||||
const allOtherChangedFilesPaths: string[] = getArrayFromPaths(
|
||||
allOtherChangedFiles.paths,
|
||||
inputs
|
||||
)
|
||||
const allChangedFilesPaths: string[] = getArrayFromPaths(
|
||||
allChangedFiles.paths,
|
||||
inputs
|
||||
)
|
||||
|
||||
const otherChangedFiles = allOtherChangedFilesPaths.filter(
|
||||
(filePath: string) => !allChangedFilesPaths.includes(filePath)
|
||||
)
|
||||
|
||||
const onlyChanged =
|
||||
otherChangedFiles.length === 0 &&
|
||||
@@ -227,19 +277,23 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('only_changed', outputPrefix),
|
||||
value: onlyChanged,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('other_changed_files', outputPrefix),
|
||||
value: otherChangedFiles.join(inputs.separator),
|
||||
inputs
|
||||
await setArrayOutput({
|
||||
key: 'other_changed_files',
|
||||
inputs,
|
||||
value: otherChangedFiles,
|
||||
outputPrefix
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('other_changed_files_count', outputPrefix),
|
||||
value: otherChangedFiles.length.toString(),
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const allModifiedFiles = await getChangeTypeFiles({
|
||||
@@ -257,19 +311,25 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('all_modified_files', outputPrefix),
|
||||
value: allModifiedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('all_modified_files_count', outputPrefix),
|
||||
value: allModifiedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('any_modified', outputPrefix),
|
||||
value: allModifiedFiles.paths.length > 0 && filePatterns.length > 0,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json
|
||||
})
|
||||
|
||||
const allOtherModifiedFiles = await getChangeTypeFiles({
|
||||
@@ -284,12 +344,19 @@ export const setChangedFilesOutput = async ({
|
||||
]
|
||||
})
|
||||
|
||||
const otherModifiedFiles = allOtherModifiedFiles.paths
|
||||
.split(inputs.separator)
|
||||
.filter(
|
||||
(filePath: string) =>
|
||||
!allModifiedFiles.paths.split(inputs.separator).includes(filePath)
|
||||
)
|
||||
const allOtherModifiedFilesPaths: string[] = getArrayFromPaths(
|
||||
allOtherModifiedFiles.paths,
|
||||
inputs
|
||||
)
|
||||
|
||||
const allModifiedFilesPaths: string[] = getArrayFromPaths(
|
||||
allModifiedFiles.paths,
|
||||
inputs
|
||||
)
|
||||
|
||||
const otherModifiedFiles = allOtherModifiedFilesPaths.filter(
|
||||
(filePath: string) => !allModifiedFilesPaths.includes(filePath)
|
||||
)
|
||||
|
||||
const onlyModified =
|
||||
otherModifiedFiles.length === 0 &&
|
||||
@@ -299,19 +366,23 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('only_modified', outputPrefix),
|
||||
value: onlyModified,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('other_modified_files', outputPrefix),
|
||||
value: otherModifiedFiles.join(inputs.separator),
|
||||
inputs
|
||||
await setArrayOutput({
|
||||
key: 'other_modified_files',
|
||||
inputs,
|
||||
value: otherModifiedFiles,
|
||||
outputPrefix
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('other_modified_files_count', outputPrefix),
|
||||
value: otherModifiedFiles.length.toString(),
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
const deletedFiles = await getChangeTypeFiles({
|
||||
@@ -320,22 +391,50 @@ export const setChangedFilesOutput = async ({
|
||||
changeTypes: [ChangeTypeEnum.Deleted]
|
||||
})
|
||||
core.debug(`Deleted files: ${JSON.stringify(deletedFiles)}`)
|
||||
|
||||
if (
|
||||
inputs.dirNamesDeletedFilesIncludeOnlyDeletedDirs &&
|
||||
inputs.dirNames &&
|
||||
workingDirectory
|
||||
) {
|
||||
const newDeletedFilesPaths: string[] = []
|
||||
for (const deletedPath of getArrayFromPaths(deletedFiles.paths, inputs)) {
|
||||
const dirPath = path.join(workingDirectory, deletedPath)
|
||||
core.debug(`Checking if directory exists: ${dirPath}`)
|
||||
if (!(await exists(dirPath))) {
|
||||
core.debug(`Directory not found: ${dirPath}`)
|
||||
newDeletedFilesPaths.push(deletedPath)
|
||||
}
|
||||
}
|
||||
deletedFiles.paths = inputs.json
|
||||
? newDeletedFilesPaths
|
||||
: newDeletedFilesPaths.join(inputs.separator)
|
||||
deletedFiles.count = newDeletedFilesPaths.length.toString()
|
||||
core.debug(`New deleted files: ${JSON.stringify(deletedFiles)}`)
|
||||
}
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('deleted_files', outputPrefix),
|
||||
value: deletedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('deleted_files_count', outputPrefix),
|
||||
value: deletedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('any_deleted', outputPrefix),
|
||||
value: deletedFiles.paths.length > 0 && filePatterns.length > 0,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json
|
||||
})
|
||||
|
||||
const allOtherDeletedFiles = await getChangeTypeFiles({
|
||||
@@ -344,11 +443,19 @@ export const setChangedFilesOutput = async ({
|
||||
changeTypes: [ChangeTypeEnum.Deleted]
|
||||
})
|
||||
|
||||
const otherDeletedFiles = allOtherDeletedFiles.paths
|
||||
.split(inputs.separator)
|
||||
.filter(
|
||||
filePath => !deletedFiles.paths.split(inputs.separator).includes(filePath)
|
||||
)
|
||||
const allOtherDeletedFilesPaths: string[] = getArrayFromPaths(
|
||||
allOtherDeletedFiles.paths,
|
||||
inputs
|
||||
)
|
||||
|
||||
const deletedFilesPaths: string[] = getArrayFromPaths(
|
||||
deletedFiles.paths,
|
||||
inputs
|
||||
)
|
||||
|
||||
const otherDeletedFiles = allOtherDeletedFilesPaths.filter(
|
||||
filePath => !deletedFilesPaths.includes(filePath)
|
||||
)
|
||||
|
||||
const onlyDeleted =
|
||||
otherDeletedFiles.length === 0 &&
|
||||
@@ -358,18 +465,27 @@ export const setChangedFilesOutput = async ({
|
||||
await setOutput({
|
||||
key: getOutputKey('only_deleted', outputPrefix),
|
||||
value: onlyDeleted,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('other_deleted_files', outputPrefix),
|
||||
value: otherDeletedFiles.join(inputs.separator),
|
||||
inputs
|
||||
await setArrayOutput({
|
||||
key: 'other_deleted_files',
|
||||
inputs,
|
||||
value: otherDeletedFiles,
|
||||
outputPrefix
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: getOutputKey('other_deleted_files_count', outputPrefix),
|
||||
value: otherDeletedFiles.length.toString(),
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir
|
||||
})
|
||||
|
||||
return {
|
||||
anyModified: allModifiedFiles.paths.length > 0 && filePatterns.length > 0,
|
||||
anyChanged: allChangedFiles.paths.length > 0 && filePatterns.length > 0
|
||||
}
|
||||
}
|
||||
|
||||
303
src/commitSha.ts
303
src/commitSha.ts
@@ -5,6 +5,8 @@ import {Env} from './env'
|
||||
import {Inputs} from './inputs'
|
||||
import {
|
||||
canDiffCommits,
|
||||
cleanShaInput,
|
||||
getCurrentBranchName,
|
||||
getHeadSha,
|
||||
getParentSha,
|
||||
getPreviousGitTag,
|
||||
@@ -22,7 +24,11 @@ const getCurrentSHA = async ({
|
||||
inputs: Inputs
|
||||
workingDirectory: string
|
||||
}): Promise<string> => {
|
||||
let currentSha = inputs.sha
|
||||
let currentSha = await cleanShaInput({
|
||||
sha: inputs.sha,
|
||||
cwd: workingDirectory,
|
||||
token: inputs.token
|
||||
})
|
||||
core.debug('Getting current SHA...')
|
||||
|
||||
if (inputs.until) {
|
||||
@@ -90,67 +96,95 @@ export const getSHAForNonPullRequestEvent = async (
|
||||
isTag: boolean
|
||||
): Promise<DiffResult> => {
|
||||
let targetBranch = env.GITHUB_REF_NAME
|
||||
const currentBranch = targetBranch
|
||||
let currentBranch = targetBranch
|
||||
let initialCommit = false
|
||||
|
||||
if (isShallow && !inputs.skipInitialFetch) {
|
||||
core.info('Repository is shallow, fetching more history...')
|
||||
if (!inputs.skipInitialFetch) {
|
||||
if (isShallow) {
|
||||
core.info('Repository is shallow, fetching more history...')
|
||||
|
||||
if (isTag) {
|
||||
let sourceBranch = ''
|
||||
if (isTag) {
|
||||
let sourceBranch = ''
|
||||
|
||||
if (github.context.payload.base_ref) {
|
||||
sourceBranch = github.context.payload.base_ref.replace(
|
||||
'refs/heads/',
|
||||
''
|
||||
)
|
||||
} else if (github.context.payload.release?.target_commitish) {
|
||||
sourceBranch = github.context.payload.release?.target_commitish
|
||||
if (github.context.payload.base_ref) {
|
||||
sourceBranch = github.context.payload.base_ref.replace(
|
||||
'refs/heads/',
|
||||
''
|
||||
)
|
||||
} else if (github.context.payload.release?.target_commitish) {
|
||||
sourceBranch = github.context.payload.release?.target_commitish
|
||||
}
|
||||
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${sourceBranch}:refs/remotes/origin/${sourceBranch}`
|
||||
]
|
||||
})
|
||||
} else {
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${sourceBranch}:refs/remotes/origin/${sourceBranch}`
|
||||
]
|
||||
})
|
||||
if (hasSubmodule) {
|
||||
await gitFetchSubmodules({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`
|
||||
]
|
||||
})
|
||||
}
|
||||
} else {
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (hasSubmodule) {
|
||||
await gitFetchSubmodules({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`
|
||||
]
|
||||
})
|
||||
if (hasSubmodule && inputs.fetchSubmoduleHistory) {
|
||||
await gitFetchSubmodules({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`
|
||||
]
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const currentSha = await getCurrentSHA({inputs, workingDirectory})
|
||||
let previousSha = inputs.baseSha
|
||||
let previousSha = await cleanShaInput({
|
||||
sha: inputs.baseSha,
|
||||
cwd: workingDirectory,
|
||||
token: inputs.token
|
||||
})
|
||||
const diff = '..'
|
||||
const currentBranchName = await getCurrentBranchName({cwd: workingDirectory})
|
||||
|
||||
if (previousSha && currentSha && currentBranch && targetBranch) {
|
||||
if (
|
||||
currentBranchName &&
|
||||
currentBranchName !== 'HEAD' &&
|
||||
(currentBranchName !== targetBranch || currentBranchName !== currentBranch)
|
||||
) {
|
||||
targetBranch = currentBranchName
|
||||
currentBranch = currentBranchName
|
||||
}
|
||||
|
||||
if (inputs.baseSha && inputs.sha && currentBranch && targetBranch) {
|
||||
if (previousSha === currentSha) {
|
||||
core.error(
|
||||
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||
@@ -161,7 +195,6 @@ export const getSHAForNonPullRequestEvent = async (
|
||||
throw new Error('Similar commit hashes detected.')
|
||||
}
|
||||
|
||||
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||
core.debug(`Previous SHA: ${previousSha}`)
|
||||
|
||||
return {
|
||||
@@ -173,7 +206,7 @@ export const getSHAForNonPullRequestEvent = async (
|
||||
}
|
||||
}
|
||||
|
||||
if (!previousSha) {
|
||||
if (!previousSha || previousSha === currentSha) {
|
||||
core.debug('Getting previous SHA...')
|
||||
if (inputs.since) {
|
||||
core.debug(`Getting base SHA for '${inputs.since}'...`)
|
||||
@@ -288,55 +321,68 @@ export const getSHAForPullRequestEvent = async (
|
||||
targetBranch = currentBranch
|
||||
}
|
||||
|
||||
if (isShallow && !inputs.skipInitialFetch) {
|
||||
if (!inputs.skipInitialFetch) {
|
||||
core.info('Repository is shallow, fetching more history...')
|
||||
|
||||
let prFetchExitCode = await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
'origin',
|
||||
`pull/${github.context.payload.pull_request?.number}/head:${currentBranch}`
|
||||
]
|
||||
})
|
||||
|
||||
if (prFetchExitCode !== 0) {
|
||||
prFetchExitCode = await gitFetch({
|
||||
if (isShallow) {
|
||||
let prFetchExitCode = await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${currentBranch}*:refs/remotes/origin/${currentBranch}*`
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (prFetchExitCode !== 0) {
|
||||
throw new Error(
|
||||
'Failed to fetch pull request branch. Please ensure "persist-credentials" is set to "true" when checking out the repository. See: https://github.com/actions/checkout#usage'
|
||||
)
|
||||
}
|
||||
|
||||
if (!inputs.sinceLastRemoteCommit) {
|
||||
core.debug('Fetching target branch...')
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||
`pull/${github.context.payload.pull_request?.number}/head:${currentBranch}`
|
||||
]
|
||||
})
|
||||
|
||||
if (hasSubmodule) {
|
||||
if (prFetchExitCode !== 0) {
|
||||
prFetchExitCode = await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${currentBranch}*:refs/remotes/origin/${currentBranch}*`
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (prFetchExitCode !== 0) {
|
||||
throw new Error(
|
||||
'Failed to fetch pull request branch. Please ensure "persist-credentials" is set to "true" when checking out the repository. See: https://github.com/actions/checkout#usage'
|
||||
)
|
||||
}
|
||||
|
||||
if (!inputs.sinceLastRemoteCommit) {
|
||||
core.debug('Fetching target branch...')
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||
]
|
||||
})
|
||||
|
||||
if (hasSubmodule) {
|
||||
await gitFetchSubmodules({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitFetchExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`
|
||||
]
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (hasSubmodule && inputs.fetchSubmoduleHistory) {
|
||||
await gitFetchSubmodules({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
@@ -352,10 +398,14 @@ export const getSHAForPullRequestEvent = async (
|
||||
}
|
||||
|
||||
const currentSha = await getCurrentSHA({inputs, workingDirectory})
|
||||
let previousSha = inputs.baseSha
|
||||
let previousSha = await cleanShaInput({
|
||||
sha: inputs.baseSha,
|
||||
cwd: workingDirectory,
|
||||
token: inputs.token
|
||||
})
|
||||
let diff = '...'
|
||||
|
||||
if (previousSha && currentSha && currentBranch && targetBranch) {
|
||||
if (inputs.baseSha && inputs.sha && currentBranch && targetBranch) {
|
||||
if (previousSha === currentSha) {
|
||||
core.error(
|
||||
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||
@@ -366,7 +416,6 @@ export const getSHAForPullRequestEvent = async (
|
||||
throw new Error('Similar commit hashes detected.')
|
||||
}
|
||||
|
||||
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||
core.debug(`Previous SHA: ${previousSha}`)
|
||||
|
||||
return {
|
||||
@@ -385,38 +434,70 @@ export const getSHAForPullRequestEvent = async (
|
||||
diff = '..'
|
||||
}
|
||||
|
||||
if (!previousSha) {
|
||||
if (!previousSha || previousSha === currentSha) {
|
||||
if (inputs.sinceLastRemoteCommit) {
|
||||
previousSha = github.context.payload.before
|
||||
|
||||
if (
|
||||
!previousSha ||
|
||||
(previousSha &&
|
||||
(await verifyCommitSha({sha: previousSha, cwd: workingDirectory})) !==
|
||||
0)
|
||||
(await verifyCommitSha({
|
||||
sha: previousSha,
|
||||
cwd: workingDirectory,
|
||||
showAsErrorMessage: false
|
||||
})) !== 0)
|
||||
) {
|
||||
core.warning(
|
||||
'Unable to locate the remote branch head sha. Falling back to the previous commit in the local history.'
|
||||
core.info(
|
||||
`Unable to locate the previous commit in the local history for ${github.context.eventName} (${github.context.payload.action}) event. Falling back to the previous commit in the local history.`
|
||||
)
|
||||
|
||||
previousSha = await getParentSha({
|
||||
cwd: workingDirectory
|
||||
})
|
||||
|
||||
if (!previousSha) {
|
||||
core.warning(
|
||||
'Unable to locate the previous commit in the local history. Falling back to the pull request base sha.'
|
||||
if (
|
||||
github.context.payload.action &&
|
||||
github.context.payload.action === 'synchronize' &&
|
||||
previousSha &&
|
||||
(!previousSha ||
|
||||
(previousSha &&
|
||||
(await verifyCommitSha({
|
||||
sha: previousSha,
|
||||
cwd: workingDirectory,
|
||||
showAsErrorMessage: false
|
||||
})) !== 0))
|
||||
) {
|
||||
throw new Error(
|
||||
'Unable to locate the previous commit in the local history. Please ensure to checkout pull request HEAD commit instead of the merge commit. See: https://github.com/actions/checkout/blob/main/README.md#checkout-pull-request-head-commit-instead-of-merge-commit'
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
!previousSha ||
|
||||
(previousSha &&
|
||||
(await verifyCommitSha({
|
||||
sha: previousSha,
|
||||
cwd: workingDirectory,
|
||||
showAsErrorMessage: false
|
||||
})) !== 0)
|
||||
) {
|
||||
throw new Error(
|
||||
'Unable to locate the previous commit in the local history. Please ensure to checkout pull request HEAD commit instead of the merge commit. See: https://github.com/actions/checkout/blob/main/README.md#checkout-pull-request-head-commit-instead-of-merge-commit'
|
||||
)
|
||||
previousSha = github.context.payload.pull_request?.base?.sha
|
||||
}
|
||||
}
|
||||
} else {
|
||||
previousSha = await getRemoteBranchHeadSha({
|
||||
cwd: workingDirectory,
|
||||
branch: targetBranch
|
||||
})
|
||||
|
||||
if (!previousSha) {
|
||||
if (github.context.payload.action === 'closed') {
|
||||
previousSha = github.context.payload.pull_request?.base?.sha
|
||||
} else {
|
||||
previousSha = await getRemoteBranchHeadSha({
|
||||
cwd: workingDirectory,
|
||||
branch: targetBranch
|
||||
})
|
||||
|
||||
if (!previousSha) {
|
||||
previousSha = github.context.payload.pull_request?.base?.sha
|
||||
}
|
||||
}
|
||||
|
||||
if (isShallow) {
|
||||
@@ -492,6 +573,12 @@ export const getSHAForPullRequestEvent = async (
|
||||
diff
|
||||
}))
|
||||
) {
|
||||
core.warning(
|
||||
'If this pull request is from a forked repository, please set the checkout action `repository` input to the same repository as the pull request.'
|
||||
)
|
||||
core.warning(
|
||||
'This can be done by setting actions/checkout `repository` to ${{ github.event.pull_request.head.repo.full_name }}'
|
||||
)
|
||||
throw new Error(
|
||||
`Unable to determine a difference between ${previousSha}${diff}${currentSha}`
|
||||
)
|
||||
|
||||
@@ -29,9 +29,13 @@ export type Inputs = {
|
||||
dirNames: boolean
|
||||
dirNamesMaxDepth?: number
|
||||
dirNamesExcludeCurrentDir: boolean
|
||||
dirNamesIncludeFiles: string
|
||||
dirNamesIncludeFilesSeparator: string
|
||||
dirNamesDeletedFilesIncludeOnlyDeletedDirs: boolean
|
||||
json: boolean
|
||||
escapeJson: boolean
|
||||
fetchDepth?: number
|
||||
fetchSubmoduleHistory: boolean
|
||||
sinceLastRemoteCommit: boolean
|
||||
writeOutputFiles: boolean
|
||||
outputDir: string
|
||||
@@ -45,6 +49,9 @@ export type Inputs = {
|
||||
token: string
|
||||
apiUrl: string
|
||||
skipInitialFetch: boolean
|
||||
failOnInitialDiffError: boolean
|
||||
failOnSubmoduleDiffError: boolean
|
||||
negationPatternsFirst: boolean
|
||||
}
|
||||
|
||||
export const getInputs = (): Inputs => {
|
||||
@@ -135,6 +142,16 @@ export const getInputs = (): Inputs => {
|
||||
required: false
|
||||
}
|
||||
)
|
||||
const dirNamesIncludeFiles = core.getInput('dir_names_include_files', {
|
||||
required: false
|
||||
})
|
||||
const dirNamesIncludeFilesSeparator = core.getInput(
|
||||
'dir_names_include_files_separator',
|
||||
{
|
||||
required: false,
|
||||
trimWhitespace: false
|
||||
}
|
||||
)
|
||||
const json = core.getBooleanInput('json', {required: false})
|
||||
const escapeJson = core.getBooleanInput('escape_json', {required: false})
|
||||
const fetchDepth = core.getInput('fetch_depth', {required: false})
|
||||
@@ -177,6 +194,37 @@ export const getInputs = (): Inputs => {
|
||||
const skipInitialFetch = core.getBooleanInput('skip_initial_fetch', {
|
||||
required: false
|
||||
})
|
||||
const fetchSubmoduleHistory = core.getBooleanInput(
|
||||
'fetch_additional_submodule_history',
|
||||
{
|
||||
required: false
|
||||
}
|
||||
)
|
||||
const failOnInitialDiffError = core.getBooleanInput(
|
||||
'fail_on_initial_diff_error',
|
||||
{
|
||||
required: false
|
||||
}
|
||||
)
|
||||
const failOnSubmoduleDiffError = core.getBooleanInput(
|
||||
'fail_on_submodule_diff_error',
|
||||
{
|
||||
required: false
|
||||
}
|
||||
)
|
||||
const dirNamesDeletedFilesIncludeOnlyDeletedDirs = core.getBooleanInput(
|
||||
'dir_names_deleted_files_include_only_deleted_dirs',
|
||||
{
|
||||
required: false
|
||||
}
|
||||
)
|
||||
|
||||
const negationPatternsFirst = core.getBooleanInput(
|
||||
'negation_patterns_first',
|
||||
{
|
||||
required: false
|
||||
}
|
||||
)
|
||||
|
||||
const inputs: Inputs = {
|
||||
files,
|
||||
@@ -193,6 +241,8 @@ export const getInputs = (): Inputs => {
|
||||
filesIgnoreYaml,
|
||||
filesIgnoreYamlFromSourceFile,
|
||||
filesIgnoreYamlFromSourceFileSeparator,
|
||||
failOnInitialDiffError,
|
||||
failOnSubmoduleDiffError,
|
||||
separator,
|
||||
// Not Supported via REST API
|
||||
sha,
|
||||
@@ -212,9 +262,14 @@ export const getInputs = (): Inputs => {
|
||||
includeAllOldNewRenamedFiles,
|
||||
oldNewSeparator,
|
||||
oldNewFilesSeparator,
|
||||
skipInitialFetch,
|
||||
fetchSubmoduleHistory,
|
||||
dirNamesDeletedFilesIncludeOnlyDeletedDirs,
|
||||
// End Not Supported via REST API
|
||||
dirNames,
|
||||
dirNamesExcludeCurrentDir,
|
||||
dirNamesIncludeFiles,
|
||||
dirNamesIncludeFilesSeparator,
|
||||
json,
|
||||
escapeJson,
|
||||
writeOutputFiles,
|
||||
@@ -222,7 +277,7 @@ export const getInputs = (): Inputs => {
|
||||
outputRenamedFilesAsDeletedAndAdded,
|
||||
token,
|
||||
apiUrl,
|
||||
skipInitialFetch
|
||||
negationPatternsFirst
|
||||
}
|
||||
|
||||
if (fetchDepth) {
|
||||
|
||||
109
src/main.ts
109
src/main.ts
@@ -2,23 +2,21 @@ import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import path from 'path'
|
||||
import {
|
||||
ChangedFiles,
|
||||
processChangedFiles,
|
||||
ChangeTypeEnum,
|
||||
getAllDiffFiles,
|
||||
getChangedFilesFromGithubAPI,
|
||||
getRenamedFiles
|
||||
} from './changedFiles'
|
||||
import {setChangedFilesOutput} from './changedFilesOutput'
|
||||
import {
|
||||
DiffResult,
|
||||
getSHAForPullRequestEvent,
|
||||
getSHAForNonPullRequestEvent
|
||||
getSHAForNonPullRequestEvent,
|
||||
getSHAForPullRequestEvent
|
||||
} from './commitSha'
|
||||
import {Env, getEnv} from './env'
|
||||
import {getInputs, Inputs} from './inputs'
|
||||
import {
|
||||
getFilePatterns,
|
||||
getFilteredChangedFiles,
|
||||
getRecoverFilePatterns,
|
||||
getSubmodulePath,
|
||||
getYamlFilePatterns,
|
||||
@@ -31,73 +29,7 @@ import {
|
||||
verifyMinimumGitVersion
|
||||
} from './utils'
|
||||
|
||||
const changedFilesOutput = async ({
|
||||
filePatterns,
|
||||
allDiffFiles,
|
||||
inputs,
|
||||
yamlFilePatterns
|
||||
}: {
|
||||
filePatterns: string[]
|
||||
allDiffFiles: ChangedFiles
|
||||
inputs: Inputs
|
||||
yamlFilePatterns: Record<string, string[]>
|
||||
}): Promise<void> => {
|
||||
if (filePatterns.length > 0) {
|
||||
core.startGroup('changed-files-patterns')
|
||||
const allFilteredDiffFiles = await getFilteredChangedFiles({
|
||||
allDiffFiles,
|
||||
filePatterns
|
||||
})
|
||||
core.debug(
|
||||
`All filtered diff files: ${JSON.stringify(allFilteredDiffFiles)}`
|
||||
)
|
||||
await setChangedFilesOutput({
|
||||
allDiffFiles,
|
||||
allFilteredDiffFiles,
|
||||
inputs,
|
||||
filePatterns
|
||||
})
|
||||
core.info('All Done!')
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
if (Object.keys(yamlFilePatterns).length > 0) {
|
||||
for (const key of Object.keys(yamlFilePatterns)) {
|
||||
core.startGroup(`changed-files-yaml-${key}`)
|
||||
const allFilteredDiffFiles = await getFilteredChangedFiles({
|
||||
allDiffFiles,
|
||||
filePatterns: yamlFilePatterns[key]
|
||||
})
|
||||
core.debug(
|
||||
`All filtered diff files for ${key}: ${JSON.stringify(
|
||||
allFilteredDiffFiles
|
||||
)}`
|
||||
)
|
||||
await setChangedFilesOutput({
|
||||
allDiffFiles,
|
||||
allFilteredDiffFiles,
|
||||
inputs,
|
||||
filePatterns: yamlFilePatterns[key],
|
||||
outputPrefix: key
|
||||
})
|
||||
core.info('All Done!')
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) {
|
||||
core.startGroup('changed-files-all')
|
||||
await setChangedFilesOutput({
|
||||
allDiffFiles,
|
||||
allFilteredDiffFiles: allDiffFiles,
|
||||
inputs
|
||||
})
|
||||
core.info('All Done!')
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
const getChangedFilesFromLocalGit = async ({
|
||||
const getChangedFilesFromLocalGitHistory = async ({
|
||||
inputs,
|
||||
env,
|
||||
workingDirectory,
|
||||
@@ -190,7 +122,10 @@ const getChangedFilesFromLocalGit = async ({
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
submodulePaths,
|
||||
outputRenamedFilesAsDeletedAndAdded
|
||||
outputRenamedFilesAsDeletedAndAdded,
|
||||
fetchSubmoduleHistory: inputs.fetchSubmoduleHistory,
|
||||
failOnInitialDiffError: inputs.failOnInitialDiffError,
|
||||
failOnSubmoduleDiffError: inputs.failOnSubmoduleDiffError
|
||||
})
|
||||
core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`)
|
||||
core.info('All Done!')
|
||||
@@ -209,15 +144,18 @@ const getChangedFilesFromLocalGit = async ({
|
||||
workingDirectory,
|
||||
deletedFiles: allDiffFiles[ChangeTypeEnum.Deleted],
|
||||
recoverPatterns,
|
||||
sha: diffResult.previousSha
|
||||
diffResult,
|
||||
hasSubmodule,
|
||||
submodulePaths
|
||||
})
|
||||
}
|
||||
|
||||
await changedFilesOutput({
|
||||
await processChangedFiles({
|
||||
filePatterns,
|
||||
allDiffFiles,
|
||||
inputs,
|
||||
yamlFilePatterns
|
||||
yamlFilePatterns,
|
||||
workingDirectory
|
||||
})
|
||||
|
||||
if (inputs.includeAllOldNewRenamedFiles) {
|
||||
@@ -233,12 +171,16 @@ const getChangedFilesFromLocalGit = async ({
|
||||
await setOutput({
|
||||
key: 'all_old_new_renamed_files',
|
||||
value: allOldNewRenamedFiles.paths,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json
|
||||
})
|
||||
await setOutput({
|
||||
key: 'all_old_new_renamed_files_count',
|
||||
value: allOldNewRenamedFiles.count,
|
||||
inputs
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json
|
||||
})
|
||||
core.info('All Done!')
|
||||
core.endGroup()
|
||||
@@ -260,7 +202,7 @@ const getChangedFilesFromRESTAPI = async ({
|
||||
core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`)
|
||||
core.info('All Done!')
|
||||
|
||||
await changedFilesOutput({
|
||||
await processChangedFiles({
|
||||
filePatterns,
|
||||
allDiffFiles,
|
||||
inputs,
|
||||
@@ -314,7 +256,12 @@ export async function run(): Promise<void> {
|
||||
'sinceLastRemoteCommit',
|
||||
'recoverDeletedFiles',
|
||||
'recoverDeletedFilesToDestination',
|
||||
'includeAllOldNewRenamedFiles'
|
||||
'recoverFiles',
|
||||
'recoverFilesIgnore',
|
||||
'includeAllOldNewRenamedFiles',
|
||||
'skipInitialFetch',
|
||||
'fetchSubmoduleHistory',
|
||||
'dirNamesDeletedFilesIncludeOnlyDeletedDirs'
|
||||
]
|
||||
|
||||
for (const input of unsupportedInputs) {
|
||||
@@ -338,7 +285,7 @@ export async function run(): Promise<void> {
|
||||
}
|
||||
|
||||
core.info('Using local .git directory')
|
||||
await getChangedFilesFromLocalGit({
|
||||
await getChangedFilesFromLocalGitHistory({
|
||||
inputs,
|
||||
env,
|
||||
workingDirectory,
|
||||
|
||||
349
src/utils.ts
349
src/utils.ts
@@ -1,6 +1,7 @@
|
||||
/*global AsyncIterableIterator*/
|
||||
import * as core from '@actions/core'
|
||||
import * as exec from '@actions/exec'
|
||||
import * as github from '@actions/github'
|
||||
import {createReadStream, promises as fs} from 'fs'
|
||||
import {readFile} from 'fs/promises'
|
||||
import {flattenDeep} from 'lodash'
|
||||
@@ -9,7 +10,7 @@ import * as path from 'path'
|
||||
import {createInterface} from 'readline'
|
||||
import {parseDocument} from 'yaml'
|
||||
import {ChangedFiles, ChangeTypeEnum} from './changedFiles'
|
||||
|
||||
import {DiffResult} from './commitSha'
|
||||
import {Inputs} from './inputs'
|
||||
|
||||
const MINIMUM_GIT_VERSION = '2.18.0'
|
||||
@@ -38,15 +39,6 @@ export const normalizeSeparators = (p: string): string => {
|
||||
return p.replace(/\/\/+/g, '/')
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize file path separators to '/' on all platforms
|
||||
* @param p - file path
|
||||
* @returns file path with normalized separators
|
||||
*/
|
||||
const normalizePath = (p: string): string => {
|
||||
return p.replace(/\\/g, '/')
|
||||
}
|
||||
|
||||
/**
|
||||
* Trims unnecessary trailing slash from file path
|
||||
* @param p - file path
|
||||
@@ -149,7 +141,7 @@ export const verifyMinimumGitVersion = async (): Promise<void> => {
|
||||
* @param filePath - path to check
|
||||
* @returns path exists
|
||||
*/
|
||||
const exists = async (filePath: string): Promise<boolean> => {
|
||||
export const exists = async (filePath: string): Promise<boolean> => {
|
||||
try {
|
||||
await fs.access(filePath)
|
||||
return true
|
||||
@@ -365,7 +357,7 @@ export const getSubmodulePath = async ({
|
||||
return stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map((line: string) => normalizePath(line.trim().split(' ')[1]))
|
||||
.map((line: string) => normalizeSeparators(line.trim().split(' ')[1]))
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -478,13 +470,15 @@ export const gitRenamedFiles = async ({
|
||||
core.debug(`Renamed file: ${line}`)
|
||||
const [, oldPath, newPath] = line.split('\t')
|
||||
if (isSubmodule) {
|
||||
return `${normalizePath(
|
||||
return `${normalizeSeparators(
|
||||
path.join(parentDir, oldPath)
|
||||
)}${oldNewSeparator}${normalizePath(path.join(parentDir, newPath))}`
|
||||
)}${oldNewSeparator}${normalizeSeparators(
|
||||
path.join(parentDir, newPath)
|
||||
)}`
|
||||
}
|
||||
return `${normalizePath(oldPath)}${oldNewSeparator}${normalizePath(
|
||||
newPath
|
||||
)}`
|
||||
return `${normalizeSeparators(
|
||||
oldPath
|
||||
)}${oldNewSeparator}${normalizeSeparators(newPath)}`
|
||||
})
|
||||
}
|
||||
|
||||
@@ -497,6 +491,8 @@ export const gitRenamedFiles = async ({
|
||||
* @param isSubmodule - is the repo a submodule
|
||||
* @param parentDir - parent directory of the submodule
|
||||
* @param outputRenamedFilesAsDeletedAndAdded - output renamed files as deleted and added
|
||||
* @param failOnInitialDiffError - fail if the initial diff fails
|
||||
* @param failOnSubmoduleDiffError - fail if the submodule diff fails
|
||||
*/
|
||||
export const getAllChangedFiles = async ({
|
||||
cwd,
|
||||
@@ -505,7 +501,9 @@ export const getAllChangedFiles = async ({
|
||||
diff,
|
||||
isSubmodule = false,
|
||||
parentDir = '',
|
||||
outputRenamedFilesAsDeletedAndAdded = false
|
||||
outputRenamedFilesAsDeletedAndAdded = false,
|
||||
failOnInitialDiffError = false,
|
||||
failOnSubmoduleDiffError = false
|
||||
}: {
|
||||
cwd: string
|
||||
sha1: string
|
||||
@@ -514,6 +512,8 @@ export const getAllChangedFiles = async ({
|
||||
isSubmodule?: boolean
|
||||
parentDir?: string
|
||||
outputRenamedFilesAsDeletedAndAdded?: boolean
|
||||
failOnInitialDiffError?: boolean
|
||||
failOnSubmoduleDiffError?: boolean
|
||||
}): Promise<ChangedFiles> => {
|
||||
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||
'git',
|
||||
@@ -541,6 +541,18 @@ export const getAllChangedFiles = async ({
|
||||
[ChangeTypeEnum.Unknown]: []
|
||||
}
|
||||
|
||||
if (exitCode !== 0) {
|
||||
if (failOnInitialDiffError && !isSubmodule) {
|
||||
throw new Error(
|
||||
`Failed to get changed files between: ${sha1}${diff}${sha2}: ${stderr}`
|
||||
)
|
||||
} else if (failOnSubmoduleDiffError && isSubmodule) {
|
||||
throw new Error(
|
||||
`Failed to get changed files for submodule between: ${sha1}${diff}${sha2}: ${stderr}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (exitCode !== 0) {
|
||||
if (isSubmodule) {
|
||||
core.warning(
|
||||
@@ -564,11 +576,11 @@ export const getAllChangedFiles = async ({
|
||||
for (const line of lines) {
|
||||
const [changeType, filePath, newPath = ''] = line.split('\t')
|
||||
const normalizedFilePath = isSubmodule
|
||||
? normalizePath(path.join(parentDir, filePath))
|
||||
: normalizePath(filePath)
|
||||
? normalizeSeparators(path.join(parentDir, filePath))
|
||||
: normalizeSeparators(filePath)
|
||||
const normalizedNewPath = isSubmodule
|
||||
? normalizePath(path.join(parentDir, newPath))
|
||||
: normalizePath(newPath)
|
||||
? normalizeSeparators(path.join(parentDir, newPath))
|
||||
: normalizeSeparators(newPath)
|
||||
|
||||
if (changeType.startsWith('R')) {
|
||||
if (outputRenamedFilesAsDeletedAndAdded) {
|
||||
@@ -607,15 +619,16 @@ export const getFilteredChangedFiles = async ({
|
||||
[ChangeTypeEnum.Unknown]: []
|
||||
}
|
||||
const hasFilePatterns = filePatterns.length > 0
|
||||
const isWin = isWindows()
|
||||
|
||||
for (const changeType of Object.keys(allDiffFiles)) {
|
||||
const files = allDiffFiles[changeType as ChangeTypeEnum]
|
||||
if (hasFilePatterns) {
|
||||
changedFiles[changeType as ChangeTypeEnum] = mm(files, filePatterns, {
|
||||
dot: true,
|
||||
windows: isWindows(),
|
||||
windows: isWin,
|
||||
noext: true
|
||||
})
|
||||
}).map(normalizeSeparators)
|
||||
} else {
|
||||
changedFiles[changeType as ChangeTypeEnum] = files
|
||||
}
|
||||
@@ -685,6 +698,28 @@ export const getRemoteBranchHeadSha = async ({
|
||||
return stdout.trim()
|
||||
}
|
||||
|
||||
export const getCurrentBranchName = async ({
|
||||
cwd
|
||||
}: {
|
||||
cwd: string
|
||||
}): Promise<string> => {
|
||||
const {stdout, exitCode} = await exec.getExecOutput(
|
||||
'git',
|
||||
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
{
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: !core.isDebug()
|
||||
}
|
||||
)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
return ''
|
||||
}
|
||||
|
||||
return stdout.trim()
|
||||
}
|
||||
|
||||
export const getParentSha = async ({cwd}: {cwd: string}): Promise<string> => {
|
||||
const {stdout, exitCode} = await exec.getExecOutput(
|
||||
'git',
|
||||
@@ -738,6 +773,56 @@ export const verifyCommitSha = async ({
|
||||
return exitCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean the sha from the input which could be a branch name or a commit sha.
|
||||
*
|
||||
* If the input is a valid commit sha, return it as is.
|
||||
*
|
||||
* If the input is a branch name, get the HEAD sha of that branch and return it.
|
||||
*
|
||||
* @param sha The input string, which could be a branch name or a commit sha.
|
||||
* @param cwd The working directory.
|
||||
* @param token The GitHub token.
|
||||
* @returns The cleaned SHA string.
|
||||
*/
|
||||
export const cleanShaInput = async ({
|
||||
sha,
|
||||
cwd,
|
||||
token
|
||||
}: {
|
||||
sha: string
|
||||
cwd: string
|
||||
token: string
|
||||
}): Promise<string> => {
|
||||
// Check if the input is a valid commit sha
|
||||
if (!sha) {
|
||||
return sha
|
||||
}
|
||||
// Check if the input is a valid commit sha
|
||||
const {stdout, exitCode} = await exec.getExecOutput(
|
||||
'git',
|
||||
['rev-parse', '--verify', `${sha}^{commit}`],
|
||||
{
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: !core.isDebug()
|
||||
}
|
||||
)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
const octokit = github.getOctokit(token)
|
||||
// If it's not a valid commit sha, assume it's a branch name and get the HEAD sha
|
||||
const {data: refData} = await octokit.rest.git.getRef({
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
ref: `heads/${sha}`
|
||||
})
|
||||
|
||||
return refData.object.sha
|
||||
}
|
||||
|
||||
return stdout.trim()
|
||||
}
|
||||
export const getPreviousGitTag = async ({
|
||||
cwd
|
||||
}: {
|
||||
@@ -745,7 +830,7 @@ export const getPreviousGitTag = async ({
|
||||
}): Promise<{tag: string; sha: string}> => {
|
||||
const {stdout} = await exec.getExecOutput(
|
||||
'git',
|
||||
['tag', '--sort=-version:refname'],
|
||||
['tag', '--sort=-creatordate'],
|
||||
{
|
||||
cwd,
|
||||
silent: !core.isDebug()
|
||||
@@ -873,14 +958,14 @@ export const getDirnameMaxDepth = ({
|
||||
return ''
|
||||
}
|
||||
|
||||
return normalizePath(output)
|
||||
return normalizeSeparators(output)
|
||||
}
|
||||
|
||||
export const jsonOutput = ({
|
||||
value,
|
||||
shouldEscape
|
||||
}: {
|
||||
value: string | string[]
|
||||
value: string | string[] | boolean
|
||||
shouldEscape: boolean
|
||||
}): string => {
|
||||
const result = JSON.stringify(value)
|
||||
@@ -888,6 +973,16 @@ export const jsonOutput = ({
|
||||
return shouldEscape ? result.replace(/"/g, '\\"') : result
|
||||
}
|
||||
|
||||
export const getDirNamesIncludeFilesPattern = ({
|
||||
inputs
|
||||
}: {
|
||||
inputs: Inputs
|
||||
}): string[] => {
|
||||
return inputs.dirNamesIncludeFiles
|
||||
.split(inputs.dirNamesIncludeFilesSeparator)
|
||||
.filter(Boolean)
|
||||
}
|
||||
|
||||
export const getFilePatterns = async ({
|
||||
inputs,
|
||||
workingDirectory
|
||||
@@ -895,93 +990,99 @@ export const getFilePatterns = async ({
|
||||
inputs: Inputs
|
||||
workingDirectory: string
|
||||
}): Promise<string[]> => {
|
||||
let filePatterns = inputs.files
|
||||
.split(inputs.filesSeparator)
|
||||
.filter(p => p !== '')
|
||||
.join('\n')
|
||||
let cleanedFilePatterns: string[] = []
|
||||
|
||||
if (inputs.files) {
|
||||
const filesPatterns = inputs.files
|
||||
.split(inputs.filesSeparator)
|
||||
.filter(Boolean)
|
||||
|
||||
cleanedFilePatterns.push(...filesPatterns)
|
||||
|
||||
core.debug(`files patterns: ${filesPatterns.join('\n')}`)
|
||||
}
|
||||
|
||||
if (inputs.filesFromSourceFile !== '') {
|
||||
const inputFilesFromSourceFile = inputs.filesFromSourceFile
|
||||
.split(inputs.filesFromSourceFileSeparator)
|
||||
.filter(p => p !== '')
|
||||
.filter(Boolean)
|
||||
.map(p => path.join(workingDirectory, p))
|
||||
|
||||
core.debug(`files from source file: ${inputFilesFromSourceFile}`)
|
||||
|
||||
const filesFromSourceFiles = (
|
||||
await getFilesFromSourceFile({filePaths: inputFilesFromSourceFile})
|
||||
).join('\n')
|
||||
const filesFromSourceFiles = await getFilesFromSourceFile({
|
||||
filePaths: inputFilesFromSourceFile
|
||||
})
|
||||
|
||||
core.debug(`files from source files patterns: ${filesFromSourceFiles}`)
|
||||
core.debug(
|
||||
`files from source files patterns: ${filesFromSourceFiles.join('\n')}`
|
||||
)
|
||||
|
||||
filePatterns = filePatterns.concat('\n', filesFromSourceFiles)
|
||||
cleanedFilePatterns.push(...filesFromSourceFiles)
|
||||
}
|
||||
|
||||
if (inputs.filesIgnore) {
|
||||
const filesIgnorePatterns = inputs.filesIgnore
|
||||
.split(inputs.filesIgnoreSeparator)
|
||||
.filter(p => p !== '')
|
||||
.filter(Boolean)
|
||||
.map(p => {
|
||||
if (!p.startsWith('!')) {
|
||||
p = `!${p}`
|
||||
}
|
||||
return p
|
||||
})
|
||||
.join('\n')
|
||||
|
||||
core.debug(`files ignore patterns: ${filesIgnorePatterns}`)
|
||||
core.debug(`files ignore patterns: ${filesIgnorePatterns.join('\n')}`)
|
||||
|
||||
filePatterns = filePatterns.concat('\n', filesIgnorePatterns)
|
||||
cleanedFilePatterns.push(...filesIgnorePatterns)
|
||||
}
|
||||
|
||||
if (inputs.filesIgnoreFromSourceFile) {
|
||||
const inputFilesIgnoreFromSourceFile = inputs.filesIgnoreFromSourceFile
|
||||
.split(inputs.filesIgnoreFromSourceFileSeparator)
|
||||
.filter(p => p !== '')
|
||||
.filter(Boolean)
|
||||
.map(p => path.join(workingDirectory, p))
|
||||
|
||||
core.debug(
|
||||
`files ignore from source file: ${inputFilesIgnoreFromSourceFile}`
|
||||
)
|
||||
|
||||
const filesIgnoreFromSourceFiles = (
|
||||
await getFilesFromSourceFile({
|
||||
filePaths: inputFilesIgnoreFromSourceFile,
|
||||
excludedFiles: true
|
||||
})
|
||||
).join('\n')
|
||||
const filesIgnoreFromSourceFiles = await getFilesFromSourceFile({
|
||||
filePaths: inputFilesIgnoreFromSourceFile,
|
||||
excludedFiles: true
|
||||
})
|
||||
|
||||
core.debug(
|
||||
`files ignore from source files patterns: ${filesIgnoreFromSourceFiles}`
|
||||
`files ignore from source files patterns: ${filesIgnoreFromSourceFiles.join(
|
||||
'\n'
|
||||
)}`
|
||||
)
|
||||
|
||||
filePatterns = filePatterns.concat('\n', filesIgnoreFromSourceFiles)
|
||||
cleanedFilePatterns.push(...filesIgnoreFromSourceFiles)
|
||||
}
|
||||
|
||||
if (inputs.negationPatternsFirst) {
|
||||
cleanedFilePatterns.sort((a, b) => {
|
||||
return a.startsWith('!') ? -1 : b.startsWith('!') ? 1 : 0
|
||||
})
|
||||
}
|
||||
|
||||
// Reorder file patterns '**' should come first
|
||||
if (cleanedFilePatterns.includes('**')) {
|
||||
cleanedFilePatterns.sort((a, b) => {
|
||||
return a === '**' ? -1 : b === '**' ? 1 : 0
|
||||
})
|
||||
}
|
||||
|
||||
if (isWindows()) {
|
||||
filePatterns = filePatterns.replace(/\r\n/g, '\n')
|
||||
filePatterns = filePatterns.replace(/\r/g, '\n')
|
||||
cleanedFilePatterns = cleanedFilePatterns.map(pattern =>
|
||||
pattern.replace(/\r\n/g, '\n').replace(/\r/g, '\n')
|
||||
)
|
||||
}
|
||||
|
||||
core.debug(`file patterns: ${filePatterns}`)
|
||||
core.debug(`Input file patterns: \n${cleanedFilePatterns.join('\n')}`)
|
||||
|
||||
return filePatterns
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter(Boolean)
|
||||
.map(pattern => {
|
||||
if (pattern.endsWith('/')) {
|
||||
return `${pattern}**`
|
||||
} else {
|
||||
const pathParts = pattern.split(path.sep)
|
||||
const lastPart = pathParts[pathParts.length - 1]
|
||||
if (!lastPart.includes('.')) {
|
||||
return `${pattern}${path.sep}**`
|
||||
} else {
|
||||
return pattern
|
||||
}
|
||||
}
|
||||
})
|
||||
return cleanedFilePatterns
|
||||
}
|
||||
|
||||
// Example YAML input:
|
||||
@@ -1201,21 +1302,58 @@ export const getRecoverFilePatterns = ({
|
||||
return filePatterns.filter(Boolean)
|
||||
}
|
||||
|
||||
export const getOutputKey = (key: string, outputPrefix: string): string => {
|
||||
return outputPrefix ? `${outputPrefix}_${key}` : key
|
||||
}
|
||||
|
||||
export const setArrayOutput = async ({
|
||||
key,
|
||||
inputs,
|
||||
value,
|
||||
outputPrefix
|
||||
}: {
|
||||
key: string
|
||||
inputs: Inputs
|
||||
value: string[]
|
||||
outputPrefix?: string
|
||||
}): Promise<void> => {
|
||||
core.debug(`${key}: ${JSON.stringify(value)}`)
|
||||
await setOutput({
|
||||
key: outputPrefix ? getOutputKey(key, outputPrefix) : key,
|
||||
value: inputs.json ? value : value.join(inputs.separator),
|
||||
writeOutputFiles: inputs.writeOutputFiles,
|
||||
outputDir: inputs.outputDir,
|
||||
json: inputs.json,
|
||||
shouldEscape: inputs.escapeJson
|
||||
})
|
||||
}
|
||||
|
||||
export const setOutput = async ({
|
||||
key,
|
||||
value,
|
||||
inputs
|
||||
writeOutputFiles,
|
||||
outputDir,
|
||||
json = false,
|
||||
shouldEscape = false
|
||||
}: {
|
||||
key: string
|
||||
value: string | boolean
|
||||
inputs: Inputs
|
||||
value: string | string[] | boolean
|
||||
writeOutputFiles: boolean
|
||||
outputDir: string
|
||||
json?: boolean
|
||||
shouldEscape?: boolean
|
||||
}): Promise<void> => {
|
||||
const cleanedValue = value.toString().trim()
|
||||
let cleanedValue
|
||||
if (json) {
|
||||
cleanedValue = jsonOutput({value, shouldEscape})
|
||||
} else {
|
||||
cleanedValue = value.toString().trim()
|
||||
}
|
||||
|
||||
core.setOutput(key, cleanedValue)
|
||||
|
||||
if (inputs.writeOutputFiles) {
|
||||
const outputDir = inputs.outputDir
|
||||
const extension = inputs.json ? 'json' : 'txt'
|
||||
if (writeOutputFiles) {
|
||||
const extension = json ? 'json' : 'txt'
|
||||
const outputFilePath = path.join(outputDir, `${key}.${extension}`)
|
||||
|
||||
if (!(await exists(outputDir))) {
|
||||
@@ -1258,13 +1396,17 @@ export const recoverDeletedFiles = async ({
|
||||
workingDirectory,
|
||||
deletedFiles,
|
||||
recoverPatterns,
|
||||
sha
|
||||
diffResult,
|
||||
hasSubmodule,
|
||||
submodulePaths
|
||||
}: {
|
||||
inputs: Inputs
|
||||
workingDirectory: string
|
||||
deletedFiles: string[]
|
||||
recoverPatterns: string[]
|
||||
sha: string
|
||||
diffResult: DiffResult
|
||||
hasSubmodule: boolean
|
||||
submodulePaths: string[]
|
||||
}): Promise<void> => {
|
||||
let recoverableDeletedFiles = deletedFiles
|
||||
core.debug(`recoverable deleted files: ${recoverableDeletedFiles}`)
|
||||
@@ -1289,16 +1431,55 @@ export const recoverDeletedFiles = async ({
|
||||
)
|
||||
}
|
||||
|
||||
const deletedFileContents = await getDeletedFileContents({
|
||||
cwd: workingDirectory,
|
||||
filePath: deletedFile,
|
||||
sha
|
||||
})
|
||||
let deletedFileContents: string
|
||||
|
||||
const submodulePath = submodulePaths.find(p => deletedFile.startsWith(p))
|
||||
|
||||
if (hasSubmodule && submodulePath) {
|
||||
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
||||
cwd: workingDirectory,
|
||||
parentSha1: diffResult.previousSha,
|
||||
parentSha2: diffResult.currentSha,
|
||||
submodulePath,
|
||||
diff: diffResult.diff
|
||||
})
|
||||
|
||||
if (submoduleShaResult.previousSha) {
|
||||
core.debug(
|
||||
`recovering deleted file "${deletedFile}" from submodule ${submodulePath} from ${submoduleShaResult.previousSha}`
|
||||
)
|
||||
deletedFileContents = await getDeletedFileContents({
|
||||
cwd: path.join(workingDirectory, submodulePath),
|
||||
// E.g. submodulePath = test/demo and deletedFile = test/demo/.github/README.md => filePath => .github/README.md
|
||||
filePath: deletedFile.replace(submodulePath, '').substring(1),
|
||||
sha: submoduleShaResult.previousSha
|
||||
})
|
||||
} else {
|
||||
core.warning(
|
||||
`Unable to recover deleted file "${deletedFile}" from submodule ${submodulePath} from ${submoduleShaResult.previousSha}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
core.debug(
|
||||
`recovering deleted file "${deletedFile}" from ${diffResult.previousSha}`
|
||||
)
|
||||
deletedFileContents = await getDeletedFileContents({
|
||||
cwd: workingDirectory,
|
||||
filePath: deletedFile,
|
||||
sha: diffResult.previousSha
|
||||
})
|
||||
}
|
||||
|
||||
core.debug(`recovered deleted file "${deletedFile}"`)
|
||||
|
||||
if (!(await exists(path.dirname(target)))) {
|
||||
core.debug(`creating directory "${path.dirname(target)}"`)
|
||||
await fs.mkdir(path.dirname(target), {recursive: true})
|
||||
}
|
||||
core.debug(`writing file "${target}"`)
|
||||
await fs.writeFile(target, deletedFileContents)
|
||||
core.debug(`wrote file "${target}"`)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Submodule test/demo updated: e168fac86c...5dfac2e9a7
Reference in New Issue
Block a user