mirror of
https://gitea.com/actions/dorny-paths-filter.git
synced 2025-12-23 05:48:41 +00:00
Compare commits
135 Commits
v2.5.0
...
668c092af3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
668c092af3 | ||
|
|
209e61402d | ||
|
|
de90cc6fb3 | ||
|
|
cf89abdbae | ||
|
|
f90d5265d6 | ||
|
|
ebc4d7e9eb | ||
|
|
45f16f1875 | ||
|
|
5da0e4c086 | ||
|
|
1441771bbf | ||
|
|
0bc4621a31 | ||
|
|
7267a8516b | ||
|
|
e36f1124bf | ||
|
|
2f74457227 | ||
|
|
67617953b4 | ||
|
|
a35d8d6a33 | ||
|
|
b5a5203f8b | ||
|
|
3c49e64ca2 | ||
|
|
8ec7be4734 | ||
|
|
100a1198b2 | ||
|
|
96be2b61c4 | ||
|
|
f5071954e8 | ||
|
|
4067d88573 | ||
|
|
fc3b4e8a61 | ||
|
|
fbb4d78dec | ||
|
|
245527a2ef | ||
|
|
4512585405 | ||
|
|
5f5fe18015 | ||
|
|
e12ca0e584 | ||
|
|
ffe0943825 | ||
|
|
c763b521be | ||
|
|
9e7258bb2a | ||
|
|
baa26e3237 | ||
|
|
513ea69ce3 | ||
|
|
027a82c128 | ||
|
|
e0f036e43d | ||
|
|
737cb1986a | ||
|
|
0ef5f0d812 | ||
|
|
248cda557c | ||
|
|
b0e6c31fb8 | ||
|
|
ce8f47aa7f | ||
|
|
829abbf5d3 | ||
|
|
c232e225e7 | ||
|
|
e7dd821189 | ||
|
|
375cc8a558 | ||
|
|
a458940404 | ||
|
|
ff17951ef9 | ||
|
|
8c7f485a57 | ||
|
|
17e486d015 | ||
|
|
5266f0ac59 | ||
|
|
bdd8d7ab6c | ||
|
|
38e0a049f6 | ||
|
|
6b40481b02 | ||
|
|
b55f63c13c | ||
|
|
816eb040ab | ||
|
|
1ec7035ff5 | ||
|
|
74cfa7995e | ||
|
|
a0e43af4ae | ||
|
|
b7a9db5c9b | ||
|
|
b2feaf19c2 | ||
|
|
71d51d8208 | ||
|
|
af2564d3e0 | ||
|
|
78ab00f877 | ||
|
|
f3d3fc848b | ||
|
|
aae9c5619e | ||
|
|
e59743163d | ||
|
|
78b1672eeb | ||
|
|
f1c461fccf | ||
|
|
0b18612ac3 | ||
|
|
87375a4a68 | ||
|
|
24a74833cc | ||
|
|
d0507d9a8a | ||
|
|
f093f3520b | ||
|
|
8d029eb508 | ||
|
|
07d6abdb9c | ||
|
|
e8f370c197 | ||
|
|
58ed00ec48 | ||
|
|
02eeef4973 | ||
|
|
37a6d38b2d | ||
|
|
208adf42c8 | ||
|
|
ad1ae68cd0 | ||
|
|
5d414b88ab | ||
|
|
a6989ad592 | ||
|
|
6d8169070c | ||
|
|
3d4a25053b | ||
|
|
e59197f91b | ||
|
|
ca8fa4002c | ||
|
|
c64be944bf | ||
|
|
138368ff4f | ||
|
|
a301a0ad83 | ||
|
|
0c0d1a854a | ||
|
|
0aa1597c2b | ||
|
|
46d2898cef | ||
|
|
c90ecaa5a1 | ||
|
|
49abb091ed | ||
|
|
8801c887e9 | ||
|
|
68792bf56a | ||
|
|
31c576896e | ||
|
|
3be8c93277 | ||
|
|
1cdd3bbdf6 | ||
|
|
e5b96fe4da | ||
|
|
a339507743 | ||
|
|
febe8330ca | ||
|
|
b5fa2d5c02 | ||
|
|
e2bed85912 | ||
|
|
7c0f15b688 | ||
|
|
cbc3287af3 | ||
|
|
a2730492f0 | ||
|
|
c2766acabb | ||
|
|
363576b9ea | ||
|
|
b1a097ef7b | ||
|
|
2c79a825c0 | ||
|
|
4e7fcc37b4 | ||
|
|
c506bed1ae | ||
|
|
9b7572ffb2 | ||
|
|
9e8c9af501 | ||
|
|
84e1697bff | ||
|
|
e4d886f503 | ||
|
|
ada1eee648 | ||
|
|
44ac6d8e25 | ||
|
|
3c5b7d242c | ||
|
|
eb75a1edc1 | ||
|
|
181b35e268 | ||
|
|
1934d574ce | ||
|
|
d599443ba5 | ||
|
|
eb8fe2c24b | ||
|
|
dec8b8030e | ||
|
|
785a14adbe | ||
|
|
e84bc6af29 | ||
|
|
b4eabb6049 | ||
|
|
550eb4925d | ||
|
|
5282566eab | ||
|
|
804ec66d7a | ||
|
|
b37d4e9e86 | ||
|
|
7b5334ddb5 | ||
|
|
75cbfb4be9 |
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["plugin:github/es6"],
|
||||
"extends": ["plugin:github/internal"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
@@ -16,13 +16,10 @@
|
||||
"@typescript-eslint/no-require-imports": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
"@typescript-eslint/ban-ts-ignore": "error",
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/camelcase": "off",
|
||||
"@typescript-eslint/class-name-casing": "error",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
@@ -32,7 +29,6 @@
|
||||
"@typescript-eslint/no-misused-new": "error",
|
||||
"@typescript-eslint/no-namespace": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "warn",
|
||||
"@typescript-eslint/no-object-literal-type-assertion": "error",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
@@ -40,7 +36,6 @@
|
||||
"@typescript-eslint/prefer-for-of": "warn",
|
||||
"@typescript-eslint/prefer-function-type": "warn",
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-interface": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": ["error", { "allowAny": true }],
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
|
||||
10
.github/workflows/build.yml
vendored
10
.github/workflows/build.yml
vendored
@@ -1,6 +1,8 @@
|
||||
name: "Build"
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore: [ '*.md' ]
|
||||
branches:
|
||||
- master
|
||||
|
||||
@@ -8,7 +10,11 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
- run: |
|
||||
npm install
|
||||
npm run all
|
||||
@@ -16,7 +22,7 @@ jobs:
|
||||
self-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
|
||||
55
.github/workflows/pull-request-verification.yml
vendored
55
.github/workflows/pull-request-verification.yml
vendored
@@ -1,23 +1,30 @@
|
||||
name: "Pull Request Verification"
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore: [ '*.md' ]
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
- run: |
|
||||
npm install
|
||||
npm run all
|
||||
|
||||
test-inline:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: read
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
@@ -29,11 +36,16 @@ jobs:
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
|
||||
run: exit 1
|
||||
- name: changes-test
|
||||
if: contains(fromJSON(steps.filter.outputs.changes), 'error') || !contains(fromJSON(steps.filter.outputs.changes), 'any')
|
||||
run: exit 1
|
||||
|
||||
test-external:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: read
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
@@ -45,7 +57,7 @@ jobs:
|
||||
test-without-token:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
@@ -58,7 +70,7 @@ jobs:
|
||||
test-wd-without-token:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: somewhere
|
||||
- uses: ./somewhere
|
||||
@@ -71,10 +83,30 @@ jobs:
|
||||
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
|
||||
run: exit 1
|
||||
|
||||
test-local-changes:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: echo "NEW FILE" > local
|
||||
- run: git add local
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
base: HEAD
|
||||
filters: |
|
||||
local:
|
||||
- local
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.local != 'true'
|
||||
run: exit 1
|
||||
- name: count-test
|
||||
if: steps.filter.outputs.local_count != 1
|
||||
run: exit 1
|
||||
|
||||
test-change-type:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: configure GIT user
|
||||
run: git config user.email "john@nowhere.local" && git config user.name "John Doe"
|
||||
- name: modify working tree
|
||||
@@ -102,15 +134,12 @@ jobs:
|
||||
- name: Print 'deleted_files'
|
||||
run: echo ${{steps.filter.outputs.deleted_files}}
|
||||
- name: filter-test
|
||||
# only single quotes are supported in GH action literal
|
||||
# single quote needs to be escaped with single quote
|
||||
# '''add.txt''' resolves to string 'add.txt'
|
||||
if: |
|
||||
steps.filter.outputs.added != 'true'
|
||||
|| steps.filter.outputs.deleted != 'true'
|
||||
|| steps.filter.outputs.modified != 'true'
|
||||
|| steps.filter.outputs.any != 'true'
|
||||
|| steps.filter.outputs.added_files != '''add.txt'''
|
||||
|| steps.filter.outputs.modified_files != '''LICENSE'''
|
||||
|| steps.filter.outputs.deleted_files != '''README.md'''
|
||||
|| steps.filter.outputs.added_files != 'add.txt'
|
||||
|| steps.filter.outputs.modified_files != 'LICENSE'
|
||||
|| steps.filter.outputs.deleted_files != 'README.md'
|
||||
run: exit 1
|
||||
|
||||
69
CHANGELOG.md
69
CHANGELOG.md
@@ -1,5 +1,74 @@
|
||||
# Changelog
|
||||
|
||||
## v3.0.2
|
||||
- [Add config parameter for predicate quantifier](https://github.com/dorny/paths-filter/pull/224)
|
||||
|
||||
## v3.0.1
|
||||
- [Compare base and ref when token is empty](https://github.com/dorny/paths-filter/pull/133)
|
||||
|
||||
## v3.0.0
|
||||
- [Update to Node.js 20](https://github.com/dorny/paths-filter/pull/210)
|
||||
- [Update all dependencies](https://github.com/dorny/paths-filter/pull/215)
|
||||
|
||||
## v2.11.1
|
||||
- [Update @actions/core to v1.10.0 - Fixes warning about deprecated set-output](https://github.com/dorny/paths-filter/pull/167)
|
||||
- [Document need for pull-requests: read permission](https://github.com/dorny/paths-filter/pull/168)
|
||||
- [Updating to actions/checkout@v3](https://github.com/dorny/paths-filter/pull/164)
|
||||
|
||||
## v2.11.0
|
||||
- [Set list-files input parameter as not required](https://github.com/dorny/paths-filter/pull/157)
|
||||
- [Update Node.js](https://github.com/dorny/paths-filter/pull/161)
|
||||
- [Fix incorrect handling of Unicode characters in exec()](https://github.com/dorny/paths-filter/pull/162)
|
||||
- [Use Octokit pagination](https://github.com/dorny/paths-filter/pull/163)
|
||||
- [Updates real world links](https://github.com/dorny/paths-filter/pull/160)
|
||||
|
||||
## v2.10.2
|
||||
- [Fix getLocalRef() returns wrong ref](https://github.com/dorny/paths-filter/pull/91)
|
||||
|
||||
## v2.10.1
|
||||
- [Improve robustness of change detection](https://github.com/dorny/paths-filter/pull/85)
|
||||
|
||||
## v2.10.0
|
||||
- [Add ref input parameter](https://github.com/dorny/paths-filter/pull/82)
|
||||
- [Fix change detection in PR when pullRequest.changed_files is incorrect](https://github.com/dorny/paths-filter/pull/83)
|
||||
|
||||
## v2.9.3
|
||||
- [Fix change detection when base is a tag](https://github.com/dorny/paths-filter/pull/78)
|
||||
|
||||
## v2.9.2
|
||||
- [Fix fetching git history](https://github.com/dorny/paths-filter/pull/75)
|
||||
|
||||
## v2.9.1
|
||||
- [Fix fetching git history + fallback to unshallow repo](https://github.com/dorny/paths-filter/pull/74)
|
||||
|
||||
## v2.9.0
|
||||
- [Add list-files: csv format](https://github.com/dorny/paths-filter/pull/68)
|
||||
|
||||
## v2.8.0
|
||||
- [Add count output variable](https://github.com/dorny/paths-filter/pull/65)
|
||||
- [Fix log grouping of changes](https://github.com/dorny/paths-filter/pull/61)
|
||||
|
||||
## v2.7.0
|
||||
- [Add "changes" output variable to support matrix job configuration](https://github.com/dorny/paths-filter/pull/59)
|
||||
- [Improved listing of matching files with `list-files: shell` and `list-files: escape` options](https://github.com/dorny/paths-filter/pull/58)
|
||||
|
||||
## v2.6.0
|
||||
- [Support local changes](https://github.com/dorny/paths-filter/pull/53)
|
||||
|
||||
## v2.5.3
|
||||
- [Fixed mapping of removed/deleted change status from github API](https://github.com/dorny/paths-filter/pull/51)
|
||||
- [Fixed retrieval of all changes via Github API when there are 100+ changes](https://github.com/dorny/paths-filter/pull/50)
|
||||
|
||||
## v2.5.2
|
||||
- [Add support for multiple patterns when using file status](https://github.com/dorny/paths-filter/pull/48)
|
||||
- [Use picomatch directly instead of micromatch wrapper](https://github.com/dorny/paths-filter/pull/49)
|
||||
|
||||
## v2.5.1
|
||||
- [Improved path matching with micromatch](https://github.com/dorny/paths-filter/pull/46)
|
||||
|
||||
## v2.5.0
|
||||
- [Support workflows triggered by any event](https://github.com/dorny/paths-filter/pull/44)
|
||||
|
||||
## v2.4.2
|
||||
- [Fixed compatibility with older (<2.23) versions of git](https://github.com/dorny/paths-filter/pull/42)
|
||||
|
||||
|
||||
364
README.md
364
README.md
@@ -1,31 +1,52 @@
|
||||
# Paths Changes Filter
|
||||
|
||||
This [Github Action](https://github.com/features/actions) enables conditional execution of workflow steps and jobs,
|
||||
based on the files modified by pull request, feature branch or in pushed commits.
|
||||
[GitHub Action](https://github.com/features/actions) that enables conditional execution of workflow steps and jobs, based on the files modified by pull request, on a feature
|
||||
branch, or by the recently pushed commits.
|
||||
|
||||
It saves time and resources especially in monorepo setups, where you can run slow tasks (e.g. integration tests or deployments) only for changed components.
|
||||
Github workflows built-in [path filters](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestpaths)
|
||||
doesn't allow this because they doesn't work on a level of individual jobs or steps.
|
||||
Run slow tasks like integration tests or deployments only for changed components. It saves time and resources, especially in monorepo setups.
|
||||
GitHub workflows built-in [path filters](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestpaths)
|
||||
don't allow this because they don't work on a level of individual jobs or steps.
|
||||
|
||||
**Real world usage examples:**
|
||||
|
||||
## Supported workflows:
|
||||
- Pull requests:
|
||||
- Action triggered by **[pull_request](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request)**
|
||||
- [sentry.io](https://sentry.io/) - [backend.yml](https://github.com/getsentry/sentry/blob/2ebe01feab863d89aa7564e6d243b6d80c230ddc/.github/workflows/backend.yml#L36)
|
||||
- [GoogleChrome/web.dev](https://web.dev/) - [lint-workflow.yml](https://github.com/GoogleChrome/web.dev/blob/3a57b721e7df6fc52172f676ca68d16153bda6a3/.github/workflows/lint-workflow.yml#L26)
|
||||
- [blog post Configuring python linting to be part of CI/CD using GitHub actions](https://dev.to/freshbooks/configuring-python-linting-to-be-part-of-cicd-using-github-actions-1731#what-files-does-it-run-against) - [py_linter.yml](https://github.com/iamtodor/demo-github-actions-python-linter-configuration/blob/main/.github/workflows/py_linter.yml#L31)
|
||||
|
||||
## Supported workflows
|
||||
|
||||
- **Pull requests:**
|
||||
- Workflow triggered by **[pull_request](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request)**
|
||||
or **[pull_request_target](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request_target)** event
|
||||
- Changes are detected against the pull request base branch
|
||||
- Uses Github REST API to fetch list of modified files
|
||||
- Feature branches:
|
||||
- Action triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)** event
|
||||
- Changes are detected against the merge-base with configured base branch
|
||||
- Uses GitHub REST API to fetch a list of modified files
|
||||
- Requires [pull-requests: read](https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs) permission
|
||||
- **Feature branches:**
|
||||
- Workflow triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)**
|
||||
or any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
- The `base` input parameter must not be the same as the branch that triggered the workflow
|
||||
- Changes are detected against the merge-base with the configured base branch or the default branch
|
||||
- Uses git commands to detect changes - repository must be already [checked out](https://github.com/actions/checkout)
|
||||
- Master, Release or other long-lived branches:
|
||||
- Action triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)** event
|
||||
- Changes are detected against the most recent commit on the same branch before the push
|
||||
- **Master, Release, or other long-lived branches:**
|
||||
- Workflow triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)** event
|
||||
when `base` input parameter is the same as the branch that triggered the workflow:
|
||||
- Changes are detected against the most recent commit on the same branch before the push
|
||||
- Workflow triggered by any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
when `base` input parameter is commit SHA:
|
||||
- Changes are detected against the provided `base` commit
|
||||
- Workflow triggered by any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
when `base` input parameter is the same as the branch that triggered the workflow:
|
||||
- Changes are detected from the last commit
|
||||
- Uses git commands to detect changes - repository must be already [checked out](https://github.com/actions/checkout)
|
||||
- **Local changes**
|
||||
- Workflow triggered by any event when `base` input parameter is set to `HEAD`
|
||||
- Changes are detected against the current HEAD
|
||||
- Untracked files are ignored
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
@@ -33,110 +54,135 @@ doesn't allow this because they doesn't work on a level of individual jobs or st
|
||||
- 'src/**'
|
||||
|
||||
# run only if some file in 'src' folder was changed
|
||||
if: steps.changes.outputs.src == 'true'
|
||||
- if: steps.changes.outputs.src == 'true'
|
||||
run: ...
|
||||
```
|
||||
|
||||
For more scenarios see [examples](#examples) section.
|
||||
|
||||
## Notes:
|
||||
- Paths expressions are evaluated using [minimatch](https://github.com/isaacs/minimatch) library.
|
||||
Documentation for path expression format can be found on project github page.
|
||||
- Minimatch [dot](https://www.npmjs.com/package/minimatch#dot) option is set to true.
|
||||
Globbing will match also paths where file or folder name starts with a dot.
|
||||
- It's recommended to quote your path expressions with `'` or `"`. Otherwise you will get an error if it starts with `*`.
|
||||
## Notes
|
||||
|
||||
- Paths expressions are evaluated using [picomatch](https://github.com/micromatch/picomatch) library.
|
||||
Documentation for path expression format can be found on the project GitHub page.
|
||||
- Picomatch [dot](https://github.com/micromatch/picomatch#options) option is set to true.
|
||||
Globbing will also match paths where file or folder name starts with a dot.
|
||||
- It's recommended to quote your path expressions with `'` or `"`. Otherwise, you will get an error if it starts with `*`.
|
||||
- Local execution with [act](https://github.com/nektos/act) works only with alternative runner image. Default runner doesn't have `git` binary.
|
||||
- Use: `act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04`
|
||||
|
||||
## What's New
|
||||
|
||||
# What's New
|
||||
- Fixed compatibility with older (<2.23) versions of git
|
||||
- Support for tag pushes and tags as a base reference
|
||||
- Fixes for various edge cases when event payload is incomplete
|
||||
- Supports local execution with [act](https://github.com/nektos/act)
|
||||
- Fixed behavior of feature branch workflow:
|
||||
- Detects only changes introduced by feature branch. Later modifications on base branch are ignored.
|
||||
- Filter by type of file change:
|
||||
- Optionally consider if file was added, modified or deleted
|
||||
- Custom processing of changed files:
|
||||
- Optionally export paths of all files matching the filter
|
||||
- Output can be space-delimited or in JSON format
|
||||
- New major release `v3` after update to Node 20 [Breaking change]
|
||||
- Add `ref` input parameter
|
||||
- Add `list-files: csv` format
|
||||
- Configure matrix job to run for each folder with changes using `changes` output
|
||||
- Improved listing of matching files with `list-files: shell` and `list-files: escape` options
|
||||
- Paths expressions are now evaluated using [picomatch](https://github.com/micromatch/picomatch) library
|
||||
|
||||
For more information see [CHANGELOG](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
|
||||
For more information, see [CHANGELOG](https://github.com/dorny/paths-filter/blob/master/CHANGELOG.md)
|
||||
|
||||
# Usage
|
||||
## Usage
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
with:
|
||||
# Defines filters applied to detected changed files.
|
||||
# Each filter has a name and list of rules.
|
||||
# Each filter has a name and a list of rules.
|
||||
# Rule is a glob expression - paths of all changed
|
||||
# files are matched against it.
|
||||
# Rule can optionally specify if the file
|
||||
# should be added, modified or deleted.
|
||||
# For each filter there will be corresponding output variable to
|
||||
# should be added, modified, or deleted.
|
||||
# For each filter, there will be a corresponding output variable to
|
||||
# indicate if there's a changed file matching any of the rules.
|
||||
# Optionally there can be a second output variable
|
||||
# Optionally, there can be a second output variable
|
||||
# set to list of all files matching the filter.
|
||||
# Filters can be provided inline as a string (containing valid YAML document)
|
||||
# or as a relative path to separate file (e.g.: .github/filters.yaml).
|
||||
# Multiline string is evaluated as embedded filter definition,
|
||||
# single line string is evaluated as relative path to separate file.
|
||||
# Filters can be provided inline as a string (containing valid YAML document),
|
||||
# or as a relative path to a file (e.g.: .github/filters.yaml).
|
||||
# Filters syntax is documented by example - see examples section.
|
||||
filters: ''
|
||||
|
||||
# Branch or tag against which the changes will be detected.
|
||||
# If it references same branch it was pushed to,
|
||||
# Branch, tag, or commit SHA against which the changes will be detected.
|
||||
# If it references the same branch it was pushed to,
|
||||
# changes are detected against the most recent commit before the push.
|
||||
# Otherwise it uses git merge-base to find best common ancestor between
|
||||
# Otherwise, it uses git merge-base to find the best common ancestor between
|
||||
# current branch (HEAD) and base.
|
||||
# When merge-base is found, it's used for change detection - only changes
|
||||
# introduced by current branch are considered.
|
||||
# introduced by the current branch are considered.
|
||||
# All files are considered as added if there is no common ancestor with
|
||||
# base branch or no previous commit.
|
||||
# This option is ignored if action is triggered by pull_request event.
|
||||
# Default: repository default branch (e.g. master)
|
||||
base: ''
|
||||
|
||||
# How many commits are initially fetched from base branch.
|
||||
# Git reference (e.g. branch name) from which the changes will be detected.
|
||||
# Useful when workflow can be triggered only on the default branch (e.g. repository_dispatch event)
|
||||
# but you want to get changes on a different branch.
|
||||
# This option is ignored if action is triggered by pull_request event.
|
||||
# default: ${{ github.ref }}
|
||||
ref:
|
||||
|
||||
# How many commits are initially fetched from the base branch.
|
||||
# If needed, each subsequent fetch doubles the
|
||||
# previously requested number of commits until the merge-base
|
||||
# is found or there are no more commits in the history.
|
||||
# is found, or there are no more commits in the history.
|
||||
# This option takes effect only when changes are detected
|
||||
# using git against base branch (feature branch workflow).
|
||||
# Default: 20
|
||||
# Default: 100
|
||||
initial-fetch-depth: ''
|
||||
|
||||
# Enables listing of files matching the filter:
|
||||
# 'none' - Disables listing of matching files (default).
|
||||
# 'json' - Matching files paths are formatted as JSON array.
|
||||
# 'shell' - Matching files paths are escaped and space-delimited.
|
||||
# Output is usable as command line argument list in linux shell.
|
||||
# 'csv' - Coma separated list of filenames.
|
||||
# If needed, it uses double quotes to wrap filename with unsafe characters.
|
||||
# 'json' - File paths are formatted as JSON array.
|
||||
# 'shell' - Space delimited list usable as command-line argument list in Linux shell.
|
||||
# If needed, it uses single or double quotes to wrap filename with unsafe characters.
|
||||
# 'escape'- Space delimited list usable as command-line argument list in Linux shell.
|
||||
# Backslash escapes every potentially unsafe character.
|
||||
# Default: none
|
||||
list-files: ''
|
||||
|
||||
# Relative path under $GITHUB_WORKSPACE where the repository was checked out.
|
||||
working-directory: ''
|
||||
|
||||
# Personal access token used to fetch list of changed files
|
||||
# from Github REST API.
|
||||
# It's used only if action is triggered by pull request event.
|
||||
# Github token from workflow context is used as default value.
|
||||
# If empty string is provided, action falls back to detect
|
||||
# Personal access token used to fetch a list of changed files
|
||||
# from GitHub REST API.
|
||||
# It's only used if action is triggered by a pull request event.
|
||||
# GitHub token from workflow context is used as default value.
|
||||
# If an empty string is provided, the action falls back to detect
|
||||
# changes using git commands.
|
||||
# Default: ${{ github.token }}
|
||||
token: ''
|
||||
|
||||
# Optional parameter to override the default behavior of file matching algorithm.
|
||||
# By default files that match at least one pattern defined by the filters will be included.
|
||||
# This parameter allows to override the "at least one pattern" behavior to make it so that
|
||||
# all of the patterns have to match or otherwise the file is excluded.
|
||||
# An example scenario where this is useful if you would like to match all
|
||||
# .ts files in a sub-directory but not .md files.
|
||||
# The filters below will match markdown files despite the exclusion syntax UNLESS
|
||||
# you specify 'every' as the predicate-quantifier parameter. When you do that,
|
||||
# it will only match the .ts files in the subdirectory as expected.
|
||||
#
|
||||
# backend:
|
||||
# - 'pkg/a/b/c/**'
|
||||
# - '!**/*.jpeg'
|
||||
# - '!**/*.md'
|
||||
predicate-quantifier: 'some'
|
||||
```
|
||||
|
||||
## Outputs
|
||||
- For each filter it sets output variable named by the filter to the text:
|
||||
- `'true'` - if **any** of changed files matches any of filter rules
|
||||
- `'false'` - if **none** of changed files matches any of filter rules
|
||||
- If enabled, for each filter it sets output variable with name `${FILTER_NAME}_files`. It will contain list of all files matching the filter.
|
||||
|
||||
# Examples
|
||||
- For each filter, it sets output variable named by the filter to the text:
|
||||
- `'true'` - if **any** of changed files matches any of filter rules
|
||||
- `'false'` - if **none** of changed files matches any of filter rules
|
||||
- For each filter, it sets an output variable with the name `${FILTER_NAME}_count` to the count of matching files.
|
||||
- If enabled, for each filter it sets an output variable with the name `${FILTER_NAME}_files`. It will contain a list of all files matching the filter.
|
||||
- `changes` - JSON array with names of all filters matching any of the changed files.
|
||||
|
||||
## Conditional execution
|
||||
## Examples
|
||||
|
||||
### Conditional execution
|
||||
|
||||
<details>
|
||||
<summary>Execute <b>step</b> in a workflow job only if some file in a subfolder is changed</summary>
|
||||
@@ -146,8 +192,8 @@ jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
@@ -171,6 +217,7 @@ jobs:
|
||||
if: steps.filter.outputs.backend == 'true' || steps.filter.outputs.frontend == 'true'
|
||||
run: ...
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
@@ -181,13 +228,16 @@ jobs:
|
||||
# JOB to run change detection
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Required permissions
|
||||
permissions:
|
||||
pull-requests: read
|
||||
# Set job outputs to values from filter step
|
||||
outputs:
|
||||
backend: ${{ steps.filter.outputs.backend }}
|
||||
frontend: ${{ steps.filter.outputs.frontend }}
|
||||
steps:
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
@@ -202,7 +252,7 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.backend == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- ...
|
||||
|
||||
# JOB to build and test frontend code
|
||||
@@ -211,12 +261,52 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.frontend == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- ...
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## Change detection workflows
|
||||
<details>
|
||||
<summary>Use change detection to configure matrix job</summary>
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
# JOB to run change detection
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Required permissions
|
||||
permissions:
|
||||
pull-requests: read
|
||||
outputs:
|
||||
# Expose matched filters as job 'packages' output variable
|
||||
packages: ${{ steps.filter.outputs.changes }}
|
||||
steps:
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
package1: src/package1
|
||||
package2: src/package2
|
||||
|
||||
# JOB to build and test each of modified packages
|
||||
build:
|
||||
needs: changes
|
||||
strategy:
|
||||
matrix:
|
||||
# Parse JSON array containing names of all filters matching any of changed files
|
||||
# e.g. ['package1', 'package2'] if both package folders contains changes
|
||||
package: ${{ fromJSON(needs.changes.outputs.packages) }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- ...
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Change detection workflows
|
||||
|
||||
<details>
|
||||
<summary><b>Pull requests:</b> Detect changes against PR base branch</summary>
|
||||
@@ -224,19 +314,23 @@ jobs:
|
||||
```yaml
|
||||
on:
|
||||
pull_request:
|
||||
branches: # PRs to following branches will trigger the workflow
|
||||
branches: # PRs to the following branches will trigger the workflow
|
||||
- master
|
||||
- develop
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
# Required permissions
|
||||
permissions:
|
||||
pull-requests: read
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
@@ -251,22 +345,51 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
# This may save additional git fetch roundtrip if
|
||||
# merge-base is found within latest 20 commits
|
||||
fetch-depth: 20
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
base: develop # Change detection against merge-base with this branch
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Long lived branches:</b> Detect changes against the most recent commit on the same branch before the push</summary>
|
||||
|
||||
```yaml
|
||||
on:
|
||||
push:
|
||||
branches: # Push to the following branches will trigger the workflow
|
||||
- master
|
||||
- develop
|
||||
- release/**
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
# Use context to get the branch where commits were pushed.
|
||||
# If there is only one long-lived branch (e.g. master),
|
||||
# you can specify it directly.
|
||||
# If it's not configured, the repository default branch is used.
|
||||
base: ${{ github.ref }}
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Local changes:</b> Detect staged and unstaged local changes</summary>
|
||||
|
||||
```yaml
|
||||
on:
|
||||
push:
|
||||
@@ -278,38 +401,42 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Some action that modifies files tracked by git (e.g. code linter)
|
||||
- uses: johndoe/some-action@v1
|
||||
|
||||
# Filter to detect which files were modified
|
||||
# Changes could be, for example, automatically committed
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
# Use context to get branch where commits were pushed.
|
||||
# If there is only one long lived branch (e.g. master),
|
||||
# you can specify it directly.
|
||||
# If it's not configured, the repository default branch is used.
|
||||
base: ${{ github.ref }}
|
||||
base: HEAD
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## Advanced options
|
||||
### Advanced options
|
||||
|
||||
<details>
|
||||
<summary>Define filter rules in own file</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
# Path to file where filters are defined
|
||||
filters: .github/filters.yaml
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Use YAML anchors to reuse path expression(s) inside another rule</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
# &shared is YAML anchor,
|
||||
@@ -323,42 +450,74 @@ jobs:
|
||||
- *shared
|
||||
- src/**
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Consider if file was added, modified or deleted</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
# Changed file can be 'added', 'modified', or 'deleted'.
|
||||
# By default the type of change is not considered.
|
||||
# Optionally it's possible to specify it using nested
|
||||
# dictionary, where type(s) of change composes the key.
|
||||
# Multiple change types can be specified using `|` as delimiter.
|
||||
# By default, the type of change is not considered.
|
||||
# Optionally, it's possible to specify it using nested
|
||||
# dictionary, where the type of change composes the key.
|
||||
# Multiple change types can be specified using `|` as the delimiter.
|
||||
filters: |
|
||||
shared: &shared
|
||||
- common/**
|
||||
- config/**
|
||||
addedOrModified:
|
||||
- added|modified: '**'
|
||||
allChanges:
|
||||
- added|deleted|modified: '**'
|
||||
addedOrModifiedAnchors:
|
||||
- added|modified: *shared
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Detect changes in folder only for some file extensions</summary>
|
||||
|
||||
## Custom processing of changed files
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
# This makes it so that all the patterns have to match a file for it to be
|
||||
# considered changed. Because we have the exclusions for .jpeg and .md files
|
||||
# the end result is that if those files are changed they will be ignored
|
||||
# because they don't match the respective rules excluding them.
|
||||
#
|
||||
# This can be leveraged to ensure that you only build & test software changes
|
||||
# that have real impact on the behavior of the code, e.g. you can set up your
|
||||
# build to run when Typescript/Rust/etc. files are changed but markdown
|
||||
# changes in the diff will be ignored and you consume less resources to build.
|
||||
predicate-quantifier: 'every'
|
||||
filters: |
|
||||
backend:
|
||||
- 'pkg/a/b/c/**'
|
||||
- '!**/*.jpeg'
|
||||
- '!**/*.md'
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Custom processing of changed files
|
||||
|
||||
<details>
|
||||
<summary>Passing list of modified files as command line args in Linux shell</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
# Enable listing of files matching each filter.
|
||||
# Paths to files will be available in `${FILTER_NAME}_files` output variable.
|
||||
# Paths will be escaped and space-delimited.
|
||||
# Output is usable as command line argument list in linux shell
|
||||
# Output is usable as command-line argument list in Linux shell
|
||||
list-files: shell
|
||||
|
||||
# In this example changed files will be checked by linter.
|
||||
@@ -371,13 +530,14 @@ jobs:
|
||||
if: ${{ steps.filter.outputs.markdown == 'true' }}
|
||||
run: npx textlint ${{ steps.filter.outputs.markdown_files }}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Passing list of modified files as JSON array to another action</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
# Enable listing of files matching each filter.
|
||||
@@ -385,7 +545,7 @@ jobs:
|
||||
# Paths will be formatted as JSON array
|
||||
list-files: json
|
||||
|
||||
# In this example all changed files are passed to following action to do
|
||||
# In this example all changed files are passed to the following action to do
|
||||
# some custom processing.
|
||||
filters: |
|
||||
changed:
|
||||
@@ -393,11 +553,15 @@ jobs:
|
||||
- name: Lint Markdown
|
||||
uses: johndoe/some-action@v1
|
||||
with:
|
||||
files: ${{ steps.filter.changed_files }}
|
||||
files: ${{ steps.filter.outputs.changed_files }}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## See also
|
||||
|
||||
# License
|
||||
- [test-reporter](https://github.com/dorny/test-reporter) - Displays test results from popular testing frameworks directly in GitHub
|
||||
|
||||
## License
|
||||
|
||||
The scripts and documentation in this project are released under the [MIT License](https://github.com/dorny/paths-filter/blob/master/LICENSE)
|
||||
|
||||
23
__tests__/csv-escape.test.ts
Normal file
23
__tests__/csv-escape.test.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import {csvEscape} from '../src/list-format/csv-escape'
|
||||
|
||||
describe('csvEscape() backslash escapes every character except subset of definitely safe characters', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(csvEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(csvEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(csvEscape('file with space')).toBe('"file with space"')
|
||||
})
|
||||
|
||||
test('filename with "," should be quoted', () => {
|
||||
expect(csvEscape('file, with coma')).toBe('"file, with coma"')
|
||||
})
|
||||
|
||||
test('Double quote should be escaped by another double quote', () => {
|
||||
expect(csvEscape('file " with double quote')).toBe('"file "" with double quote"')
|
||||
})
|
||||
})
|
||||
@@ -1,4 +1,4 @@
|
||||
import {Filter} from '../src/filter'
|
||||
import {Filter, FilterConfig, PredicateQuantifier} from '../src/filter'
|
||||
import {File, ChangeStatus} from '../src/file'
|
||||
|
||||
describe('yaml filter parsing tests', () => {
|
||||
@@ -98,6 +98,56 @@ describe('matching tests', () => {
|
||||
expect(match.dot).toEqual(files)
|
||||
})
|
||||
|
||||
test('matches all except tsx and less files (negate a group with or-ed parts)', () => {
|
||||
const yaml = `
|
||||
backend:
|
||||
- '!(**/*.tsx|**/*.less)'
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const tsxFiles = modified(['src/ui.tsx'])
|
||||
const lessFiles = modified(['src/ui.less'])
|
||||
const pyFiles = modified(['src/server.py'])
|
||||
|
||||
const tsxMatch = filter.match(tsxFiles)
|
||||
const lessMatch = filter.match(lessFiles)
|
||||
const pyMatch = filter.match(pyFiles)
|
||||
|
||||
expect(tsxMatch.backend).toEqual([])
|
||||
expect(lessMatch.backend).toEqual([])
|
||||
expect(pyMatch.backend).toEqual(pyFiles)
|
||||
})
|
||||
|
||||
test('matches only files that are matching EVERY pattern when set to PredicateQuantifier.EVERY', () => {
|
||||
const yaml = `
|
||||
backend:
|
||||
- 'pkg/a/b/c/**'
|
||||
- '!**/*.jpeg'
|
||||
- '!**/*.md'
|
||||
`
|
||||
const filterConfig: FilterConfig = {predicateQuantifier: PredicateQuantifier.EVERY}
|
||||
const filter = new Filter(yaml, filterConfig)
|
||||
|
||||
const typescriptFiles = modified(['pkg/a/b/c/some-class.ts', 'pkg/a/b/c/src/main/some-class.ts'])
|
||||
const otherPkgTypescriptFiles = modified(['pkg/x/y/z/some-class.ts', 'pkg/x/y/z/src/main/some-class.ts'])
|
||||
const otherPkgJpegFiles = modified(['pkg/x/y/z/some-pic.jpeg', 'pkg/x/y/z/src/main/jpeg/some-pic.jpeg'])
|
||||
const docsFiles = modified([
|
||||
'pkg/a/b/c/some-pics.jpeg',
|
||||
'pkg/a/b/c/src/main/jpeg/some-pic.jpeg',
|
||||
'pkg/a/b/c/src/main/some-docs.md',
|
||||
'pkg/a/b/c/some-docs.md'
|
||||
])
|
||||
|
||||
const typescriptMatch = filter.match(typescriptFiles)
|
||||
const otherPkgTypescriptMatch = filter.match(otherPkgTypescriptFiles)
|
||||
const docsMatch = filter.match(docsFiles)
|
||||
const otherPkgJpegMatch = filter.match(otherPkgJpegFiles)
|
||||
|
||||
expect(typescriptMatch.backend).toEqual(typescriptFiles)
|
||||
expect(otherPkgTypescriptMatch.backend).toEqual([])
|
||||
expect(docsMatch.backend).toEqual([])
|
||||
expect(otherPkgJpegMatch.backend).toEqual([])
|
||||
})
|
||||
|
||||
test('matches path based on rules included using YAML anchor', () => {
|
||||
const yaml = `
|
||||
shared: &shared
|
||||
@@ -146,6 +196,20 @@ describe('matching specific change status', () => {
|
||||
const match = filter.match(files)
|
||||
expect(match.addOrModify).toEqual(files)
|
||||
})
|
||||
|
||||
test('matches when using an anchor', () => {
|
||||
const yaml = `
|
||||
shared: &shared
|
||||
- common/**/*
|
||||
- config/**/*
|
||||
src:
|
||||
- modified: *shared
|
||||
`
|
||||
let filter = new Filter(yaml)
|
||||
const files = modified(['config/file.js', 'common/anotherFile.js'])
|
||||
const match = filter.match(files)
|
||||
expect(match.src).toEqual(files)
|
||||
})
|
||||
})
|
||||
|
||||
function modified(paths: string[]): File[] {
|
||||
@@ -153,3 +217,9 @@ function modified(paths: string[]): File[] {
|
||||
return {filename, status: ChangeStatus.Modified}
|
||||
})
|
||||
}
|
||||
|
||||
function renamed(paths: string[]): File[] {
|
||||
return paths.map(filename => {
|
||||
return {filename, status: ChangeStatus.Renamed}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -26,4 +26,10 @@ describe('git utility function tests (those not invoking git)', () => {
|
||||
expect(git.getShortName('tags/v1')).toBe('tags/v1')
|
||||
expect(git.getShortName('v1')).toBe('v1')
|
||||
})
|
||||
|
||||
test('isGitSha(ref) returns true only for 40 characters of a-z and 0-9', () => {
|
||||
expect(git.isGitSha('8b399ed1681b9efd6b1e048ca1c5cba47edf3855')).toBeTruthy()
|
||||
expect(git.isGitSha('This_is_very_long_name_for_a_branch_1111')).toBeFalsy()
|
||||
expect(git.isGitSha('master')).toBeFalsy()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,16 +1,57 @@
|
||||
import shellEscape from '../src/shell-escape'
|
||||
import {backslashEscape, shellEscape} from '../src/list-format/shell-escape'
|
||||
|
||||
test('simple path escaped', () => {
|
||||
expect(shellEscape('file')).toBe("'file'")
|
||||
describe('escape() backslash escapes every character except subset of definitely safe characters', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(backslashEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(backslashEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('spaces should be escaped with backslash', () => {
|
||||
expect(backslashEscape('file with space')).toBe('file\\ with\\ space')
|
||||
})
|
||||
|
||||
test('quotes should be escaped with backslash', () => {
|
||||
expect(backslashEscape('file\'with quote"')).toBe('file\\\'with\\ quote\\"')
|
||||
})
|
||||
|
||||
test('$variables should be escaped', () => {
|
||||
expect(backslashEscape('$var')).toBe('\\$var')
|
||||
})
|
||||
})
|
||||
|
||||
test('path with space is wrapped with single quotes', () => {
|
||||
expect(shellEscape('file with space')).toBe("'file with space'")
|
||||
})
|
||||
describe('shellEscape() returns human readable filenames with as few escaping applied as possible', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(shellEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('path with quote is divided into quoted segments and escaped quote', () => {
|
||||
expect(shellEscape("file'with quote")).toBe("'file'\\''with quote'")
|
||||
})
|
||||
test('path with leading quote does not have double quotes at beginning', () => {
|
||||
expect(shellEscape("'file-leading-quote")).toBe("\\''file-leading-quote'")
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(shellEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(shellEscape('file with space')).toBe("'file with space'")
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(shellEscape('file with space')).toBe("'file with space'")
|
||||
})
|
||||
|
||||
test('filename with $ should be quoted', () => {
|
||||
expect(shellEscape('$var')).toBe("'$var'")
|
||||
})
|
||||
|
||||
test('filename with " should be quoted', () => {
|
||||
expect(shellEscape('file"name')).toBe("'file\"name'")
|
||||
})
|
||||
|
||||
test('filename with single quote should be wrapped in double quotes', () => {
|
||||
expect(shellEscape("file'with quote")).toBe('"file\'with quote"')
|
||||
})
|
||||
|
||||
test('filename with single quote and special characters is split and quoted/escaped as needed', () => {
|
||||
expect(shellEscape("file'with $quote")).toBe("file\\''with $quote'")
|
||||
})
|
||||
})
|
||||
|
||||
28
action.yml
28
action.yml
@@ -9,6 +9,11 @@ inputs:
|
||||
working-directory:
|
||||
description: 'Relative path under $GITHUB_WORKSPACE where the repository was checked out.'
|
||||
required: false
|
||||
ref:
|
||||
description: |
|
||||
Git reference (e.g. branch name) from which the changes will be detected.
|
||||
This option is ignored if action is triggered by pull_request event.
|
||||
required: false
|
||||
base:
|
||||
description: |
|
||||
Git reference (e.g. branch name) against which the changes will be detected. Defaults to repository default branch (e.g. master).
|
||||
@@ -22,9 +27,14 @@ inputs:
|
||||
description: |
|
||||
Enables listing of files matching the filter:
|
||||
'none' - Disables listing of matching files (default).
|
||||
'json' - Matching files paths are serialized as JSON array.
|
||||
'shell' - Matching files paths are escaped and space-delimited. Output is usable as command line argument list in linux shell.
|
||||
required: true
|
||||
'csv' - Coma separated list of filenames.
|
||||
If needed it uses double quotes to wrap filename with unsafe characters.
|
||||
'json' - Serialized as JSON array.
|
||||
'shell' - Space delimited list usable as command line argument list in linux shell.
|
||||
If needed it uses single or double quotes to wrap filename with unsafe characters.
|
||||
'escape'- Space delimited list usable as command line argument list in linux shell.
|
||||
Backslash escapes every potentially unsafe character.
|
||||
required: false
|
||||
default: none
|
||||
initial-fetch-depth:
|
||||
description: |
|
||||
@@ -33,9 +43,17 @@ inputs:
|
||||
until the merge-base is found or there are no more commits in the history.
|
||||
This option takes effect only when changes are detected using git against different base branch.
|
||||
required: false
|
||||
default: '10'
|
||||
default: '100'
|
||||
predicate-quantifier:
|
||||
description: |
|
||||
allows to override the "at least one pattern" behavior to make it so that all of the patterns have to match or otherwise the file is excluded.
|
||||
required: false
|
||||
default: 'some'
|
||||
outputs:
|
||||
changes:
|
||||
description: JSON array with names of all filters matching any of changed files
|
||||
runs:
|
||||
using: 'node12'
|
||||
using: 'node20'
|
||||
main: 'dist/index.js'
|
||||
branding:
|
||||
color: blue
|
||||
|
||||
66603
dist/index.js
vendored
66603
dist/index.js
vendored
File diff suppressed because one or more lines are too long
19758
package-lock.json
generated
19758
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
47
package.json
47
package.json
@@ -1,6 +1,9 @@
|
||||
{
|
||||
"name": "paths-filter",
|
||||
"version": "1.0.0",
|
||||
"engines": {
|
||||
"node": ">= 20"
|
||||
},
|
||||
"private": true,
|
||||
"description": "Execute your workflow steps only if relevant files are modified.",
|
||||
"main": "lib/main.js",
|
||||
@@ -25,30 +28,28 @@
|
||||
"author": "YourNameOrOrganization",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.4",
|
||||
"@actions/exec": "^1.0.4",
|
||||
"@actions/github": "^2.2.0",
|
||||
"@octokit/webhooks": "^7.6.2",
|
||||
"minimatch": "^3.0.4"
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "6.0.0",
|
||||
"picomatch": "^2.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^25.2.3",
|
||||
"@types/js-yaml": "^3.12.4",
|
||||
"@types/minimatch": "^3.0.3",
|
||||
"@types/node": "^14.0.5",
|
||||
"@typescript-eslint/parser": "^3.3.0",
|
||||
"@zeit/ncc": "^0.22.3",
|
||||
"eslint": "^7.3.0",
|
||||
"eslint-plugin-github": "^2.0.0",
|
||||
"eslint-plugin-jest": "^22.21.0",
|
||||
"jest": "^26.0.1",
|
||||
"jest-circus": "^26.0.1",
|
||||
"js-yaml": "^3.14.0",
|
||||
"prettier": "^2.0.5",
|
||||
"ts-jest": "^26.0.0",
|
||||
"typescript": "^3.9.3"
|
||||
},
|
||||
"jest": {
|
||||
"testEnvironment": "node"
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@types/jest": "^29.5.11",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^20.11.6",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@typescript-eslint/eslint-plugin": "^6.19.1",
|
||||
"@typescript-eslint/parser": "^6.19.1",
|
||||
"@vercel/ncc": "^0.38.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-plugin-github": "^4.10.1",
|
||||
"eslint-plugin-jest": "^27.6.3",
|
||||
"jest": "^29.7.0",
|
||||
"jest-circus": "^29.7.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"prettier": "^2.8.8",
|
||||
"ts-jest": "^29.1.2",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
}
|
||||
|
||||
21
src/exec.ts
21
src/exec.ts
@@ -1,21 +0,0 @@
|
||||
import {exec as execImpl, ExecOptions} from '@actions/exec'
|
||||
|
||||
// Wraps original exec() function
|
||||
// Returns exit code and whole stdout/stderr
|
||||
export default async function exec(commandLine: string, args?: string[], options?: ExecOptions): Promise<ExecResult> {
|
||||
options = options || {}
|
||||
let stdout = ''
|
||||
let stderr = ''
|
||||
options.listeners = {
|
||||
stdout: (data: Buffer) => (stdout += data.toString()),
|
||||
stderr: (data: Buffer) => (stderr += data.toString())
|
||||
}
|
||||
const code = await execImpl(commandLine, args, options)
|
||||
return {code, stdout, stderr}
|
||||
}
|
||||
|
||||
export interface ExecResult {
|
||||
code: number
|
||||
stdout: string
|
||||
stderr: string
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as jsyaml from 'js-yaml'
|
||||
import * as minimatch from 'minimatch'
|
||||
import picomatch from 'picomatch'
|
||||
import {File, ChangeStatus} from './file'
|
||||
|
||||
// Type definition of object we expect to load from YAML
|
||||
@@ -8,11 +8,11 @@ interface FilterYaml {
|
||||
}
|
||||
type FilterItemYaml =
|
||||
| string // Filename pattern, e.g. "path/to/*.js"
|
||||
| {[changeTypes: string]: string} // Change status and filename, e.g. added|modified: "path/to/*.js"
|
||||
| {[changeTypes: string]: string | string[]} // Change status and filename, e.g. added|modified: "path/to/*.js"
|
||||
| FilterItemYaml[] // Supports referencing another rule via YAML anchor
|
||||
|
||||
// Minimatch options used in all matchers
|
||||
const MinimatchOptions: minimatch.IOptions = {
|
||||
const MatchOptions = {
|
||||
dot: true
|
||||
}
|
||||
|
||||
@@ -20,7 +20,49 @@ const MinimatchOptions: minimatch.IOptions = {
|
||||
// Created as simplified form of data in FilterItemYaml
|
||||
interface FilterRuleItem {
|
||||
status?: ChangeStatus[] // Required change status of the matched files
|
||||
matcher: minimatch.IMinimatch // Matches the filename
|
||||
isMatch: (str: string) => boolean // Matches the filename
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumerates the possible logic quantifiers that can be used when determining
|
||||
* if a file is a match or not with multiple patterns.
|
||||
*
|
||||
* The YAML configuration property that is parsed into one of these values is
|
||||
* 'predicate-quantifier' on the top level of the configuration object of the
|
||||
* action.
|
||||
*
|
||||
* The default is to use 'some' which used to be the hardcoded behavior prior to
|
||||
* the introduction of the new mechanism.
|
||||
*
|
||||
* @see https://en.wikipedia.org/wiki/Quantifier_(logic)
|
||||
*/
|
||||
export enum PredicateQuantifier {
|
||||
/**
|
||||
* When choosing 'every' in the config it means that files will only get matched
|
||||
* if all the patterns are satisfied by the path of the file, not just at least one of them.
|
||||
*/
|
||||
EVERY = 'every',
|
||||
/**
|
||||
* When choosing 'some' in the config it means that files will get matched as long as there is
|
||||
* at least one pattern that matches them. This is the default behavior if you don't
|
||||
* specify anything as a predicate quantifier.
|
||||
*/
|
||||
SOME = 'some'
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to define customizations for how the file filtering should work at runtime.
|
||||
*/
|
||||
export type FilterConfig = {readonly predicateQuantifier: PredicateQuantifier}
|
||||
|
||||
/**
|
||||
* An array of strings (at runtime) that contains the valid/accepted values for
|
||||
* the configuration parameter 'predicate-quantifier'.
|
||||
*/
|
||||
export const SUPPORTED_PREDICATE_QUANTIFIERS = Object.values(PredicateQuantifier)
|
||||
|
||||
export function isPredicateQuantifier(x: unknown): x is PredicateQuantifier {
|
||||
return SUPPORTED_PREDICATE_QUANTIFIERS.includes(x as PredicateQuantifier)
|
||||
}
|
||||
|
||||
export interface FilterResults {
|
||||
@@ -31,7 +73,7 @@ export class Filter {
|
||||
rules: {[key: string]: FilterRuleItem[]} = {}
|
||||
|
||||
// Creates instance of Filter and load rules from YAML if it's provided
|
||||
constructor(yaml?: string) {
|
||||
constructor(yaml?: string, readonly filterConfig?: FilterConfig) {
|
||||
if (yaml) {
|
||||
this.load(yaml)
|
||||
}
|
||||
@@ -43,7 +85,7 @@ export class Filter {
|
||||
return
|
||||
}
|
||||
|
||||
const doc = jsyaml.safeLoad(yaml) as FilterYaml
|
||||
const doc = jsyaml.load(yaml) as FilterYaml
|
||||
if (typeof doc !== 'object') {
|
||||
this.throwInvalidFormatError('Root element is not an object')
|
||||
}
|
||||
@@ -62,9 +104,14 @@ export class Filter {
|
||||
}
|
||||
|
||||
private isMatch(file: File, patterns: FilterRuleItem[]): boolean {
|
||||
return patterns.some(
|
||||
rule => (rule.status === undefined || rule.status.includes(file.status)) && rule.matcher.match(file.filename)
|
||||
)
|
||||
const aPredicate = (rule: Readonly<FilterRuleItem>): boolean => {
|
||||
return (rule.status === undefined || rule.status.includes(file.status)) && rule.isMatch(file.filename)
|
||||
}
|
||||
if (this.filterConfig?.predicateQuantifier === 'every') {
|
||||
return patterns.every(aPredicate)
|
||||
} else {
|
||||
return patterns.some(aPredicate)
|
||||
}
|
||||
}
|
||||
|
||||
private parseFilterItemYaml(item: FilterItemYaml): FilterRuleItem[] {
|
||||
@@ -73,14 +120,14 @@ export class Filter {
|
||||
}
|
||||
|
||||
if (typeof item === 'string') {
|
||||
return [{status: undefined, matcher: new minimatch.Minimatch(item, MinimatchOptions)}]
|
||||
return [{status: undefined, isMatch: picomatch(item, MatchOptions)}]
|
||||
}
|
||||
|
||||
if (typeof item === 'object') {
|
||||
return Object.entries(item).map(([key, pattern]) => {
|
||||
if (typeof key !== 'string' || typeof pattern !== 'string') {
|
||||
if (typeof key !== 'string' || (typeof pattern !== 'string' && !Array.isArray(pattern))) {
|
||||
this.throwInvalidFormatError(
|
||||
`Expected [key:string]= pattern:string, but [${key}:${typeof key}]= ${pattern}:${typeof pattern} found`
|
||||
`Expected [key:string]= pattern:string | string[], but [${key}:${typeof key}]= ${pattern}:${typeof pattern} found`
|
||||
)
|
||||
}
|
||||
return {
|
||||
@@ -89,7 +136,7 @@ export class Filter {
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0)
|
||||
.map(x => x.toLowerCase()) as ChangeStatus[],
|
||||
matcher: new minimatch.Minimatch(pattern, MinimatchOptions)
|
||||
isMatch: picomatch(pattern, MatchOptions)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
220
src/git.ts
220
src/git.ts
@@ -1,14 +1,15 @@
|
||||
import exec from './exec'
|
||||
import {getExecOutput} from '@actions/exec'
|
||||
import * as core from '@actions/core'
|
||||
import {File, ChangeStatus} from './file'
|
||||
|
||||
export const NULL_SHA = '0000000000000000000000000000000000000000'
|
||||
export const HEAD = 'HEAD'
|
||||
|
||||
export async function getChangesInLastCommit(): Promise<File[]> {
|
||||
core.startGroup(`Change detection in last commit`)
|
||||
let output = ''
|
||||
try {
|
||||
output = (await exec('git', ['log', '--format=', '--no-renames', '--name-status', '-z', '-n', '1'])).stdout
|
||||
output = (await getExecOutput('git', ['log', '--format=', '--no-renames', '--name-status', '-z', '-n', '1'])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
@@ -17,20 +18,17 @@ export async function getChangesInLastCommit(): Promise<File[]> {
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChanges(ref: string): Promise<File[]> {
|
||||
if (!(await hasCommit(ref))) {
|
||||
// Fetch single commit
|
||||
core.startGroup(`Fetching ${ref} from origin`)
|
||||
await exec('git', ['fetch', '--depth=1', '--no-tags', 'origin', ref])
|
||||
core.endGroup()
|
||||
}
|
||||
export async function getChanges(base: string, head: string): Promise<File[]> {
|
||||
const baseRef = await ensureRefAvailable(base)
|
||||
const headRef = await ensureRefAvailable(head)
|
||||
|
||||
// Get differences between ref and HEAD
|
||||
core.startGroup(`Change detection ${ref}..HEAD`)
|
||||
core.startGroup(`Change detection ${base}..${head}`)
|
||||
let output = ''
|
||||
try {
|
||||
// Two dots '..' change detection - directly compares two versions
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}..HEAD`])).stdout
|
||||
output = (await getExecOutput('git', ['diff', '--no-renames', '--name-status', '-z', `${baseRef}..${headRef}`]))
|
||||
.stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
@@ -39,50 +37,93 @@ export async function getChanges(ref: string): Promise<File[]> {
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChangesSinceMergeBase(ref: string, initialFetchDepth: number): Promise<File[]> {
|
||||
if (!(await hasCommit(ref))) {
|
||||
// Fetch and add base branch
|
||||
core.startGroup(`Fetching ${ref}`)
|
||||
try {
|
||||
await exec('git', ['fetch', `--depth=${initialFetchDepth}`, '--no-tags', 'origin', `${ref}:${ref}`])
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
async function hasMergeBase(): Promise<boolean> {
|
||||
return (await exec('git', ['merge-base', ref, 'HEAD'], {ignoreReturnCode: true})).code === 0
|
||||
}
|
||||
|
||||
async function countCommits(): Promise<number> {
|
||||
return (await getNumberOfCommits('HEAD')) + (await getNumberOfCommits(ref))
|
||||
}
|
||||
|
||||
core.startGroup(`Searching for merge-base with ${ref}`)
|
||||
// Fetch more commits until merge-base is found
|
||||
if (!(await hasMergeBase())) {
|
||||
let deepen = initialFetchDepth
|
||||
let lastCommitsCount = await countCommits()
|
||||
do {
|
||||
await exec('git', ['fetch', `--deepen=${deepen}`, '--no-tags'])
|
||||
const count = await countCommits()
|
||||
if (count <= lastCommitsCount) {
|
||||
core.info('No merge base found - all files will be listed as added')
|
||||
core.endGroup()
|
||||
return await listAllFilesAsAdded()
|
||||
}
|
||||
lastCommitsCount = count
|
||||
deepen = Math.min(deepen * 2, Number.MAX_SAFE_INTEGER)
|
||||
} while (!(await hasMergeBase()))
|
||||
}
|
||||
core.endGroup()
|
||||
|
||||
// Get changes introduced on HEAD compared to ref
|
||||
core.startGroup(`Change detection ${ref}...HEAD`)
|
||||
export async function getChangesOnHead(): Promise<File[]> {
|
||||
// Get current changes - both staged and unstaged
|
||||
core.startGroup(`Change detection on HEAD`)
|
||||
let output = ''
|
||||
try {
|
||||
// Three dots '...' change detection - finds merge-base and compares against it
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}...HEAD`])).stdout
|
||||
output = (await getExecOutput('git', ['diff', '--no-renames', '--name-status', '-z', 'HEAD'])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChangesSinceMergeBase(base: string, head: string, initialFetchDepth: number): Promise<File[]> {
|
||||
let baseRef: string | undefined
|
||||
let headRef: string | undefined
|
||||
async function hasMergeBase(): Promise<boolean> {
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
return false
|
||||
}
|
||||
return (await getExecOutput('git', ['merge-base', baseRef, headRef], {ignoreReturnCode: true})).exitCode === 0
|
||||
}
|
||||
|
||||
let noMergeBase = false
|
||||
core.startGroup(`Searching for merge-base ${base}...${head}`)
|
||||
try {
|
||||
baseRef = await getLocalRef(base)
|
||||
headRef = await getLocalRef(head)
|
||||
if (!(await hasMergeBase())) {
|
||||
await getExecOutput('git', ['fetch', '--no-tags', `--depth=${initialFetchDepth}`, 'origin', base, head])
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
baseRef = baseRef ?? (await getLocalRef(base))
|
||||
headRef = headRef ?? (await getLocalRef(head))
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
await getExecOutput('git', ['fetch', '--tags', '--depth=1', 'origin', base, head], {
|
||||
ignoreReturnCode: true // returns exit code 1 if tags on remote were updated - we can safely ignore it
|
||||
})
|
||||
baseRef = baseRef ?? (await getLocalRef(base))
|
||||
headRef = headRef ?? (await getLocalRef(head))
|
||||
if (baseRef === undefined) {
|
||||
throw new Error(
|
||||
`Could not determine what is ${base} - fetch works but it's not a branch, tag or commit SHA`
|
||||
)
|
||||
}
|
||||
if (headRef === undefined) {
|
||||
throw new Error(
|
||||
`Could not determine what is ${head} - fetch works but it's not a branch, tag or commit SHA`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let depth = initialFetchDepth
|
||||
let lastCommitCount = await getCommitCount()
|
||||
while (!(await hasMergeBase())) {
|
||||
depth = Math.min(depth * 2, Number.MAX_SAFE_INTEGER)
|
||||
await getExecOutput('git', ['fetch', `--deepen=${depth}`, 'origin', base, head])
|
||||
const commitCount = await getCommitCount()
|
||||
if (commitCount === lastCommitCount) {
|
||||
core.info('No more commits were fetched')
|
||||
core.info('Last attempt will be to fetch full history')
|
||||
await getExecOutput('git', ['fetch'])
|
||||
if (!(await hasMergeBase())) {
|
||||
noMergeBase = true
|
||||
}
|
||||
break
|
||||
}
|
||||
lastCommitCount = commitCount
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
// Three dots '...' change detection - finds merge-base and compares against it
|
||||
let diffArg = `${baseRef}...${headRef}`
|
||||
if (noMergeBase) {
|
||||
core.warning('No merge base found - change detection will use direct <commit>..<commit> comparison')
|
||||
diffArg = `${baseRef}..${headRef}`
|
||||
}
|
||||
|
||||
// Get changes introduced on ref compared to base
|
||||
core.startGroup(`Change detection ${diffArg}`)
|
||||
let output = ''
|
||||
try {
|
||||
output = (await getExecOutput('git', ['diff', '--no-renames', '--name-status', '-z', diffArg])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
@@ -107,7 +148,7 @@ export async function listAllFilesAsAdded(): Promise<File[]> {
|
||||
core.startGroup('Listing all files tracked by git')
|
||||
let output = ''
|
||||
try {
|
||||
output = (await exec('git', ['ls-files', '-z'])).stdout
|
||||
output = (await getExecOutput('git', ['ls-files', '-z'])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
@@ -123,19 +164,19 @@ export async function listAllFilesAsAdded(): Promise<File[]> {
|
||||
}
|
||||
|
||||
export async function getCurrentRef(): Promise<string> {
|
||||
core.startGroup(`Determining current ref`)
|
||||
core.startGroup(`Get current git ref`)
|
||||
try {
|
||||
const branch = (await exec('git', ['branch', '--show-current'])).stdout.trim()
|
||||
const branch = (await getExecOutput('git', ['branch', '--show-current'])).stdout.trim()
|
||||
if (branch) {
|
||||
return branch
|
||||
}
|
||||
|
||||
const describe = await exec('git', ['describe', '--tags', '--exact-match'], {ignoreReturnCode: true})
|
||||
if (describe.code === 0) {
|
||||
const describe = await getExecOutput('git', ['describe', '--tags', '--exact-match'], {ignoreReturnCode: true})
|
||||
if (describe.exitCode === 0) {
|
||||
return describe.stdout.trim()
|
||||
}
|
||||
|
||||
return (await exec('git', ['rev-parse', 'HEAD'])).stdout.trim()
|
||||
return (await getExecOutput('git', ['rev-parse', HEAD])).stdout.trim()
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
@@ -153,21 +194,66 @@ export function getShortName(ref: string): string {
|
||||
return ref
|
||||
}
|
||||
|
||||
export function isGitSha(ref: string): boolean {
|
||||
return /^[a-z0-9]{40}$/.test(ref)
|
||||
}
|
||||
|
||||
async function hasCommit(ref: string): Promise<boolean> {
|
||||
core.startGroup(`Checking if commit for ${ref} is locally available`)
|
||||
return (await getExecOutput('git', ['cat-file', '-e', `${ref}^{commit}`], {ignoreReturnCode: true})).exitCode === 0
|
||||
}
|
||||
|
||||
async function getCommitCount(): Promise<number> {
|
||||
const output = (await getExecOutput('git', ['rev-list', '--count', '--all'])).stdout
|
||||
const count = parseInt(output)
|
||||
return isNaN(count) ? 0 : count
|
||||
}
|
||||
|
||||
async function getLocalRef(shortName: string): Promise<string | undefined> {
|
||||
if (isGitSha(shortName)) {
|
||||
return (await hasCommit(shortName)) ? shortName : undefined
|
||||
}
|
||||
|
||||
const output = (await getExecOutput('git', ['show-ref', shortName], {ignoreReturnCode: true})).stdout
|
||||
const refs = output
|
||||
.split(/\r?\n/g)
|
||||
.map(l => l.match(/refs\/(?:(?:heads)|(?:tags)|(?:remotes\/origin))\/(.*)$/))
|
||||
.filter(match => match !== null && match[1] === shortName)
|
||||
.map(match => match?.[0] ?? '') // match can't be null here but compiler doesn't understand that
|
||||
|
||||
if (refs.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const remoteRef = refs.find(ref => ref.startsWith('refs/remotes/origin/'))
|
||||
if (remoteRef) {
|
||||
return remoteRef
|
||||
}
|
||||
|
||||
return refs[0]
|
||||
}
|
||||
|
||||
async function ensureRefAvailable(name: string): Promise<string> {
|
||||
core.startGroup(`Ensuring ${name} is fetched from origin`)
|
||||
try {
|
||||
return (await exec('git', ['cat-file', '-e', `${ref}^{commit}`], {ignoreReturnCode: true})).code === 0
|
||||
let ref = await getLocalRef(name)
|
||||
if (ref === undefined) {
|
||||
await getExecOutput('git', ['fetch', '--depth=1', '--no-tags', 'origin', name])
|
||||
ref = await getLocalRef(name)
|
||||
if (ref === undefined) {
|
||||
await getExecOutput('git', ['fetch', '--depth=1', '--tags', 'origin', name])
|
||||
ref = await getLocalRef(name)
|
||||
if (ref === undefined) {
|
||||
throw new Error(`Could not determine what is ${name} - fetch works but it's not a branch, tag or commit SHA`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ref
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
async function getNumberOfCommits(ref: string): Promise<number> {
|
||||
const output = (await exec('git', ['rev-list', `--count`, ref])).stdout
|
||||
const count = parseInt(output)
|
||||
return isNaN(count) ? 0 : count
|
||||
}
|
||||
|
||||
function fixStdOutNullTermination(): void {
|
||||
// Previous command uses NULL as delimiters and output is printed to stdout.
|
||||
// We have to make sure next thing written to stdout will start on new line.
|
||||
|
||||
16
src/list-format/csv-escape.ts
Normal file
16
src/list-format/csv-escape.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
// Returns filename escaped for CSV
|
||||
// Wraps file name into "..." only when it contains some potentially unsafe character
|
||||
export function csvEscape(value: string): string {
|
||||
if (value === '') return value
|
||||
|
||||
// Only safe characters
|
||||
if (/^[a-zA-Z0-9._+:@%/-]+$/m.test(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc4180
|
||||
// If double-quotes are used to enclose fields, then a double-quote
|
||||
// appearing inside a field must be escaped by preceding it with
|
||||
// another double quote
|
||||
return `"${value.replace(/"/g, '""')}"`
|
||||
}
|
||||
28
src/list-format/shell-escape.ts
Normal file
28
src/list-format/shell-escape.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
// Backslash escape every character except small subset of definitely safe characters
|
||||
export function backslashEscape(value: string): string {
|
||||
return value.replace(/([^a-zA-Z0-9,._+:@%/-])/gm, '\\$1')
|
||||
}
|
||||
|
||||
// Returns filename escaped for usage as shell argument.
|
||||
// Applies "human readable" approach with as few escaping applied as possible
|
||||
export function shellEscape(value: string): string {
|
||||
if (value === '') return value
|
||||
|
||||
// Only safe characters
|
||||
if (/^[a-zA-Z0-9,._+:@%/-]+$/m.test(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
if (value.includes("'")) {
|
||||
// Only safe characters, single quotes and white-spaces
|
||||
if (/^[a-zA-Z0-9,._+:@%/'\s-]+$/m.test(value)) {
|
||||
return `"${value}"`
|
||||
}
|
||||
|
||||
// Split by single quote and apply escaping recursively
|
||||
return value.split("'").map(shellEscape).join("\\'")
|
||||
}
|
||||
|
||||
// Contains some unsafe characters but no single quote
|
||||
return `'${value}'`
|
||||
}
|
||||
247
src/main.ts
247
src/main.ts
@@ -1,14 +1,23 @@
|
||||
import * as fs from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import {Webhooks} from '@octokit/webhooks'
|
||||
import {GetResponseDataTypeFromEndpointMethod} from '@octokit/types'
|
||||
import {PushEvent, PullRequestEvent} from '@octokit/webhooks-types'
|
||||
|
||||
import {Filter, FilterResults} from './filter'
|
||||
import {
|
||||
isPredicateQuantifier,
|
||||
Filter,
|
||||
FilterConfig,
|
||||
FilterResults,
|
||||
PredicateQuantifier,
|
||||
SUPPORTED_PREDICATE_QUANTIFIERS
|
||||
} from './filter'
|
||||
import {File, ChangeStatus} from './file'
|
||||
import * as git from './git'
|
||||
import shellEscape from './shell-escape'
|
||||
import {backslashEscape, shellEscape} from './list-format/shell-escape'
|
||||
import {csvEscape} from './list-format/csv-escape'
|
||||
|
||||
type ExportFormat = 'none' | 'json' | 'shell'
|
||||
type ExportFormat = 'none' | 'csv' | 'json' | 'shell' | 'escape'
|
||||
|
||||
async function run(): Promise<void> {
|
||||
try {
|
||||
@@ -18,28 +27,39 @@ async function run(): Promise<void> {
|
||||
}
|
||||
|
||||
const token = core.getInput('token', {required: false})
|
||||
const ref = core.getInput('ref', {required: false})
|
||||
const base = core.getInput('base', {required: false})
|
||||
const filtersInput = core.getInput('filters', {required: true})
|
||||
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput
|
||||
const listFiles = core.getInput('list-files', {required: false}).toLowerCase() || 'none'
|
||||
const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', {required: false})) || 10
|
||||
const predicateQuantifier = core.getInput('predicate-quantifier', {required: false}) || PredicateQuantifier.SOME
|
||||
|
||||
if (!isExportFormat(listFiles)) {
|
||||
core.setFailed(`Input parameter 'list-files' is set to invalid value '${listFiles}'`)
|
||||
return
|
||||
}
|
||||
|
||||
const filter = new Filter(filtersYaml)
|
||||
const files = await getChangedFiles(token, base, initialFetchDepth)
|
||||
if (!isPredicateQuantifier(predicateQuantifier)) {
|
||||
const predicateQuantifierInvalidErrorMsg =
|
||||
`Input parameter 'predicate-quantifier' is set to invalid value ` +
|
||||
`'${predicateQuantifier}'. Valid values: ${SUPPORTED_PREDICATE_QUANTIFIERS.join(', ')}`
|
||||
throw new Error(predicateQuantifierInvalidErrorMsg)
|
||||
}
|
||||
const filterConfig: FilterConfig = {predicateQuantifier}
|
||||
|
||||
const filter = new Filter(filtersYaml, filterConfig)
|
||||
const files = await getChangedFiles(token, base, ref, initialFetchDepth)
|
||||
core.info(`Detected ${files.length} changed files`)
|
||||
const results = filter.match(files)
|
||||
exportResults(results, listFiles)
|
||||
} catch (error) {
|
||||
core.setFailed(error.message)
|
||||
core.setFailed(getErrorMessage(error))
|
||||
}
|
||||
}
|
||||
|
||||
function isPathInput(text: string): boolean {
|
||||
return !text.includes('\n')
|
||||
return !(text.includes('\n') || text.includes(':'))
|
||||
}
|
||||
|
||||
function getConfigFileContent(configPath: string): string {
|
||||
@@ -54,115 +74,168 @@ function getConfigFileContent(configPath: string): string {
|
||||
return fs.readFileSync(configPath, {encoding: 'utf8'})
|
||||
}
|
||||
|
||||
async function getChangedFiles(token: string, base: string, initialFetchDepth: number): Promise<File[]> {
|
||||
if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') {
|
||||
const pr = github.context.payload.pull_request as Webhooks.WebhookPayloadPullRequestPullRequest
|
||||
async function getChangedFiles(token: string, base: string, ref: string, initialFetchDepth: number): Promise<File[]> {
|
||||
// if base is 'HEAD' only local uncommitted changes will be detected
|
||||
// This is the simplest case as we don't need to fetch more commits or evaluate current/before refs
|
||||
if (base === git.HEAD) {
|
||||
if (ref) {
|
||||
core.warning(`'ref' input parameter is ignored when 'base' is set to HEAD`)
|
||||
}
|
||||
return await git.getChangesOnHead()
|
||||
}
|
||||
|
||||
const prEvents = ['pull_request', 'pull_request_review', 'pull_request_review_comment', 'pull_request_target']
|
||||
if (prEvents.includes(github.context.eventName)) {
|
||||
if (ref) {
|
||||
core.warning(`'ref' input parameter is ignored when 'base' is set to HEAD`)
|
||||
}
|
||||
if (base) {
|
||||
core.warning(`'base' input parameter is ignored when action is triggered by pull request event`)
|
||||
}
|
||||
const pr = github.context.payload.pull_request as PullRequestEvent
|
||||
if (token) {
|
||||
return await getChangedFilesFromApi(token, pr)
|
||||
}
|
||||
core.info('Github token is not available - changes will be detected from PRs merge commit')
|
||||
return await git.getChangesInLastCommit()
|
||||
} else if (github.context.eventName === 'push') {
|
||||
return getChangedFilesFromPush(base, initialFetchDepth)
|
||||
if (github.context.eventName === 'pull_request_target') {
|
||||
// pull_request_target is executed in context of base branch and GITHUB_SHA points to last commit in base branch
|
||||
// Therefor it's not possible to look at changes in last commit
|
||||
// At the same time we don't want to fetch any code from forked repository
|
||||
throw new Error(`'token' input parameter is required if action is triggered by 'pull_request_target' event`)
|
||||
}
|
||||
core.info('Github token is not available - changes will be detected using git diff')
|
||||
const baseSha = github.context.payload.pull_request?.base.sha
|
||||
const defaultBranch = github.context.payload.repository?.default_branch
|
||||
const currentRef = await git.getCurrentRef()
|
||||
return await git.getChanges(base || baseSha || defaultBranch, currentRef)
|
||||
} else {
|
||||
throw new Error('This action can be triggered only by pull_request, pull_request_target or push event')
|
||||
return getChangedFilesFromGit(base, ref, initialFetchDepth)
|
||||
}
|
||||
}
|
||||
|
||||
async function getChangedFilesFromPush(base: string, initialFetchDepth: number): Promise<File[]> {
|
||||
const push = github.context.payload as Webhooks.WebhookPayloadPush
|
||||
const defaultRef = push.repository?.default_branch
|
||||
async function getChangedFilesFromGit(base: string, head: string, initialFetchDepth: number): Promise<File[]> {
|
||||
const defaultBranch = github.context.payload.repository?.default_branch
|
||||
|
||||
const pushRef =
|
||||
git.getShortName(push.ref) ||
|
||||
(core.warning(`'ref' field is missing in PUSH event payload - using current branch, tag or commit SHA`),
|
||||
await git.getCurrentRef())
|
||||
const beforeSha = github.context.eventName === 'push' ? (github.context.payload as PushEvent).before : null
|
||||
|
||||
const baseRef = git.getShortName(base) || defaultRef
|
||||
if (!baseRef) {
|
||||
const currentRef = await git.getCurrentRef()
|
||||
|
||||
head = git.getShortName(head || github.context.ref || currentRef)
|
||||
base = git.getShortName(base || defaultBranch)
|
||||
|
||||
if (!head) {
|
||||
throw new Error(
|
||||
"This action requires 'head' input to be configured, 'ref' to be set in the event payload or branch/tag checked out in current git repository"
|
||||
)
|
||||
}
|
||||
|
||||
if (!base) {
|
||||
throw new Error(
|
||||
"This action requires 'base' input to be configured or 'repository.default_branch' to be set in the event payload"
|
||||
)
|
||||
}
|
||||
|
||||
// If base references same branch it was pushed to,
|
||||
// we will do comparison against the previously pushed commit
|
||||
if (baseRef === pushRef) {
|
||||
if (!push.before) {
|
||||
core.warning(`'before' field is missing in PUSH event payload - changes will be detected from last commit`)
|
||||
const isBaseSha = git.isGitSha(base)
|
||||
const isBaseSameAsHead = base === head
|
||||
|
||||
// If base is commit SHA we will do comparison against the referenced commit
|
||||
// Or if base references same branch it was pushed to, we will do comparison against the previously pushed commit
|
||||
if (isBaseSha || isBaseSameAsHead) {
|
||||
const baseSha = isBaseSha ? base : beforeSha
|
||||
if (!baseSha) {
|
||||
core.warning(`'before' field is missing in event payload - changes will be detected from last commit`)
|
||||
if (head !== currentRef) {
|
||||
core.warning(`Ref ${head} is not checked out - results might be incorrect!`)
|
||||
}
|
||||
return await git.getChangesInLastCommit()
|
||||
}
|
||||
|
||||
// If there is no previously pushed commit,
|
||||
// we will do comparison against the default branch or return all as added
|
||||
if (push.before === git.NULL_SHA) {
|
||||
if (defaultRef && baseRef !== defaultRef) {
|
||||
core.info(`First push of a branch detected - changes will be detected against the default branch ${defaultRef}`)
|
||||
return await git.getChangesSinceMergeBase(defaultRef, initialFetchDepth)
|
||||
if (baseSha === git.NULL_SHA) {
|
||||
if (defaultBranch && base !== defaultBranch) {
|
||||
core.info(
|
||||
`First push of a branch detected - changes will be detected against the default branch ${defaultBranch}`
|
||||
)
|
||||
return await git.getChangesSinceMergeBase(defaultBranch, head, initialFetchDepth)
|
||||
} else {
|
||||
core.info('Initial push detected - all files will be listed as added')
|
||||
if (head !== currentRef) {
|
||||
core.warning(`Ref ${head} is not checked out - results might be incorrect!`)
|
||||
}
|
||||
return await git.listAllFilesAsAdded()
|
||||
}
|
||||
}
|
||||
|
||||
core.info(`Changes will be detected against the last previously pushed commit on same branch (${pushRef})`)
|
||||
return await git.getChanges(push.before)
|
||||
core.info(`Changes will be detected between ${baseSha} and ${head}`)
|
||||
return await git.getChanges(baseSha, head)
|
||||
}
|
||||
|
||||
// Changes introduced by current branch against the base branch
|
||||
core.info(`Changes will be detected against the branch ${baseRef}`)
|
||||
return await git.getChangesSinceMergeBase(baseRef, initialFetchDepth)
|
||||
core.info(`Changes will be detected between ${base} and ${head}`)
|
||||
return await git.getChangesSinceMergeBase(base, head, initialFetchDepth)
|
||||
}
|
||||
|
||||
// Uses github REST api to get list of files changed in PR
|
||||
async function getChangedFilesFromApi(
|
||||
token: string,
|
||||
pullRequest: Webhooks.WebhookPayloadPullRequestPullRequest
|
||||
): Promise<File[]> {
|
||||
core.info(`Fetching list of changed files for PR#${pullRequest.number} from Github API`)
|
||||
const client = new github.GitHub(token)
|
||||
const pageSize = 100
|
||||
const files: File[] = []
|
||||
for (let page = 0; page * pageSize < pullRequest.changed_files; page++) {
|
||||
const response = await client.pulls.listFiles({
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
pull_number: pullRequest.number,
|
||||
page,
|
||||
per_page: pageSize
|
||||
})
|
||||
for (const row of response.data) {
|
||||
// There's no obvious use-case for detection of renames
|
||||
// Therefore we treat it as if rename detection in git diff was turned off.
|
||||
// Rename is replaced by delete of original filename and add of new filename
|
||||
if (row.status === ChangeStatus.Renamed) {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: ChangeStatus.Added
|
||||
})
|
||||
files.push({
|
||||
// 'previous_filename' for some unknown reason isn't in the type definition or documentation
|
||||
filename: (<any>row).previous_filename as string,
|
||||
status: ChangeStatus.Deleted
|
||||
})
|
||||
} else {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: row.status as ChangeStatus
|
||||
})
|
||||
async function getChangedFilesFromApi(token: string, pullRequest: PullRequestEvent): Promise<File[]> {
|
||||
core.startGroup(`Fetching list of changed files for PR#${pullRequest.number} from Github API`)
|
||||
try {
|
||||
const client = github.getOctokit(token)
|
||||
const per_page = 100
|
||||
const files: File[] = []
|
||||
|
||||
core.info(`Invoking listFiles(pull_number: ${pullRequest.number}, per_page: ${per_page})`)
|
||||
for await (const response of client.paginate.iterator(
|
||||
client.rest.pulls.listFiles.endpoint.merge({
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
pull_number: pullRequest.number,
|
||||
per_page
|
||||
})
|
||||
)) {
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Fetching list of changed files from GitHub API failed with error code ${response.status}`)
|
||||
}
|
||||
core.info(`Received ${response.data.length} items`)
|
||||
|
||||
for (const row of response.data as GetResponseDataTypeFromEndpointMethod<typeof client.rest.pulls.listFiles>) {
|
||||
core.info(`[${row.status}] ${row.filename}`)
|
||||
// There's no obvious use-case for detection of renames
|
||||
// Therefore we treat it as if rename detection in git diff was turned off.
|
||||
// Rename is replaced by delete of original filename and add of new filename
|
||||
if (row.status === ChangeStatus.Renamed) {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: ChangeStatus.Added
|
||||
})
|
||||
files.push({
|
||||
// 'previous_filename' for some unknown reason isn't in the type definition or documentation
|
||||
filename: (<any>row).previous_filename as string,
|
||||
status: ChangeStatus.Deleted
|
||||
})
|
||||
} else {
|
||||
// Github status and git status variants are same except for deleted files
|
||||
const status = row.status === 'removed' ? ChangeStatus.Deleted : (row.status as ChangeStatus)
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return files
|
||||
return files
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
function exportResults(results: FilterResults, format: ExportFormat): void {
|
||||
core.info('Results:')
|
||||
const changes = []
|
||||
for (const [key, files] of Object.entries(results)) {
|
||||
const value = files.length > 0
|
||||
core.startGroup(`Filter ${key} = ${value}`)
|
||||
if (files.length > 0) {
|
||||
changes.push(key)
|
||||
core.info('Matching files:')
|
||||
for (const file of files) {
|
||||
core.info(`${file.filename} [${file.status}]`)
|
||||
@@ -172,19 +245,32 @@ function exportResults(results: FilterResults, format: ExportFormat): void {
|
||||
}
|
||||
|
||||
core.setOutput(key, value)
|
||||
core.setOutput(`${key}_count`, files.length)
|
||||
if (format !== 'none') {
|
||||
const filesValue = serializeExport(files, format)
|
||||
core.setOutput(`${key}_files`, filesValue)
|
||||
}
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
if (results['changes'] === undefined) {
|
||||
const changesJson = JSON.stringify(changes)
|
||||
core.info(`Changes output set to ${changesJson}`)
|
||||
core.setOutput('changes', changesJson)
|
||||
} else {
|
||||
core.info('Cannot set changes output variable - name already used by filter output')
|
||||
}
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
function serializeExport(files: File[], format: ExportFormat): string {
|
||||
const fileNames = files.map(file => file.filename)
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
return fileNames.map(csvEscape).join(',')
|
||||
case 'json':
|
||||
return JSON.stringify(fileNames)
|
||||
case 'escape':
|
||||
return fileNames.map(backslashEscape).join(' ')
|
||||
case 'shell':
|
||||
return fileNames.map(shellEscape).join(' ')
|
||||
default:
|
||||
@@ -193,7 +279,12 @@ function serializeExport(files: File[], format: ExportFormat): string {
|
||||
}
|
||||
|
||||
function isExportFormat(value: string): value is ExportFormat {
|
||||
return value === 'none' || value === 'shell' || value === 'json'
|
||||
return ['none', 'csv', 'shell', 'json', 'escape'].includes(value)
|
||||
}
|
||||
|
||||
function getErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) return error.message
|
||||
return String(error)
|
||||
}
|
||||
|
||||
run()
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
// Credits to https://github.com/xxorax/node-shell-escape
|
||||
|
||||
export default function shellEscape(value: string): string {
|
||||
return `'${value.replace(/'/g, "'\\''")}'`
|
||||
.replace(/^(?:'')+/g, '') // unduplicate single-quote at the beginning
|
||||
.replace(/\\'''/g, "\\'") // remove non-escaped single-quote if there are enclosed between 2 escaped
|
||||
}
|
||||
Reference in New Issue
Block a user