mirror of
https://gitea.com/actions/dorny-paths-filter.git
synced 2025-12-25 16:38:20 +00:00
Compare commits
67 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
78ab00f877 | ||
|
|
f3d3fc848b | ||
|
|
aae9c5619e | ||
|
|
e59743163d | ||
|
|
78b1672eeb | ||
|
|
f1c461fccf | ||
|
|
0b18612ac3 | ||
|
|
87375a4a68 | ||
|
|
24a74833cc | ||
|
|
d0507d9a8a | ||
|
|
f093f3520b | ||
|
|
8d029eb508 | ||
|
|
07d6abdb9c | ||
|
|
e8f370c197 | ||
|
|
58ed00ec48 | ||
|
|
02eeef4973 | ||
|
|
37a6d38b2d | ||
|
|
208adf42c8 | ||
|
|
ad1ae68cd0 | ||
|
|
5d414b88ab | ||
|
|
a6989ad592 | ||
|
|
6d8169070c | ||
|
|
3d4a25053b | ||
|
|
e59197f91b | ||
|
|
ca8fa4002c | ||
|
|
c64be944bf | ||
|
|
138368ff4f | ||
|
|
a301a0ad83 | ||
|
|
0c0d1a854a | ||
|
|
0aa1597c2b | ||
|
|
46d2898cef | ||
|
|
c90ecaa5a1 | ||
|
|
49abb091ed | ||
|
|
8801c887e9 | ||
|
|
68792bf56a | ||
|
|
31c576896e | ||
|
|
3be8c93277 | ||
|
|
1cdd3bbdf6 | ||
|
|
e5b96fe4da | ||
|
|
a339507743 | ||
|
|
febe8330ca | ||
|
|
b5fa2d5c02 | ||
|
|
e2bed85912 | ||
|
|
7c0f15b688 | ||
|
|
cbc3287af3 | ||
|
|
a2730492f0 | ||
|
|
c2766acabb | ||
|
|
363576b9ea | ||
|
|
b1a097ef7b | ||
|
|
2c79a825c0 | ||
|
|
4e7fcc37b4 | ||
|
|
c506bed1ae | ||
|
|
9b7572ffb2 | ||
|
|
9e8c9af501 | ||
|
|
84e1697bff | ||
|
|
e4d886f503 | ||
|
|
ada1eee648 | ||
|
|
44ac6d8e25 | ||
|
|
3c5b7d242c | ||
|
|
eb75a1edc1 | ||
|
|
181b35e268 | ||
|
|
1934d574ce | ||
|
|
d599443ba5 | ||
|
|
eb8fe2c24b | ||
|
|
dec8b8030e | ||
|
|
785a14adbe | ||
|
|
e84bc6af29 |
1
.github/workflows/build.yml
vendored
1
.github/workflows/build.yml
vendored
@@ -1,6 +1,7 @@
|
||||
name: "Build"
|
||||
on:
|
||||
push:
|
||||
paths-ignore: [ '*.md' ]
|
||||
branches:
|
||||
- master
|
||||
|
||||
|
||||
35
.github/workflows/pull-request-verification.yml
vendored
35
.github/workflows/pull-request-verification.yml
vendored
@@ -1,9 +1,10 @@
|
||||
name: "Pull Request Verification"
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore: [ '*.md' ]
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -29,6 +30,9 @@ jobs:
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
|
||||
run: exit 1
|
||||
- name: changes-test
|
||||
if: contains(fromJSON(steps.filter.outputs.changes), 'error') || !contains(fromJSON(steps.filter.outputs.changes), 'any')
|
||||
run: exit 1
|
||||
|
||||
test-external:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -71,6 +75,26 @@ jobs:
|
||||
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
|
||||
run: exit 1
|
||||
|
||||
test-local-changes:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: echo "NEW FILE" > local
|
||||
- run: git add local
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
base: HEAD
|
||||
filters: |
|
||||
local:
|
||||
- local
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.local != 'true'
|
||||
run: exit 1
|
||||
- name: count-test
|
||||
if: steps.filter.outputs.local_count != 1
|
||||
run: exit 1
|
||||
|
||||
test-change-type:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -102,15 +126,12 @@ jobs:
|
||||
- name: Print 'deleted_files'
|
||||
run: echo ${{steps.filter.outputs.deleted_files}}
|
||||
- name: filter-test
|
||||
# only single quotes are supported in GH action literal
|
||||
# single quote needs to be escaped with single quote
|
||||
# '''add.txt''' resolves to string 'add.txt'
|
||||
if: |
|
||||
steps.filter.outputs.added != 'true'
|
||||
|| steps.filter.outputs.deleted != 'true'
|
||||
|| steps.filter.outputs.modified != 'true'
|
||||
|| steps.filter.outputs.any != 'true'
|
||||
|| steps.filter.outputs.added_files != '''add.txt'''
|
||||
|| steps.filter.outputs.modified_files != '''LICENSE'''
|
||||
|| steps.filter.outputs.deleted_files != '''README.md'''
|
||||
|| steps.filter.outputs.added_files != 'add.txt'
|
||||
|| steps.filter.outputs.modified_files != 'LICENSE'
|
||||
|| steps.filter.outputs.deleted_files != 'README.md'
|
||||
run: exit 1
|
||||
|
||||
34
CHANGELOG.md
34
CHANGELOG.md
@@ -1,5 +1,39 @@
|
||||
# Changelog
|
||||
|
||||
## v2.10.1
|
||||
- [Improve robustness of change detection](https://github.com/dorny/paths-filter/pull/85)
|
||||
|
||||
## v2.10.0
|
||||
- [Add ref input parameter](https://github.com/dorny/paths-filter/pull/82)
|
||||
- [Fix change detection in PR when pullRequest.changed_files is incorrect](https://github.com/dorny/paths-filter/pull/83)
|
||||
|
||||
## v2.9.3
|
||||
- [Fix change detection when base is a tag](https://github.com/dorny/paths-filter/pull/78)
|
||||
|
||||
## v2.9.2
|
||||
- [Fix fetching git history](https://github.com/dorny/paths-filter/pull/75)
|
||||
|
||||
## v2.9.1
|
||||
- [Fix fetching git history + fallback to unshallow repo](https://github.com/dorny/paths-filter/pull/74)
|
||||
|
||||
## v2.9.0
|
||||
- [Add list-files: csv format](https://github.com/dorny/paths-filter/pull/68)
|
||||
|
||||
## v2.8.0
|
||||
- [Add count output variable](https://github.com/dorny/paths-filter/pull/65)
|
||||
- [Fix log grouping of changes](https://github.com/dorny/paths-filter/pull/61)
|
||||
|
||||
## v2.7.0
|
||||
- [Add "changes" output variable to support matrix job configuration](https://github.com/dorny/paths-filter/pull/59)
|
||||
- [Improved listing of matching files with `list-files: shell` and `list-files: escape` options](https://github.com/dorny/paths-filter/pull/58)
|
||||
|
||||
## v2.6.0
|
||||
- [Support local changes](https://github.com/dorny/paths-filter/pull/53)
|
||||
|
||||
## v2.5.3
|
||||
- [Fixed mapping of removed/deleted change status from github API](https://github.com/dorny/paths-filter/pull/51)
|
||||
- [Fixed retrieval of all changes via Github API when there are 100+ changes](https://github.com/dorny/paths-filter/pull/50)
|
||||
|
||||
## v2.5.2
|
||||
- [Add support for multiple patterns when using file status](https://github.com/dorny/paths-filter/pull/48)
|
||||
- [Use picomatch directly instead of micromatch wrapper](https://github.com/dorny/paths-filter/pull/49)
|
||||
|
||||
190
README.md
190
README.md
@@ -1,11 +1,11 @@
|
||||
# Paths Changes Filter
|
||||
|
||||
This [Github Action](https://github.com/features/actions) enables conditional execution of workflow steps and jobs,
|
||||
based on the files modified by pull request, feature branch or in pushed commits.
|
||||
[Github Action](https://github.com/features/actions) that enables conditional execution of workflow steps and jobs, based on the files modified by pull request, on a feature
|
||||
branch, or by the recently pushed commits.
|
||||
|
||||
It saves time and resources especially in monorepo setups, where you can run slow tasks (e.g. integration tests or deployments) only for changed components.
|
||||
Run slow tasks like integration tests or deployments only for changed components. It saves time and resources, especially in monorepo setups.
|
||||
Github workflows built-in [path filters](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestpaths)
|
||||
doesn't allow this because they doesn't work on a level of individual jobs or steps.
|
||||
don't allow this because they don't work on a level of individual jobs or steps.
|
||||
|
||||
**Real world usage examples:**
|
||||
- [sentry.io](https://sentry.io/) - [backend-test-py3.6.yml](https://github.com/getsentry/sentry/blob/ca0e43dc5602a9ab2e06d3f6397cc48fb5a78541/.github/workflows/backend-test-py3.6.yml#L32)
|
||||
@@ -17,24 +17,28 @@ doesn't allow this because they doesn't work on a level of individual jobs or st
|
||||
- Workflow triggered by **[pull_request](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request)**
|
||||
or **[pull_request_target](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request_target)** event
|
||||
- Changes are detected against the pull request base branch
|
||||
- Uses Github REST API to fetch list of modified files
|
||||
- Uses Github REST API to fetch a list of modified files
|
||||
- **Feature branches:**
|
||||
- Workflow triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)**
|
||||
or any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
- The `base` input parameter must not be the same as the branch that triggered the workflow
|
||||
- Changes are detected against the merge-base with configured base branch or default branch
|
||||
- Changes are detected against the merge-base with the configured base branch or the default branch
|
||||
- Uses git commands to detect changes - repository must be already [checked out](https://github.com/actions/checkout)
|
||||
- **Master, Release or other long-lived branches:**
|
||||
- **Master, Release, or other long-lived branches:**
|
||||
- Workflow triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)** event
|
||||
when `base` input parameter is same as the branch that triggered the workflow:
|
||||
when `base` input parameter is the same as the branch that triggered the workflow:
|
||||
- Changes are detected against the most recent commit on the same branch before the push
|
||||
- Workflow triggered by any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
when `base` input parameter is commit SHA:
|
||||
- Changes are detected against the provided `base` commit
|
||||
- Workflow triggered by any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
when `base` input parameter is same as the branch that triggered the workflow:
|
||||
- Changes are detected from last commit
|
||||
when `base` input parameter is the same as the branch that triggered the workflow:
|
||||
- Changes are detected from the last commit
|
||||
- Uses git commands to detect changes - repository must be already [checked out](https://github.com/actions/checkout)
|
||||
- **Local changes**
|
||||
- Workflow triggered by any event when `base` input parameter is set to `HEAD`
|
||||
- Changes are detected against the current HEAD
|
||||
- Untracked files are ignored
|
||||
|
||||
## Example
|
||||
```yaml
|
||||
@@ -53,27 +57,22 @@ For more scenarios see [examples](#examples) section.
|
||||
|
||||
## Notes:
|
||||
- Paths expressions are evaluated using [picomatch](https://github.com/micromatch/picomatch) library.
|
||||
Documentation for path expression format can be found on project github page.
|
||||
- Micromatch [dot](https://github.com/micromatch/picomatch#options) option is set to true.
|
||||
Globbing will match also paths where file or folder name starts with a dot.
|
||||
- It's recommended to quote your path expressions with `'` or `"`. Otherwise you will get an error if it starts with `*`.
|
||||
Documentation for path expression format can be found on the project GitHub page.
|
||||
- Picomatch [dot](https://github.com/micromatch/picomatch#options) option is set to true.
|
||||
Globbing will also match paths where file or folder name starts with a dot.
|
||||
- It's recommended to quote your path expressions with `'` or `"`. Otherwise, you will get an error if it starts with `*`.
|
||||
- Local execution with [act](https://github.com/nektos/act) works only with alternative runner image. Default runner doesn't have `git` binary.
|
||||
- Use: `act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04`
|
||||
|
||||
|
||||
# What's New
|
||||
- Add `ref` input parameter
|
||||
- Add `list-files: csv` format
|
||||
- Configure matrix job to run for each folder with changes using `changes` output
|
||||
- Improved listing of matching files with `list-files: shell` and `list-files: escape` options
|
||||
- Paths expressions are now evaluated using [picomatch](https://github.com/micromatch/picomatch) library
|
||||
- Support workflows triggered by any event
|
||||
- Fixed compatibility with older (<2.23) versions of git
|
||||
- Support for tag pushes and tags as a base reference
|
||||
- Fixes for various edge cases when event payload is incomplete
|
||||
- Supports local execution with [act](https://github.com/nektos/act)
|
||||
- Fixed behavior of feature branch workflow:
|
||||
- Detects only changes introduced by feature branch. Later modifications on base branch are ignored
|
||||
- Filter by type of file change:
|
||||
- Optionally consider if file was added, modified or deleted
|
||||
|
||||
For more information see [CHANGELOG](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
|
||||
For more information, see [CHANGELOG](https://github.com/dorny/paths-filter/blob/master/CHANGELOG.md)
|
||||
|
||||
# Usage
|
||||
|
||||
@@ -81,70 +80,81 @@ For more information see [CHANGELOG](https://github.com/actions/checkout/blob/ma
|
||||
- uses: dorny/paths-filter@v2
|
||||
with:
|
||||
# Defines filters applied to detected changed files.
|
||||
# Each filter has a name and list of rules.
|
||||
# Each filter has a name and a list of rules.
|
||||
# Rule is a glob expression - paths of all changed
|
||||
# files are matched against it.
|
||||
# Rule can optionally specify if the file
|
||||
# should be added, modified or deleted.
|
||||
# For each filter there will be corresponding output variable to
|
||||
# should be added, modified, or deleted.
|
||||
# For each filter, there will be a corresponding output variable to
|
||||
# indicate if there's a changed file matching any of the rules.
|
||||
# Optionally there can be a second output variable
|
||||
# Optionally, there can be a second output variable
|
||||
# set to list of all files matching the filter.
|
||||
# Filters can be provided inline as a string (containing valid YAML document)
|
||||
# or as a relative path to separate file (e.g.: .github/filters.yaml).
|
||||
# Multiline string is evaluated as embedded filter definition,
|
||||
# single line string is evaluated as relative path to separate file.
|
||||
# Filters can be provided inline as a string (containing valid YAML document),
|
||||
# or as a relative path to a file (e.g.: .github/filters.yaml).
|
||||
# Filters syntax is documented by example - see examples section.
|
||||
filters: ''
|
||||
|
||||
# Branch, tag or commit SHA against which the changes will be detected.
|
||||
# If it references same branch it was pushed to,
|
||||
# Branch, tag, or commit SHA against which the changes will be detected.
|
||||
# If it references the same branch it was pushed to,
|
||||
# changes are detected against the most recent commit before the push.
|
||||
# Otherwise it uses git merge-base to find best common ancestor between
|
||||
# Otherwise, it uses git merge-base to find the best common ancestor between
|
||||
# current branch (HEAD) and base.
|
||||
# When merge-base is found, it's used for change detection - only changes
|
||||
# introduced by current branch are considered.
|
||||
# introduced by the current branch are considered.
|
||||
# All files are considered as added if there is no common ancestor with
|
||||
# base branch or no previous commit.
|
||||
# This option is ignored if action is triggered by pull_request event.
|
||||
# Default: repository default branch (e.g. master)
|
||||
base: ''
|
||||
|
||||
# How many commits are initially fetched from base branch.
|
||||
# Git reference (e.g. branch name) from which the changes will be detected.
|
||||
# Useful when workflow can be triggered only on the default branch (e.g. repository_dispatch event)
|
||||
# but you want to get changes on a different branch.
|
||||
# This option is ignored if action is triggered by pull_request event.
|
||||
# default: ${{ github.ref }}
|
||||
ref:
|
||||
|
||||
# How many commits are initially fetched from the base branch.
|
||||
# If needed, each subsequent fetch doubles the
|
||||
# previously requested number of commits until the merge-base
|
||||
# is found or there are no more commits in the history.
|
||||
# is found, or there are no more commits in the history.
|
||||
# This option takes effect only when changes are detected
|
||||
# using git against base branch (feature branch workflow).
|
||||
# Default: 20
|
||||
# Default: 100
|
||||
initial-fetch-depth: ''
|
||||
|
||||
# Enables listing of files matching the filter:
|
||||
# 'none' - Disables listing of matching files (default).
|
||||
# 'json' - Matching files paths are formatted as JSON array.
|
||||
# 'shell' - Matching files paths are escaped and space-delimited.
|
||||
# Output is usable as command line argument list in linux shell.
|
||||
# 'csv' - Coma separated list of filenames.
|
||||
# If needed, it uses double quotes to wrap filename with unsafe characters.
|
||||
# 'json' - File paths are formatted as JSON array.
|
||||
# 'shell' - Space delimited list usable as command-line argument list in Linux shell.
|
||||
# If needed, it uses single or double quotes to wrap filename with unsafe characters.
|
||||
# 'escape'- Space delimited list usable as command-line argument list in Linux shell.
|
||||
# Backslash escapes every potentially unsafe character.
|
||||
# Default: none
|
||||
list-files: ''
|
||||
|
||||
# Relative path under $GITHUB_WORKSPACE where the repository was checked out.
|
||||
working-directory: ''
|
||||
|
||||
# Personal access token used to fetch list of changed files
|
||||
# Personal access token used to fetch a list of changed files
|
||||
# from Github REST API.
|
||||
# It's used only if action is triggered by pull request event.
|
||||
# It's only used if action is triggered by a pull request event.
|
||||
# Github token from workflow context is used as default value.
|
||||
# If empty string is provided, action falls back to detect
|
||||
# If an empty string is provided, the action falls back to detect
|
||||
# changes using git commands.
|
||||
# Default: ${{ github.token }}
|
||||
token: ''
|
||||
```
|
||||
|
||||
## Outputs
|
||||
- For each filter it sets output variable named by the filter to the text:
|
||||
- For each filter, it sets output variable named by the filter to the text:
|
||||
- `'true'` - if **any** of changed files matches any of filter rules
|
||||
- `'false'` - if **none** of changed files matches any of filter rules
|
||||
- If enabled, for each filter it sets output variable with name `${FILTER_NAME}_files`. It will contain list of all files matching the filter.
|
||||
- For each filter, it sets an output variable with the name `${FILTER_NAME}_count` to the count of matching files.
|
||||
- If enabled, for each filter it sets an output variable with the name `${FILTER_NAME}_files`. It will contain a list of all files matching the filter.
|
||||
- `changes` - JSON array with names of all filters matching any of the changed files.
|
||||
|
||||
# Examples
|
||||
|
||||
@@ -228,6 +238,41 @@ jobs:
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Use change detection to configure matrix job</summary>
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
# JOB to run change detection
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
# Expose matched filters as job 'packages' output variable
|
||||
packages: ${{ steps.filter.outputs.changes }}
|
||||
steps:
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
package1: src/package1
|
||||
package2: src/package2
|
||||
|
||||
# JOB to build and test each of modified packages
|
||||
build:
|
||||
needs: changes
|
||||
strategy:
|
||||
matrix:
|
||||
# Parse JSON array containing names of all filters matching any of changed files
|
||||
# e.g. ['package1', 'package2'] if both package folders contains changes
|
||||
package: ${{ fromJSON(needs.changes.outputs.packages) }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- ...
|
||||
```
|
||||
</details>
|
||||
|
||||
## Change detection workflows
|
||||
|
||||
<details>
|
||||
@@ -236,7 +281,7 @@ jobs:
|
||||
```yaml
|
||||
on:
|
||||
pull_request:
|
||||
branches: # PRs to following branches will trigger the workflow
|
||||
branches: # PRs to the following branches will trigger the workflow
|
||||
- master
|
||||
- develop
|
||||
jobs:
|
||||
@@ -282,7 +327,7 @@ jobs:
|
||||
```yaml
|
||||
on:
|
||||
push:
|
||||
branches: # Push to following branches will trigger the workflow
|
||||
branches: # Push to the following branches will trigger the workflow
|
||||
- master
|
||||
- develop
|
||||
- release/**
|
||||
@@ -294,8 +339,8 @@ jobs:
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# Use context to get branch where commits were pushed.
|
||||
# If there is only one long lived branch (e.g. master),
|
||||
# Use context to get the branch where commits were pushed.
|
||||
# If there is only one long-lived branch (e.g. master),
|
||||
# you can specify it directly.
|
||||
# If it's not configured, the repository default branch is used.
|
||||
base: ${{ github.ref }}
|
||||
@@ -303,6 +348,35 @@ jobs:
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Local changes:</b> Detect staged and unstaged local changes</summary>
|
||||
|
||||
```yaml
|
||||
on:
|
||||
push:
|
||||
branches: # Push to following branches will trigger the workflow
|
||||
- master
|
||||
- develop
|
||||
- release/**
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Some action that modifies files tracked by git (e.g. code linter)
|
||||
- uses: johndoe/some-action@v1
|
||||
|
||||
# Filter to detect which files were modified
|
||||
# Changes could be, for example, automatically committed
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
base: HEAD
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
</details>
|
||||
|
||||
## Advanced options
|
||||
|
||||
<details>
|
||||
@@ -345,10 +419,10 @@ jobs:
|
||||
id: filter
|
||||
with:
|
||||
# Changed file can be 'added', 'modified', or 'deleted'.
|
||||
# By default the type of change is not considered.
|
||||
# Optionally it's possible to specify it using nested
|
||||
# dictionary, where type(s) of change composes the key.
|
||||
# Multiple change types can be specified using `|` as delimiter.
|
||||
# By default, the type of change is not considered.
|
||||
# Optionally, it's possible to specify it using nested
|
||||
# dictionary, where the type of change composes the key.
|
||||
# Multiple change types can be specified using `|` as the delimiter.
|
||||
filters: |
|
||||
shared: &shared
|
||||
- common/**
|
||||
@@ -375,7 +449,7 @@ jobs:
|
||||
# Enable listing of files matching each filter.
|
||||
# Paths to files will be available in `${FILTER_NAME}_files` output variable.
|
||||
# Paths will be escaped and space-delimited.
|
||||
# Output is usable as command line argument list in linux shell
|
||||
# Output is usable as command-line argument list in Linux shell
|
||||
list-files: shell
|
||||
|
||||
# In this example changed files will be checked by linter.
|
||||
@@ -402,7 +476,7 @@ jobs:
|
||||
# Paths will be formatted as JSON array
|
||||
list-files: json
|
||||
|
||||
# In this example all changed files are passed to following action to do
|
||||
# In this example all changed files are passed to the following action to do
|
||||
# some custom processing.
|
||||
filters: |
|
||||
changed:
|
||||
@@ -414,6 +488,8 @@ jobs:
|
||||
```
|
||||
</details>
|
||||
|
||||
# See also
|
||||
- [test-reporter](https://github.com/dorny/test-reporter) - Displays test results from popular testing frameworks directly in GitHub
|
||||
|
||||
# License
|
||||
|
||||
|
||||
23
__tests__/csv-escape.test.ts
Normal file
23
__tests__/csv-escape.test.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import {csvEscape} from '../src/list-format/csv-escape'
|
||||
|
||||
describe('csvEscape() backslash escapes every character except subset of definitely safe characters', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(csvEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(csvEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(csvEscape('file with space')).toBe('"file with space"')
|
||||
})
|
||||
|
||||
test('filename with "," should be quoted', () => {
|
||||
expect(csvEscape('file, with coma')).toBe('"file, with coma"')
|
||||
})
|
||||
|
||||
test('Double quote should be escaped by another double quote', () => {
|
||||
expect(csvEscape('file " with double quote')).toBe('"file "" with double quote"')
|
||||
})
|
||||
})
|
||||
@@ -1,16 +1,57 @@
|
||||
import shellEscape from '../src/shell-escape'
|
||||
import {backslashEscape, shellEscape} from '../src/list-format/shell-escape'
|
||||
|
||||
test('simple path escaped', () => {
|
||||
expect(shellEscape('file')).toBe("'file'")
|
||||
describe('escape() backslash escapes every character except subset of definitely safe characters', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(backslashEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(backslashEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('spaces should be escaped with backslash', () => {
|
||||
expect(backslashEscape('file with space')).toBe('file\\ with\\ space')
|
||||
})
|
||||
|
||||
test('quotes should be escaped with backslash', () => {
|
||||
expect(backslashEscape('file\'with quote"')).toBe('file\\\'with\\ quote\\"')
|
||||
})
|
||||
|
||||
test('$variables should be escaped', () => {
|
||||
expect(backslashEscape('$var')).toBe('\\$var')
|
||||
})
|
||||
})
|
||||
|
||||
test('path with space is wrapped with single quotes', () => {
|
||||
expect(shellEscape('file with space')).toBe("'file with space'")
|
||||
})
|
||||
describe('shellEscape() returns human readable filenames with as few escaping applied as possible', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(shellEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('path with quote is divided into quoted segments and escaped quote', () => {
|
||||
expect(shellEscape("file'with quote")).toBe("'file'\\''with quote'")
|
||||
})
|
||||
test('path with leading quote does not have double quotes at beginning', () => {
|
||||
expect(shellEscape("'file-leading-quote")).toBe("\\''file-leading-quote'")
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(shellEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(shellEscape('file with space')).toBe("'file with space'")
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(shellEscape('file with space')).toBe("'file with space'")
|
||||
})
|
||||
|
||||
test('filename with $ should be quoted', () => {
|
||||
expect(shellEscape('$var')).toBe("'$var'")
|
||||
})
|
||||
|
||||
test('filename with " should be quoted', () => {
|
||||
expect(shellEscape('file"name')).toBe("'file\"name'")
|
||||
})
|
||||
|
||||
test('filename with single quote should be wrapped in double quotes', () => {
|
||||
expect(shellEscape("file'with quote")).toBe('"file\'with quote"')
|
||||
})
|
||||
|
||||
test('filename with single quote and special characters is split and quoted/escaped as needed', () => {
|
||||
expect(shellEscape("file'with $quote")).toBe("file\\''with $quote'")
|
||||
})
|
||||
})
|
||||
|
||||
19
action.yml
19
action.yml
@@ -9,6 +9,11 @@ inputs:
|
||||
working-directory:
|
||||
description: 'Relative path under $GITHUB_WORKSPACE where the repository was checked out.'
|
||||
required: false
|
||||
ref:
|
||||
description: |
|
||||
Git reference (e.g. branch name) from which the changes will be detected.
|
||||
This option is ignored if action is triggered by pull_request event.
|
||||
required: false
|
||||
base:
|
||||
description: |
|
||||
Git reference (e.g. branch name) against which the changes will be detected. Defaults to repository default branch (e.g. master).
|
||||
@@ -22,8 +27,13 @@ inputs:
|
||||
description: |
|
||||
Enables listing of files matching the filter:
|
||||
'none' - Disables listing of matching files (default).
|
||||
'json' - Matching files paths are serialized as JSON array.
|
||||
'shell' - Matching files paths are escaped and space-delimited. Output is usable as command line argument list in linux shell.
|
||||
'csv' - Coma separated list of filenames.
|
||||
If needed it uses double quotes to wrap filename with unsafe characters.
|
||||
'json' - Serialized as JSON array.
|
||||
'shell' - Space delimited list usable as command line argument list in linux shell.
|
||||
If needed it uses single or double quotes to wrap filename with unsafe characters.
|
||||
'escape'- Space delimited list usable as command line argument list in linux shell.
|
||||
Backslash escapes every potentially unsafe character.
|
||||
required: true
|
||||
default: none
|
||||
initial-fetch-depth:
|
||||
@@ -33,7 +43,10 @@ inputs:
|
||||
until the merge-base is found or there are no more commits in the history.
|
||||
This option takes effect only when changes are detected using git against different base branch.
|
||||
required: false
|
||||
default: '10'
|
||||
default: '100'
|
||||
outputs:
|
||||
changes:
|
||||
description: JSON array with names of all filters matching any of changed files
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'dist/index.js'
|
||||
|
||||
462
dist/index.js
vendored
462
dist/index.js
vendored
@@ -3811,11 +3811,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isGitSha = exports.getShortName = exports.getCurrentRef = exports.listAllFilesAsAdded = exports.parseGitDiffOutput = exports.getChangesSinceMergeBase = exports.getChanges = exports.getChangesInLastCommit = exports.NULL_SHA = void 0;
|
||||
exports.isGitSha = exports.getShortName = exports.getCurrentRef = exports.listAllFilesAsAdded = exports.parseGitDiffOutput = exports.getChangesSinceMergeBase = exports.getChangesOnHead = exports.getChanges = exports.getChangesInLastCommit = exports.HEAD = exports.NULL_SHA = void 0;
|
||||
const exec_1 = __importDefault(__webpack_require__(807));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const file_1 = __webpack_require__(258);
|
||||
exports.NULL_SHA = '0000000000000000000000000000000000000000';
|
||||
exports.HEAD = 'HEAD';
|
||||
async function getChangesInLastCommit() {
|
||||
core.startGroup(`Change detection in last commit`);
|
||||
let output = '';
|
||||
@@ -3829,19 +3830,15 @@ async function getChangesInLastCommit() {
|
||||
return parseGitDiffOutput(output);
|
||||
}
|
||||
exports.getChangesInLastCommit = getChangesInLastCommit;
|
||||
async function getChanges(ref) {
|
||||
if (!(await hasCommit(ref))) {
|
||||
// Fetch single commit
|
||||
core.startGroup(`Fetching ${ref} from origin`);
|
||||
await exec_1.default('git', ['fetch', '--depth=1', '--no-tags', 'origin', ref]);
|
||||
core.endGroup();
|
||||
}
|
||||
async function getChanges(base, head) {
|
||||
const baseRef = await ensureRefAvailable(base);
|
||||
const headRef = await ensureRefAvailable(head);
|
||||
// Get differences between ref and HEAD
|
||||
core.startGroup(`Change detection ${ref}..HEAD`);
|
||||
core.startGroup(`Change detection ${base}..${head}`);
|
||||
let output = '';
|
||||
try {
|
||||
// Two dots '..' change detection - directly compares two versions
|
||||
output = (await exec_1.default('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}..HEAD`])).stdout;
|
||||
output = (await exec_1.default('git', ['diff', '--no-renames', '--name-status', '-z', `${baseRef}..${headRef}`])).stdout;
|
||||
}
|
||||
finally {
|
||||
fixStdOutNullTermination();
|
||||
@@ -3850,47 +3847,86 @@ async function getChanges(ref) {
|
||||
return parseGitDiffOutput(output);
|
||||
}
|
||||
exports.getChanges = getChanges;
|
||||
async function getChangesSinceMergeBase(ref, initialFetchDepth) {
|
||||
if (!(await hasCommit(ref))) {
|
||||
// Fetch and add base branch
|
||||
core.startGroup(`Fetching ${ref}`);
|
||||
try {
|
||||
await exec_1.default('git', ['fetch', `--depth=${initialFetchDepth}`, '--no-tags', 'origin', `${ref}:${ref}`]);
|
||||
}
|
||||
finally {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function hasMergeBase() {
|
||||
return (await exec_1.default('git', ['merge-base', ref, 'HEAD'], { ignoreReturnCode: true })).code === 0;
|
||||
}
|
||||
async function countCommits() {
|
||||
return (await getNumberOfCommits('HEAD')) + (await getNumberOfCommits(ref));
|
||||
}
|
||||
core.startGroup(`Searching for merge-base with ${ref}`);
|
||||
// Fetch more commits until merge-base is found
|
||||
if (!(await hasMergeBase())) {
|
||||
let deepen = initialFetchDepth;
|
||||
let lastCommitsCount = await countCommits();
|
||||
do {
|
||||
await exec_1.default('git', ['fetch', `--deepen=${deepen}`, '--no-tags']);
|
||||
const count = await countCommits();
|
||||
if (count <= lastCommitsCount) {
|
||||
core.info('No merge base found - all files will be listed as added');
|
||||
core.endGroup();
|
||||
return await listAllFilesAsAdded();
|
||||
}
|
||||
lastCommitsCount = count;
|
||||
deepen = Math.min(deepen * 2, Number.MAX_SAFE_INTEGER);
|
||||
} while (!(await hasMergeBase()));
|
||||
}
|
||||
core.endGroup();
|
||||
// Get changes introduced on HEAD compared to ref
|
||||
core.startGroup(`Change detection ${ref}...HEAD`);
|
||||
async function getChangesOnHead() {
|
||||
// Get current changes - both staged and unstaged
|
||||
core.startGroup(`Change detection on HEAD`);
|
||||
let output = '';
|
||||
try {
|
||||
// Three dots '...' change detection - finds merge-base and compares against it
|
||||
output = (await exec_1.default('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}...HEAD`])).stdout;
|
||||
output = (await exec_1.default('git', ['diff', '--no-renames', '--name-status', '-z', 'HEAD'])).stdout;
|
||||
}
|
||||
finally {
|
||||
fixStdOutNullTermination();
|
||||
core.endGroup();
|
||||
}
|
||||
return parseGitDiffOutput(output);
|
||||
}
|
||||
exports.getChangesOnHead = getChangesOnHead;
|
||||
async function getChangesSinceMergeBase(base, head, initialFetchDepth) {
|
||||
let baseRef;
|
||||
let headRef;
|
||||
async function hasMergeBase() {
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
return false;
|
||||
}
|
||||
return (await exec_1.default('git', ['merge-base', baseRef, headRef], { ignoreReturnCode: true })).code === 0;
|
||||
}
|
||||
let noMergeBase = false;
|
||||
core.startGroup(`Searching for merge-base ${base}...${head}`);
|
||||
try {
|
||||
baseRef = await getLocalRef(base);
|
||||
headRef = await getLocalRef(head);
|
||||
if (!(await hasMergeBase())) {
|
||||
await exec_1.default('git', ['fetch', '--no-tags', `--depth=${initialFetchDepth}`, 'origin', base, head]);
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
baseRef = baseRef !== null && baseRef !== void 0 ? baseRef : (await getLocalRef(base));
|
||||
headRef = headRef !== null && headRef !== void 0 ? headRef : (await getLocalRef(head));
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
await exec_1.default('git', ['fetch', '--tags', '--depth=1', 'origin', base, head], {
|
||||
ignoreReturnCode: true // returns exit code 1 if tags on remote were updated - we can safely ignore it
|
||||
});
|
||||
baseRef = baseRef !== null && baseRef !== void 0 ? baseRef : (await getLocalRef(base));
|
||||
headRef = headRef !== null && headRef !== void 0 ? headRef : (await getLocalRef(head));
|
||||
if (baseRef === undefined) {
|
||||
throw new Error(`Could not determine what is ${base} - fetch works but it's not a branch, tag or commit SHA`);
|
||||
}
|
||||
if (headRef === undefined) {
|
||||
throw new Error(`Could not determine what is ${head} - fetch works but it's not a branch, tag or commit SHA`);
|
||||
}
|
||||
}
|
||||
}
|
||||
let depth = initialFetchDepth;
|
||||
let lastCommitCount = await getCommitCount();
|
||||
while (!(await hasMergeBase())) {
|
||||
depth = Math.min(depth * 2, Number.MAX_SAFE_INTEGER);
|
||||
await exec_1.default('git', ['fetch', `--deepen=${depth}`, 'origin', base, head]);
|
||||
const commitCount = await getCommitCount();
|
||||
if (commitCount === lastCommitCount) {
|
||||
core.info('No more commits were fetched');
|
||||
core.info('Last attempt will be to fetch full history');
|
||||
await exec_1.default('git', ['fetch']);
|
||||
if (!(await hasMergeBase())) {
|
||||
noMergeBase = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
lastCommitCount = commitCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
finally {
|
||||
core.endGroup();
|
||||
}
|
||||
// Three dots '...' change detection - finds merge-base and compares against it
|
||||
let diffArg = `${baseRef}...${headRef}`;
|
||||
if (noMergeBase) {
|
||||
core.warning('No merge base found - change detection will use direct <commit>..<commit> comparison');
|
||||
diffArg = `${baseRef}..${headRef}`;
|
||||
}
|
||||
// Get changes introduced on ref compared to base
|
||||
core.startGroup(`Change detection ${diffArg}`);
|
||||
let output = '';
|
||||
try {
|
||||
output = (await exec_1.default('git', ['diff', '--no-renames', '--name-status', '-z', diffArg])).stdout;
|
||||
}
|
||||
finally {
|
||||
fixStdOutNullTermination();
|
||||
@@ -3931,7 +3967,7 @@ async function listAllFilesAsAdded() {
|
||||
}
|
||||
exports.listAllFilesAsAdded = listAllFilesAsAdded;
|
||||
async function getCurrentRef() {
|
||||
core.startGroup(`Determining current ref`);
|
||||
core.startGroup(`Get current git ref`);
|
||||
try {
|
||||
const branch = (await exec_1.default('git', ['branch', '--show-current'])).stdout.trim();
|
||||
if (branch) {
|
||||
@@ -3941,7 +3977,7 @@ async function getCurrentRef() {
|
||||
if (describe.code === 0) {
|
||||
return describe.stdout.trim();
|
||||
}
|
||||
return (await exec_1.default('git', ['rev-parse', 'HEAD'])).stdout.trim();
|
||||
return (await exec_1.default('git', ['rev-parse', exports.HEAD])).stdout.trim();
|
||||
}
|
||||
finally {
|
||||
core.endGroup();
|
||||
@@ -3965,19 +4001,52 @@ function isGitSha(ref) {
|
||||
}
|
||||
exports.isGitSha = isGitSha;
|
||||
async function hasCommit(ref) {
|
||||
core.startGroup(`Checking if commit for ${ref} is locally available`);
|
||||
return (await exec_1.default('git', ['cat-file', '-e', `${ref}^{commit}`], { ignoreReturnCode: true })).code === 0;
|
||||
}
|
||||
async function getCommitCount() {
|
||||
const output = (await exec_1.default('git', ['rev-list', '--count', '--all'])).stdout;
|
||||
const count = parseInt(output);
|
||||
return isNaN(count) ? 0 : count;
|
||||
}
|
||||
async function getLocalRef(shortName) {
|
||||
if (isGitSha(shortName)) {
|
||||
return (await hasCommit(shortName)) ? shortName : undefined;
|
||||
}
|
||||
const output = (await exec_1.default('git', ['show-ref', shortName], { ignoreReturnCode: true })).stdout;
|
||||
const refs = output
|
||||
.split(/\r?\n/g)
|
||||
.map(l => { var _a, _b; return (_b = (_a = l.match(/refs\/.*$/)) === null || _a === void 0 ? void 0 : _a[0]) !== null && _b !== void 0 ? _b : ''; })
|
||||
.filter(l => l !== '');
|
||||
if (refs.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
const remoteRef = refs.find(ref => ref.startsWith('refs/remotes/origin/'));
|
||||
if (remoteRef) {
|
||||
return remoteRef;
|
||||
}
|
||||
return refs[0];
|
||||
}
|
||||
async function ensureRefAvailable(name) {
|
||||
core.startGroup(`Ensuring ${name} is fetched from origin`);
|
||||
try {
|
||||
return (await exec_1.default('git', ['cat-file', '-e', `${ref}^{commit}`], { ignoreReturnCode: true })).code === 0;
|
||||
let ref = await getLocalRef(name);
|
||||
if (ref === undefined) {
|
||||
await exec_1.default('git', ['fetch', '--depth=1', '--no-tags', 'origin', name]);
|
||||
ref = await getLocalRef(name);
|
||||
if (ref === undefined) {
|
||||
await exec_1.default('git', ['fetch', '--depth=1', '--tags', 'origin', name]);
|
||||
ref = await getLocalRef(name);
|
||||
if (ref === undefined) {
|
||||
throw new Error(`Could not determine what is ${name} - fetch works but it's not a branch, tag or commit SHA`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return ref;
|
||||
}
|
||||
finally {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function getNumberOfCommits(ref) {
|
||||
const output = (await exec_1.default('git', ['rev-list', `--count`, ref])).stdout;
|
||||
const count = parseInt(output);
|
||||
return isNaN(count) ? 0 : count;
|
||||
}
|
||||
function fixStdOutNullTermination() {
|
||||
// Previous command uses NULL as delimiters and output is printed to stdout.
|
||||
// We have to make sure next thing written to stdout will start on new line.
|
||||
@@ -4626,9 +4695,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(__webpack_require__(747));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
@@ -4636,7 +4702,8 @@ const github = __importStar(__webpack_require__(469));
|
||||
const filter_1 = __webpack_require__(235);
|
||||
const file_1 = __webpack_require__(258);
|
||||
const git = __importStar(__webpack_require__(136));
|
||||
const shell_escape_1 = __importDefault(__webpack_require__(751));
|
||||
const shell_escape_1 = __webpack_require__(206);
|
||||
const csv_escape_1 = __webpack_require__(410);
|
||||
async function run() {
|
||||
try {
|
||||
const workingDirectory = core.getInput('working-directory', { required: false });
|
||||
@@ -4644,6 +4711,7 @@ async function run() {
|
||||
process.chdir(workingDirectory);
|
||||
}
|
||||
const token = core.getInput('token', { required: false });
|
||||
const ref = core.getInput('ref', { required: false });
|
||||
const base = core.getInput('base', { required: false });
|
||||
const filtersInput = core.getInput('filters', { required: true });
|
||||
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput;
|
||||
@@ -4654,7 +4722,8 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
const filter = new filter_1.Filter(filtersYaml);
|
||||
const files = await getChangedFiles(token, base, initialFetchDepth);
|
||||
const files = await getChangedFiles(token, base, ref, initialFetchDepth);
|
||||
core.info(`Detected ${files.length} changed files`);
|
||||
const results = filter.match(files);
|
||||
exportResults(results, listFiles);
|
||||
}
|
||||
@@ -4663,7 +4732,7 @@ async function run() {
|
||||
}
|
||||
}
|
||||
function isPathInput(text) {
|
||||
return !text.includes('\n');
|
||||
return !(text.includes('\n') || text.includes(':'));
|
||||
}
|
||||
function getConfigFileContent(configPath) {
|
||||
if (!fs.existsSync(configPath)) {
|
||||
@@ -4674,104 +4743,151 @@ function getConfigFileContent(configPath) {
|
||||
}
|
||||
return fs.readFileSync(configPath, { encoding: 'utf8' });
|
||||
}
|
||||
async function getChangedFiles(token, base, initialFetchDepth) {
|
||||
if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') {
|
||||
async function getChangedFiles(token, base, ref, initialFetchDepth) {
|
||||
// if base is 'HEAD' only local uncommitted changes will be detected
|
||||
// This is the simplest case as we don't need to fetch more commits or evaluate current/before refs
|
||||
if (base === git.HEAD) {
|
||||
if (ref) {
|
||||
core.warning(`'ref' input parameter is ignored when 'base' is set to HEAD`);
|
||||
}
|
||||
return await git.getChangesOnHead();
|
||||
}
|
||||
const prEvents = ['pull_request', 'pull_request_review', 'pull_request_review_comment', 'pull_request_target'];
|
||||
if (prEvents.includes(github.context.eventName)) {
|
||||
if (ref) {
|
||||
core.warning(`'ref' input parameter is ignored when 'base' is set to HEAD`);
|
||||
}
|
||||
if (base) {
|
||||
core.warning(`'base' input parameter is ignored when action is triggered by pull request event`);
|
||||
}
|
||||
const pr = github.context.payload.pull_request;
|
||||
if (token) {
|
||||
return await getChangedFilesFromApi(token, pr);
|
||||
}
|
||||
if (github.context.eventName === 'pull_request_target') {
|
||||
// pull_request_target is executed in context of base branch and GITHUB_SHA points to last commit in base branch
|
||||
// Therefor it's not possible to look at changes in last commit
|
||||
// At the same time we don't want to fetch any code from forked repository
|
||||
throw new Error(`'token' input parameter is required if action is triggered by 'pull_request_target' event`);
|
||||
}
|
||||
core.info('Github token is not available - changes will be detected from PRs merge commit');
|
||||
return await git.getChangesInLastCommit();
|
||||
}
|
||||
else {
|
||||
return getChangedFilesFromGit(base, initialFetchDepth);
|
||||
return getChangedFilesFromGit(base, ref, initialFetchDepth);
|
||||
}
|
||||
}
|
||||
async function getChangedFilesFromGit(base, initialFetchDepth) {
|
||||
async function getChangedFilesFromGit(base, head, initialFetchDepth) {
|
||||
var _a;
|
||||
const defaultRef = (_a = github.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
|
||||
const defaultBranch = (_a = github.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
|
||||
const beforeSha = github.context.eventName === 'push' ? github.context.payload.before : null;
|
||||
const pushRef = git.getShortName(github.context.ref) ||
|
||||
(core.warning(`'ref' field is missing in PUSH event payload - using current branch, tag or commit SHA`),
|
||||
await git.getCurrentRef());
|
||||
const baseRef = git.getShortName(base) || defaultRef;
|
||||
if (!baseRef) {
|
||||
const currentRef = await git.getCurrentRef();
|
||||
head = git.getShortName(head || github.context.ref || currentRef);
|
||||
base = git.getShortName(base || defaultBranch);
|
||||
if (!head) {
|
||||
throw new Error("This action requires 'head' input to be configured, 'ref' to be set in the event payload or branch/tag checked out in current git repository");
|
||||
}
|
||||
if (!base) {
|
||||
throw new Error("This action requires 'base' input to be configured or 'repository.default_branch' to be set in the event payload");
|
||||
}
|
||||
const isBaseRefSha = git.isGitSha(baseRef);
|
||||
const isBaseSameAsPush = baseRef === pushRef;
|
||||
// If base is commit SHA will do comparison against the referenced commit
|
||||
// Or If base references same branch it was pushed to, we will do comparison against the previously pushed commit
|
||||
if (isBaseRefSha || isBaseSameAsPush) {
|
||||
if (!isBaseRefSha && !beforeSha) {
|
||||
core.warning(`'before' field is missing in PUSH event payload - changes will be detected from last commit`);
|
||||
const isBaseSha = git.isGitSha(base);
|
||||
const isBaseSameAsHead = base === head;
|
||||
// If base is commit SHA we will do comparison against the referenced commit
|
||||
// Or if base references same branch it was pushed to, we will do comparison against the previously pushed commit
|
||||
if (isBaseSha || isBaseSameAsHead) {
|
||||
const baseSha = isBaseSha ? base : beforeSha;
|
||||
if (!baseSha) {
|
||||
core.warning(`'before' field is missing in event payload - changes will be detected from last commit`);
|
||||
if (head !== currentRef) {
|
||||
core.warning(`Ref ${head} is not checked out - results might be incorrect!`);
|
||||
}
|
||||
return await git.getChangesInLastCommit();
|
||||
}
|
||||
const baseSha = isBaseRefSha ? baseRef : beforeSha;
|
||||
// If there is no previously pushed commit,
|
||||
// we will do comparison against the default branch or return all as added
|
||||
if (baseSha === git.NULL_SHA) {
|
||||
if (defaultRef && baseRef !== defaultRef) {
|
||||
core.info(`First push of a branch detected - changes will be detected against the default branch ${defaultRef}`);
|
||||
return await git.getChangesSinceMergeBase(defaultRef, initialFetchDepth);
|
||||
if (defaultBranch && base !== defaultBranch) {
|
||||
core.info(`First push of a branch detected - changes will be detected against the default branch ${defaultBranch}`);
|
||||
return await git.getChangesSinceMergeBase(defaultBranch, head, initialFetchDepth);
|
||||
}
|
||||
else {
|
||||
core.info('Initial push detected - all files will be listed as added');
|
||||
if (head !== currentRef) {
|
||||
core.warning(`Ref ${head} is not checked out - results might be incorrect!`);
|
||||
}
|
||||
return await git.listAllFilesAsAdded();
|
||||
}
|
||||
}
|
||||
core.info(`Changes will be detected against commit (${baseSha})`);
|
||||
return await git.getChanges(baseSha);
|
||||
core.info(`Changes will be detected between ${baseSha} and ${head}`);
|
||||
return await git.getChanges(baseSha, head);
|
||||
}
|
||||
// Changes introduced by current branch against the base branch
|
||||
core.info(`Changes will be detected against the branch ${baseRef}`);
|
||||
return await git.getChangesSinceMergeBase(baseRef, initialFetchDepth);
|
||||
core.info(`Changes will be detected between ${base} and ${head}`);
|
||||
return await git.getChangesSinceMergeBase(base, head, initialFetchDepth);
|
||||
}
|
||||
// Uses github REST api to get list of files changed in PR
|
||||
async function getChangedFilesFromApi(token, pullRequest) {
|
||||
core.info(`Fetching list of changed files for PR#${pullRequest.number} from Github API`);
|
||||
const client = new github.GitHub(token);
|
||||
const pageSize = 100;
|
||||
const files = [];
|
||||
for (let page = 0; page * pageSize < pullRequest.changed_files; page++) {
|
||||
const response = await client.pulls.listFiles({
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
pull_number: pullRequest.number,
|
||||
page,
|
||||
per_page: pageSize
|
||||
});
|
||||
for (const row of response.data) {
|
||||
// There's no obvious use-case for detection of renames
|
||||
// Therefore we treat it as if rename detection in git diff was turned off.
|
||||
// Rename is replaced by delete of original filename and add of new filename
|
||||
if (row.status === file_1.ChangeStatus.Renamed) {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: file_1.ChangeStatus.Added
|
||||
});
|
||||
files.push({
|
||||
// 'previous_filename' for some unknown reason isn't in the type definition or documentation
|
||||
filename: row.previous_filename,
|
||||
status: file_1.ChangeStatus.Deleted
|
||||
});
|
||||
async function getChangedFilesFromApi(token, prNumber) {
|
||||
core.startGroup(`Fetching list of changed files for PR#${prNumber.number} from Github API`);
|
||||
try {
|
||||
const client = new github.GitHub(token);
|
||||
const per_page = 100;
|
||||
const files = [];
|
||||
for (let page = 1;; page++) {
|
||||
core.info(`Invoking listFiles(pull_number: ${prNumber.number}, page: ${page}, per_page: ${per_page})`);
|
||||
const response = await client.pulls.listFiles({
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
pull_number: prNumber.number,
|
||||
per_page,
|
||||
page
|
||||
});
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Fetching list of changed files from GitHub API failed with error code ${response.status}`);
|
||||
}
|
||||
else {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: row.status
|
||||
});
|
||||
core.info(`Received ${response.data.length} items`);
|
||||
if (response.data.length === 0) {
|
||||
core.info('All changed files has been fetched from GitHub API');
|
||||
break;
|
||||
}
|
||||
for (const row of response.data) {
|
||||
core.info(`[${row.status}] ${row.filename}`);
|
||||
// There's no obvious use-case for detection of renames
|
||||
// Therefore we treat it as if rename detection in git diff was turned off.
|
||||
// Rename is replaced by delete of original filename and add of new filename
|
||||
if (row.status === file_1.ChangeStatus.Renamed) {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: file_1.ChangeStatus.Added
|
||||
});
|
||||
files.push({
|
||||
// 'previous_filename' for some unknown reason isn't in the type definition or documentation
|
||||
filename: row.previous_filename,
|
||||
status: file_1.ChangeStatus.Deleted
|
||||
});
|
||||
}
|
||||
else {
|
||||
// Github status and git status variants are same except for deleted files
|
||||
const status = row.status === 'removed' ? file_1.ChangeStatus.Deleted : row.status;
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
finally {
|
||||
core.endGroup();
|
||||
}
|
||||
return files;
|
||||
}
|
||||
function exportResults(results, format) {
|
||||
core.info('Results:');
|
||||
const changes = [];
|
||||
for (const [key, files] of Object.entries(results)) {
|
||||
const value = files.length > 0;
|
||||
core.startGroup(`Filter ${key} = ${value}`);
|
||||
if (files.length > 0) {
|
||||
changes.push(key);
|
||||
core.info('Matching files:');
|
||||
for (const file of files) {
|
||||
core.info(`${file.filename} [${file.status}]`);
|
||||
@@ -4781,26 +4897,39 @@ function exportResults(results, format) {
|
||||
core.info('Matching files: none');
|
||||
}
|
||||
core.setOutput(key, value);
|
||||
core.setOutput(`${key}_count`, files.length);
|
||||
if (format !== 'none') {
|
||||
const filesValue = serializeExport(files, format);
|
||||
core.setOutput(`${key}_files`, filesValue);
|
||||
}
|
||||
core.endGroup();
|
||||
}
|
||||
if (results['changes'] === undefined) {
|
||||
const changesJson = JSON.stringify(changes);
|
||||
core.info(`Changes output set to ${changesJson}`);
|
||||
core.setOutput('changes', changesJson);
|
||||
}
|
||||
else {
|
||||
core.info('Cannot set changes output variable - name already used by filter output');
|
||||
}
|
||||
core.endGroup();
|
||||
}
|
||||
function serializeExport(files, format) {
|
||||
const fileNames = files.map(file => file.filename);
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
return fileNames.map(csv_escape_1.csvEscape).join(',');
|
||||
case 'json':
|
||||
return JSON.stringify(fileNames);
|
||||
case 'escape':
|
||||
return fileNames.map(shell_escape_1.backslashEscape).join(' ');
|
||||
case 'shell':
|
||||
return fileNames.map(shell_escape_1.default).join(' ');
|
||||
return fileNames.map(shell_escape_1.shellEscape).join(' ');
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
}
|
||||
function isExportFormat(value) {
|
||||
return value === 'none' || value === 'shell' || value === 'json';
|
||||
return ['none', 'csv', 'shell', 'json', 'escape'].includes(value);
|
||||
}
|
||||
run();
|
||||
|
||||
@@ -4992,6 +5121,43 @@ module.exports = {
|
||||
};
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 206:
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.shellEscape = exports.backslashEscape = void 0;
|
||||
// Backslash escape every character except small subset of definitely safe characters
|
||||
function backslashEscape(value) {
|
||||
return value.replace(/([^a-zA-Z0-9,._+:@%/-])/gm, '\\$1');
|
||||
}
|
||||
exports.backslashEscape = backslashEscape;
|
||||
// Returns filename escaped for usage as shell argument.
|
||||
// Applies "human readable" approach with as few escaping applied as possible
|
||||
function shellEscape(value) {
|
||||
if (value === '')
|
||||
return value;
|
||||
// Only safe characters
|
||||
if (/^[a-zA-Z0-9,._+:@%/-]+$/m.test(value)) {
|
||||
return value;
|
||||
}
|
||||
if (value.includes("'")) {
|
||||
// Only safe characters, single quotes and white-spaces
|
||||
if (/^[a-zA-Z0-9,._+:@%/'\s-]+$/m.test(value)) {
|
||||
return `"${value}"`;
|
||||
}
|
||||
// Split by single quote and apply escaping recursively
|
||||
return value.split("'").map(shellEscape).join("\\'");
|
||||
}
|
||||
// Contains some unsafe characters but no single quote
|
||||
return `'${value}'`;
|
||||
}
|
||||
exports.shellEscape = shellEscape;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 211:
|
||||
@@ -8777,6 +8943,33 @@ function Octokit(plugins, options) {
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 410:
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.csvEscape = void 0;
|
||||
// Returns filename escaped for CSV
|
||||
// Wraps file name into "..." only when it contains some potentially unsafe character
|
||||
function csvEscape(value) {
|
||||
if (value === '')
|
||||
return value;
|
||||
// Only safe characters
|
||||
if (/^[a-zA-Z0-9._+:@%/-]+$/m.test(value)) {
|
||||
return value;
|
||||
}
|
||||
// https://tools.ietf.org/html/rfc4180
|
||||
// If double-quotes are used to enclose fields, then a double-quote
|
||||
// appearing inside a field must be escaped by preceding it with
|
||||
// another double quote
|
||||
return `"${value.replace(/"/g, '""')}"`;
|
||||
}
|
||||
exports.csvEscape = csvEscape;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 413:
|
||||
@@ -15189,23 +15382,6 @@ function sync (path, options) {
|
||||
|
||||
module.exports = require("fs");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 751:
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
// Credits to https://github.com/xxorax/node-shell-escape
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
function shellEscape(value) {
|
||||
return `'${value.replace(/'/g, "'\\''")}'`
|
||||
.replace(/^(?:'')+/g, '') // unduplicate single-quote at the beginning
|
||||
.replace(/\\'''/g, "\\'"); // remove non-escaped single-quote if there are enclosed between 2 escaped
|
||||
}
|
||||
exports.default = shellEscape;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 753:
|
||||
|
||||
202
src/git.ts
202
src/git.ts
@@ -3,6 +3,7 @@ import * as core from '@actions/core'
|
||||
import {File, ChangeStatus} from './file'
|
||||
|
||||
export const NULL_SHA = '0000000000000000000000000000000000000000'
|
||||
export const HEAD = 'HEAD'
|
||||
|
||||
export async function getChangesInLastCommit(): Promise<File[]> {
|
||||
core.startGroup(`Change detection in last commit`)
|
||||
@@ -17,20 +18,16 @@ export async function getChangesInLastCommit(): Promise<File[]> {
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChanges(ref: string): Promise<File[]> {
|
||||
if (!(await hasCommit(ref))) {
|
||||
// Fetch single commit
|
||||
core.startGroup(`Fetching ${ref} from origin`)
|
||||
await exec('git', ['fetch', '--depth=1', '--no-tags', 'origin', ref])
|
||||
core.endGroup()
|
||||
}
|
||||
export async function getChanges(base: string, head: string): Promise<File[]> {
|
||||
const baseRef = await ensureRefAvailable(base)
|
||||
const headRef = await ensureRefAvailable(head)
|
||||
|
||||
// Get differences between ref and HEAD
|
||||
core.startGroup(`Change detection ${ref}..HEAD`)
|
||||
core.startGroup(`Change detection ${base}..${head}`)
|
||||
let output = ''
|
||||
try {
|
||||
// Two dots '..' change detection - directly compares two versions
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}..HEAD`])).stdout
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${baseRef}..${headRef}`])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
@@ -39,50 +36,93 @@ export async function getChanges(ref: string): Promise<File[]> {
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChangesSinceMergeBase(ref: string, initialFetchDepth: number): Promise<File[]> {
|
||||
if (!(await hasCommit(ref))) {
|
||||
// Fetch and add base branch
|
||||
core.startGroup(`Fetching ${ref}`)
|
||||
try {
|
||||
await exec('git', ['fetch', `--depth=${initialFetchDepth}`, '--no-tags', 'origin', `${ref}:${ref}`])
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
async function hasMergeBase(): Promise<boolean> {
|
||||
return (await exec('git', ['merge-base', ref, 'HEAD'], {ignoreReturnCode: true})).code === 0
|
||||
}
|
||||
|
||||
async function countCommits(): Promise<number> {
|
||||
return (await getNumberOfCommits('HEAD')) + (await getNumberOfCommits(ref))
|
||||
}
|
||||
|
||||
core.startGroup(`Searching for merge-base with ${ref}`)
|
||||
// Fetch more commits until merge-base is found
|
||||
if (!(await hasMergeBase())) {
|
||||
let deepen = initialFetchDepth
|
||||
let lastCommitsCount = await countCommits()
|
||||
do {
|
||||
await exec('git', ['fetch', `--deepen=${deepen}`, '--no-tags'])
|
||||
const count = await countCommits()
|
||||
if (count <= lastCommitsCount) {
|
||||
core.info('No merge base found - all files will be listed as added')
|
||||
core.endGroup()
|
||||
return await listAllFilesAsAdded()
|
||||
}
|
||||
lastCommitsCount = count
|
||||
deepen = Math.min(deepen * 2, Number.MAX_SAFE_INTEGER)
|
||||
} while (!(await hasMergeBase()))
|
||||
}
|
||||
core.endGroup()
|
||||
|
||||
// Get changes introduced on HEAD compared to ref
|
||||
core.startGroup(`Change detection ${ref}...HEAD`)
|
||||
export async function getChangesOnHead(): Promise<File[]> {
|
||||
// Get current changes - both staged and unstaged
|
||||
core.startGroup(`Change detection on HEAD`)
|
||||
let output = ''
|
||||
try {
|
||||
// Three dots '...' change detection - finds merge-base and compares against it
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}...HEAD`])).stdout
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', 'HEAD'])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChangesSinceMergeBase(base: string, head: string, initialFetchDepth: number): Promise<File[]> {
|
||||
let baseRef: string | undefined
|
||||
let headRef: string | undefined
|
||||
async function hasMergeBase(): Promise<boolean> {
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
return false
|
||||
}
|
||||
return (await exec('git', ['merge-base', baseRef, headRef], {ignoreReturnCode: true})).code === 0
|
||||
}
|
||||
|
||||
let noMergeBase = false
|
||||
core.startGroup(`Searching for merge-base ${base}...${head}`)
|
||||
try {
|
||||
baseRef = await getLocalRef(base)
|
||||
headRef = await getLocalRef(head)
|
||||
if (!(await hasMergeBase())) {
|
||||
await exec('git', ['fetch', '--no-tags', `--depth=${initialFetchDepth}`, 'origin', base, head])
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
baseRef = baseRef ?? (await getLocalRef(base))
|
||||
headRef = headRef ?? (await getLocalRef(head))
|
||||
if (baseRef === undefined || headRef === undefined) {
|
||||
await exec('git', ['fetch', '--tags', '--depth=1', 'origin', base, head], {
|
||||
ignoreReturnCode: true // returns exit code 1 if tags on remote were updated - we can safely ignore it
|
||||
})
|
||||
baseRef = baseRef ?? (await getLocalRef(base))
|
||||
headRef = headRef ?? (await getLocalRef(head))
|
||||
if (baseRef === undefined) {
|
||||
throw new Error(
|
||||
`Could not determine what is ${base} - fetch works but it's not a branch, tag or commit SHA`
|
||||
)
|
||||
}
|
||||
if (headRef === undefined) {
|
||||
throw new Error(
|
||||
`Could not determine what is ${head} - fetch works but it's not a branch, tag or commit SHA`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let depth = initialFetchDepth
|
||||
let lastCommitCount = await getCommitCount()
|
||||
while (!(await hasMergeBase())) {
|
||||
depth = Math.min(depth * 2, Number.MAX_SAFE_INTEGER)
|
||||
await exec('git', ['fetch', `--deepen=${depth}`, 'origin', base, head])
|
||||
const commitCount = await getCommitCount()
|
||||
if (commitCount === lastCommitCount) {
|
||||
core.info('No more commits were fetched')
|
||||
core.info('Last attempt will be to fetch full history')
|
||||
await exec('git', ['fetch'])
|
||||
if (!(await hasMergeBase())) {
|
||||
noMergeBase = true
|
||||
}
|
||||
break
|
||||
}
|
||||
lastCommitCount = commitCount
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
// Three dots '...' change detection - finds merge-base and compares against it
|
||||
let diffArg = `${baseRef}...${headRef}`
|
||||
if (noMergeBase) {
|
||||
core.warning('No merge base found - change detection will use direct <commit>..<commit> comparison')
|
||||
diffArg = `${baseRef}..${headRef}`
|
||||
}
|
||||
|
||||
// Get changes introduced on ref compared to base
|
||||
core.startGroup(`Change detection ${diffArg}`)
|
||||
let output = ''
|
||||
try {
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', diffArg])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
@@ -123,7 +163,7 @@ export async function listAllFilesAsAdded(): Promise<File[]> {
|
||||
}
|
||||
|
||||
export async function getCurrentRef(): Promise<string> {
|
||||
core.startGroup(`Determining current ref`)
|
||||
core.startGroup(`Get current git ref`)
|
||||
try {
|
||||
const branch = (await exec('git', ['branch', '--show-current'])).stdout.trim()
|
||||
if (branch) {
|
||||
@@ -135,7 +175,7 @@ export async function getCurrentRef(): Promise<string> {
|
||||
return describe.stdout.trim()
|
||||
}
|
||||
|
||||
return (await exec('git', ['rev-parse', 'HEAD'])).stdout.trim()
|
||||
return (await exec('git', ['rev-parse', HEAD])).stdout.trim()
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
@@ -158,20 +198,60 @@ export function isGitSha(ref: string): boolean {
|
||||
}
|
||||
|
||||
async function hasCommit(ref: string): Promise<boolean> {
|
||||
core.startGroup(`Checking if commit for ${ref} is locally available`)
|
||||
return (await exec('git', ['cat-file', '-e', `${ref}^{commit}`], {ignoreReturnCode: true})).code === 0
|
||||
}
|
||||
|
||||
async function getCommitCount(): Promise<number> {
|
||||
const output = (await exec('git', ['rev-list', '--count', '--all'])).stdout
|
||||
const count = parseInt(output)
|
||||
return isNaN(count) ? 0 : count
|
||||
}
|
||||
|
||||
async function getLocalRef(shortName: string): Promise<string | undefined> {
|
||||
if (isGitSha(shortName)) {
|
||||
return (await hasCommit(shortName)) ? shortName : undefined
|
||||
}
|
||||
|
||||
const output = (await exec('git', ['show-ref', shortName], {ignoreReturnCode: true})).stdout
|
||||
const refs = output
|
||||
.split(/\r?\n/g)
|
||||
.map(l => l.match(/refs\/.*$/)?.[0] ?? '')
|
||||
.filter(l => l !== '')
|
||||
|
||||
if (refs.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const remoteRef = refs.find(ref => ref.startsWith('refs/remotes/origin/'))
|
||||
if (remoteRef) {
|
||||
return remoteRef
|
||||
}
|
||||
|
||||
return refs[0]
|
||||
}
|
||||
|
||||
async function ensureRefAvailable(name: string): Promise<string> {
|
||||
core.startGroup(`Ensuring ${name} is fetched from origin`)
|
||||
try {
|
||||
return (await exec('git', ['cat-file', '-e', `${ref}^{commit}`], {ignoreReturnCode: true})).code === 0
|
||||
let ref = await getLocalRef(name)
|
||||
if (ref === undefined) {
|
||||
await exec('git', ['fetch', '--depth=1', '--no-tags', 'origin', name])
|
||||
ref = await getLocalRef(name)
|
||||
if (ref === undefined) {
|
||||
await exec('git', ['fetch', '--depth=1', '--tags', 'origin', name])
|
||||
ref = await getLocalRef(name)
|
||||
if (ref === undefined) {
|
||||
throw new Error(`Could not determine what is ${name} - fetch works but it's not a branch, tag or commit SHA`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ref
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
async function getNumberOfCommits(ref: string): Promise<number> {
|
||||
const output = (await exec('git', ['rev-list', `--count`, ref])).stdout
|
||||
const count = parseInt(output)
|
||||
return isNaN(count) ? 0 : count
|
||||
}
|
||||
|
||||
function fixStdOutNullTermination(): void {
|
||||
// Previous command uses NULL as delimiters and output is printed to stdout.
|
||||
// We have to make sure next thing written to stdout will start on new line.
|
||||
|
||||
16
src/list-format/csv-escape.ts
Normal file
16
src/list-format/csv-escape.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
// Returns filename escaped for CSV
|
||||
// Wraps file name into "..." only when it contains some potentially unsafe character
|
||||
export function csvEscape(value: string): string {
|
||||
if (value === '') return value
|
||||
|
||||
// Only safe characters
|
||||
if (/^[a-zA-Z0-9._+:@%/-]+$/m.test(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc4180
|
||||
// If double-quotes are used to enclose fields, then a double-quote
|
||||
// appearing inside a field must be escaped by preceding it with
|
||||
// another double quote
|
||||
return `"${value.replace(/"/g, '""')}"`
|
||||
}
|
||||
28
src/list-format/shell-escape.ts
Normal file
28
src/list-format/shell-escape.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
// Backslash escape every character except small subset of definitely safe characters
|
||||
export function backslashEscape(value: string): string {
|
||||
return value.replace(/([^a-zA-Z0-9,._+:@%/-])/gm, '\\$1')
|
||||
}
|
||||
|
||||
// Returns filename escaped for usage as shell argument.
|
||||
// Applies "human readable" approach with as few escaping applied as possible
|
||||
export function shellEscape(value: string): string {
|
||||
if (value === '') return value
|
||||
|
||||
// Only safe characters
|
||||
if (/^[a-zA-Z0-9,._+:@%/-]+$/m.test(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
if (value.includes("'")) {
|
||||
// Only safe characters, single quotes and white-spaces
|
||||
if (/^[a-zA-Z0-9,._+:@%/'\s-]+$/m.test(value)) {
|
||||
return `"${value}"`
|
||||
}
|
||||
|
||||
// Split by single quote and apply escaping recursively
|
||||
return value.split("'").map(shellEscape).join("\\'")
|
||||
}
|
||||
|
||||
// Contains some unsafe characters but no single quote
|
||||
return `'${value}'`
|
||||
}
|
||||
206
src/main.ts
206
src/main.ts
@@ -6,9 +6,10 @@ import {Webhooks} from '@octokit/webhooks'
|
||||
import {Filter, FilterResults} from './filter'
|
||||
import {File, ChangeStatus} from './file'
|
||||
import * as git from './git'
|
||||
import shellEscape from './shell-escape'
|
||||
import {backslashEscape, shellEscape} from './list-format/shell-escape'
|
||||
import {csvEscape} from './list-format/csv-escape'
|
||||
|
||||
type ExportFormat = 'none' | 'json' | 'shell'
|
||||
type ExportFormat = 'none' | 'csv' | 'json' | 'shell' | 'escape'
|
||||
|
||||
async function run(): Promise<void> {
|
||||
try {
|
||||
@@ -18,6 +19,7 @@ async function run(): Promise<void> {
|
||||
}
|
||||
|
||||
const token = core.getInput('token', {required: false})
|
||||
const ref = core.getInput('ref', {required: false})
|
||||
const base = core.getInput('base', {required: false})
|
||||
const filtersInput = core.getInput('filters', {required: true})
|
||||
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput
|
||||
@@ -30,7 +32,8 @@ async function run(): Promise<void> {
|
||||
}
|
||||
|
||||
const filter = new Filter(filtersYaml)
|
||||
const files = await getChangedFiles(token, base, initialFetchDepth)
|
||||
const files = await getChangedFiles(token, base, ref, initialFetchDepth)
|
||||
core.info(`Detected ${files.length} changed files`)
|
||||
const results = filter.match(files)
|
||||
exportResults(results, listFiles)
|
||||
} catch (error) {
|
||||
@@ -39,7 +42,7 @@ async function run(): Promise<void> {
|
||||
}
|
||||
|
||||
function isPathInput(text: string): boolean {
|
||||
return !text.includes('\n')
|
||||
return !(text.includes('\n') || text.includes(':'))
|
||||
}
|
||||
|
||||
function getConfigFileContent(configPath: string): string {
|
||||
@@ -54,119 +57,175 @@ function getConfigFileContent(configPath: string): string {
|
||||
return fs.readFileSync(configPath, {encoding: 'utf8'})
|
||||
}
|
||||
|
||||
async function getChangedFiles(token: string, base: string, initialFetchDepth: number): Promise<File[]> {
|
||||
if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') {
|
||||
async function getChangedFiles(token: string, base: string, ref: string, initialFetchDepth: number): Promise<File[]> {
|
||||
// if base is 'HEAD' only local uncommitted changes will be detected
|
||||
// This is the simplest case as we don't need to fetch more commits or evaluate current/before refs
|
||||
if (base === git.HEAD) {
|
||||
if (ref) {
|
||||
core.warning(`'ref' input parameter is ignored when 'base' is set to HEAD`)
|
||||
}
|
||||
return await git.getChangesOnHead()
|
||||
}
|
||||
|
||||
const prEvents = ['pull_request', 'pull_request_review', 'pull_request_review_comment', 'pull_request_target']
|
||||
if (prEvents.includes(github.context.eventName)) {
|
||||
if (ref) {
|
||||
core.warning(`'ref' input parameter is ignored when 'base' is set to HEAD`)
|
||||
}
|
||||
if (base) {
|
||||
core.warning(`'base' input parameter is ignored when action is triggered by pull request event`)
|
||||
}
|
||||
const pr = github.context.payload.pull_request as Webhooks.WebhookPayloadPullRequestPullRequest
|
||||
if (token) {
|
||||
return await getChangedFilesFromApi(token, pr)
|
||||
}
|
||||
if (github.context.eventName === 'pull_request_target') {
|
||||
// pull_request_target is executed in context of base branch and GITHUB_SHA points to last commit in base branch
|
||||
// Therefor it's not possible to look at changes in last commit
|
||||
// At the same time we don't want to fetch any code from forked repository
|
||||
throw new Error(`'token' input parameter is required if action is triggered by 'pull_request_target' event`)
|
||||
}
|
||||
core.info('Github token is not available - changes will be detected from PRs merge commit')
|
||||
return await git.getChangesInLastCommit()
|
||||
} else {
|
||||
return getChangedFilesFromGit(base, initialFetchDepth)
|
||||
return getChangedFilesFromGit(base, ref, initialFetchDepth)
|
||||
}
|
||||
}
|
||||
|
||||
async function getChangedFilesFromGit(base: string, initialFetchDepth: number): Promise<File[]> {
|
||||
const defaultRef = github.context.payload.repository?.default_branch
|
||||
async function getChangedFilesFromGit(base: string, head: string, initialFetchDepth: number): Promise<File[]> {
|
||||
const defaultBranch = github.context.payload.repository?.default_branch
|
||||
|
||||
const beforeSha =
|
||||
github.context.eventName === 'push' ? (github.context.payload as Webhooks.WebhookPayloadPush).before : null
|
||||
|
||||
const pushRef =
|
||||
git.getShortName(github.context.ref) ||
|
||||
(core.warning(`'ref' field is missing in PUSH event payload - using current branch, tag or commit SHA`),
|
||||
await git.getCurrentRef())
|
||||
const currentRef = await git.getCurrentRef()
|
||||
|
||||
const baseRef = git.getShortName(base) || defaultRef
|
||||
if (!baseRef) {
|
||||
head = git.getShortName(head || github.context.ref || currentRef)
|
||||
base = git.getShortName(base || defaultBranch)
|
||||
|
||||
if (!head) {
|
||||
throw new Error(
|
||||
"This action requires 'head' input to be configured, 'ref' to be set in the event payload or branch/tag checked out in current git repository"
|
||||
)
|
||||
}
|
||||
|
||||
if (!base) {
|
||||
throw new Error(
|
||||
"This action requires 'base' input to be configured or 'repository.default_branch' to be set in the event payload"
|
||||
)
|
||||
}
|
||||
|
||||
const isBaseRefSha = git.isGitSha(baseRef)
|
||||
const isBaseSameAsPush = baseRef === pushRef
|
||||
const isBaseSha = git.isGitSha(base)
|
||||
const isBaseSameAsHead = base === head
|
||||
|
||||
// If base is commit SHA will do comparison against the referenced commit
|
||||
// Or If base references same branch it was pushed to, we will do comparison against the previously pushed commit
|
||||
if (isBaseRefSha || isBaseSameAsPush) {
|
||||
if (!isBaseRefSha && !beforeSha) {
|
||||
core.warning(`'before' field is missing in PUSH event payload - changes will be detected from last commit`)
|
||||
// If base is commit SHA we will do comparison against the referenced commit
|
||||
// Or if base references same branch it was pushed to, we will do comparison against the previously pushed commit
|
||||
if (isBaseSha || isBaseSameAsHead) {
|
||||
const baseSha = isBaseSha ? base : beforeSha
|
||||
if (!baseSha) {
|
||||
core.warning(`'before' field is missing in event payload - changes will be detected from last commit`)
|
||||
if (head !== currentRef) {
|
||||
core.warning(`Ref ${head} is not checked out - results might be incorrect!`)
|
||||
}
|
||||
return await git.getChangesInLastCommit()
|
||||
}
|
||||
|
||||
const baseSha = isBaseRefSha ? baseRef : beforeSha
|
||||
// If there is no previously pushed commit,
|
||||
// we will do comparison against the default branch or return all as added
|
||||
if (baseSha === git.NULL_SHA) {
|
||||
if (defaultRef && baseRef !== defaultRef) {
|
||||
core.info(`First push of a branch detected - changes will be detected against the default branch ${defaultRef}`)
|
||||
return await git.getChangesSinceMergeBase(defaultRef, initialFetchDepth)
|
||||
if (defaultBranch && base !== defaultBranch) {
|
||||
core.info(
|
||||
`First push of a branch detected - changes will be detected against the default branch ${defaultBranch}`
|
||||
)
|
||||
return await git.getChangesSinceMergeBase(defaultBranch, head, initialFetchDepth)
|
||||
} else {
|
||||
core.info('Initial push detected - all files will be listed as added')
|
||||
if (head !== currentRef) {
|
||||
core.warning(`Ref ${head} is not checked out - results might be incorrect!`)
|
||||
}
|
||||
return await git.listAllFilesAsAdded()
|
||||
}
|
||||
}
|
||||
|
||||
core.info(`Changes will be detected against commit (${baseSha})`)
|
||||
return await git.getChanges(baseSha)
|
||||
core.info(`Changes will be detected between ${baseSha} and ${head}`)
|
||||
return await git.getChanges(baseSha, head)
|
||||
}
|
||||
|
||||
// Changes introduced by current branch against the base branch
|
||||
core.info(`Changes will be detected against the branch ${baseRef}`)
|
||||
return await git.getChangesSinceMergeBase(baseRef, initialFetchDepth)
|
||||
core.info(`Changes will be detected between ${base} and ${head}`)
|
||||
return await git.getChangesSinceMergeBase(base, head, initialFetchDepth)
|
||||
}
|
||||
|
||||
// Uses github REST api to get list of files changed in PR
|
||||
async function getChangedFilesFromApi(
|
||||
token: string,
|
||||
pullRequest: Webhooks.WebhookPayloadPullRequestPullRequest
|
||||
prNumber: Webhooks.WebhookPayloadPullRequestPullRequest
|
||||
): Promise<File[]> {
|
||||
core.info(`Fetching list of changed files for PR#${pullRequest.number} from Github API`)
|
||||
const client = new github.GitHub(token)
|
||||
const pageSize = 100
|
||||
const files: File[] = []
|
||||
for (let page = 0; page * pageSize < pullRequest.changed_files; page++) {
|
||||
const response = await client.pulls.listFiles({
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
pull_number: pullRequest.number,
|
||||
page,
|
||||
per_page: pageSize
|
||||
})
|
||||
for (const row of response.data) {
|
||||
// There's no obvious use-case for detection of renames
|
||||
// Therefore we treat it as if rename detection in git diff was turned off.
|
||||
// Rename is replaced by delete of original filename and add of new filename
|
||||
if (row.status === ChangeStatus.Renamed) {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: ChangeStatus.Added
|
||||
})
|
||||
files.push({
|
||||
// 'previous_filename' for some unknown reason isn't in the type definition or documentation
|
||||
filename: (<any>row).previous_filename as string,
|
||||
status: ChangeStatus.Deleted
|
||||
})
|
||||
} else {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: row.status as ChangeStatus
|
||||
})
|
||||
core.startGroup(`Fetching list of changed files for PR#${prNumber.number} from Github API`)
|
||||
try {
|
||||
const client = new github.GitHub(token)
|
||||
const per_page = 100
|
||||
const files: File[] = []
|
||||
|
||||
for (let page = 1; ; page++) {
|
||||
core.info(`Invoking listFiles(pull_number: ${prNumber.number}, page: ${page}, per_page: ${per_page})`)
|
||||
const response = await client.pulls.listFiles({
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
pull_number: prNumber.number,
|
||||
per_page,
|
||||
page
|
||||
})
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Fetching list of changed files from GitHub API failed with error code ${response.status}`)
|
||||
}
|
||||
|
||||
core.info(`Received ${response.data.length} items`)
|
||||
if (response.data.length === 0) {
|
||||
core.info('All changed files has been fetched from GitHub API')
|
||||
break
|
||||
}
|
||||
|
||||
for (const row of response.data) {
|
||||
core.info(`[${row.status}] ${row.filename}`)
|
||||
// There's no obvious use-case for detection of renames
|
||||
// Therefore we treat it as if rename detection in git diff was turned off.
|
||||
// Rename is replaced by delete of original filename and add of new filename
|
||||
if (row.status === ChangeStatus.Renamed) {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: ChangeStatus.Added
|
||||
})
|
||||
files.push({
|
||||
// 'previous_filename' for some unknown reason isn't in the type definition or documentation
|
||||
filename: (<any>row).previous_filename as string,
|
||||
status: ChangeStatus.Deleted
|
||||
})
|
||||
} else {
|
||||
// Github status and git status variants are same except for deleted files
|
||||
const status = row.status === 'removed' ? ChangeStatus.Deleted : (row.status as ChangeStatus)
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return files
|
||||
return files
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
function exportResults(results: FilterResults, format: ExportFormat): void {
|
||||
core.info('Results:')
|
||||
const changes = []
|
||||
for (const [key, files] of Object.entries(results)) {
|
||||
const value = files.length > 0
|
||||
core.startGroup(`Filter ${key} = ${value}`)
|
||||
if (files.length > 0) {
|
||||
changes.push(key)
|
||||
core.info('Matching files:')
|
||||
for (const file of files) {
|
||||
core.info(`${file.filename} [${file.status}]`)
|
||||
@@ -176,19 +235,32 @@ function exportResults(results: FilterResults, format: ExportFormat): void {
|
||||
}
|
||||
|
||||
core.setOutput(key, value)
|
||||
core.setOutput(`${key}_count`, files.length)
|
||||
if (format !== 'none') {
|
||||
const filesValue = serializeExport(files, format)
|
||||
core.setOutput(`${key}_files`, filesValue)
|
||||
}
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
if (results['changes'] === undefined) {
|
||||
const changesJson = JSON.stringify(changes)
|
||||
core.info(`Changes output set to ${changesJson}`)
|
||||
core.setOutput('changes', changesJson)
|
||||
} else {
|
||||
core.info('Cannot set changes output variable - name already used by filter output')
|
||||
}
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
function serializeExport(files: File[], format: ExportFormat): string {
|
||||
const fileNames = files.map(file => file.filename)
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
return fileNames.map(csvEscape).join(',')
|
||||
case 'json':
|
||||
return JSON.stringify(fileNames)
|
||||
case 'escape':
|
||||
return fileNames.map(backslashEscape).join(' ')
|
||||
case 'shell':
|
||||
return fileNames.map(shellEscape).join(' ')
|
||||
default:
|
||||
@@ -197,7 +269,7 @@ function serializeExport(files: File[], format: ExportFormat): string {
|
||||
}
|
||||
|
||||
function isExportFormat(value: string): value is ExportFormat {
|
||||
return value === 'none' || value === 'shell' || value === 'json'
|
||||
return ['none', 'csv', 'shell', 'json', 'escape'].includes(value)
|
||||
}
|
||||
|
||||
run()
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
// Credits to https://github.com/xxorax/node-shell-escape
|
||||
|
||||
export default function shellEscape(value: string): string {
|
||||
return `'${value.replace(/'/g, "'\\''")}'`
|
||||
.replace(/^(?:'')+/g, '') // unduplicate single-quote at the beginning
|
||||
.replace(/\\'''/g, "\\'") // remove non-escaped single-quote if there are enclosed between 2 escaped
|
||||
}
|
||||
Reference in New Issue
Block a user