mirror of
https://gitea.com/actions/dorny-paths-filter.git
synced 2025-12-23 23:48:20 +00:00
Compare commits
61 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e5b96fe4da | ||
|
|
a339507743 | ||
|
|
febe8330ca | ||
|
|
b5fa2d5c02 | ||
|
|
e2bed85912 | ||
|
|
7c0f15b688 | ||
|
|
cbc3287af3 | ||
|
|
a2730492f0 | ||
|
|
c2766acabb | ||
|
|
363576b9ea | ||
|
|
b1a097ef7b | ||
|
|
2c79a825c0 | ||
|
|
4e7fcc37b4 | ||
|
|
c506bed1ae | ||
|
|
9b7572ffb2 | ||
|
|
9e8c9af501 | ||
|
|
84e1697bff | ||
|
|
e4d886f503 | ||
|
|
ada1eee648 | ||
|
|
44ac6d8e25 | ||
|
|
3c5b7d242c | ||
|
|
eb75a1edc1 | ||
|
|
181b35e268 | ||
|
|
1934d574ce | ||
|
|
d599443ba5 | ||
|
|
eb8fe2c24b | ||
|
|
dec8b8030e | ||
|
|
785a14adbe | ||
|
|
e84bc6af29 | ||
|
|
b4eabb6049 | ||
|
|
550eb4925d | ||
|
|
5282566eab | ||
|
|
804ec66d7a | ||
|
|
b37d4e9e86 | ||
|
|
7b5334ddb5 | ||
|
|
75cbfb4be9 | ||
|
|
9bd03c0d68 | ||
|
|
9553dabbd8 | ||
|
|
8b399ed168 | ||
|
|
ff5bb057bf | ||
|
|
d9e86af7c0 | ||
|
|
adb239d623 | ||
|
|
1f7b23edeb | ||
|
|
bfc5803f5e | ||
|
|
b4f32c4f08 | ||
|
|
81c90ccae8 | ||
|
|
3f845744aa | ||
|
|
483986d0a7 | ||
|
|
3b817c9974 | ||
|
|
b365bd8768 | ||
|
|
f34047f516 | ||
|
|
1ff702da35 | ||
|
|
caef9bef1f | ||
|
|
14dd70c742 | ||
|
|
e1ae9889cb | ||
|
|
83deb9f037 | ||
|
|
7d201829e2 | ||
|
|
4eb15bc267 | ||
|
|
9ef7936e79 | ||
|
|
affb29871a | ||
|
|
910e8b1235 |
9
.editorconfig
Normal file
9
.editorconfig
Normal file
@@ -0,0 +1,9 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
@@ -25,7 +25,7 @@
|
||||
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-extraneous-class": "error",
|
||||
"@typescript-eslint/no-for-in-array": "error",
|
||||
"@typescript-eslint/no-inferrable-types": "error",
|
||||
@@ -42,7 +42,7 @@
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-interface": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/promise-function-async": ["error", { "allowAny": true }],
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"semi": "off",
|
||||
|
||||
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
* text=auto eol=lf
|
||||
13
.github/workflows/build.yml
vendored
13
.github/workflows/build.yml
vendored
@@ -1,6 +1,7 @@
|
||||
name: "Build"
|
||||
on:
|
||||
push:
|
||||
paths-ignore: [ '*.md' ]
|
||||
branches:
|
||||
- master
|
||||
|
||||
@@ -12,3 +13,15 @@ jobs:
|
||||
- run: |
|
||||
npm install
|
||||
npm run all
|
||||
|
||||
self-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
filters: '.github/filters.yml'
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
|
||||
run: exit 1
|
||||
|
||||
85
.github/workflows/pull-request-verification.yml
vendored
85
.github/workflows/pull-request-verification.yml
vendored
@@ -1,11 +1,10 @@
|
||||
name: "Pull Request Verification"
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
paths-ignore: [ '*.md' ]
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -31,6 +30,9 @@ jobs:
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
|
||||
run: exit 1
|
||||
- name: changes-test
|
||||
if: contains(fromJSON(steps.filter.outputs.changes), 'error') || !contains(fromJSON(steps.filter.outputs.changes), 'any')
|
||||
run: exit 1
|
||||
|
||||
test-external:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -56,3 +58,80 @@ jobs:
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
|
||||
run: exit 1
|
||||
|
||||
test-wd-without-token:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
path: somewhere
|
||||
- uses: ./somewhere
|
||||
id: filter
|
||||
with:
|
||||
token: ''
|
||||
working-directory: somewhere
|
||||
filters: '.github/filters.yml'
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.any != 'true' || steps.filter.outputs.error == 'true'
|
||||
run: exit 1
|
||||
|
||||
test-local-changes:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: echo "NEW FILE" > local
|
||||
- run: git add local
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
base: HEAD
|
||||
filters: |
|
||||
local:
|
||||
- local
|
||||
- name: filter-test
|
||||
if: steps.filter.outputs.local != 'true'
|
||||
run: exit 1
|
||||
- name: count-test
|
||||
if: steps.filter.outputs.local_count != 1
|
||||
run: exit 1
|
||||
|
||||
test-change-type:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: configure GIT user
|
||||
run: git config user.email "john@nowhere.local" && git config user.name "John Doe"
|
||||
- name: modify working tree
|
||||
run: touch add.txt && rm README.md && echo "TEST" > LICENSE
|
||||
- name: commit changes
|
||||
run: git add -A && git commit -a -m 'testing this action'
|
||||
- uses: ./
|
||||
id: filter
|
||||
with:
|
||||
token: ''
|
||||
list-files: shell
|
||||
filters: |
|
||||
added:
|
||||
- added: "add.txt"
|
||||
deleted:
|
||||
- deleted: "README.md"
|
||||
modified:
|
||||
- modified: "LICENSE"
|
||||
any:
|
||||
- added|deleted|modified: "*"
|
||||
- name: Print 'added_files'
|
||||
run: echo ${{steps.filter.outputs.added_files}}
|
||||
- name: Print 'modified_files'
|
||||
run: echo ${{steps.filter.outputs.modified_files}}
|
||||
- name: Print 'deleted_files'
|
||||
run: echo ${{steps.filter.outputs.deleted_files}}
|
||||
- name: filter-test
|
||||
if: |
|
||||
steps.filter.outputs.added != 'true'
|
||||
|| steps.filter.outputs.deleted != 'true'
|
||||
|| steps.filter.outputs.modified != 'true'
|
||||
|| steps.filter.outputs.any != 'true'
|
||||
|| steps.filter.outputs.added_files != 'add.txt'
|
||||
|| steps.filter.outputs.modified_files != 'LICENSE'
|
||||
|| steps.filter.outputs.deleted_files != 'README.md'
|
||||
run: exit 1
|
||||
|
||||
70
CHANGELOG.md
Normal file
70
CHANGELOG.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# Changelog
|
||||
|
||||
## v2.9.0
|
||||
- [Add list-files: csv format](https://github.com/dorny/paths-filter/pull/68)
|
||||
|
||||
## v2.8.0
|
||||
- [Add count output variable](https://github.com/dorny/paths-filter/pull/65)
|
||||
- [Fix log grouping of changes](https://github.com/dorny/paths-filter/pull/61)
|
||||
|
||||
## v2.7.0
|
||||
- [Add "changes" output variable to support matrix job configuration](https://github.com/dorny/paths-filter/pull/59)
|
||||
- [Improved listing of matching files with `list-files: shell` and `list-files: escape` options](https://github.com/dorny/paths-filter/pull/58)
|
||||
|
||||
## v2.6.0
|
||||
- [Support local changes](https://github.com/dorny/paths-filter/pull/53)
|
||||
|
||||
## v2.5.3
|
||||
- [Fixed mapping of removed/deleted change status from github API](https://github.com/dorny/paths-filter/pull/51)
|
||||
- [Fixed retrieval of all changes via Github API when there are 100+ changes](https://github.com/dorny/paths-filter/pull/50)
|
||||
|
||||
## v2.5.2
|
||||
- [Add support for multiple patterns when using file status](https://github.com/dorny/paths-filter/pull/48)
|
||||
- [Use picomatch directly instead of micromatch wrapper](https://github.com/dorny/paths-filter/pull/49)
|
||||
|
||||
## v2.5.1
|
||||
- [Improved path matching with micromatch](https://github.com/dorny/paths-filter/pull/46)
|
||||
|
||||
## v2.5.0
|
||||
- [Support workflows triggered by any event](https://github.com/dorny/paths-filter/pull/44)
|
||||
|
||||
## v2.4.2
|
||||
- [Fixed compatibility with older (<2.23) versions of git](https://github.com/dorny/paths-filter/pull/42)
|
||||
|
||||
## v2.4.0
|
||||
- [Support pushes of tags or when tag is used as base](https://github.com/dorny/paths-filter/pull/40)
|
||||
- [Use git log to detect changes from PRs merge commit if token is not available](https://github.com/dorny/paths-filter/pull/40)
|
||||
- [Support local execution with act](https://github.com/dorny/paths-filter/pull/40)
|
||||
- [Improved processing of repository initial push](https://github.com/dorny/paths-filter/pull/40)
|
||||
- [Improved processing of first push of new branch](https://github.com/dorny/paths-filter/pull/40)
|
||||
|
||||
|
||||
## v2.3.0
|
||||
- [Improved documentation](https://github.com/dorny/paths-filter/pull/37)
|
||||
- [Change detection using git "three dot" diff](https://github.com/dorny/paths-filter/pull/35)
|
||||
- [Export files matching filter](https://github.com/dorny/paths-filter/pull/32)
|
||||
- [Extend filter syntax with optional specification of file status: add, modified, deleted](https://github.com/dorny/paths-filter/pull/22)
|
||||
- [Add working-directory input](https://github.com/dorny/paths-filter/pull/21)
|
||||
|
||||
## v2.2.1
|
||||
- [Add support for pull_request_target](https://github.com/dorny/paths-filter/pull/29)
|
||||
|
||||
## v2.2.0
|
||||
- [Improve change detection for feature branches](https://github.com/dorny/paths-filter/pull/16)
|
||||
|
||||
## v2.1.0
|
||||
- [Support reusable paths blocks with yaml anchors](https://github.com/dorny/paths-filter/pull/13)
|
||||
|
||||
## v2.0.0
|
||||
- [Added support for workflows triggered by push events](https://github.com/dorny/paths-filter/pull/10)
|
||||
- Action and repository renamed to paths-filter - original name doesn't make sense anymore
|
||||
|
||||
## v1.1.0
|
||||
- [Allows filters to be specified in own .yml file](https://github.com/dorny/paths-filter/pull/8)
|
||||
- [Adds alternative change detection using git fetch and git diff-index](https://github.com/dorny/paths-filter/pull/9)
|
||||
|
||||
## v1.0.1
|
||||
Updated dependencies - fixes github security alert
|
||||
|
||||
## v1.0.0
|
||||
First official release uploaded to marketplace.
|
||||
2
LICENSE
2
LICENSE
@@ -1,7 +1,7 @@
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2018 GitHub, Inc. and contributors
|
||||
Copyright (c) 2020 Michal Dorner and contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
521
README.md
521
README.md
@@ -1,72 +1,187 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/dorny/pr-changed-files-filter/actions"><img alt="typescript-action status" src="https://github.com/dorny/pr-changed-files-filter/workflows/Build/badge.svg"></a>
|
||||
</p>
|
||||
# Paths Changes Filter
|
||||
|
||||
**CAUTION**: This action can be only used in a workflow triggered by `pull_request` event.
|
||||
This [Github Action](https://github.com/features/actions) enables conditional execution of workflow steps and jobs,
|
||||
based on the files modified by pull request, feature branch or in pushed commits.
|
||||
|
||||
# Pull request changed files filter
|
||||
It saves time and resources especially in monorepo setups, where you can run slow tasks (e.g. integration tests or deployments) only for changed components.
|
||||
Github workflows built-in [path filters](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#onpushpull_requestpaths)
|
||||
don't allow this because they don't work on a level of individual jobs or steps.
|
||||
|
||||
This [Github Action](https://github.com/features/actions) enables conditional execution of workflow job steps considering which files are modified by a pull request.
|
||||
|
||||
It saves time and resources especially in monorepo setups, where you can run slow tasks (e.g. integration tests) only for changed components.
|
||||
Github workflows built-in
|
||||
[path filters](https://help.github.com/en/actions/referenceworkflow-syntax-for-github-actions#onpushpull_requestpaths)
|
||||
doesn't allow this because they doesn't work on a level of individual jobs or steps.
|
||||
|
||||
## Usage
|
||||
|
||||
The action accepts filter rules in the YAML format.
|
||||
Each filter rule is a list of [glob expressions](https://github.com/isaacs/minimatch).
|
||||
Corresponding output variable will be created to indicate if there's a changed file matching any of the rule glob expressions.
|
||||
Output variables can be later used in the `if` clause to conditionally run specific steps.
|
||||
|
||||
### Inputs
|
||||
- **`token`**: GitHub Access Token - defaults to `${{ github.token }}`
|
||||
- **`filters`**: Path to the configuration file or directly embedded string in YAML format. Filter configuration is a dictionary, where keys specifies rule names and values are lists of file path patterns.
|
||||
|
||||
### Outputs
|
||||
- For each rule it sets output variable named by the rule to text:
|
||||
- `'true'` - if **any** of changed files matches any of rule patterns
|
||||
- `'false'` - if **none** of changed files matches any of rule patterns
|
||||
**Real world usage examples:**
|
||||
- [sentry.io](https://sentry.io/) - [backend-test-py3.6.yml](https://github.com/getsentry/sentry/blob/ca0e43dc5602a9ab2e06d3f6397cc48fb5a78541/.github/workflows/backend-test-py3.6.yml#L32)
|
||||
- [GoogleChrome/web.dev](https://web.dev/) - [lint-and-test-workflow.yml](https://github.com/GoogleChrome/web.dev/blob/e1f0c28964e99ce6a996c1e3fd3ee1985a7a04f6/.github/workflows/lint-and-test-workflow.yml#L33)
|
||||
|
||||
|
||||
### Notes
|
||||
- minimatch [dot](https://www.npmjs.com/package/minimatch#dot) option is set to true - therefore
|
||||
globbing will match also paths where file or folder name starts with a dot.
|
||||
## Supported workflows:
|
||||
- **Pull requests:**
|
||||
- Workflow triggered by **[pull_request](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request)**
|
||||
or **[pull_request_target](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#pull_request_target)** event
|
||||
- Changes are detected against the pull request base branch
|
||||
- Uses Github REST API to fetch list of modified files
|
||||
- **Feature branches:**
|
||||
- Workflow triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)**
|
||||
or any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
- The `base` input parameter must not be the same as the branch that triggered the workflow
|
||||
- Changes are detected against the merge-base with configured base branch or default branch
|
||||
- Uses git commands to detect changes - repository must be already [checked out](https://github.com/actions/checkout)
|
||||
- **Master, Release or other long-lived branches:**
|
||||
- Workflow triggered by **[push](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#push)** event
|
||||
when `base` input parameter is same as the branch that triggered the workflow:
|
||||
- Changes are detected against the most recent commit on the same branch before the push
|
||||
- Workflow triggered by any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
when `base` input parameter is commit SHA:
|
||||
- Changes are detected against the provided `base` commit
|
||||
- Workflow triggered by any other **[event](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows)**
|
||||
when `base` input parameter is same as the branch that triggered the workflow:
|
||||
- Changes are detected from last commit
|
||||
- Uses git commands to detect changes - repository must be already [checked out](https://github.com/actions/checkout)
|
||||
- **Local changes**
|
||||
- Workflow triggered by any event when `base` input parameter is set to `HEAD`
|
||||
- Changes are detected against current HEAD
|
||||
- Untracked files are ignored
|
||||
|
||||
### Sample workflow
|
||||
## Example
|
||||
```yaml
|
||||
name: Build verification
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
src:
|
||||
- 'src/**'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
branches:
|
||||
- master
|
||||
# run only if some file in 'src' folder was changed
|
||||
if: steps.changes.outputs.src == 'true'
|
||||
run: ...
|
||||
```
|
||||
For more scenarios see [examples](#examples) section.
|
||||
|
||||
## Notes:
|
||||
- Paths expressions are evaluated using [picomatch](https://github.com/micromatch/picomatch) library.
|
||||
Documentation for path expression format can be found on project github page.
|
||||
- Picomatch [dot](https://github.com/micromatch/picomatch#options) option is set to true.
|
||||
Globbing will match also paths where file or folder name starts with a dot.
|
||||
- It's recommended to quote your path expressions with `'` or `"`. Otherwise you will get an error if it starts with `*`.
|
||||
- Local execution with [act](https://github.com/nektos/act) works only with alternative runner image. Default runner doesn't have `git` binary.
|
||||
- Use: `act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04`
|
||||
|
||||
|
||||
# What's New
|
||||
- Add `list-files: csv` format
|
||||
- Configure matrix job to run for each folder with changes using `changes` output
|
||||
- Improved listing of matching files with `list-files: shell` and `list-files: escape` options
|
||||
- Support local changes
|
||||
- Fixed retrieval of all changes via Github API when there are 100+ changes
|
||||
- Paths expressions are now evaluated using [picomatch](https://github.com/micromatch/picomatch) library
|
||||
- Support workflows triggered by any event
|
||||
|
||||
For more information see [CHANGELOG](https://github.com/dorny/paths-filter/blob/master/CHANGELOG.md)
|
||||
|
||||
# Usage
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
with:
|
||||
# Defines filters applied to detected changed files.
|
||||
# Each filter has a name and list of rules.
|
||||
# Rule is a glob expression - paths of all changed
|
||||
# files are matched against it.
|
||||
# Rule can optionally specify if the file
|
||||
# should be added, modified or deleted.
|
||||
# For each filter there will be corresponding output variable to
|
||||
# indicate if there's a changed file matching any of the rules.
|
||||
# Optionally there can be a second output variable
|
||||
# set to list of all files matching the filter.
|
||||
# Filters can be provided inline as a string (containing valid YAML document)
|
||||
# or as a relative path to separate file (e.g.: .github/filters.yaml).
|
||||
# Multiline string is evaluated as embedded filter definition,
|
||||
# single line string is evaluated as relative path to separate file.
|
||||
# Filters syntax is documented by example - see examples section.
|
||||
filters: ''
|
||||
|
||||
# Branch, tag or commit SHA against which the changes will be detected.
|
||||
# If it references same branch it was pushed to,
|
||||
# changes are detected against the most recent commit before the push.
|
||||
# Otherwise it uses git merge-base to find best common ancestor between
|
||||
# current branch (HEAD) and base.
|
||||
# When merge-base is found, it's used for change detection - only changes
|
||||
# introduced by current branch are considered.
|
||||
# All files are considered as added if there is no common ancestor with
|
||||
# base branch or no previous commit.
|
||||
# This option is ignored if action is triggered by pull_request event.
|
||||
# Default: repository default branch (e.g. master)
|
||||
base: ''
|
||||
|
||||
# How many commits are initially fetched from base branch.
|
||||
# If needed, each subsequent fetch doubles the
|
||||
# previously requested number of commits until the merge-base
|
||||
# is found or there are no more commits in the history.
|
||||
# This option takes effect only when changes are detected
|
||||
# using git against base branch (feature branch workflow).
|
||||
# Default: 20
|
||||
initial-fetch-depth: ''
|
||||
|
||||
# Enables listing of files matching the filter:
|
||||
# 'none' - Disables listing of matching files (default).
|
||||
# 'csv' - Coma separated list of filenames.
|
||||
# If needed it uses double quotes to wrap filename with unsafe characters.
|
||||
# 'json' - Matching files paths are formatted as JSON array.
|
||||
# 'shell' - Space delimited list usable as command line argument list in Linux shell.
|
||||
# If needed it uses single or double quotes to wrap filename with unsafe characters.
|
||||
# 'escape'- Space delimited list usable as command line argument list in Linux shell.
|
||||
# Backslash escapes every potentially unsafe character.
|
||||
# Default: none
|
||||
list-files: ''
|
||||
|
||||
# Relative path under $GITHUB_WORKSPACE where the repository was checked out.
|
||||
working-directory: ''
|
||||
|
||||
# Personal access token used to fetch list of changed files
|
||||
# from Github REST API.
|
||||
# It's used only if action is triggered by pull request event.
|
||||
# Github token from workflow context is used as default value.
|
||||
# If empty string is provided, action falls back to detect
|
||||
# changes using git commands.
|
||||
# Default: ${{ github.token }}
|
||||
token: ''
|
||||
```
|
||||
|
||||
## Outputs
|
||||
- For each filter it sets output variable named by the filter to the text:
|
||||
- `'true'` - if **any** of changed files matches any of filter rules
|
||||
- `'false'` - if **none** of changed files matches any of filter rules
|
||||
- For each filter it sets output variable with name `${FILTER_NAME}_count` to the count of matching files.
|
||||
- If enabled, for each filter it sets output variable with name `${FILTER_NAME}_files`. It will contain list of all files matching the filter.
|
||||
- `changes` - JSON array with names of all filters matching any of changed files.
|
||||
|
||||
# Examples
|
||||
|
||||
## Conditional execution
|
||||
|
||||
<details>
|
||||
<summary>Execute <b>step</b> in a workflow job only if some file in a subfolder is changed</summary>
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: dorny/pr-changed-files-filter@v1
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# inline YAML or path to separate file (e.g.: .github/filters.yaml)
|
||||
filters: |
|
||||
backend:
|
||||
- 'backend/**/*'
|
||||
- 'backend/**'
|
||||
frontend:
|
||||
- 'frontend/**/*'
|
||||
- 'frontend/**'
|
||||
|
||||
# run only if 'backend' files were changed
|
||||
- name: backend unit tests
|
||||
- name: backend tests
|
||||
if: steps.filter.outputs.backend == 'true'
|
||||
run: ...
|
||||
|
||||
# run only if 'frontend' files were changed
|
||||
- name: frontend unit tests
|
||||
- name: frontend tests
|
||||
if: steps.filter.outputs.frontend == 'true'
|
||||
run: ...
|
||||
|
||||
@@ -75,26 +190,304 @@ jobs:
|
||||
if: steps.filter.outputs.backend == 'true' || steps.filter.outputs.frontend == 'true'
|
||||
run: ...
|
||||
```
|
||||
</details>
|
||||
|
||||
## How it works
|
||||
<details>
|
||||
<summary>Execute <b>job</b> in a workflow only if some file in a subfolder is changed</summary>
|
||||
|
||||
1. Required inputs are checked (`filters`)
|
||||
2. If token was provided, it's used to fetch list of changed files from Github API.
|
||||
3. If token was not provided, base branch is fetched and changed files are detected using `git diff-index` command.
|
||||
4. For each filter rule it checks if there is any matching file
|
||||
5. Output variables are set
|
||||
```yml
|
||||
jobs:
|
||||
# JOB to run change detection
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Set job outputs to values from filter step
|
||||
outputs:
|
||||
backend: ${{ steps.filter.outputs.backend }}
|
||||
frontend: ${{ steps.filter.outputs.frontend }}
|
||||
steps:
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
backend:
|
||||
- 'backend/**'
|
||||
frontend:
|
||||
- 'frontend/**'
|
||||
|
||||
## Difference from related projects:
|
||||
# JOB to build and test backend code
|
||||
backend:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.backend == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- ...
|
||||
|
||||
- [Has Changed Path](https://github.com/MarceloPrado/has-changed-path)
|
||||
- detects changes from previous commit
|
||||
- you have to configure `checkout` action to fetch some number of previous commits
|
||||
- `git diff` is used for change detection
|
||||
- outputs only single `true` / `false` value if any of provided paths contains changes
|
||||
- [Changed Files Exporter](https://github.com/futuratrepadeira/changed-files)
|
||||
- outputs lists with paths of created, updated and deleted files
|
||||
- output is not directly usable in the `if` clause
|
||||
- [Changed File Filter](https://github.com/tony84727/changed-file-filter)
|
||||
- allows change detection between any refs or commits
|
||||
- fetches whole history of your git repository
|
||||
- might have negative performance impact on big repositories (github by default fetches only single commit)
|
||||
# JOB to build and test frontend code
|
||||
frontend:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.frontend == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- ...
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Use change detection to configure matrix job</summary>
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
# JOB to run change detection
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
# Expose matched filters as job 'packages' output variable
|
||||
packages: ${{ steps.filter.outputs.changes }}
|
||||
steps:
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
package1: src/package1
|
||||
package2: src/package2
|
||||
|
||||
# JOB to build and test each of modified packages
|
||||
build:
|
||||
needs: changes
|
||||
strategy:
|
||||
matrix:
|
||||
# Parse JSON array containing names of all filters matching any of changed files
|
||||
# e.g. ['package1', 'package2'] if both package folders contains changes
|
||||
package: ${{ fromJson(needs.changes.outputs.packages) }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- ...
|
||||
```
|
||||
</details>
|
||||
|
||||
## Change detection workflows
|
||||
|
||||
<details>
|
||||
<summary><b>Pull requests:</b> Detect changes against PR base branch</summary>
|
||||
|
||||
```yaml
|
||||
on:
|
||||
pull_request:
|
||||
branches: # PRs to following branches will trigger the workflow
|
||||
- master
|
||||
- develop
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Feature branch:</b> Detect changes against configured base branch</summary>
|
||||
|
||||
```yaml
|
||||
on:
|
||||
push:
|
||||
branches: # Push to following branches will trigger the workflow
|
||||
- feature/**
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# This may save additional git fetch roundtrip if
|
||||
# merge-base is found within latest 20 commits
|
||||
fetch-depth: 20
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
base: develop # Change detection against merge-base with this branch
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Long lived branches:</b> Detect changes against the most recent commit on the same branch before the push</summary>
|
||||
|
||||
```yaml
|
||||
on:
|
||||
push:
|
||||
branches: # Push to following branches will trigger the workflow
|
||||
- master
|
||||
- develop
|
||||
- release/**
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# Use context to get branch where commits were pushed.
|
||||
# If there is only one long lived branch (e.g. master),
|
||||
# you can specify it directly.
|
||||
# If it's not configured, the repository default branch is used.
|
||||
base: ${{ github.ref }}
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Local changes:</b> Detect staged and unstaged local changes</summary>
|
||||
|
||||
```yaml
|
||||
on:
|
||||
push:
|
||||
branches: # Push to following branches will trigger the workflow
|
||||
- master
|
||||
- develop
|
||||
- release/**
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Some action which modifies files tracked by git (e.g. code linter)
|
||||
- uses: johndoe/some-action@v1
|
||||
|
||||
# Filter to detect which files were modified
|
||||
# Changes could be for example automatically committed
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
base: HEAD
|
||||
filters: ... # Configure your filters
|
||||
```
|
||||
</details>
|
||||
|
||||
## Advanced options
|
||||
|
||||
<details>
|
||||
<summary>Define filter rules in own file</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# Path to file where filters are defined
|
||||
filters: .github/filters.yaml
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Use YAML anchors to reuse path expression(s) inside another rule</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# &shared is YAML anchor,
|
||||
# *shared references previously defined anchor
|
||||
# src filter will match any path under common, config and src folders
|
||||
filters: |
|
||||
shared: &shared
|
||||
- common/**
|
||||
- config/**
|
||||
src:
|
||||
- *shared
|
||||
- src/**
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Consider if file was added, modified or deleted</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# Changed file can be 'added', 'modified', or 'deleted'.
|
||||
# By default the type of change is not considered.
|
||||
# Optionally it's possible to specify it using nested
|
||||
# dictionary, where type(s) of change composes the key.
|
||||
# Multiple change types can be specified using `|` as delimiter.
|
||||
filters: |
|
||||
shared: &shared
|
||||
- common/**
|
||||
- config/**
|
||||
addedOrModified:
|
||||
- added|modified: '**'
|
||||
allChanges:
|
||||
- added|deleted|modified: '**'
|
||||
addedOrModifiedAnchors:
|
||||
- added|modified: *shared
|
||||
```
|
||||
</details>
|
||||
|
||||
|
||||
## Custom processing of changed files
|
||||
|
||||
<details>
|
||||
<summary>Passing list of modified files as command line args in Linux shell</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# Enable listing of files matching each filter.
|
||||
# Paths to files will be available in `${FILTER_NAME}_files` output variable.
|
||||
# Paths will be escaped and space-delimited.
|
||||
# Output is usable as command line argument list in Linux shell
|
||||
list-files: shell
|
||||
|
||||
# In this example changed files will be checked by linter.
|
||||
# It doesn't make sense to lint deleted files.
|
||||
# Therefore we specify we are only interested in added or modified files.
|
||||
filters: |
|
||||
markdown:
|
||||
- added|modified: '*.md'
|
||||
- name: Lint Markdown
|
||||
if: ${{ steps.filter.outputs.markdown == 'true' }}
|
||||
run: npx textlint ${{ steps.filter.outputs.markdown_files }}
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Passing list of modified files as JSON array to another action</summary>
|
||||
|
||||
```yaml
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# Enable listing of files matching each filter.
|
||||
# Paths to files will be available in `${FILTER_NAME}_files` output variable.
|
||||
# Paths will be formatted as JSON array
|
||||
list-files: json
|
||||
|
||||
# In this example all changed files are passed to following action to do
|
||||
# some custom processing.
|
||||
filters: |
|
||||
changed:
|
||||
- '**'
|
||||
- name: Lint Markdown
|
||||
uses: johndoe/some-action@v1
|
||||
with:
|
||||
files: ${{ steps.filter.outputs.changed_files }}
|
||||
```
|
||||
</details>
|
||||
|
||||
# See also
|
||||
- [test-reporter](https://github.com/dorny/test-reporter) - Displays test results from popular testing frameworks directly in GitHub
|
||||
|
||||
# License
|
||||
|
||||
The scripts and documentation in this project are released under the [MIT License](https://github.com/dorny/paths-filter/blob/master/LICENSE)
|
||||
|
||||
23
__tests__/csv-escape.test.ts
Normal file
23
__tests__/csv-escape.test.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import {csvEscape} from '../src/list-format/csv-escape'
|
||||
|
||||
describe('csvEscape() backslash escapes every character except subset of definitely safe characters', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(csvEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(csvEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(csvEscape('file with space')).toBe('"file with space"')
|
||||
})
|
||||
|
||||
test('filename with "," should be quoted', () => {
|
||||
expect(csvEscape('file, with coma')).toBe('"file, with coma"')
|
||||
})
|
||||
|
||||
test('Double quote should be escaped by another double quote', () => {
|
||||
expect(csvEscape('file " with double quote')).toBe('"file "" with double quote"')
|
||||
})
|
||||
})
|
||||
188
__tests__/filter.test.ts
Normal file
188
__tests__/filter.test.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
import {Filter} from '../src/filter'
|
||||
import {File, ChangeStatus} from '../src/file'
|
||||
|
||||
describe('yaml filter parsing tests', () => {
|
||||
test('throws if yaml is not a dictionary', () => {
|
||||
const yaml = 'not a dictionary'
|
||||
const t = () => new Filter(yaml)
|
||||
expect(t).toThrow(/^Invalid filter.*/)
|
||||
})
|
||||
test('throws if pattern is not a string', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
- dict:
|
||||
some: value
|
||||
`
|
||||
const t = () => new Filter(yaml)
|
||||
expect(t).toThrow(/^Invalid filter.*/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('matching tests', () => {
|
||||
test('matches single inline rule', () => {
|
||||
const yaml = `
|
||||
src: "src/**/*.js"
|
||||
`
|
||||
let filter = new Filter(yaml)
|
||||
const files = modified(['src/app/module/file.js'])
|
||||
const match = filter.match(files)
|
||||
expect(match.src).toEqual(files)
|
||||
})
|
||||
test('matches single rule in single group', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const files = modified(['src/app/module/file.js'])
|
||||
const match = filter.match(files)
|
||||
expect(match.src).toEqual(files)
|
||||
})
|
||||
|
||||
test('no match when file is in different folder', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const match = filter.match(modified(['not_src/other_file.js']))
|
||||
expect(match.src).toEqual([])
|
||||
})
|
||||
|
||||
test('match only within second groups ', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
test:
|
||||
- test/**/*.js
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const files = modified(['test/test.js'])
|
||||
const match = filter.match(files)
|
||||
expect(match.src).toEqual([])
|
||||
expect(match.test).toEqual(files)
|
||||
})
|
||||
|
||||
test('match only withing second rule of single group', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
- test/**/*.js
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const files = modified(['test/test.js'])
|
||||
const match = filter.match(files)
|
||||
expect(match.src).toEqual(files)
|
||||
})
|
||||
|
||||
test('matches anything', () => {
|
||||
const yaml = `
|
||||
any:
|
||||
- "**"
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const files = modified(['test/test.js'])
|
||||
const match = filter.match(files)
|
||||
expect(match.any).toEqual(files)
|
||||
})
|
||||
|
||||
test('globbing matches path where file or folder name starts with dot', () => {
|
||||
const yaml = `
|
||||
dot:
|
||||
- "**/*.js"
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const files = modified(['.test/.test.js'])
|
||||
const match = filter.match(files)
|
||||
expect(match.dot).toEqual(files)
|
||||
})
|
||||
|
||||
test('matches all except tsx and less files (negate a group with or-ed parts)', () => {
|
||||
const yaml = `
|
||||
backend:
|
||||
- '!(**/*.tsx|**/*.less)'
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const tsxFiles = modified(['src/ui.tsx'])
|
||||
const lessFiles = modified(['src/ui.less'])
|
||||
const pyFiles = modified(['src/server.py'])
|
||||
|
||||
const tsxMatch = filter.match(tsxFiles)
|
||||
const lessMatch = filter.match(lessFiles)
|
||||
const pyMatch = filter.match(pyFiles)
|
||||
|
||||
expect(tsxMatch.backend).toEqual([])
|
||||
expect(lessMatch.backend).toEqual([])
|
||||
expect(pyMatch.backend).toEqual(pyFiles)
|
||||
})
|
||||
|
||||
test('matches path based on rules included using YAML anchor', () => {
|
||||
const yaml = `
|
||||
shared: &shared
|
||||
- common/**/*
|
||||
- config/**/*
|
||||
src:
|
||||
- *shared
|
||||
- src/**/*
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const files = modified(['config/settings.yml'])
|
||||
const match = filter.match(files)
|
||||
expect(match.src).toEqual(files)
|
||||
})
|
||||
})
|
||||
|
||||
describe('matching specific change status', () => {
|
||||
test('does not match modified file as added', () => {
|
||||
const yaml = `
|
||||
add:
|
||||
- added: "**/*"
|
||||
`
|
||||
let filter = new Filter(yaml)
|
||||
const match = filter.match(modified(['file.js']))
|
||||
expect(match.add).toEqual([])
|
||||
})
|
||||
|
||||
test('match added file as added', () => {
|
||||
const yaml = `
|
||||
add:
|
||||
- added: "**/*"
|
||||
`
|
||||
let filter = new Filter(yaml)
|
||||
const files = [{status: ChangeStatus.Added, filename: 'file.js'}]
|
||||
const match = filter.match(files)
|
||||
expect(match.add).toEqual(files)
|
||||
})
|
||||
|
||||
test('matches when multiple statuses are configured', () => {
|
||||
const yaml = `
|
||||
addOrModify:
|
||||
- added|modified: "**/*"
|
||||
`
|
||||
let filter = new Filter(yaml)
|
||||
const files = [{status: ChangeStatus.Modified, filename: 'file.js'}]
|
||||
const match = filter.match(files)
|
||||
expect(match.addOrModify).toEqual(files)
|
||||
})
|
||||
|
||||
test('matches when using an anchor', () => {
|
||||
const yaml = `
|
||||
shared: &shared
|
||||
- common/**/*
|
||||
- config/**/*
|
||||
src:
|
||||
- modified: *shared
|
||||
`
|
||||
let filter = new Filter(yaml)
|
||||
const files = modified(['config/file.js', 'common/anotherFile.js'])
|
||||
const match = filter.match(files)
|
||||
expect(match.src).toEqual(files)
|
||||
})
|
||||
})
|
||||
|
||||
function modified(paths: string[]): File[] {
|
||||
return paths.map(filename => {
|
||||
return {filename, status: ChangeStatus.Modified}
|
||||
})
|
||||
}
|
||||
35
__tests__/git.test.ts
Normal file
35
__tests__/git.test.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import * as git from '../src/git'
|
||||
import {ChangeStatus} from '../src/file'
|
||||
|
||||
describe('parsing output of the git diff command', () => {
|
||||
test('parseGitDiffOutput returns files with correct change status', async () => {
|
||||
const files = git.parseGitDiffOutput(
|
||||
'A\u0000LICENSE\u0000' + 'M\u0000src/index.ts\u0000' + 'D\u0000src/main.ts\u0000'
|
||||
)
|
||||
expect(files.length).toBe(3)
|
||||
expect(files[0].filename).toBe('LICENSE')
|
||||
expect(files[0].status).toBe(ChangeStatus.Added)
|
||||
expect(files[1].filename).toBe('src/index.ts')
|
||||
expect(files[1].status).toBe(ChangeStatus.Modified)
|
||||
expect(files[2].filename).toBe('src/main.ts')
|
||||
expect(files[2].status).toBe(ChangeStatus.Deleted)
|
||||
})
|
||||
})
|
||||
|
||||
describe('git utility function tests (those not invoking git)', () => {
|
||||
test('Trims "refs/" and "heads/" from ref', () => {
|
||||
expect(git.getShortName('refs/heads/master')).toBe('master')
|
||||
expect(git.getShortName('heads/master')).toBe('heads/master')
|
||||
expect(git.getShortName('master')).toBe('master')
|
||||
|
||||
expect(git.getShortName('refs/tags/v1')).toBe('v1')
|
||||
expect(git.getShortName('tags/v1')).toBe('tags/v1')
|
||||
expect(git.getShortName('v1')).toBe('v1')
|
||||
})
|
||||
|
||||
test('isGitSha(ref) returns true only for 40 characters of a-z and 0-9', () => {
|
||||
expect(git.isGitSha('8b399ed1681b9efd6b1e048ca1c5cba47edf3855')).toBeTruthy()
|
||||
expect(git.isGitSha('This_is_very_long_name_for_a_branch_1111')).toBeFalsy()
|
||||
expect(git.isGitSha('master')).toBeFalsy()
|
||||
})
|
||||
})
|
||||
@@ -1,93 +0,0 @@
|
||||
import Filter from '../src/filter'
|
||||
|
||||
describe('yaml filter parsing tests', () => {
|
||||
test('throws if yaml is not a dictionary', () => {
|
||||
const yaml = 'not a dictionary'
|
||||
const t = () => new Filter(yaml)
|
||||
expect(t).toThrow(/^Invalid filter.*/)
|
||||
})
|
||||
test('throws on invalid yaml', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
src/**/*.js
|
||||
`
|
||||
const t = () => new Filter(yaml)
|
||||
expect(t).toThrow(/^Invalid filter.*/)
|
||||
})
|
||||
test('throws if pattern is not a string', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
- dict:
|
||||
some: value
|
||||
`
|
||||
const t = () => new Filter(yaml)
|
||||
expect(t).toThrow(/^Invalid filter.*/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('matching tests', () => {
|
||||
test('matches single rule in single group', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const match = filter.match(['src/app/module/file.js'])
|
||||
expect(match.src).toBeTruthy()
|
||||
})
|
||||
|
||||
test('no match when file is in different folder', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const match = filter.match(['not_src/other_file.js'])
|
||||
expect(match.src).toBeFalsy()
|
||||
})
|
||||
|
||||
test('match only within second groups ', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
test:
|
||||
- test/**/*.js
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const match = filter.match(['test/test.js'])
|
||||
expect(match.src).toBeFalsy()
|
||||
expect(match.test).toBeTruthy()
|
||||
})
|
||||
|
||||
test('match only withing second rule of single group', () => {
|
||||
const yaml = `
|
||||
src:
|
||||
- src/**/*.js
|
||||
- test/**/*.js
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const match = filter.match(['test/test.js'])
|
||||
expect(match.src).toBeTruthy()
|
||||
})
|
||||
|
||||
test('matches anything', () => {
|
||||
const yaml = `
|
||||
any:
|
||||
- "**/*"
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const match = filter.match(['test/test.js'])
|
||||
expect(match.any).toBeTruthy()
|
||||
})
|
||||
|
||||
test('globbing matches path where file or folder name starts with dot', () => {
|
||||
const yaml = `
|
||||
dot:
|
||||
- "**/*.js"
|
||||
`
|
||||
const filter = new Filter(yaml)
|
||||
const match = filter.match(['.test/.test.js'])
|
||||
expect(match.dot).toBeTruthy()
|
||||
})
|
||||
})
|
||||
57
__tests__/shell-escape.test.ts
Normal file
57
__tests__/shell-escape.test.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import {backslashEscape, shellEscape} from '../src/list-format/shell-escape'
|
||||
|
||||
describe('escape() backslash escapes every character except subset of definitely safe characters', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(backslashEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(backslashEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('spaces should be escaped with backslash', () => {
|
||||
expect(backslashEscape('file with space')).toBe('file\\ with\\ space')
|
||||
})
|
||||
|
||||
test('quotes should be escaped with backslash', () => {
|
||||
expect(backslashEscape('file\'with quote"')).toBe('file\\\'with\\ quote\\"')
|
||||
})
|
||||
|
||||
test('$variables should be escaped', () => {
|
||||
expect(backslashEscape('$var')).toBe('\\$var')
|
||||
})
|
||||
})
|
||||
|
||||
describe('shellEscape() returns human readable filenames with as few escaping applied as possible', () => {
|
||||
test('simple filename should not be modified', () => {
|
||||
expect(shellEscape('file.txt')).toBe('file.txt')
|
||||
})
|
||||
|
||||
test('directory separator should be preserved and not escaped', () => {
|
||||
expect(shellEscape('path/to/file.txt')).toBe('path/to/file.txt')
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(shellEscape('file with space')).toBe("'file with space'")
|
||||
})
|
||||
|
||||
test('filename with spaces should be quoted', () => {
|
||||
expect(shellEscape('file with space')).toBe("'file with space'")
|
||||
})
|
||||
|
||||
test('filename with $ should be quoted', () => {
|
||||
expect(shellEscape('$var')).toBe("'$var'")
|
||||
})
|
||||
|
||||
test('filename with " should be quoted', () => {
|
||||
expect(shellEscape('file"name')).toBe("'file\"name'")
|
||||
})
|
||||
|
||||
test('filename with single quote should be wrapped in double quotes', () => {
|
||||
expect(shellEscape("file'with quote")).toBe('"file\'with quote"')
|
||||
})
|
||||
|
||||
test('filename with single quote and special characters is split and quoted/escaped as needed', () => {
|
||||
expect(shellEscape("file'with $quote")).toBe("file\\''with $quote'")
|
||||
})
|
||||
})
|
||||
39
action.yml
39
action.yml
@@ -1,17 +1,50 @@
|
||||
name: 'Pull request changed files filter'
|
||||
description: 'Enables conditional execution of workflow job steps considering which files are modified by a pull request.'
|
||||
name: 'Paths Changes Filter'
|
||||
description: 'Execute your workflow steps only if relevant files are modified.'
|
||||
author: 'Michal Dorner <dorner.michal@gmail.com>'
|
||||
inputs:
|
||||
token:
|
||||
description: 'GitHub Access Token'
|
||||
required: false
|
||||
default: ${{ github.token }}
|
||||
working-directory:
|
||||
description: 'Relative path under $GITHUB_WORKSPACE where the repository was checked out.'
|
||||
required: false
|
||||
base:
|
||||
description: |
|
||||
Git reference (e.g. branch name) against which the changes will be detected. Defaults to repository default branch (e.g. master).
|
||||
If it references same branch it was pushed to, changes are detected against the most recent commit before the push.
|
||||
This option is ignored if action is triggered by pull_request event.
|
||||
required: false
|
||||
filters:
|
||||
description: 'Path to the configuration file or YAML string with filters definition'
|
||||
required: true
|
||||
list-files:
|
||||
description: |
|
||||
Enables listing of files matching the filter:
|
||||
'none' - Disables listing of matching files (default).
|
||||
'csv' - Coma separated list of filenames.
|
||||
If needed it uses double quotes to wrap filename with unsafe characters.
|
||||
'json' - Serialized as JSON array.
|
||||
'shell' - Space delimited list usable as command line argument list in linux shell.
|
||||
If needed it uses single or double quotes to wrap filename with unsafe characters.
|
||||
'escape'- Space delimited list usable as command line argument list in linux shell.
|
||||
Backslash escapes every potentially unsafe character.
|
||||
required: true
|
||||
default: none
|
||||
initial-fetch-depth:
|
||||
description: |
|
||||
How many commits are initially fetched from base branch.
|
||||
If needed, each subsequent fetch doubles the previously requested number of commits
|
||||
until the merge-base is found or there are no more commits in the history.
|
||||
This option takes effect only when changes are detected using git against different base branch.
|
||||
required: false
|
||||
default: '10'
|
||||
outputs:
|
||||
changes:
|
||||
description: JSON array with names of all filters matching any of changed files
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'dist/index.js'
|
||||
branding:
|
||||
color: blue
|
||||
icon: filter
|
||||
icon: filter
|
||||
|
||||
3998
dist/index.js
vendored
3998
dist/index.js
vendored
File diff suppressed because it is too large
Load Diff
687
package-lock.json
generated
687
package-lock.json
generated
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "pr-changed-files-filter",
|
||||
"name": "paths-filter",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
@@ -3808,6 +3808,12 @@
|
||||
"integrity": "sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/picomatch": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/picomatch/-/picomatch-2.2.1.tgz",
|
||||
"integrity": "sha512-26/tQcDmJXYHiaWAAIjnTVL5nwrT+IVaqFZIbBImAuKk/r/j1r/1hmZ7uaOzG6IknqP3QHcNNQ6QO8Vp28lUoA==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/prettier": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.0.1.tgz",
|
||||
@@ -3897,33 +3903,33 @@
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/experimental-utils": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.0.0.tgz",
|
||||
"integrity": "sha512-BN0vmr9N79M9s2ctITtChRuP1+Dls0x/wlg0RXW1yQ7WJKPurg6X3Xirv61J2sjPif4F8SLsFMs5Nzte0WYoTQ==",
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.3.0.tgz",
|
||||
"integrity": "sha512-d4pGIAbu/tYsrPrdHCQ5xfadJGvlkUxbeBB56nO/VGmEDi/sKmfa5fGty5t5veL1OyJBrUmSiRn1R1qfVDydrg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/json-schema": "^7.0.3",
|
||||
"@typescript-eslint/typescript-estree": "3.0.0",
|
||||
"@typescript-eslint/typescript-estree": "3.3.0",
|
||||
"eslint-scope": "^5.0.0",
|
||||
"eslint-utils": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/parser": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-3.0.0.tgz",
|
||||
"integrity": "sha512-8RRCA9KLxoFNO0mQlrLZA0reGPd/MsobxZS/yPFj+0/XgMdS8+mO8mF3BDj2ZYQj03rkayhSJtF1HAohQ3iylw==",
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-3.3.0.tgz",
|
||||
"integrity": "sha512-a7S0Sqn/+RpOOWTcaLw6RD4obsharzxmgMfdK24l364VxuBODXjuJM7ImCkSXEN7oz52aiZbXSbc76+2EsE91w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/eslint-visitor-keys": "^1.0.0",
|
||||
"@typescript-eslint/experimental-utils": "3.0.0",
|
||||
"@typescript-eslint/typescript-estree": "3.0.0",
|
||||
"@typescript-eslint/experimental-utils": "3.3.0",
|
||||
"@typescript-eslint/typescript-estree": "3.3.0",
|
||||
"eslint-visitor-keys": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/typescript-estree": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.0.0.tgz",
|
||||
"integrity": "sha512-nevQvHyNghsfLrrByzVIH4ZG3NROgJ8LZlfh3ddwPPH4CH7W4GAiSx5qu+xHuX5pWsq6q/eqMc1io840ZhAnUg==",
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.3.0.tgz",
|
||||
"integrity": "sha512-3SqxylENltEvJsjjMSDCUx/edZNSC7wAqifUU1Ywp//0OWEZwMZJfecJud9XxJ/40rAKEbJMKBOQzeOjrLJFzQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"debug": "^4.1.1",
|
||||
@@ -3956,9 +3962,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"acorn": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz",
|
||||
"integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==",
|
||||
"version": "7.3.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-7.3.1.tgz",
|
||||
"integrity": "sha512-tLc0wSnatxAQHVHUapaHdz72pi9KUyHjq5KyHjGg9Y8Ifdc79pTh2XvI6I1/chZbnM7QtNKzh66ooDogPZSleA==",
|
||||
"dev": true
|
||||
},
|
||||
"acorn-globals": {
|
||||
@@ -4003,6 +4009,12 @@
|
||||
"uri-js": "^4.2.2"
|
||||
}
|
||||
},
|
||||
"ansi-colors": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz",
|
||||
"integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==",
|
||||
"dev": true
|
||||
},
|
||||
"ansi-escapes": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz",
|
||||
@@ -4032,6 +4044,111 @@
|
||||
"requires": {
|
||||
"micromatch": "^3.1.4",
|
||||
"normalize-path": "^2.1.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"braces": {
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz",
|
||||
"integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"arr-flatten": "^1.1.0",
|
||||
"array-unique": "^0.3.2",
|
||||
"extend-shallow": "^2.0.1",
|
||||
"fill-range": "^4.0.0",
|
||||
"isobject": "^3.0.1",
|
||||
"repeat-element": "^1.1.2",
|
||||
"snapdragon": "^0.8.1",
|
||||
"snapdragon-node": "^2.0.1",
|
||||
"split-string": "^3.0.2",
|
||||
"to-regex": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"extend-shallow": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
|
||||
"integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-extendable": "^0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz",
|
||||
"integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"extend-shallow": "^2.0.1",
|
||||
"is-number": "^3.0.0",
|
||||
"repeat-string": "^1.6.1",
|
||||
"to-regex-range": "^2.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"extend-shallow": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
|
||||
"integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-extendable": "^0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"is-number": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz",
|
||||
"integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"kind-of": "^3.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"kind-of": {
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz",
|
||||
"integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-buffer": "^1.1.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"micromatch": {
|
||||
"version": "3.1.10",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz",
|
||||
"integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"arr-diff": "^4.0.0",
|
||||
"array-unique": "^0.3.2",
|
||||
"braces": "^2.3.1",
|
||||
"define-property": "^2.0.2",
|
||||
"extend-shallow": "^3.0.2",
|
||||
"extglob": "^2.0.4",
|
||||
"fragment-cache": "^0.2.1",
|
||||
"kind-of": "^6.0.2",
|
||||
"nanomatch": "^1.2.9",
|
||||
"object.pick": "^1.3.0",
|
||||
"regex-not": "^1.0.0",
|
||||
"snapdragon": "^0.8.1",
|
||||
"to-regex": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"to-regex-range": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz",
|
||||
"integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-number": "^3.0.0",
|
||||
"repeat-string": "^1.6.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"argparse": {
|
||||
@@ -4772,7 +4889,8 @@
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
|
||||
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
|
||||
"dev": true
|
||||
},
|
||||
"base": {
|
||||
"version": "0.11.2",
|
||||
@@ -4847,40 +4965,12 @@
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"braces": {
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz",
|
||||
"integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"arr-flatten": "^1.1.0",
|
||||
"array-unique": "^0.3.2",
|
||||
"extend-shallow": "^2.0.1",
|
||||
"fill-range": "^4.0.0",
|
||||
"isobject": "^3.0.1",
|
||||
"repeat-element": "^1.1.2",
|
||||
"snapdragon": "^0.8.1",
|
||||
"snapdragon-node": "^2.0.1",
|
||||
"split-string": "^3.0.2",
|
||||
"to-regex": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"extend-shallow": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
|
||||
"integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-extendable": "^0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"browser-process-hrtime": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz",
|
||||
@@ -5139,7 +5229,8 @@
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
|
||||
"dev": true
|
||||
},
|
||||
"contains-path": {
|
||||
"version": "0.1.0",
|
||||
@@ -5419,6 +5510,15 @@
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"enquirer": {
|
||||
"version": "2.3.5",
|
||||
"resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.5.tgz",
|
||||
"integrity": "sha512-BNT1C08P9XD0vNg3J475yIUG+mVdp9T6towYFHUv897X0KoHBjB1shyrNmhmtHWKP17iSWgo7Gqh7BBuzLZMSA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-colors": "^3.2.1"
|
||||
}
|
||||
},
|
||||
"error-ex": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
|
||||
@@ -5487,79 +5587,214 @@
|
||||
}
|
||||
},
|
||||
"eslint": {
|
||||
"version": "5.16.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz",
|
||||
"integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==",
|
||||
"version": "7.3.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-7.3.0.tgz",
|
||||
"integrity": "sha512-dJMVXwfU5PT1cj2Nv2VPPrKahKTGdX+5Dh0Q3YuKt+Y2UhdL2YbzsVaBMyG9HC0tBismlv/r1+eZqs6SMIV38Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/code-frame": "^7.0.0",
|
||||
"ajv": "^6.9.1",
|
||||
"chalk": "^2.1.0",
|
||||
"cross-spawn": "^6.0.5",
|
||||
"ajv": "^6.10.0",
|
||||
"chalk": "^4.0.0",
|
||||
"cross-spawn": "^7.0.2",
|
||||
"debug": "^4.0.1",
|
||||
"doctrine": "^3.0.0",
|
||||
"eslint-scope": "^4.0.3",
|
||||
"eslint-utils": "^1.3.1",
|
||||
"eslint-visitor-keys": "^1.0.0",
|
||||
"espree": "^5.0.1",
|
||||
"esquery": "^1.0.1",
|
||||
"enquirer": "^2.3.5",
|
||||
"eslint-scope": "^5.1.0",
|
||||
"eslint-utils": "^2.0.0",
|
||||
"eslint-visitor-keys": "^1.2.0",
|
||||
"espree": "^7.1.0",
|
||||
"esquery": "^1.2.0",
|
||||
"esutils": "^2.0.2",
|
||||
"file-entry-cache": "^5.0.1",
|
||||
"functional-red-black-tree": "^1.0.1",
|
||||
"glob": "^7.1.2",
|
||||
"globals": "^11.7.0",
|
||||
"glob-parent": "^5.0.0",
|
||||
"globals": "^12.1.0",
|
||||
"ignore": "^4.0.6",
|
||||
"import-fresh": "^3.0.0",
|
||||
"imurmurhash": "^0.1.4",
|
||||
"inquirer": "^6.2.2",
|
||||
"js-yaml": "^3.13.0",
|
||||
"is-glob": "^4.0.0",
|
||||
"js-yaml": "^3.13.1",
|
||||
"json-stable-stringify-without-jsonify": "^1.0.1",
|
||||
"levn": "^0.3.0",
|
||||
"lodash": "^4.17.11",
|
||||
"levn": "^0.4.1",
|
||||
"lodash": "^4.17.14",
|
||||
"minimatch": "^3.0.4",
|
||||
"mkdirp": "^0.5.1",
|
||||
"natural-compare": "^1.4.0",
|
||||
"optionator": "^0.8.2",
|
||||
"path-is-inside": "^1.0.2",
|
||||
"optionator": "^0.9.1",
|
||||
"progress": "^2.0.0",
|
||||
"regexpp": "^2.0.1",
|
||||
"semver": "^5.5.1",
|
||||
"strip-ansi": "^4.0.0",
|
||||
"strip-json-comments": "^2.0.1",
|
||||
"regexpp": "^3.1.0",
|
||||
"semver": "^7.2.1",
|
||||
"strip-ansi": "^6.0.0",
|
||||
"strip-json-comments": "^3.1.0",
|
||||
"table": "^5.2.3",
|
||||
"text-table": "^0.2.0"
|
||||
"text-table": "^0.2.0",
|
||||
"v8-compile-cache": "^2.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"eslint-scope": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz",
|
||||
"integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==",
|
||||
"ansi-styles": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"esrecurse": "^4.1.0",
|
||||
"estraverse": "^4.1.1"
|
||||
"@types/color-name": "^1.1.1",
|
||||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"eslint-utils": {
|
||||
"version": "1.4.3",
|
||||
"resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz",
|
||||
"integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==",
|
||||
"chalk": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz",
|
||||
"integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"eslint-visitor-keys": "^1.1.0"
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"dev": true
|
||||
},
|
||||
"cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"path-key": "^3.1.0",
|
||||
"shebang-command": "^2.0.0",
|
||||
"which": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"eslint-visitor-keys": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz",
|
||||
"integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==",
|
||||
"dev": true
|
||||
},
|
||||
"globals": {
|
||||
"version": "12.4.0",
|
||||
"resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz",
|
||||
"integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"type-fest": "^0.8.1"
|
||||
}
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||
"dev": true
|
||||
},
|
||||
"levn": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
|
||||
"integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"prelude-ls": "^1.2.1",
|
||||
"type-check": "~0.4.0"
|
||||
}
|
||||
},
|
||||
"optionator": {
|
||||
"version": "0.9.1",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
|
||||
"integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"deep-is": "^0.1.3",
|
||||
"fast-levenshtein": "^2.0.6",
|
||||
"levn": "^0.4.1",
|
||||
"prelude-ls": "^1.2.1",
|
||||
"type-check": "^0.4.0",
|
||||
"word-wrap": "^1.2.3"
|
||||
}
|
||||
},
|
||||
"path-key": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
|
||||
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
|
||||
"dev": true
|
||||
},
|
||||
"prelude-ls": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
||||
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
|
||||
"dev": true
|
||||
},
|
||||
"regexpp": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz",
|
||||
"integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==",
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz",
|
||||
"integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==",
|
||||
"dev": true
|
||||
},
|
||||
"semver": {
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
|
||||
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
|
||||
"version": "7.3.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
|
||||
"integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==",
|
||||
"dev": true
|
||||
},
|
||||
"shebang-command": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
|
||||
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"shebang-regex": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"shebang-regex": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
|
||||
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
|
||||
"dev": true
|
||||
},
|
||||
"strip-ansi": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
|
||||
"integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-regex": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"type-check": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
|
||||
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"prelude-ls": "^1.2.1"
|
||||
}
|
||||
},
|
||||
"which": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
|
||||
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"isexe": "^2.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -5927,9 +6162,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"eslint-scope": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz",
|
||||
"integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==",
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.0.tgz",
|
||||
"integrity": "sha512-iiGRvtxWqgtx5m8EyQUJihBloE4EnYeGE/bz1wSPwJE6tZuJUtHlhqDM4Xj2ukE8Dyy1+HCZ4hE0fzIVMzb58w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"esrecurse": "^4.1.0",
|
||||
@@ -5937,9 +6172,9 @@
|
||||
}
|
||||
},
|
||||
"eslint-utils": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz",
|
||||
"integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==",
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz",
|
||||
"integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"eslint-visitor-keys": "^1.1.0"
|
||||
@@ -5952,14 +6187,22 @@
|
||||
"dev": true
|
||||
},
|
||||
"espree": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz",
|
||||
"integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==",
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/espree/-/espree-7.1.0.tgz",
|
||||
"integrity": "sha512-dcorZSyfmm4WTuTnE5Y7MEN1DyoPYy1ZR783QW1FJoenn7RailyWFsq/UL6ZAAA7uXurN9FIpYyUs3OfiIW+Qw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"acorn": "^6.0.7",
|
||||
"acorn-jsx": "^5.0.0",
|
||||
"eslint-visitor-keys": "^1.0.0"
|
||||
"acorn": "^7.2.0",
|
||||
"acorn-jsx": "^5.2.0",
|
||||
"eslint-visitor-keys": "^1.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"eslint-visitor-keys": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz",
|
||||
"integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"esprima": {
|
||||
@@ -6326,29 +6569,6 @@
|
||||
"flat-cache": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz",
|
||||
"integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"extend-shallow": "^2.0.1",
|
||||
"is-number": "^3.0.0",
|
||||
"repeat-string": "^1.6.1",
|
||||
"to-regex-range": "^2.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"extend-shallow": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
|
||||
"integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-extendable": "^0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"find-up": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz",
|
||||
@@ -6486,6 +6706,15 @@
|
||||
"path-is-absolute": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"glob-parent": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz",
|
||||
"integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-glob": "^4.0.1"
|
||||
}
|
||||
},
|
||||
"globals": {
|
||||
"version": "11.12.0",
|
||||
"resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
|
||||
@@ -6601,6 +6830,26 @@
|
||||
"kind-of": "^4.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"is-number": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz",
|
||||
"integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"kind-of": "^3.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"kind-of": {
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz",
|
||||
"integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-buffer": "^1.1.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"kind-of": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz",
|
||||
@@ -6954,26 +7203,6 @@
|
||||
"is-extglob": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"is-number": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz",
|
||||
"integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"kind-of": "^3.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"kind-of": {
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz",
|
||||
"integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-buffer": "^1.1.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"is-plain-object": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz",
|
||||
@@ -12158,9 +12387,9 @@
|
||||
}
|
||||
},
|
||||
"kind-of": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz",
|
||||
"integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==",
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
|
||||
"integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
|
||||
"dev": true
|
||||
},
|
||||
"kleur": {
|
||||
@@ -12311,27 +12540,6 @@
|
||||
"integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==",
|
||||
"dev": true
|
||||
},
|
||||
"micromatch": {
|
||||
"version": "3.1.10",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz",
|
||||
"integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"arr-diff": "^4.0.0",
|
||||
"array-unique": "^0.3.2",
|
||||
"braces": "^2.3.1",
|
||||
"define-property": "^2.0.2",
|
||||
"extend-shallow": "^3.0.2",
|
||||
"extglob": "^2.0.4",
|
||||
"fragment-cache": "^0.2.1",
|
||||
"kind-of": "^6.0.2",
|
||||
"nanomatch": "^1.2.9",
|
||||
"object.pick": "^1.3.0",
|
||||
"regex-not": "^1.0.0",
|
||||
"snapdragon": "^0.8.1",
|
||||
"to-regex": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"mime-db": {
|
||||
"version": "1.44.0",
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz",
|
||||
@@ -12357,6 +12565,7 @@
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
@@ -12781,12 +12990,6 @@
|
||||
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
|
||||
"dev": true
|
||||
},
|
||||
"path-is-inside": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz",
|
||||
"integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=",
|
||||
"dev": true
|
||||
},
|
||||
"path-key": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz",
|
||||
@@ -12816,8 +13019,7 @@
|
||||
"picomatch": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz",
|
||||
"integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==",
|
||||
"dev": true
|
||||
"integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg=="
|
||||
},
|
||||
"pify": {
|
||||
"version": "2.3.0",
|
||||
@@ -13345,11 +13547,114 @@
|
||||
"walker": "~1.0.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"braces": {
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz",
|
||||
"integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"arr-flatten": "^1.1.0",
|
||||
"array-unique": "^0.3.2",
|
||||
"extend-shallow": "^2.0.1",
|
||||
"fill-range": "^4.0.0",
|
||||
"isobject": "^3.0.1",
|
||||
"repeat-element": "^1.1.2",
|
||||
"snapdragon": "^0.8.1",
|
||||
"snapdragon-node": "^2.0.1",
|
||||
"split-string": "^3.0.2",
|
||||
"to-regex": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"extend-shallow": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
|
||||
"integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-extendable": "^0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz",
|
||||
"integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"extend-shallow": "^2.0.1",
|
||||
"is-number": "^3.0.0",
|
||||
"repeat-string": "^1.6.1",
|
||||
"to-regex-range": "^2.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"extend-shallow": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
|
||||
"integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-extendable": "^0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"is-number": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz",
|
||||
"integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"kind-of": "^3.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"kind-of": {
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz",
|
||||
"integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-buffer": "^1.1.5"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"micromatch": {
|
||||
"version": "3.1.10",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz",
|
||||
"integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"arr-diff": "^4.0.0",
|
||||
"array-unique": "^0.3.2",
|
||||
"braces": "^2.3.1",
|
||||
"define-property": "^2.0.2",
|
||||
"extend-shallow": "^3.0.2",
|
||||
"extglob": "^2.0.4",
|
||||
"fragment-cache": "^0.2.1",
|
||||
"kind-of": "^6.0.2",
|
||||
"nanomatch": "^1.2.9",
|
||||
"object.pick": "^1.3.0",
|
||||
"regex-not": "^1.0.0",
|
||||
"snapdragon": "^0.8.1",
|
||||
"to-regex": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"minimist": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
|
||||
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=",
|
||||
"dev": true
|
||||
},
|
||||
"to-regex-range": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz",
|
||||
"integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-number": "^3.0.0",
|
||||
"repeat-string": "^1.6.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -13584,12 +13889,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"source-map-resolve": {
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.2.tgz",
|
||||
"integrity": "sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA==",
|
||||
"version": "0.5.3",
|
||||
"resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz",
|
||||
"integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"atob": "^2.1.1",
|
||||
"atob": "^2.1.2",
|
||||
"decode-uri-component": "^0.2.0",
|
||||
"resolve-url": "^0.2.1",
|
||||
"source-map-url": "^0.4.0",
|
||||
@@ -13862,9 +14167,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"strip-json-comments": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
|
||||
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=",
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.0.tgz",
|
||||
"integrity": "sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w==",
|
||||
"dev": true
|
||||
},
|
||||
"supports-color": {
|
||||
@@ -14053,16 +14358,6 @@
|
||||
"safe-regex": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"to-regex-range": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz",
|
||||
"integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"is-number": "^3.0.0",
|
||||
"repeat-string": "^1.6.1"
|
||||
}
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz",
|
||||
@@ -14317,6 +14612,12 @@
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"v8-compile-cache": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz",
|
||||
"integrity": "sha512-8OQ9CL+VWyt3JStj7HX7/ciTL2V3Rl1Wf5OL+SNTm0yK1KvtReVulksyeRnCANHHuUxHlQig+JJDlUhBt1NQDQ==",
|
||||
"dev": true
|
||||
},
|
||||
"v8-to-istanbul": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-4.1.4.tgz",
|
||||
|
||||
11
package.json
11
package.json
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"name": "pr-changed-files-filter",
|
||||
"name": "paths-filter",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"description": "Enables conditional execution of workflow job steps considering which files are modified by a pull request.",
|
||||
"description": "Execute your workflow steps only if relevant files are modified.",
|
||||
"main": "lib/main.js",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
@@ -29,16 +29,17 @@
|
||||
"@actions/exec": "^1.0.4",
|
||||
"@actions/github": "^2.2.0",
|
||||
"@octokit/webhooks": "^7.6.2",
|
||||
"minimatch": "^3.0.4"
|
||||
"picomatch": "^2.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^25.2.3",
|
||||
"@types/js-yaml": "^3.12.4",
|
||||
"@types/minimatch": "^3.0.3",
|
||||
"@types/node": "^14.0.5",
|
||||
"@typescript-eslint/parser": "^3.0.0",
|
||||
"@types/picomatch": "^2.2.1",
|
||||
"@typescript-eslint/parser": "^3.3.0",
|
||||
"@zeit/ncc": "^0.22.3",
|
||||
"eslint": "^5.16.0",
|
||||
"eslint": "^7.3.0",
|
||||
"eslint-plugin-github": "^2.0.0",
|
||||
"eslint-plugin-jest": "^22.21.0",
|
||||
"jest": "^26.0.1",
|
||||
|
||||
21
src/exec.ts
Normal file
21
src/exec.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import {exec as execImpl, ExecOptions} from '@actions/exec'
|
||||
|
||||
// Wraps original exec() function
|
||||
// Returns exit code and whole stdout/stderr
|
||||
export default async function exec(commandLine: string, args?: string[], options?: ExecOptions): Promise<ExecResult> {
|
||||
options = options || {}
|
||||
let stdout = ''
|
||||
let stderr = ''
|
||||
options.listeners = {
|
||||
stdout: (data: Buffer) => (stdout += data.toString()),
|
||||
stderr: (data: Buffer) => (stderr += data.toString())
|
||||
}
|
||||
const code = await execImpl(commandLine, args, options)
|
||||
return {code, stdout, stderr}
|
||||
}
|
||||
|
||||
export interface ExecResult {
|
||||
code: number
|
||||
stdout: string
|
||||
stderr: string
|
||||
}
|
||||
13
src/file.ts
Normal file
13
src/file.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export interface File {
|
||||
filename: string
|
||||
status: ChangeStatus
|
||||
}
|
||||
|
||||
export enum ChangeStatus {
|
||||
Added = 'added',
|
||||
Copied = 'copied',
|
||||
Deleted = 'deleted',
|
||||
Modified = 'modified',
|
||||
Renamed = 'renamed',
|
||||
Unmerged = 'unmerged'
|
||||
}
|
||||
121
src/filter.ts
121
src/filter.ts
@@ -1,42 +1,109 @@
|
||||
import * as jsyaml from 'js-yaml'
|
||||
import * as minimatch from 'minimatch'
|
||||
import picomatch from 'picomatch'
|
||||
import {File, ChangeStatus} from './file'
|
||||
|
||||
export default class Filter {
|
||||
rules: {[key: string]: minimatch.IMinimatch[]} = {}
|
||||
// Type definition of object we expect to load from YAML
|
||||
interface FilterYaml {
|
||||
[name: string]: FilterItemYaml
|
||||
}
|
||||
type FilterItemYaml =
|
||||
| string // Filename pattern, e.g. "path/to/*.js"
|
||||
| {[changeTypes: string]: string | string[]} // Change status and filename, e.g. added|modified: "path/to/*.js"
|
||||
| FilterItemYaml[] // Supports referencing another rule via YAML anchor
|
||||
|
||||
constructor(yaml: string) {
|
||||
const doc = jsyaml.safeLoad(yaml)
|
||||
if (typeof doc !== 'object') {
|
||||
this.throwInvalidFormatError()
|
||||
}
|
||||
// Minimatch options used in all matchers
|
||||
const MatchOptions = {
|
||||
dot: true
|
||||
}
|
||||
|
||||
const opts: minimatch.IOptions = {
|
||||
dot: true
|
||||
}
|
||||
// Internal representation of one item in named filter rule
|
||||
// Created as simplified form of data in FilterItemYaml
|
||||
interface FilterRuleItem {
|
||||
status?: ChangeStatus[] // Required change status of the matched files
|
||||
isMatch: (str: string) => boolean // Matches the filename
|
||||
}
|
||||
|
||||
for (const name of Object.keys(doc)) {
|
||||
const patterns = doc[name] as string[]
|
||||
if (!Array.isArray(patterns)) {
|
||||
this.throwInvalidFormatError()
|
||||
}
|
||||
if (!patterns.every(x => typeof x === 'string')) {
|
||||
this.throwInvalidFormatError()
|
||||
}
|
||||
this.rules[name] = patterns.map(x => new minimatch.Minimatch(x, opts))
|
||||
export interface FilterResults {
|
||||
[key: string]: File[]
|
||||
}
|
||||
|
||||
export class Filter {
|
||||
rules: {[key: string]: FilterRuleItem[]} = {}
|
||||
|
||||
// Creates instance of Filter and load rules from YAML if it's provided
|
||||
constructor(yaml?: string) {
|
||||
if (yaml) {
|
||||
this.load(yaml)
|
||||
}
|
||||
}
|
||||
|
||||
// Returns dictionary with match result per rules group
|
||||
match(paths: string[]): {[key: string]: boolean} {
|
||||
const result: {[key: string]: boolean} = {}
|
||||
// Load rules from YAML string
|
||||
load(yaml: string): void {
|
||||
if (!yaml) {
|
||||
return
|
||||
}
|
||||
|
||||
const doc = jsyaml.safeLoad(yaml) as FilterYaml
|
||||
if (typeof doc !== 'object') {
|
||||
this.throwInvalidFormatError('Root element is not an object')
|
||||
}
|
||||
|
||||
for (const [key, item] of Object.entries(doc)) {
|
||||
this.rules[key] = this.parseFilterItemYaml(item)
|
||||
}
|
||||
}
|
||||
|
||||
match(files: File[]): FilterResults {
|
||||
const result: FilterResults = {}
|
||||
for (const [key, patterns] of Object.entries(this.rules)) {
|
||||
const match = paths.some(fileName => patterns.some(rule => rule.match(fileName)))
|
||||
result[key] = match
|
||||
result[key] = files.filter(file => this.isMatch(file, patterns))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
private throwInvalidFormatError(): never {
|
||||
throw new Error('Invalid filter YAML format: Expected dictionary of string arrays')
|
||||
private isMatch(file: File, patterns: FilterRuleItem[]): boolean {
|
||||
return patterns.some(
|
||||
rule => (rule.status === undefined || rule.status.includes(file.status)) && rule.isMatch(file.filename)
|
||||
)
|
||||
}
|
||||
|
||||
private parseFilterItemYaml(item: FilterItemYaml): FilterRuleItem[] {
|
||||
if (Array.isArray(item)) {
|
||||
return flat(item.map(i => this.parseFilterItemYaml(i)))
|
||||
}
|
||||
|
||||
if (typeof item === 'string') {
|
||||
return [{status: undefined, isMatch: picomatch(item, MatchOptions)}]
|
||||
}
|
||||
|
||||
if (typeof item === 'object') {
|
||||
return Object.entries(item).map(([key, pattern]) => {
|
||||
if (typeof key !== 'string' || (typeof pattern !== 'string' && !Array.isArray(pattern))) {
|
||||
this.throwInvalidFormatError(
|
||||
`Expected [key:string]= pattern:string | string[], but [${key}:${typeof key}]= ${pattern}:${typeof pattern} found`
|
||||
)
|
||||
}
|
||||
return {
|
||||
status: key
|
||||
.split('|')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0)
|
||||
.map(x => x.toLowerCase()) as ChangeStatus[],
|
||||
isMatch: picomatch(pattern, MatchOptions)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
this.throwInvalidFormatError(`Unexpected element type '${typeof item}'`)
|
||||
}
|
||||
|
||||
private throwInvalidFormatError(message: string): never {
|
||||
throw new Error(`Invalid filter YAML format: ${message}.`)
|
||||
}
|
||||
}
|
||||
|
||||
// Creates a new array with all sub-array elements concatenated
|
||||
// In future could be replaced by Array.prototype.flat (supported on Node.js 11+)
|
||||
function flat<T>(arr: T[][]): T[] {
|
||||
return arr.reduce((acc, val) => acc.concat(val), [])
|
||||
}
|
||||
|
||||
210
src/git.ts
210
src/git.ts
@@ -1,26 +1,204 @@
|
||||
import {exec} from '@actions/exec'
|
||||
import exec from './exec'
|
||||
import * as core from '@actions/core'
|
||||
import {File, ChangeStatus} from './file'
|
||||
|
||||
export async function fetchBranch(base: string): Promise<void> {
|
||||
const exitCode = await exec('git', ['fetch', '--depth=1', 'origin', base])
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(`Fetching branch ${base} failed, exiting`)
|
||||
export const NULL_SHA = '0000000000000000000000000000000000000000'
|
||||
export const HEAD = 'HEAD'
|
||||
|
||||
export async function getChangesInLastCommit(): Promise<File[]> {
|
||||
core.startGroup(`Change detection in last commit`)
|
||||
let output = ''
|
||||
try {
|
||||
output = (await exec('git', ['log', '--format=', '--no-renames', '--name-status', '-z', '-n', '1'])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChangedFiles(base: string): Promise<string[]> {
|
||||
let output = ''
|
||||
const exitCode = await exec('git', ['diff-index', '--name-only', base], {
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => (output += data.toString())
|
||||
}
|
||||
})
|
||||
export async function getChanges(ref: string): Promise<File[]> {
|
||||
if (!(await hasCommit(ref))) {
|
||||
// Fetch single commit
|
||||
core.startGroup(`Fetching ${ref} from origin`)
|
||||
await exec('git', ['fetch', '--depth=1', '--no-tags', 'origin', ref])
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(`Couldn't determine changed files, exiting`)
|
||||
// Get differences between ref and HEAD
|
||||
core.startGroup(`Change detection ${ref}..HEAD`)
|
||||
let output = ''
|
||||
try {
|
||||
// Two dots '..' change detection - directly compares two versions
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}..HEAD`])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChangesOnHead(): Promise<File[]> {
|
||||
// Get current changes - both staged and unstaged
|
||||
core.startGroup(`Change detection on HEAD`)
|
||||
let output = ''
|
||||
try {
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', 'HEAD'])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export async function getChangesSinceMergeBase(ref: string, initialFetchDepth: number): Promise<File[]> {
|
||||
if (!(await hasCommit(ref))) {
|
||||
// Fetch and add base branch
|
||||
core.startGroup(`Fetching ${ref}`)
|
||||
try {
|
||||
await exec('git', ['fetch', `--depth=${initialFetchDepth}`, '--no-tags', 'origin', `${ref}:${ref}`])
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
async function hasMergeBase(): Promise<boolean> {
|
||||
return (await exec('git', ['merge-base', ref, 'HEAD'], {ignoreReturnCode: true})).code === 0
|
||||
}
|
||||
|
||||
async function countCommits(): Promise<number> {
|
||||
return (await getNumberOfCommits('HEAD')) + (await getNumberOfCommits(ref))
|
||||
}
|
||||
|
||||
core.startGroup(`Searching for merge-base with ${ref}`)
|
||||
// Fetch more commits until merge-base is found
|
||||
if (!(await hasMergeBase())) {
|
||||
let deepen = initialFetchDepth
|
||||
let lastCommitsCount = await countCommits()
|
||||
do {
|
||||
await exec('git', ['fetch', `--deepen=${deepen}`, '--no-tags'])
|
||||
const count = await countCommits()
|
||||
if (count <= lastCommitsCount) {
|
||||
core.info('No merge base found - all files will be listed as added')
|
||||
core.endGroup()
|
||||
return await listAllFilesAsAdded()
|
||||
}
|
||||
lastCommitsCount = count
|
||||
deepen = Math.min(deepen * 2, Number.MAX_SAFE_INTEGER)
|
||||
} while (!(await hasMergeBase()))
|
||||
}
|
||||
core.endGroup()
|
||||
|
||||
// Get changes introduced on HEAD compared to ref
|
||||
core.startGroup(`Change detection ${ref}...HEAD`)
|
||||
let output = ''
|
||||
try {
|
||||
// Three dots '...' change detection - finds merge-base and compares against it
|
||||
output = (await exec('git', ['diff', '--no-renames', '--name-status', '-z', `${ref}...HEAD`])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
return parseGitDiffOutput(output)
|
||||
}
|
||||
|
||||
export function parseGitDiffOutput(output: string): File[] {
|
||||
const tokens = output.split('\u0000').filter(s => s.length > 0)
|
||||
const files: File[] = []
|
||||
for (let i = 0; i + 1 < tokens.length; i += 2) {
|
||||
files.push({
|
||||
status: statusMap[tokens[i]],
|
||||
filename: tokens[i + 1]
|
||||
})
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
export async function listAllFilesAsAdded(): Promise<File[]> {
|
||||
core.startGroup('Listing all files tracked by git')
|
||||
let output = ''
|
||||
try {
|
||||
output = (await exec('git', ['ls-files', '-z'])).stdout
|
||||
} finally {
|
||||
fixStdOutNullTermination()
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
return output
|
||||
.split('\n')
|
||||
.map(s => s.trim())
|
||||
.split('\u0000')
|
||||
.filter(s => s.length > 0)
|
||||
.map(path => ({
|
||||
status: ChangeStatus.Added,
|
||||
filename: path
|
||||
}))
|
||||
}
|
||||
|
||||
export async function getCurrentRef(): Promise<string> {
|
||||
core.startGroup(`Determining current ref`)
|
||||
try {
|
||||
const branch = (await exec('git', ['branch', '--show-current'])).stdout.trim()
|
||||
if (branch) {
|
||||
return branch
|
||||
}
|
||||
|
||||
const describe = await exec('git', ['describe', '--tags', '--exact-match'], {ignoreReturnCode: true})
|
||||
if (describe.code === 0) {
|
||||
return describe.stdout.trim()
|
||||
}
|
||||
|
||||
return (await exec('git', ['rev-parse', 'HEAD'])).stdout.trim()
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
export function getShortName(ref: string): string {
|
||||
if (!ref) return ''
|
||||
|
||||
const heads = 'refs/heads/'
|
||||
const tags = 'refs/tags/'
|
||||
|
||||
if (ref.startsWith(heads)) return ref.slice(heads.length)
|
||||
if (ref.startsWith(tags)) return ref.slice(tags.length)
|
||||
|
||||
return ref
|
||||
}
|
||||
|
||||
export function isGitSha(ref: string): boolean {
|
||||
return /^[a-z0-9]{40}$/.test(ref)
|
||||
}
|
||||
|
||||
async function hasCommit(ref: string): Promise<boolean> {
|
||||
core.startGroup(`Checking if commit for ${ref} is locally available`)
|
||||
try {
|
||||
return (await exec('git', ['cat-file', '-e', `${ref}^{commit}`], {ignoreReturnCode: true})).code === 0
|
||||
} finally {
|
||||
core.endGroup()
|
||||
}
|
||||
}
|
||||
|
||||
async function getNumberOfCommits(ref: string): Promise<number> {
|
||||
const output = (await exec('git', ['rev-list', `--count`, ref])).stdout
|
||||
const count = parseInt(output)
|
||||
return isNaN(count) ? 0 : count
|
||||
}
|
||||
|
||||
function fixStdOutNullTermination(): void {
|
||||
// Previous command uses NULL as delimiters and output is printed to stdout.
|
||||
// We have to make sure next thing written to stdout will start on new line.
|
||||
// Otherwise things like ::set-output wouldn't work.
|
||||
core.info('')
|
||||
}
|
||||
|
||||
const statusMap: {[char: string]: ChangeStatus} = {
|
||||
A: ChangeStatus.Added,
|
||||
C: ChangeStatus.Copied,
|
||||
D: ChangeStatus.Deleted,
|
||||
M: ChangeStatus.Modified,
|
||||
R: ChangeStatus.Renamed,
|
||||
U: ChangeStatus.Unmerged
|
||||
}
|
||||
|
||||
16
src/list-format/csv-escape.ts
Normal file
16
src/list-format/csv-escape.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
// Returns filename escaped for CSV
|
||||
// Wraps file name into "..." only when it contains some potentially unsafe character
|
||||
export function csvEscape(value: string): string {
|
||||
if (value === '') return value
|
||||
|
||||
// Only safe characters
|
||||
if (/^[a-zA-Z0-9._+:@%/-]+$/m.test(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc4180
|
||||
// If double-quotes are used to enclose fields, then a double-quote
|
||||
// appearing inside a field must be escaped by preceding it with
|
||||
// another double quote
|
||||
return `"${value.replace(/"/g, '""')}"`
|
||||
}
|
||||
28
src/list-format/shell-escape.ts
Normal file
28
src/list-format/shell-escape.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
// Backslash escape every character except small subset of definitely safe characters
|
||||
export function backslashEscape(value: string): string {
|
||||
return value.replace(/([^a-zA-Z0-9,._+:@%/-])/gm, '\\$1')
|
||||
}
|
||||
|
||||
// Returns filename escaped for usage as shell argument.
|
||||
// Applies "human readable" approach with as few escaping applied as possible
|
||||
export function shellEscape(value: string): string {
|
||||
if (value === '') return value
|
||||
|
||||
// Only safe characters
|
||||
if (/^[a-zA-Z0-9,._+:@%/-]+$/m.test(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
if (value.includes("'")) {
|
||||
// Only safe characters, single quotes and white-spaces
|
||||
if (/^[a-zA-Z0-9,._+:@%/'\s-]+$/m.test(value)) {
|
||||
return `"${value}"`
|
||||
}
|
||||
|
||||
// Split by single quote and apply escaping recursively
|
||||
return value.split("'").map(shellEscape).join("\\'")
|
||||
}
|
||||
|
||||
// Contains some unsafe characters but no single quote
|
||||
return `'${value}'`
|
||||
}
|
||||
191
src/main.ts
191
src/main.ts
@@ -3,28 +3,37 @@ import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import {Webhooks} from '@octokit/webhooks'
|
||||
|
||||
import Filter from './filter'
|
||||
import {Filter, FilterResults} from './filter'
|
||||
import {File, ChangeStatus} from './file'
|
||||
import * as git from './git'
|
||||
import {backslashEscape, shellEscape} from './list-format/shell-escape'
|
||||
import {csvEscape} from './list-format/csv-escape'
|
||||
|
||||
type ExportFormat = 'none' | 'csv' | 'json' | 'shell' | 'escape'
|
||||
|
||||
async function run(): Promise<void> {
|
||||
try {
|
||||
const workingDirectory = core.getInput('working-directory', {required: false})
|
||||
if (workingDirectory) {
|
||||
process.chdir(workingDirectory)
|
||||
}
|
||||
|
||||
const token = core.getInput('token', {required: false})
|
||||
const base = core.getInput('base', {required: false})
|
||||
const filtersInput = core.getInput('filters', {required: true})
|
||||
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput
|
||||
const listFiles = core.getInput('list-files', {required: false}).toLowerCase() || 'none'
|
||||
const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', {required: false})) || 10
|
||||
|
||||
if (github.context.eventName !== 'pull_request') {
|
||||
core.setFailed('This action can be triggered only by pull_request event')
|
||||
if (!isExportFormat(listFiles)) {
|
||||
core.setFailed(`Input parameter 'list-files' is set to invalid value '${listFiles}'`)
|
||||
return
|
||||
}
|
||||
|
||||
const pr = github.context.payload.pull_request as Webhooks.WebhookPayloadPullRequestPullRequest
|
||||
const filter = new Filter(filtersYaml)
|
||||
const files = token ? await getChangedFilesFromApi(token, pr) : await getChangedFilesFromGit(pr)
|
||||
|
||||
const result = filter.match(files)
|
||||
for (const key in result) {
|
||||
core.setOutput(key, String(result[key]))
|
||||
}
|
||||
const files = await getChangedFiles(token, base, initialFetchDepth)
|
||||
const results = filter.match(files)
|
||||
exportResults(results, listFiles)
|
||||
} catch (error) {
|
||||
core.setFailed(error.message)
|
||||
}
|
||||
@@ -46,24 +55,88 @@ function getConfigFileContent(configPath: string): string {
|
||||
return fs.readFileSync(configPath, {encoding: 'utf8'})
|
||||
}
|
||||
|
||||
// Fetch base branch and use `git diff` to determine changed files
|
||||
async function getChangedFilesFromGit(pullRequest: Webhooks.WebhookPayloadPullRequestPullRequest): Promise<string[]> {
|
||||
core.debug('Fetching base branch and using `git diff-index` to determine changed files')
|
||||
const baseRef = pullRequest.base.ref
|
||||
await git.fetchBranch(baseRef)
|
||||
return await git.getChangedFiles(pullRequest.base.sha)
|
||||
async function getChangedFiles(token: string, base: string, initialFetchDepth: number): Promise<File[]> {
|
||||
// if base is 'HEAD' only local uncommitted changes will be detected
|
||||
// This is the simplest case as we don't need to fetch more commits or evaluate current/before refs
|
||||
if (base === git.HEAD) {
|
||||
return await git.getChangesOnHead()
|
||||
}
|
||||
|
||||
if (github.context.eventName === 'pull_request' || github.context.eventName === 'pull_request_target') {
|
||||
const pr = github.context.payload.pull_request as Webhooks.WebhookPayloadPullRequestPullRequest
|
||||
if (token) {
|
||||
return await getChangedFilesFromApi(token, pr)
|
||||
}
|
||||
core.info('Github token is not available - changes will be detected from PRs merge commit')
|
||||
return await git.getChangesInLastCommit()
|
||||
} else {
|
||||
return getChangedFilesFromGit(base, initialFetchDepth)
|
||||
}
|
||||
}
|
||||
|
||||
async function getChangedFilesFromGit(base: string, initialFetchDepth: number): Promise<File[]> {
|
||||
const defaultRef = github.context.payload.repository?.default_branch
|
||||
|
||||
const beforeSha =
|
||||
github.context.eventName === 'push' ? (github.context.payload as Webhooks.WebhookPayloadPush).before : null
|
||||
|
||||
const pushRef =
|
||||
git.getShortName(github.context.ref) ||
|
||||
(core.warning(`'ref' field is missing in event payload - using current branch, tag or commit SHA`),
|
||||
await git.getCurrentRef())
|
||||
|
||||
const baseRef = git.getShortName(base) || defaultRef
|
||||
if (!baseRef) {
|
||||
throw new Error(
|
||||
"This action requires 'base' input to be configured or 'repository.default_branch' to be set in the event payload"
|
||||
)
|
||||
}
|
||||
|
||||
const isBaseRefSha = git.isGitSha(baseRef)
|
||||
const isBaseSameAsPush = baseRef === pushRef
|
||||
|
||||
// If base is commit SHA we will do comparison against the referenced commit
|
||||
// Or if base references same branch it was pushed to, we will do comparison against the previously pushed commit
|
||||
if (isBaseRefSha || isBaseSameAsPush) {
|
||||
if (!isBaseRefSha && !beforeSha) {
|
||||
core.warning(`'before' field is missing in event payload - changes will be detected from last commit`)
|
||||
return await git.getChangesInLastCommit()
|
||||
}
|
||||
|
||||
const baseSha = isBaseRefSha ? baseRef : beforeSha
|
||||
// If there is no previously pushed commit,
|
||||
// we will do comparison against the default branch or return all as added
|
||||
if (baseSha === git.NULL_SHA) {
|
||||
if (defaultRef && baseRef !== defaultRef) {
|
||||
core.info(`First push of a branch detected - changes will be detected against the default branch ${defaultRef}`)
|
||||
return await git.getChangesSinceMergeBase(defaultRef, initialFetchDepth)
|
||||
} else {
|
||||
core.info('Initial push detected - all files will be listed as added')
|
||||
return await git.listAllFilesAsAdded()
|
||||
}
|
||||
}
|
||||
|
||||
core.info(`Changes will be detected against commit (${baseSha})`)
|
||||
return await git.getChanges(baseSha)
|
||||
}
|
||||
|
||||
// Changes introduced by current branch against the base branch
|
||||
core.info(`Changes will be detected against the branch ${baseRef}`)
|
||||
return await git.getChangesSinceMergeBase(baseRef, initialFetchDepth)
|
||||
}
|
||||
|
||||
// Uses github REST api to get list of files changed in PR
|
||||
async function getChangedFilesFromApi(
|
||||
token: string,
|
||||
pullRequest: Webhooks.WebhookPayloadPullRequestPullRequest
|
||||
): Promise<string[]> {
|
||||
core.debug('Fetching list of modified files from Github API')
|
||||
): Promise<File[]> {
|
||||
core.startGroup(`Fetching list of changed files for PR#${pullRequest.number} from Github API`)
|
||||
core.info(`Number of changed_files is ${pullRequest.changed_files}`)
|
||||
const client = new github.GitHub(token)
|
||||
const pageSize = 100
|
||||
const files: string[] = []
|
||||
for (let page = 0; page * pageSize < pullRequest.changed_files; page++) {
|
||||
const files: File[] = []
|
||||
for (let page = 1; (page - 1) * pageSize < pullRequest.changed_files; page++) {
|
||||
core.info(`Invoking listFiles(pull_number: ${pullRequest.number}, page: ${page}, per_page: ${pageSize})`)
|
||||
const response = await client.pulls.listFiles({
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
@@ -72,11 +145,87 @@ async function getChangedFilesFromApi(
|
||||
per_page: pageSize
|
||||
})
|
||||
for (const row of response.data) {
|
||||
files.push(row.filename)
|
||||
core.info(`[${row.status}] ${row.filename}`)
|
||||
// There's no obvious use-case for detection of renames
|
||||
// Therefore we treat it as if rename detection in git diff was turned off.
|
||||
// Rename is replaced by delete of original filename and add of new filename
|
||||
if (row.status === ChangeStatus.Renamed) {
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status: ChangeStatus.Added
|
||||
})
|
||||
files.push({
|
||||
// 'previous_filename' for some unknown reason isn't in the type definition or documentation
|
||||
filename: (<any>row).previous_filename as string,
|
||||
status: ChangeStatus.Deleted
|
||||
})
|
||||
} else {
|
||||
// Github status and git status variants are same except for deleted files
|
||||
const status = row.status === 'removed' ? ChangeStatus.Deleted : (row.status as ChangeStatus)
|
||||
files.push({
|
||||
filename: row.filename,
|
||||
status
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
core.endGroup()
|
||||
return files
|
||||
}
|
||||
|
||||
function exportResults(results: FilterResults, format: ExportFormat): void {
|
||||
core.info('Results:')
|
||||
const changes = []
|
||||
for (const [key, files] of Object.entries(results)) {
|
||||
const value = files.length > 0
|
||||
core.startGroup(`Filter ${key} = ${value}`)
|
||||
if (files.length > 0) {
|
||||
changes.push(key)
|
||||
core.info('Matching files:')
|
||||
for (const file of files) {
|
||||
core.info(`${file.filename} [${file.status}]`)
|
||||
}
|
||||
} else {
|
||||
core.info('Matching files: none')
|
||||
}
|
||||
|
||||
core.setOutput(key, value)
|
||||
core.setOutput(`${key}_count`, files.length)
|
||||
if (format !== 'none') {
|
||||
const filesValue = serializeExport(files, format)
|
||||
core.setOutput(`${key}_files`, filesValue)
|
||||
}
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
if (results['changes'] === undefined) {
|
||||
const changesJson = JSON.stringify(changes)
|
||||
core.info(`Changes output set to ${changesJson}`)
|
||||
core.setOutput('changes', changesJson)
|
||||
} else {
|
||||
core.info('Cannot set changes output variable - name already used by filter output')
|
||||
}
|
||||
}
|
||||
|
||||
function serializeExport(files: File[], format: ExportFormat): string {
|
||||
const fileNames = files.map(file => file.filename)
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
return fileNames.map(csvEscape).join(',')
|
||||
case 'json':
|
||||
return JSON.stringify(fileNames)
|
||||
case 'escape':
|
||||
return fileNames.map(backslashEscape).join(' ')
|
||||
case 'shell':
|
||||
return fileNames.map(shellEscape).join(' ')
|
||||
default:
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
function isExportFormat(value: string): value is ExportFormat {
|
||||
return ['none', 'csv', 'shell', 'json', 'escape'].includes(value)
|
||||
}
|
||||
|
||||
run()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
"target": "es2019", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
"outDir": "./lib", /* Redirect output structure to the directory. */
|
||||
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
|
||||
Reference in New Issue
Block a user