Compare commits

..

2 Commits

Author SHA1 Message Date
Bassem Dghaidi
a649471855 Merge branch 'main' into alert-autofix-51 2026-01-29 11:04:18 +01:00
Bassem Dghaidi
af0f7d9495 Potential fix for code scanning alert no. 51: Workflow does not contain permissions
Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>
2026-01-29 11:02:50 +01:00
15 changed files with 146 additions and 480 deletions

View File

@@ -1,4 +1,4 @@
name: Check dist content name: Check dist/
on: on:
push: push:
@@ -11,9 +11,6 @@ on:
- '**.md' - '**.md'
workflow_dispatch: workflow_dispatch:
permissions:
contents: read
jobs: jobs:
call-check-dist: call-check-dist:
name: Check dist/ name: Check dist/

View File

@@ -1,5 +1,4 @@
name: Close inactive issues name: Close inactive issues
on: on:
schedule: schedule:
- cron: "30 8 * * *" - cron: "30 8 * * *"

View File

@@ -1,4 +1,4 @@
name: Code scanning name: "Code scanning - action"
on: on:
push: push:
@@ -6,14 +6,15 @@ on:
schedule: schedule:
- cron: '0 19 * * 0' - cron: '0 19 * * 0'
permissions:
contents: read
security-events: write
jobs: jobs:
CodeQL-Build: CodeQL-Build:
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest # CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
# required for all workflows
security-events: write
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v5

View File

@@ -1,21 +1,16 @@
name: Assign issue name: Assign issue
on: on:
issues: issues:
types: [opened] types: [opened]
permissions:
issues: write
jobs: jobs:
run-action: run-action:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Get current oncall - name: Get current oncall
id: oncall id: oncall
run: | run: |
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
- name: add_assignees - name: add_assignees
run: | run: |
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}' curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'

View File

@@ -1,4 +1,4 @@
name: License check name: Licensed
on: on:
push: push:
@@ -9,9 +9,6 @@ on:
- main - main
workflow_dispatch: workflow_dispatch:
permissions:
contents: read
jobs: jobs:
validate-cached-dependency-records: validate-cached-dependency-records:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -1,25 +1,20 @@
name: Assign pull request reviewer name: Add Reviewer PR
on: on:
pull_request_target: pull_request_target:
types: [opened] types: [opened]
permissions:
pull-requests: write
jobs: jobs:
run-action: run-action:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Get current oncall - name: Get current oncall
id: oncall id: oncall
run: | run: |
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
- name: Request Review - name: Request Review
run: | run: |
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/pulls/${{ github.event.pull_request.number}}/requested_reviewers -d '{"reviewers":["${{steps.oncall.outputs.CURRENT}}"]}' curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/pulls/${{ github.event.pull_request.number}}/requested_reviewers -d '{"reviewers":["${{steps.oncall.outputs.CURRENT}}"]}'
- name: Add Assignee - name: Add Assignee
run: | run: |
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}' curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'

View File

@@ -1,17 +1,17 @@
name: Publish immutable action name: 'Publish Immutable Action Version'
on: on:
release: release:
types: [released] types: [released]
permissions:
contents: read
id-token: write
packages: write
jobs: jobs:
publish: publish:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
packages: write
steps: steps:
- name: Checking out - name: Checking out
uses: actions/checkout@v5 uses: actions/checkout@v5

View File

@@ -1,5 +1,4 @@
name: Release new action version name: Release new action version
on: on:
release: release:
types: [released] types: [released]
@@ -11,7 +10,6 @@ on:
env: env:
TAG_NAME: ${{ github.event.inputs.TAG_NAME || github.event.release.tag_name }} TAG_NAME: ${{ github.event.inputs.TAG_NAME || github.event.release.tag_name }}
permissions: permissions:
contents: write contents: write

View File

@@ -1,5 +1,8 @@
name: Tests name: Tests
permissions:
contents: read
on: on:
pull_request: pull_request:
branches: branches:
@@ -10,9 +13,6 @@ on:
- main - main
- releases/** - releases/**
permissions:
contents: read
jobs: jobs:
# Build and unit test # Build and unit test
build: build:
@@ -60,7 +60,6 @@ jobs:
path: | path: |
test-cache test-cache
~/test-cache ~/test-cache
test-restore: test-restore:
needs: test-save needs: test-save
strategy: strategy:
@@ -90,359 +89,44 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: ubuntu:latest image: ubuntu:latest
options: --privileged options: --dns 127.0.0.1
services: services:
squid-proxy: squid-proxy:
image: wernight/squid image: ubuntu/squid:latest
ports: ports:
- 3128:3128 - 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v5 uses: actions/checkout@v5
- name: Install dependencies
run: |
apt-get update
apt-get install -y iptables dnsutils curl jq ipset
- name: Fetch GitHub meta and configure firewall
run: |
# Fetch GitHub meta API to get all IP ranges
echo "Fetching GitHub meta API..."
curl -sS https://api.github.com/meta > /tmp/github-meta.json
# Wait for squid-proxy service to be resolvable and accepting connections
echo "Waiting for squid-proxy service..."
for i in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15; do
PROXY_IP=$(getent hosts squid-proxy | awk '{ print $1 }')
if [ -n "$PROXY_IP" ]; then
echo "squid-proxy resolved to: $PROXY_IP"
# Test that proxy is actually accepting connections
if curl --connect-timeout 2 --max-time 5 -x http://squid-proxy:3128 -sS https://api.github.com/zen 2>/dev/null; then
echo "Proxy is working!"
break
else
echo "Attempt $i: Proxy resolved but not ready yet, waiting..."
fi
else
echo "Attempt $i: squid-proxy not resolvable yet, waiting..."
fi
sleep 2
done
if [ -z "$PROXY_IP" ]; then
echo "ERROR: Could not resolve squid-proxy after 15 attempts"
exit 1
fi
# Verify proxy works before locking down firewall
echo "Final proxy connectivity test..."
if ! curl --connect-timeout 5 --max-time 10 -x http://squid-proxy:3128 -sS https://api.github.com/zen; then
echo "ERROR: Proxy is not working properly"
exit 1
fi
echo "Proxy verified working!"
# Allow established connections
iptables -A OUTPUT -m state --state ESTABLISHED,RELATED -j ACCEPT
# Allow loopback
iptables -A OUTPUT -o lo -j ACCEPT
# Allow connections to the proxy
iptables -A OUTPUT -d $PROXY_IP -p tcp --dport 3128 -j ACCEPT
# Allow DNS
iptables -A OUTPUT -p udp --dport 53 -j ACCEPT
iptables -A OUTPUT -p tcp --dport 53 -j ACCEPT
# Create ipset for GitHub IPs (more efficient than individual rules)
ipset create github-ips hash:net
# Add all GitHub IP ranges from meta API (hooks, web, api, git, actions, etc.)
# EXCLUDING blob storage which must go through proxy
for category in hooks web api git pages importer actions actions_macos codespaces copilot; do
echo "Adding IPs for category: $category"
jq -r ".${category}[]? // empty" /tmp/github-meta.json 2>/dev/null | while read cidr; do
# Skip IPv6 for now (iptables vs ip6tables) - use case for POSIX compatibility
case "$cidr" in
*:*) ;; # IPv6, skip
*) ipset add github-ips "$cidr" 2>/dev/null || true ;;
esac
done
done
# Allow all GitHub IPs
iptables -A OUTPUT -m set --match-set github-ips dst -p tcp --dport 443 -j ACCEPT
iptables -A OUTPUT -m set --match-set github-ips dst -p tcp --dport 80 -j ACCEPT
# CRITICAL: Block direct access to blob storage and results-receiver
# These MUST go through the proxy for cache operations
echo "Blocking direct access to cache-critical endpoints..."
# Block results-receiver.actions.githubusercontent.com
for ip in $(getent ahosts "results-receiver.actions.githubusercontent.com" 2>/dev/null | awk '{print $1}' | sort -u); do
echo "Blocking direct access to results-receiver: $ip"
iptables -I OUTPUT 1 -d "$ip" -p tcp --dport 443 -j REJECT
done
# Block blob.core.windows.net (Azure blob storage used for cache)
for host in productionresultssa0.blob.core.windows.net productionresultssa1.blob.core.windows.net productionresultssa2.blob.core.windows.net productionresultssa3.blob.core.windows.net; do
for ip in $(getent ahosts "$host" 2>/dev/null | awk '{print $1}' | sort -u); do
echo "Blocking direct access to blob storage ($host): $ip"
iptables -I OUTPUT 1 -d "$ip" -p tcp --dport 443 -j REJECT
done
done
# Block all other outbound HTTP/HTTPS traffic
iptables -A OUTPUT -p tcp --dport 80 -j REJECT
iptables -A OUTPUT -p tcp --dport 443 -j REJECT
echo "iptables rules applied:"
iptables -L OUTPUT -n -v
echo ""
echo "ipset github-ips contains $(ipset list github-ips | grep -c '^[0-9]') entries"
- name: Verify proxy enforcement
run: |
echo "=== Testing proxy enforcement ==="
# Test 1: Verify proxy is working by explicitly using it
echo "Test 1: Connection through proxy (should SUCCEED)"
if curl --connect-timeout 10 --max-time 15 -x http://squid-proxy:3128 -sS -o /dev/null -w "%{http_code}" https://api.github.com/zen; then
echo ""
echo "✓ Proxy connection works"
else
echo "✗ ERROR: Proxy is not working!"
exit 1
fi
# Test 2: Direct connection to blob storage should FAIL (blocked by iptables)
echo ""
echo "Test 2: Direct connection to blob storage (should FAIL - blocked by iptables)"
if curl --connect-timeout 5 --max-time 10 --noproxy '*' -sS https://productionresultssa0.blob.core.windows.net 2>/dev/null; then
echo "✗ ERROR: Direct blob storage connection succeeded but should have been blocked!"
exit 1
else
echo "✓ Direct blob storage correctly blocked by iptables"
fi
# Test 3: Connection to blob storage THROUGH proxy should work
echo ""
echo "Test 3: Connection through proxy to blob storage (should SUCCEED)"
HTTP_CODE=$(curl --connect-timeout 10 --max-time 15 -x http://squid-proxy:3128 -sS -o /dev/null -w "%{http_code}" https://productionresultssa0.blob.core.windows.net 2>&1) || true
echo "HTTP response code: $HTTP_CODE"
if [ "$HTTP_CODE" = "400" ] || [ "$HTTP_CODE" = "409" ] || [ "$HTTP_CODE" = "200" ]; then
echo "✓ Proxy successfully forwarded request to blob storage (got HTTP $HTTP_CODE)"
else
echo "✗ ERROR: Proxy failed to forward request (got: $HTTP_CODE)"
exit 1
fi
echo ""
echo "=== All proxy enforcement tests passed ==="
echo "The proxy is working. If cache operations fail, it's because the action doesn't use the proxy."
- name: Generate files - name: Generate files
run: __tests__/create-cache-files.sh proxy test-cache run: __tests__/create-cache-files.sh proxy test-cache
- name: Save cache - name: Save cache
env:
http_proxy: http://squid-proxy:3128
https_proxy: http://squid-proxy:3128
uses: ./ uses: ./
with: with:
key: test-proxy-${{ github.run_id }} key: test-proxy-${{ github.run_id }}
path: test-cache path: test-cache
- name: Verify proxy setup
run: |
echo "## 🔒 Proxy Integration Test - Cache Save" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### ✅ Test Configuration" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Proxy**: squid-proxy:3128" >> $GITHUB_STEP_SUMMARY
echo "- **Firewall**: iptables blocking direct access to cache endpoints" >> $GITHUB_STEP_SUMMARY
echo "- **Test**: Cache save operation completed successfully through proxy" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "If the cache save step succeeded, it means:" >> $GITHUB_STEP_SUMMARY
echo "1. Direct access to results-receiver.actions.githubusercontent.com was blocked" >> $GITHUB_STEP_SUMMARY
echo "2. Direct access to *.blob.core.windows.net was blocked" >> $GITHUB_STEP_SUMMARY
echo "3. Cache operations were routed through the squid proxy" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "✅ **SUCCESS**: Proxy integration test passed!" >> $GITHUB_STEP_SUMMARY
test-proxy-restore: test-proxy-restore:
needs: test-proxy-save needs: test-proxy-save
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: ubuntu:latest image: ubuntu:latest
options: --privileged options: --dns 127.0.0.1
services: services:
squid-proxy: squid-proxy:
image: wernight/squid image: ubuntu/squid:latest
ports: ports:
- 3128:3128 - 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v5 uses: actions/checkout@v5
- name: Install dependencies
run: |
apt-get update
apt-get install -y iptables dnsutils curl jq ipset
- name: Fetch GitHub meta and configure firewall
run: |
# Fetch GitHub meta API to get all IP ranges
echo "Fetching GitHub meta API..."
curl -sS https://api.github.com/meta > /tmp/github-meta.json
# Wait for squid-proxy service to be resolvable and accepting connections
echo "Waiting for squid-proxy service..."
for i in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15; do
PROXY_IP=$(getent hosts squid-proxy | awk '{ print $1 }')
if [ -n "$PROXY_IP" ]; then
echo "squid-proxy resolved to: $PROXY_IP"
# Test that proxy is actually accepting connections
if curl --connect-timeout 2 --max-time 5 -x http://squid-proxy:3128 -sS https://api.github.com/zen 2>/dev/null; then
echo "Proxy is working!"
break
else
echo "Attempt $i: Proxy resolved but not ready yet, waiting..."
fi
else
echo "Attempt $i: squid-proxy not resolvable yet, waiting..."
fi
sleep 2
done
if [ -z "$PROXY_IP" ]; then
echo "ERROR: Could not resolve squid-proxy after 15 attempts"
exit 1
fi
# Verify proxy works before locking down firewall
echo "Final proxy connectivity test..."
if ! curl --connect-timeout 5 --max-time 10 -x http://squid-proxy:3128 -sS https://api.github.com/zen; then
echo "ERROR: Proxy is not working properly"
exit 1
fi
echo "Proxy verified working!"
# Allow established connections
iptables -A OUTPUT -m state --state ESTABLISHED,RELATED -j ACCEPT
# Allow loopback
iptables -A OUTPUT -o lo -j ACCEPT
# Allow connections to the proxy
iptables -A OUTPUT -d $PROXY_IP -p tcp --dport 3128 -j ACCEPT
# Allow DNS
iptables -A OUTPUT -p udp --dport 53 -j ACCEPT
iptables -A OUTPUT -p tcp --dport 53 -j ACCEPT
# Create ipset for GitHub IPs (more efficient than individual rules)
ipset create github-ips hash:net
# Add all GitHub IP ranges from meta API (hooks, web, api, git, actions, etc.)
# EXCLUDING blob storage which must go through proxy
for category in hooks web api git pages importer actions actions_macos codespaces copilot; do
echo "Adding IPs for category: $category"
jq -r ".${category}[]? // empty" /tmp/github-meta.json 2>/dev/null | while read cidr; do
# Skip IPv6 for now (iptables vs ip6tables) - use case for POSIX compatibility
case "$cidr" in
*:*) ;; # IPv6, skip
*) ipset add github-ips "$cidr" 2>/dev/null || true ;;
esac
done
done
# Allow all GitHub IPs
iptables -A OUTPUT -m set --match-set github-ips dst -p tcp --dport 443 -j ACCEPT
iptables -A OUTPUT -m set --match-set github-ips dst -p tcp --dport 80 -j ACCEPT
# CRITICAL: Block direct access to blob storage and results-receiver
# These MUST go through the proxy for cache operations
echo "Blocking direct access to cache-critical endpoints..."
# Block results-receiver.actions.githubusercontent.com
for ip in $(getent ahosts "results-receiver.actions.githubusercontent.com" 2>/dev/null | awk '{print $1}' | sort -u); do
echo "Blocking direct access to results-receiver: $ip"
iptables -I OUTPUT 1 -d "$ip" -p tcp --dport 443 -j REJECT
done
# Block blob.core.windows.net (Azure blob storage used for cache)
for host in productionresultssa0.blob.core.windows.net productionresultssa1.blob.core.windows.net productionresultssa2.blob.core.windows.net productionresultssa3.blob.core.windows.net; do
for ip in $(getent ahosts "$host" 2>/dev/null | awk '{print $1}' | sort -u); do
echo "Blocking direct access to blob storage ($host): $ip"
iptables -I OUTPUT 1 -d "$ip" -p tcp --dport 443 -j REJECT
done
done
# Block all other outbound HTTP/HTTPS traffic
iptables -A OUTPUT -p tcp --dport 80 -j REJECT
iptables -A OUTPUT -p tcp --dport 443 -j REJECT
echo "iptables rules applied:"
iptables -L OUTPUT -n -v
echo ""
echo "ipset github-ips contains $(ipset list github-ips | grep -c '^[0-9]') entries"
- name: Verify proxy enforcement
run: |
echo "=== Testing proxy enforcement ==="
# Test 1: Verify proxy is working by explicitly using it
echo "Test 1: Connection through proxy (should SUCCEED)"
if curl --connect-timeout 10 --max-time 15 -x http://squid-proxy:3128 -sS -o /dev/null -w "%{http_code}" https://api.github.com/zen; then
echo ""
echo "✓ Proxy connection works"
else
echo "✗ ERROR: Proxy is not working!"
exit 1
fi
# Test 2: Direct connection to blob storage should FAIL (blocked by iptables)
echo ""
echo "Test 2: Direct connection to blob storage (should FAIL - blocked by iptables)"
if curl --connect-timeout 5 --max-time 10 --noproxy '*' -sS https://productionresultssa0.blob.core.windows.net 2>/dev/null; then
echo "✗ ERROR: Direct blob storage connection succeeded but should have been blocked!"
exit 1
else
echo "✓ Direct blob storage correctly blocked by iptables"
fi
# Test 3: Connection to blob storage THROUGH proxy should work
echo ""
echo "Test 3: Connection through proxy to blob storage (should SUCCEED)"
HTTP_CODE=$(curl --connect-timeout 10 --max-time 15 -x http://squid-proxy:3128 -sS -o /dev/null -w "%{http_code}" https://productionresultssa0.blob.core.windows.net 2>&1) || true
echo "HTTP response code: $HTTP_CODE"
if [ "$HTTP_CODE" = "400" ] || [ "$HTTP_CODE" = "409" ] || [ "$HTTP_CODE" = "200" ]; then
echo "✓ Proxy successfully forwarded request to blob storage (got HTTP $HTTP_CODE)"
else
echo "✗ ERROR: Proxy failed to forward request (got: $HTTP_CODE)"
exit 1
fi
echo ""
echo "=== All proxy enforcement tests passed ==="
echo "The proxy is working. If cache operations fail, it's because the action doesn't use the proxy."
- name: Restore cache - name: Restore cache
env:
http_proxy: http://squid-proxy:3128
https_proxy: http://squid-proxy:3128
uses: ./ uses: ./
with: with:
key: test-proxy-${{ github.run_id }} key: test-proxy-${{ github.run_id }}
path: test-cache path: test-cache
- name: Verify proxy setup
run: |
echo "## 🔒 Proxy Integration Test - Cache Restore" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### ✅ Test Configuration" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Proxy**: squid-proxy:3128" >> $GITHUB_STEP_SUMMARY
echo "- **Firewall**: iptables blocking direct access to cache endpoints" >> $GITHUB_STEP_SUMMARY
echo "- **Test**: Cache restore operation completed successfully through proxy" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "If the cache restore step succeeded, it means:" >> $GITHUB_STEP_SUMMARY
echo "1. Direct access to results-receiver.actions.githubusercontent.com was blocked" >> $GITHUB_STEP_SUMMARY
echo "2. Direct access to *.blob.core.windows.net was blocked" >> $GITHUB_STEP_SUMMARY
echo "3. Cache operations were routed through the squid proxy" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "✅ **SUCCESS**: Proxy integration test passed!" >> $GITHUB_STEP_SUMMARY
- name: Verify cache - name: Verify cache
run: __tests__/verify-cache-files.sh proxy test-cache run: __tests__/verify-cache-files.sh proxy test-cache

View File

@@ -123,11 +123,11 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Cache Primes - name: Cache Primes
id: cache-primes id: cache-primes
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: prime-numbers path: prime-numbers
key: ${{ runner.os }}-primes key: ${{ runner.os }}-primes
@@ -154,11 +154,11 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Restore cached Primes - name: Restore cached Primes
id: cache-primes-restore id: cache-primes-restore
uses: actions/cache/restore@v5 uses: actions/cache/restore@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -169,7 +169,7 @@ jobs:
. .
- name: Save Primes - name: Save Primes
id: cache-primes-save id: cache-primes-save
uses: actions/cache/save@v5 uses: actions/cache/save@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -224,7 +224,7 @@ A cache key can include any of the contexts, functions, literals, and operators
For example, using the [`hashFiles`](https://docs.github.com/en/actions/learn-github-actions/expressions#hashfiles) function allows you to create a new cache when dependencies change. For example, using the [`hashFiles`](https://docs.github.com/en/actions/learn-github-actions/expressions#hashfiles) function allows you to create a new cache when dependencies change.
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -242,7 +242,7 @@ Additionally, you can use arbitrary command output in a cache key, such as a dat
echo "date=$(/bin/date -u "+%Y%m%d")" >> $GITHUB_OUTPUT echo "date=$(/bin/date -u "+%Y%m%d")" >> $GITHUB_OUTPUT
shell: bash shell: bash
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: path/to/dependencies path: path/to/dependencies
key: ${{ runner.os }}-${{ steps.get-date.outputs.date }}-${{ hashFiles('**/lockfiles') }} key: ${{ runner.os }}-${{ steps.get-date.outputs.date }}-${{ hashFiles('**/lockfiles') }}
@@ -262,9 +262,9 @@ Example:
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/cache@v5 - uses: actions/cache@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies
@@ -292,11 +292,11 @@ jobs:
build-linux: build-linux:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Cache Primes - name: Cache Primes
id: cache-primes id: cache-primes
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: prime-numbers path: prime-numbers
key: primes key: primes
@@ -307,7 +307,7 @@ jobs:
- name: Cache Numbers - name: Cache Numbers
id: cache-numbers id: cache-numbers
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: numbers path: numbers
key: primes key: primes
@@ -319,11 +319,11 @@ jobs:
build-windows: build-windows:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Cache Primes - name: Cache Primes
id: cache-primes id: cache-primes
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: prime-numbers path: prime-numbers
key: primes key: primes

View File

@@ -12,7 +12,7 @@ This document lists some of the strategies (and example workflows if possible) w
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
key: ${{ some-metadata }}-cache key: ${{ some-metadata }}-cache
``` ```
@@ -24,7 +24,7 @@ In your workflows, you can use different strategies to name your key depending o
One of the most common use case is to use hash for lockfile as key. This way, same cache will be restored for a lockfile until there's a change in dependencies listed in lockfile. One of the most common use case is to use hash for lockfile as key. This way, same cache will be restored for a lockfile until there's a change in dependencies listed in lockfile.
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -37,7 +37,7 @@ One of the most common use case is to use hash for lockfile as key. This way, sa
If cache is not found matching the primary key, restore keys can be used to download the closest matching cache that was recently created. This ensures that the build/install step will need to additionally fetch just a handful of newer dependencies, and hence saving build time. If cache is not found matching the primary key, restore keys can be used to download the closest matching cache that was recently created. This ensures that the build/install step will need to additionally fetch just a handful of newer dependencies, and hence saving build time.
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -54,7 +54,7 @@ The restore keys can be provided as a complete name, or a prefix, read more [her
In case of workflows with matrix running for multiple Operating Systems, the caches can be stored separately for each of them. This can be used in combination with hashfiles in case multiple caches are being generated per OS. In case of workflows with matrix running for multiple Operating Systems, the caches can be stored separately for each of them. This can be used in combination with hashfiles in case multiple caches are being generated per OS.
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -73,7 +73,7 @@ Caches scoped to the particular workflow run id or run attempt can be stored and
On similar lines, commit sha can be used to create a very specialized and short lived cache. On similar lines, commit sha can be used to create a very specialized and short lived cache.
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -86,7 +86,7 @@ On similar lines, commit sha can be used to create a very specialized and short
Cache key can be formed by combination of more than one metadata, evaluated info. Cache key can be formed by combination of more than one metadata, evaluated info.
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -146,9 +146,9 @@ In case you are using a centralized job to create and save your cache that can b
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/cache/restore@v5 - uses: actions/cache/restore@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies
@@ -171,9 +171,9 @@ You can use the output of this action to exit the workflow on cache miss. This w
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/cache/restore@v5 - uses: actions/cache/restore@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies
@@ -194,7 +194,7 @@ steps:
If you want to avoid re-computing the cache key again in `save` action, the outputs from `restore` action can be used as input to the `save` action. If you want to avoid re-computing the cache key again in `save` action, the outputs from `restore` action can be used as input to the `save` action.
```yaml ```yaml
- uses: actions/cache/restore@v5 - uses: actions/cache/restore@v4
id: restore-cache id: restore-cache
with: with:
path: | path: |
@@ -204,7 +204,7 @@ If you want to avoid re-computing the cache key again in `save` action, the outp
. .
. .
. .
- uses: actions/cache/save@v5 - uses: actions/cache/save@v4
with: with:
path: | path: |
path/to/dependencies path/to/dependencies
@@ -219,7 +219,7 @@ On the other hand, the key can also be explicitly re-computed while executing th
Let's say we have a restore step that computes key at runtime Let's say we have a restore step that computes key at runtime
```yaml ```yaml
uses: actions/cache/restore@v5 uses: actions/cache/restore@v4
id: restore-cache id: restore-cache
with: with:
key: cache-${{ hashFiles('**/lockfiles') }} key: cache-${{ hashFiles('**/lockfiles') }}
@@ -228,7 +228,7 @@ with:
Case 1: Where an user would want to reuse the key as it is Case 1: Where an user would want to reuse the key as it is
```yaml ```yaml
uses: actions/cache/save@v5 uses: actions/cache/save@v4
with: with:
key: ${{ steps.restore-cache.outputs.cache-primary-key }} key: ${{ steps.restore-cache.outputs.cache-primary-key }}
``` ```
@@ -236,7 +236,7 @@ with:
Case 2: Where the user would want to re-evaluate the key Case 2: Where the user would want to re-evaluate the key
```yaml ```yaml
uses: actions/cache/save@v5 uses: actions/cache/save@v4
with: with:
key: npm-cache-${{hashfiles(package-lock.json)}} key: npm-cache-${{hashfiles(package-lock.json)}}
``` ```
@@ -253,12 +253,12 @@ In case of multi-module projects, where the built artifact of one project needs
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Build - name: Build
run: ./build-parent-module.sh run: ./build-parent-module.sh
- uses: actions/cache/save@v5 - uses: actions/cache/save@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies
@@ -269,9 +269,9 @@ steps:
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/cache/restore@v5 - uses: actions/cache/restore@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies

View File

@@ -45,7 +45,7 @@
## Bun ## Bun
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~/.bun/install/cache ~/.bun/install/cache
@@ -55,7 +55,7 @@
### Windows ### Windows
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~\.bun ~\.bun
@@ -67,7 +67,7 @@
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: ~/.nuget/packages path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
@@ -76,10 +76,10 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
``` ```
Depending on the environment, huge packages might be pre-installed in the global cache folder. Depending on the environment, huge packages might be pre-installed in the global cache folder.
From `actions/cache@v3` onwards, you can now exclude unwanted packages with [exclude pattern](https://github.com/actions/toolkit/tree/main/packages/glob#exclude-patterns) With `actions/cache@v4` you can now exclude unwanted packages with [exclude pattern](https://github.com/actions/toolkit/tree/main/packages/glob#exclude-patterns)
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~/.nuget/packages ~/.nuget/packages
@@ -96,7 +96,7 @@ Or you could move the cache folder like below.
env: env:
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
steps: steps:
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: ${{ github.workspace }}/.nuget/packages path: ${{ github.workspace }}/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
@@ -108,7 +108,7 @@ steps:
```yaml ```yaml
- name: Cache lein project dependencies - name: Cache lein project dependencies
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: ~/.m2/repository path: ~/.m2/repository
key: ${{ runner.os }}-clojure-${{ hashFiles('**/project.clj') }} key: ${{ runner.os }}-clojure-${{ hashFiles('**/project.clj') }}
@@ -122,7 +122,7 @@ steps:
### POSIX ### POSIX
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: ~/.dub path: ~/.dub
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.selections.json') }} key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.selections.json') }}
@@ -133,7 +133,7 @@ steps:
### Windows ### Windows
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: ~\AppData\Local\dub path: ~\AppData\Local\dub
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.selections.json') }} key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.selections.json') }}
@@ -146,7 +146,7 @@ steps:
### Linux ### Linux
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~/.deno ~/.deno
@@ -157,7 +157,7 @@ steps:
### macOS ### macOS
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~/.deno ~/.deno
@@ -168,7 +168,7 @@ steps:
### Windows ### Windows
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~\.deno ~\.deno
@@ -179,7 +179,7 @@ steps:
## Elixir - Mix ## Elixir - Mix
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
deps deps
@@ -191,7 +191,7 @@ steps:
## Erlang - Rebar3 ## Erlang - Rebar3
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v2
with: with:
path: | path: |
~/.cache/rebar3 ~/.cache/rebar3
@@ -206,7 +206,7 @@ steps:
### Linux ### Linux
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~/.cache/go-build ~/.cache/go-build
@@ -219,7 +219,7 @@ steps:
### macOS ### macOS
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~/Library/Caches/go-build ~/Library/Caches/go-build
@@ -232,7 +232,7 @@ steps:
### Windows ### Windows
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~\AppData\Local\go-build ~\AppData\Local\go-build
@@ -248,7 +248,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
```yaml ```yaml
- name: Cache ~/.cabal/packages, ~/.cabal/store and dist-newstyle - name: Cache ~/.cabal/packages, ~/.cabal/store and dist-newstyle
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: | path: |
~/.cabal/packages ~/.cabal/packages
@@ -263,14 +263,14 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
### Linux or macOS ### Linux or macOS
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
name: Cache ~/.stack name: Cache ~/.stack
with: with:
path: ~/.stack path: ~/.stack
key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }} key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}
restore-keys: | restore-keys: |
${{ runner.os }}-stack-global- ${{ runner.os }}-stack-global-
- uses: actions/cache@v5 - uses: actions/cache@v4
name: Cache .stack-work name: Cache .stack-work
with: with:
path: .stack-work path: .stack-work
@@ -282,7 +282,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
### Windows ### Windows
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
name: Cache %APPDATA%\stack %LOCALAPPDATA%\Programs\stack name: Cache %APPDATA%\stack %LOCALAPPDATA%\Programs\stack
with: with:
path: | path: |
@@ -291,7 +291,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }} key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}
restore-keys: | restore-keys: |
${{ runner.os }}-stack-global- ${{ runner.os }}-stack-global-
- uses: actions/cache@v5 - uses: actions/cache@v4
name: Cache .stack-work name: Cache .stack-work
with: with:
path: .stack-work path: .stack-work
@@ -305,7 +305,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
> **Note** Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons. > **Note** Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons.
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~/.gradle/caches ~/.gradle/caches
@@ -319,7 +319,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
```yaml ```yaml
- name: Cache local Maven repository - name: Cache local Maven repository
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: ~/.m2/repository path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
@@ -355,7 +355,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
`Get npm cache directory` step can then be used with `actions/cache` as shown below `Get npm cache directory` step can then be used with `actions/cache` as shown below
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
id: npm-cache # use this to check for `cache-hit` ==> if: steps.npm-cache.outputs.cache-hit != 'true' id: npm-cache # use this to check for `cache-hit` ==> if: steps.npm-cache.outputs.cache-hit != 'true'
with: with:
path: ${{ steps.npm-cache-dir.outputs.dir }} path: ${{ steps.npm-cache-dir.outputs.dir }}
@@ -368,7 +368,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
```yaml ```yaml
- name: restore lerna - name: restore lerna
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: '**/node_modules' path: '**/node_modules'
key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }}
@@ -382,7 +382,7 @@ The yarn cache directory will depend on your operating system and version of `ya
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
- uses: actions/cache@v5 - uses: actions/cache@v4
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`) id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with: with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }} path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
@@ -400,7 +400,7 @@ The yarn 2 cache directory will depend on your config. See https://yarnpkg.com/c
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
- uses: actions/cache@v5 - uses: actions/cache@v4
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`) id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with: with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }} path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
@@ -415,7 +415,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
```yaml ```yaml
- name: Restore Cache - name: Restore Cache
id: restore-cache id: restore-cache
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: _export path: _export
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }} key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
@@ -444,7 +444,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
id: composer-cache id: composer-cache
run: | run: |
echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: ${{ steps.composer-cache.outputs.dir }} path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
@@ -465,7 +465,7 @@ Locations:
### Simple example ### Simple example
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: ~/.cache/pip path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
@@ -478,7 +478,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow ### Multiple OS's in a workflow
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
if: startsWith(runner.os, 'Linux') if: startsWith(runner.os, 'Linux')
with: with:
path: ~/.cache/pip path: ~/.cache/pip
@@ -486,7 +486,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
restore-keys: | restore-keys: |
${{ runner.os }}-pip- ${{ runner.os }}-pip-
- uses: actions/cache@v5 - uses: actions/cache@v4
if: startsWith(runner.os, 'macOS') if: startsWith(runner.os, 'macOS')
with: with:
path: ~/Library/Caches/pip path: ~/Library/Caches/pip
@@ -494,7 +494,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
restore-keys: | restore-keys: |
${{ runner.os }}-pip- ${{ runner.os }}-pip-
- uses: actions/cache@v5 - uses: actions/cache@v4
if: startsWith(runner.os, 'Windows') if: startsWith(runner.os, 'Windows')
with: with:
path: ~\AppData\Local\pip\Cache path: ~\AppData\Local\pip\Cache
@@ -520,7 +520,7 @@ jobs:
- os: windows-latest - os: windows-latest
path: ~\AppData\Local\pip\Cache path: ~\AppData\Local\pip\Cache
steps: steps:
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: ${{ matrix.path }} path: ${{ matrix.path }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
@@ -539,7 +539,7 @@ jobs:
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
- name: pip cache - name: pip cache
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: ${{ steps.pip-cache.outputs.dir }} path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
@@ -553,11 +553,11 @@ jobs:
- name: Set up Python - name: Set up Python
# The actions/cache step below uses this id to get the exact python version # The actions/cache step below uses this id to get the exact python version
id: setup-python id: setup-python
uses: actions/setup-python@v6 uses: actions/setup-python@v2
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: ~/.local/share/virtualenvs path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-python-${{ steps.setup-python.outputs.python-version }}-pipenv-${{ hashFiles('Pipfile.lock') }} key: ${{ runner.os }}-python-${{ steps.setup-python.outputs.python-version }}-pipenv-${{ hashFiles('Pipfile.lock') }}
@@ -584,7 +584,7 @@ For renv, the cache directory will vary by OS. The `RENV_PATHS_ROOT` environment
cat("##[set-output name=r-version;]", R.Version()$version.string, sep = "") cat("##[set-output name=r-version;]", R.Version()$version.string, sep = "")
shell: Rscript {0} shell: Rscript {0}
- name: Restore Renv package cache - name: Restore Renv package cache
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: ${{ env.RENV_PATHS_ROOT }} path: ${{ env.RENV_PATHS_ROOT }}
key: ${{ steps.get-version.outputs.os-version }}-${{ steps.get-version.outputs.r-version }}-${{ inputs.cache-version }}-${{ hashFiles('renv.lock') }} key: ${{ steps.get-version.outputs.os-version }}-${{ steps.get-version.outputs.r-version }}-${{ inputs.cache-version }}-${{ hashFiles('renv.lock') }}
@@ -610,7 +610,7 @@ whenever possible:
## Rust - Cargo ## Rust - Cargo
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: | path: |
~/.cargo/bin/ ~/.cargo/bin/
@@ -625,7 +625,7 @@ whenever possible:
```yaml ```yaml
- name: Cache SBT - name: Cache SBT
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: | path: |
~/.ivy2/cache ~/.ivy2/cache
@@ -636,7 +636,7 @@ whenever possible:
## Swift, Objective-C - Carthage ## Swift, Objective-C - Carthage
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: Carthage path: Carthage
key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }} key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }}
@@ -647,7 +647,7 @@ whenever possible:
## Swift, Objective-C - CocoaPods ## Swift, Objective-C - CocoaPods
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: Pods path: Pods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
@@ -658,7 +658,7 @@ whenever possible:
## Swift - Swift Package Manager ## Swift - Swift Package Manager
```yaml ```yaml
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: .build path: .build
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }} key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
@@ -673,7 +673,7 @@ env:
MINT_PATH: .mint/lib MINT_PATH: .mint/lib
MINT_LINK_PATH: .mint/bin MINT_LINK_PATH: .mint/bin
steps: steps:
- uses: actions/cache@v5 - uses: actions/cache@v4
with: with:
path: .mint path: .mint
key: ${{ runner.os }}-mint-${{ hashFiles('**/Mintfile') }} key: ${{ runner.os }}-mint-${{ hashFiles('**/Mintfile') }}
@@ -689,7 +689,7 @@ steps:
```yaml ```yaml
- name: Cache Bazel - name: Cache Bazel
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: | path: |
~/.cache/bazel ~/.cache/bazel
@@ -703,7 +703,7 @@ steps:
```yaml ```yaml
- name: Cache Bazel - name: Cache Bazel
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: | path: |
/private/var/tmp/_bazel_runner/ /private/var/tmp/_bazel_runner/

View File

@@ -35,9 +35,9 @@ If you are using separate jobs to create and save your cache(s) to be reused by
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/cache/restore@v5 - uses: actions/cache/restore@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies
@@ -64,12 +64,12 @@ In case of multi-module projects, where the built artifact of one project needs
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Build - name: Build
run: /build-parent-module.sh run: /build-parent-module.sh
- uses: actions/cache/save@v5 - uses: actions/cache/save@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies
@@ -80,9 +80,9 @@ steps:
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/cache/restore@v5 - uses: actions/cache/restore@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies
@@ -107,9 +107,9 @@ To fail if there is no cache hit for the primary key, leave `restore-keys` empty
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/cache/restore@v5 - uses: actions/cache/restore@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies

View File

@@ -23,7 +23,7 @@ If you are using separate jobs for generating common artifacts and sharing them
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Install Dependencies - name: Install Dependencies
run: /install.sh run: /install.sh
@@ -31,7 +31,7 @@ steps:
- name: Build artifacts - name: Build artifacts
run: /build.sh run: /build.sh
- uses: actions/cache/save@v5 - uses: actions/cache/save@v4
id: cache id: cache
with: with:
path: path/to/dependencies path: path/to/dependencies
@@ -47,7 +47,7 @@ Let's say we have a restore step that computes a key at runtime.
#### Restore a cache #### Restore a cache
```yaml ```yaml
uses: actions/cache/restore@v5 uses: actions/cache/restore@v4
id: restore-cache id: restore-cache
with: with:
key: cache-${{ hashFiles('**/lockfiles') }} key: cache-${{ hashFiles('**/lockfiles') }}
@@ -55,7 +55,7 @@ with:
#### Case 1 - Where a user would want to reuse the key as it is #### Case 1 - Where a user would want to reuse the key as it is
```yaml ```yaml
uses: actions/cache/save@v5 uses: actions/cache/save@v4
with: with:
key: ${{ steps.restore-cache.outputs.cache-primary-key }} key: ${{ steps.restore-cache.outputs.cache-primary-key }}
``` ```
@@ -63,7 +63,7 @@ with:
#### Case 2 - Where the user would want to re-evaluate the key #### Case 2 - Where the user would want to re-evaluate the key
```yaml ```yaml
uses: actions/cache/save@v5 uses: actions/cache/save@v4
with: with:
key: npm-cache-${{hashfiles(package-lock.json)}} key: npm-cache-${{hashfiles(package-lock.json)}}
``` ```
@@ -91,11 +91,11 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Restore cached Prime Numbers - name: Restore cached Prime Numbers
id: cache-prime-numbers-restore id: cache-prime-numbers-restore
uses: actions/cache/restore@v5 uses: actions/cache/restore@v4
with: with:
key: ${{ runner.os }}-prime-numbers key: ${{ runner.os }}-prime-numbers
path: | path: |
@@ -107,7 +107,7 @@ jobs:
- name: Always Save Prime Numbers - name: Always Save Prime Numbers
id: cache-prime-numbers-save id: cache-prime-numbers-save
if: always() && steps.cache-prime-numbers-restore.outputs.cache-hit != 'true' if: always() && steps.cache-prime-numbers-restore.outputs.cache-hit != 'true'
uses: actions/cache/save@v5 uses: actions/cache/save@v4
with: with:
key: ${{ steps.cache-prime-numbers-restore.outputs.cache-primary-key }} key: ${{ steps.cache-prime-numbers-restore.outputs.cache-primary-key }}
path: | path: |

View File

@@ -12,7 +12,7 @@ A cache today is immutable and cannot be updated. But some use cases require the
```yaml ```yaml
- name: update cache on every commit - name: update cache on every commit
uses: actions/cache@v5 uses: actions/cache@v4
with: with:
path: prime-numbers path: prime-numbers
key: primes-${{ runner.os }}-${{ github.run_id }} # Can use time based key as well key: primes-${{ runner.os }}-${{ github.run_id }} # Can use time based key as well