Compare commits
No commits in common. "main" and "master" have entirely different histories.
|
@ -1,6 +0,0 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/typescript-node
|
||||
{
|
||||
"name": "@actions/upload-artifact",
|
||||
"image": "mcr.microsoft.com/devcontainers/typescript-node:0-16"
|
||||
}
|
|
@ -4,10 +4,13 @@
|
|||
"parserOptions": { "ecmaVersion": 9, "sourceType": "module" },
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:import/errors",
|
||||
"plugin:import/warnings",
|
||||
"plugin:import/typescript",
|
||||
"plugin:prettier/recommended"
|
||||
"plugin:prettier/recommended",
|
||||
"prettier/@typescript-eslint"
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/no-empty-function": "off"
|
||||
|
|
|
@ -1,2 +1 @@
|
|||
* text=auto eol=lf
|
||||
.licenses/** -diff linguist-generated=true
|
|
@ -1 +0,0 @@
|
|||
* @actions/artifacts-actions
|
|
@ -1,63 +0,0 @@
|
|||
name: "🐛 Bug report"
|
||||
description: Let us know about a bug!
|
||||
labels: ['bug']
|
||||
title: '[bug]'
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: |
|
||||
Please provide a clear and concise description of what the bug is. If applicable, add screenshots to help explain your problem.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: What did you expect to happen?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: How can we reproduce it?
|
||||
description: |
|
||||
Please be minimal and precise as possible. If your repo/run is public, please include a URL so it is easier for us to investigate.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
label: Anything else we need to know?
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: What version of the action are you using?
|
||||
placeholder: vX.Y.Z
|
||||
description: |
|
||||
Please check the documentation first since different major versions can have different behaviors.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: environment
|
||||
attributes:
|
||||
label: What are your runner environments?
|
||||
multiple: true
|
||||
options:
|
||||
- self-hosted
|
||||
- linux
|
||||
- window
|
||||
- macos
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: ghes
|
||||
attributes:
|
||||
label: Are you on GitHub Enterprise Server? If so, what version?
|
||||
placeholder: vX.Y
|
|
@ -0,0 +1,33 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Version**
|
||||
- [ ] V1
|
||||
- [ ] V2
|
||||
|
||||
**Environment**
|
||||
- [ ] self-hosted
|
||||
- [ ] Linux
|
||||
- [ ] Windows
|
||||
- [ ] Mac
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Run/Repo Url**
|
||||
If applicable, and if your repo/run is public, please include a URL so it is easier for us to investigate.
|
||||
|
||||
**How to reproduce**
|
||||
If applicable, add information on how to reproduce the problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
|
@ -1,5 +0,0 @@
|
|||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 🙋 Ask a question
|
||||
url: https://github.community/c/code-to-cloud/52
|
||||
about: Please ask and answer questions on GitHub Support Community.
|
|
@ -1,28 +0,0 @@
|
|||
name: "📚 Documentation issues"
|
||||
description: Make a suggestion to improve the documentation!
|
||||
labels: ['documentation']
|
||||
title: '[docs]'
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
❗ This is only for documentation updates for files in this repo, ie: `README.md`.
|
||||
|
||||
If you want to suggest changes for the [GitHub Docs](https://docs.github.com/), please [open an issue there](https://github.com/github/docs/issues/new/choose).
|
||||
- type: textarea
|
||||
id: affected
|
||||
attributes:
|
||||
label: What files would you like to change?
|
||||
description: |
|
||||
Please provide permalinks to the specified files and line numbers.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: suggested
|
||||
attributes:
|
||||
label: What are your suggested changes?
|
||||
description: |
|
||||
Give as much detail as you can to help us understand the changes you want to see.
|
||||
validations:
|
||||
required: true
|
|
@ -1,20 +0,0 @@
|
|||
name: "🎁 Feature request"
|
||||
description: Suggest a new feature/enhancement!
|
||||
labels: ['enhancement']
|
||||
title: '[feat req]'
|
||||
body:
|
||||
- type: textarea
|
||||
id: feature
|
||||
attributes:
|
||||
label: What would you like to be added?
|
||||
description: |
|
||||
Please check existing issues to avoid making duplicates. Any duplicate issue will be closed immediately.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: reasoning
|
||||
attributes:
|
||||
label: Why is this needed?
|
||||
validations:
|
||||
required: true
|
|
@ -1,53 +0,0 @@
|
|||
# `dist/index.js` is a special file in Actions.
|
||||
# When you reference an action with `uses:` in a workflow,
|
||||
# `index.js` is the code that will run.
|
||||
# For our project, we generate this file through a build process
|
||||
# from other source files.
|
||||
# We need to make sure the checked-in `index.js` actually matches what we expect it to be.
|
||||
name: Check dist/
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-dist:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Rebuild the dist/ directory
|
||||
run: npm run release
|
||||
|
||||
- name: Compare the expected and actual dist/ directories
|
||||
run: |
|
||||
if [ "$(git diff --ignore-space-at-eol dist/ | wc -l)" -gt "0" ]; then
|
||||
echo "Detected uncommitted changes after build. See status below:"
|
||||
git diff
|
||||
exit 1
|
||||
fi
|
||||
id: diff
|
||||
|
||||
# If index.js was different than expected, upload the expected version as an artifact
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
|
@ -2,8 +2,6 @@ name: "Code scanning - action"
|
|||
|
||||
on:
|
||||
push:
|
||||
branches-ignore: "dependabot/**"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
schedule:
|
||||
|
@ -17,11 +15,11 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
@ -29,7 +27,7 @@ jobs:
|
|||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
@ -43,4 +41,4 @@ jobs:
|
|||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v1
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
name: Licensed
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
name: Check licenses
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: npm ci
|
||||
- name: Install licensed
|
||||
run: |
|
||||
cd $RUNNER_TEMP
|
||||
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/2.12.2/licensed-2.12.2-linux-x64.tar.gz
|
||||
sudo tar -xzf licensed.tar.gz
|
||||
sudo mv licensed /usr/local/bin/licensed
|
||||
- run: licensed status
|
|
@ -1,28 +0,0 @@
|
|||
name: Release new action version
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
TAG_NAME:
|
||||
description: 'Tag name that the major tag will point to'
|
||||
required: true
|
||||
|
||||
env:
|
||||
TAG_NAME: ${{ github.event.inputs.TAG_NAME || github.event.release.tag_name }}
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
update_tag:
|
||||
name: Update the major tag to include the ${{ github.event.inputs.TAG_NAME || github.event.release.tag_name }} changes
|
||||
environment:
|
||||
name: releaseNewActionVersion
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Update the ${{ env.TAG_NAME }} tag
|
||||
id: update-major-tag
|
||||
uses: actions/publish-action@v0.3.0
|
||||
with:
|
||||
source-tag: ${{ env.TAG_NAME }}
|
||||
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}
|
|
@ -1,9 +1,8 @@
|
|||
name: Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
|
@ -11,6 +10,7 @@ on:
|
|||
- '**.md'
|
||||
|
||||
jobs:
|
||||
|
||||
build:
|
||||
name: Build
|
||||
|
||||
|
@ -23,29 +23,28 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Setup Node 20
|
||||
uses: actions/setup-node@v4
|
||||
- name: Set Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'npm'
|
||||
node-version: 12.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
||||
- name: Compile
|
||||
run: npm run build
|
||||
|
||||
- name: npm test
|
||||
run: npm test
|
||||
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Format
|
||||
run: npm run format-check
|
||||
|
||||
- name: Test
|
||||
run: npm run test
|
||||
|
||||
# Test end-to-end by uploading two artifacts and then downloading them
|
||||
- name: Create artifact files
|
||||
run: |
|
||||
|
@ -54,36 +53,45 @@ jobs:
|
|||
mkdir -p path/to/dir-3
|
||||
echo "Lorem ipsum dolor sit amet" > path/to/dir-1/file1.txt
|
||||
echo "Hello world from file #2" > path/to/dir-2/file2.txt
|
||||
echo "This is a going to be a test for a large enough file that should get compressed with GZip. The @actions/artifact package uses GZip to upload files. This text should have a compression ratio greater than 100% so it should get uploaded using GZip" > path/to/dir-3/gzip.txt
|
||||
|
||||
# Upload a single file artifact
|
||||
- name: 'Upload artifact #1'
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Artifact-A-${{ matrix.runs-on }}'
|
||||
name: 'Artifact-A'
|
||||
path: path/to/dir-1/file1.txt
|
||||
|
||||
# Upload using a wildcard pattern
|
||||
# Upload using a wildcard pattern, name should default to 'artifact' if not provided
|
||||
- name: 'Upload artifact #2'
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Artifact-Wildcard-${{ matrix.runs-on }}'
|
||||
path: path/**/dir*/
|
||||
|
||||
# Upload a multi-path artifact
|
||||
# Upload a directory that contains a file that will be uploaded with GZip
|
||||
- name: 'Upload artifact #3'
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Multi-Path-Artifact-${{ matrix.runs-on }}'
|
||||
name: 'GZip-Artifact'
|
||||
path: path/to/dir-3/
|
||||
|
||||
# Upload a directory that contains a file that will be uploaded with GZip
|
||||
- name: 'Upload artifact #4'
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Multi-Path-Artifact'
|
||||
path: |
|
||||
path/to/dir-1/*
|
||||
path/to/dir-[23]/*
|
||||
!path/to/dir-3/*.txt
|
||||
|
||||
# Verify artifacts. Switch to download-artifact@v2 once it's out of preview
|
||||
|
||||
# Download Artifact #1 and verify the correctness of the content
|
||||
- name: 'Download artifact #1'
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v1
|
||||
with:
|
||||
name: 'Artifact-A-${{ matrix.runs-on }}'
|
||||
name: 'Artifact-A'
|
||||
path: some/new/path
|
||||
|
||||
- name: 'Verify Artifact #1'
|
||||
|
@ -101,9 +109,9 @@ jobs:
|
|||
|
||||
# Download Artifact #2 and verify the correctness of the content
|
||||
- name: 'Download artifact #2'
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v1
|
||||
with:
|
||||
name: 'Artifact-Wildcard-${{ matrix.runs-on }}'
|
||||
name: 'artifact'
|
||||
path: some/other/path
|
||||
|
||||
- name: 'Verify Artifact #2'
|
||||
|
@ -120,11 +128,31 @@ jobs:
|
|||
}
|
||||
shell: pwsh
|
||||
|
||||
# Download Artifact #4 and verify the correctness of the content
|
||||
- name: 'Download artifact #4'
|
||||
uses: actions/download-artifact@v4
|
||||
# Download Artifact #3 and verify the correctness of the content
|
||||
- name: 'Download artifact #3'
|
||||
uses: actions/download-artifact@v1
|
||||
with:
|
||||
name: 'Multi-Path-Artifact-${{ matrix.runs-on }}'
|
||||
name: 'GZip-Artifact'
|
||||
path: gzip/artifact/path
|
||||
|
||||
# Because a directory was used as input during the upload the parent directories, path/to/dir-3/, should not be included in the uploaded artifact
|
||||
- name: 'Verify Artifact #3'
|
||||
run: |
|
||||
$gzipFile = "gzip/artifact/path/gzip.txt"
|
||||
if(!(Test-Path -path $gzipFile))
|
||||
{
|
||||
Write-Error "Expected file do not exist"
|
||||
}
|
||||
if(!((Get-Content $gzipFile) -ceq "This is a going to be a test for a large enough file that should get compressed with GZip. The @actions/artifact package uses GZip to upload files. This text should have a compression ratio greater than 100% so it should get uploaded using GZip"))
|
||||
{
|
||||
Write-Error "File contents of downloaded artifact is incorrect"
|
||||
}
|
||||
shell: pwsh
|
||||
|
||||
- name: 'Download artifact #4'
|
||||
uses: actions/download-artifact@v1
|
||||
with:
|
||||
name: 'Multi-Path-Artifact'
|
||||
path: multi/artifact
|
||||
|
||||
- name: 'Verify Artifact #4'
|
||||
|
@ -140,112 +168,3 @@ jobs:
|
|||
Write-Error "File contents of downloaded artifacts are incorrect"
|
||||
}
|
||||
shell: pwsh
|
||||
|
||||
- name: 'Alter file 1 content'
|
||||
run: |
|
||||
echo "This file has changed" > path/to/dir-1/file1.txt
|
||||
|
||||
# Replace the contents of Artifact #1
|
||||
- name: 'Overwrite artifact #1'
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Artifact-A-${{ matrix.runs-on }}'
|
||||
path: path/to/dir-1/file1.txt
|
||||
overwrite: true
|
||||
|
||||
# Download replaced Artifact #1 and verify the correctness of the content
|
||||
- name: 'Download artifact #1 again'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: 'Artifact-A-${{ matrix.runs-on }}'
|
||||
path: overwrite/some/new/path
|
||||
|
||||
- name: 'Verify Artifact #1 again'
|
||||
run: |
|
||||
$file = "overwrite/some/new/path/file1.txt"
|
||||
if(!(Test-Path -path $file))
|
||||
{
|
||||
Write-Error "Expected file does not exist"
|
||||
}
|
||||
if(!((Get-Content $file) -ceq "This file has changed"))
|
||||
{
|
||||
Write-Error "File contents of downloaded artifact are incorrect"
|
||||
}
|
||||
shell: pwsh
|
||||
merge:
|
||||
name: Merge
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Merge all artifacts from previous jobs
|
||||
- name: Merge all artifacts in run
|
||||
uses: ./merge/
|
||||
with:
|
||||
# our matrix produces artifacts with the same file, this prevents "stomping" on each other, also makes it
|
||||
# easier to identify each of the merged artifacts
|
||||
separate-directories: true
|
||||
- name: 'Download merged artifacts'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: merged-artifacts
|
||||
path: all-merged-artifacts
|
||||
- name: 'Check merged artifact has directories for each artifact'
|
||||
run: |
|
||||
$artifacts = @(
|
||||
"Artifact-A-ubuntu-latest",
|
||||
"Artifact-A-macos-latest",
|
||||
"Artifact-A-windows-latest",
|
||||
"Artifact-Wildcard-ubuntu-latest",
|
||||
"Artifact-Wildcard-macos-latest",
|
||||
"Artifact-Wildcard-windows-latest",
|
||||
"Multi-Path-Artifact-ubuntu-latest",
|
||||
"Multi-Path-Artifact-macos-latest",
|
||||
"Multi-Path-Artifact-windows-latest"
|
||||
)
|
||||
|
||||
foreach ($artifact in $artifacts) {
|
||||
$path = "all-merged-artifacts/$artifact"
|
||||
if (!(Test-Path $path)) {
|
||||
Write-Error "$path does not exist."
|
||||
}
|
||||
}
|
||||
shell: pwsh
|
||||
|
||||
# Merge Artifact-A-* from previous jobs
|
||||
- name: Merge all Artifact-A
|
||||
uses: ./merge/
|
||||
with:
|
||||
name: Merged-Artifact-As
|
||||
pattern: 'Artifact-A-*'
|
||||
separate-directories: true
|
||||
|
||||
# Download merged artifacts and verify the correctness of the content
|
||||
- name: 'Download merged artifacts'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: Merged-Artifact-As
|
||||
path: merged-artifact-a
|
||||
|
||||
- name: 'Verify merged artifacts'
|
||||
run: |
|
||||
$files = @(
|
||||
"merged-artifact-a/Artifact-A-ubuntu-latest/file1.txt",
|
||||
"merged-artifact-a/Artifact-A-macos-latest/file1.txt",
|
||||
"merged-artifact-a/Artifact-A-windows-latest/file1.txt"
|
||||
)
|
||||
|
||||
foreach ($file in $files) {
|
||||
if (!(Test-Path $file)) {
|
||||
Write-Error "$file does not exist."
|
||||
}
|
||||
|
||||
if (!((Get-Content $file) -ceq "This file has changed")) {
|
||||
Write-Error "$file has incorrect content."
|
||||
}
|
||||
}
|
||||
shell: pwsh
|
||||
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
sources:
|
||||
npm: true
|
||||
|
||||
allowed:
|
||||
- apache-2.0
|
||||
- bsd-2-clause
|
||||
- bsd-3-clause
|
||||
- isc
|
||||
- mit
|
||||
- cc0-1.0
|
||||
- unlicense
|
||||
|
||||
reviewed:
|
||||
npm:
|
||||
- fs.realpath
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
name: "@actions/artifact"
|
||||
version: 2.1.8
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
notices: []
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
name: "@actions/core"
|
||||
version: 1.10.1
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
notices: []
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
name: "@actions/github"
|
||||
version: 6.0.0
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
notices: []
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
name: "@actions/glob"
|
||||
version: 0.3.0
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
notices: []
|
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
name: "@actions/io"
|
||||
version: 1.1.2
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
notices: []
|
|
@ -1,26 +0,0 @@
|
|||
---
|
||||
name: minimatch
|
||||
version: 9.0.3
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
license: isc
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
notices: []
|
|
@ -41,10 +41,6 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||
|
||||
## Licensed
|
||||
|
||||
This repository uses a tool called [Licensed](https://github.com/github/licensed) to verify third party dependencies. You may need to locally install licensed and run `licensed cache` to update the dependency cache if you install or update a production dependency. If licensed cache is unable to determine the dependency, you may need to modify the cache file yourself to put the correct license. You should still verify the dependency, licensed is a tool to help, but is not a substitute for human review of dependencies.
|
||||
|
||||
## Resources
|
||||
|
||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
|
|
436
README.md
436
README.md
|
@ -1,130 +1,39 @@
|
|||
# `@actions/upload-artifact`
|
||||
# Upload-Artifact v2
|
||||
|
||||
> [!WARNING]
|
||||
> actions/upload-artifact@v3 is scheduled for deprecation on **November 30, 2024**. [Learn more.](https://github.blog/changelog/2024-04-16-deprecation-notice-v3-of-the-artifact-actions/)
|
||||
> Similarly, v1/v2 are scheduled for deprecation on **June 30, 2024**.
|
||||
> Please update your workflow to use v4 of the artifact actions.
|
||||
> This deprecation will not impact any existing versions of GitHub Enterprise Server being used by customers.
|
||||
|
||||
Upload [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts) from your Workflow Runs. Internally powered by [@actions/artifact](https://github.com/actions/toolkit/tree/main/packages/artifact) package.
|
||||
This uploads artifacts from your workflow allowing you to share data between jobs and store data once a workflow is complete.
|
||||
|
||||
See also [download-artifact](https://github.com/actions/download-artifact).
|
||||
|
||||
- [`@actions/upload-artifact`](#actionsupload-artifact)
|
||||
- [v4 - What's new](#v4---whats-new)
|
||||
- [Improvements](#improvements)
|
||||
- [Breaking Changes](#breaking-changes)
|
||||
- [Usage](#usage)
|
||||
- [Inputs](#inputs)
|
||||
- [Outputs](#outputs)
|
||||
- [Examples](#examples)
|
||||
- [Upload an Individual File](#upload-an-individual-file)
|
||||
- [Upload an Entire Directory](#upload-an-entire-directory)
|
||||
- [Upload using a Wildcard Pattern](#upload-using-a-wildcard-pattern)
|
||||
- [Upload using Multiple Paths and Exclusions](#upload-using-multiple-paths-and-exclusions)
|
||||
- [Altering compressions level (speed v. size)](#altering-compressions-level-speed-v-size)
|
||||
- [Customization if no files are found](#customization-if-no-files-are-found)
|
||||
- [(Not) Uploading to the same artifact](#not-uploading-to-the-same-artifact)
|
||||
- [Environment Variables and Tilde Expansion](#environment-variables-and-tilde-expansion)
|
||||
- [Retention Period](#retention-period)
|
||||
- [Using Outputs](#using-outputs)
|
||||
- [Example output between steps](#example-output-between-steps)
|
||||
- [Example output between jobs](#example-output-between-jobs)
|
||||
- [Overwriting an Artifact](#overwriting-an-artifact)
|
||||
- [Limitations](#limitations)
|
||||
- [Number of Artifacts](#number-of-artifacts)
|
||||
- [Zip archives](#zip-archives)
|
||||
- [Permission Loss](#permission-loss)
|
||||
- [Where does the upload go?](#where-does-the-upload-go)
|
||||
# What's new
|
||||
|
||||
- Easier upload
|
||||
- Specify a wildcard pattern
|
||||
- Specify an individual file
|
||||
- Specify a directory (previously you were limited to only this option)
|
||||
- Multi path upload
|
||||
- Use a combination of individual files, wildcards or directories
|
||||
- Support for excluding certain files
|
||||
- Upload an artifact without providing a name
|
||||
- Fix for artifact uploads sometimes not working with containers
|
||||
- Proxy support out of the box
|
||||
- Port entire action to typescript from a runner plugin so it is easier to collaborate and accept contributions
|
||||
|
||||
## v4 - What's new
|
||||
Refer [here](https://github.com/actions/upload-artifact/tree/releases/v1) for the previous version
|
||||
|
||||
> [!IMPORTANT]
|
||||
> upload-artifact@v4+ is not currently supported on GHES yet. If you are on GHES, you must use [v3](https://github.com/actions/upload-artifact/releases/tag/v3).
|
||||
# Usage
|
||||
|
||||
The release of upload-artifact@v4 and download-artifact@v4 are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
|
||||
|
||||
For more information, see the [`@actions/artifact`](https://github.com/actions/toolkit/tree/main/packages/artifact) documentation.
|
||||
|
||||
There is also a new sub-action, `actions/upload-artifact/merge`. For more info, check out that action's [README](./merge/README.md).
|
||||
|
||||
### Improvements
|
||||
|
||||
1. Uploads are significantly faster, upwards of 90% improvement in worst case scenarios.
|
||||
2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized.
|
||||
3. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs unless the Artifacts are deleted and recreated (where they will have a new ID). Both of these factors help reduce the possibility of accidentally corrupting Artifact files.
|
||||
4. The compression level of an Artifact can be manually tweaked for speed or size reduction.
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
1. On self hosted runners, additional [firewall rules](https://github.com/actions/toolkit/tree/main/packages/artifact#breaking-changes) may be required.
|
||||
2. Uploading to the same named Artifact multiple times.
|
||||
|
||||
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once. Otherwise you _will_ encounter an error.
|
||||
|
||||
3. Limit of Artifacts for an individual job. Each job in a workflow run now has a limit of 500 artifacts.
|
||||
|
||||
For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
|
||||
|
||||
## Usage
|
||||
|
||||
### Inputs
|
||||
|
||||
```yaml
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
# Name of the artifact to upload.
|
||||
# Optional. Default is 'artifact'
|
||||
name:
|
||||
|
||||
# A file, directory or wildcard pattern that describes what to upload
|
||||
# Required.
|
||||
path:
|
||||
|
||||
# The desired behavior if no files are found using the provided path.
|
||||
# Available Options:
|
||||
# warn: Output a warning but do not fail the action
|
||||
# error: Fail the action with an error message
|
||||
# ignore: Do not output any warnings or errors, the action does not fail
|
||||
# Optional. Default is 'warn'
|
||||
if-no-files-found:
|
||||
|
||||
# Duration after which artifact will expire in days. 0 means using default retention.
|
||||
# Minimum 1 day.
|
||||
# Maximum 90 days unless changed from the repository settings page.
|
||||
# Optional. Defaults to repository settings.
|
||||
retention-days:
|
||||
|
||||
# The level of compression for Zlib to be applied to the artifact archive.
|
||||
# The value can range from 0 to 9.
|
||||
# For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
# Optional. Default is '6'
|
||||
compression-level:
|
||||
|
||||
# If true, an artifact with a matching name will be deleted before a new one is uploaded.
|
||||
# If false, the action will fail if an artifact for the given name already exists.
|
||||
# Does not fail if the artifact does not exist.
|
||||
# Optional. Default is 'false'
|
||||
overwrite:
|
||||
```
|
||||
|
||||
### Outputs
|
||||
|
||||
| Name | Description | Example |
|
||||
| - | - | - |
|
||||
| `artifact-id` | GitHub ID of an Artifact, can be used by the REST API | `1234` |
|
||||
| `artifact-url` | URL to download an Artifact. Can be used in many scenarios such as linking to artifacts in issues or pull requests. Users must be logged-in in order for this URL to work. This URL is valid as long as the artifact has not expired or the artifact, run or repository have not been deleted | `https://github.com/example-org/example-repo/actions/runs/1/artifacts/1234` |
|
||||
|
||||
## Examples
|
||||
See [action.yml](action.yml)
|
||||
|
||||
### Upload an Individual File
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- run: mkdir -p path/to/artifact
|
||||
|
||||
- run: echo hello > path/to/artifact/world.txt
|
||||
- uses: actions/upload-artifact@v4
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: my-artifact
|
||||
path: path/to/artifact/world.txt
|
||||
|
@ -133,25 +42,23 @@ steps:
|
|||
### Upload an Entire Directory
|
||||
|
||||
```yaml
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: my-artifact
|
||||
path: path/to/artifact/ # or path/to/artifact
|
||||
```
|
||||
|
||||
### Upload using a Wildcard Pattern
|
||||
|
||||
```yaml
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: my-artifact
|
||||
path: path/**/[abc]rtifac?/*
|
||||
```
|
||||
|
||||
### Upload using Multiple Paths and Exclusions
|
||||
|
||||
```yaml
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: my-artifact
|
||||
path: |
|
||||
|
@ -160,140 +67,112 @@ steps:
|
|||
!path/**/*.tmp
|
||||
```
|
||||
|
||||
For supported wildcards along with behavior and documentation, see [@actions/glob](https://github.com/actions/toolkit/tree/main/packages/glob) which is used internally to search for files.
|
||||
For supported wildcards along with behavior and documentation, see [@actions/glob](https://github.com/actions/toolkit/tree/master/packages/glob) which is used internally to search for files.
|
||||
|
||||
If a wildcard pattern is used, the path hierarchy will be preserved after the first wildcard pattern:
|
||||
If a wildcard pattern is used, the path hierarchy will be preserved after the first wildcard pattern.
|
||||
|
||||
```
|
||||
path/to/*/directory/foo?.txt =>
|
||||
∟ path/to/some/directory/foo1.txt
|
||||
∟ path/to/some/directory/foo2.txt
|
||||
∟ path/to/other/directory/foo1.txt
|
||||
path/to/*/directory/foo?.txt =>
|
||||
∟ path/to/some/directory/foo1.txt
|
||||
∟ path/to/some/directory/foo2.txt
|
||||
∟ path/to/other/directory/foo1.txt
|
||||
|
||||
would be flattened and uploaded as =>
|
||||
∟ some/directory/foo1.txt
|
||||
∟ some/directory/foo2.txt
|
||||
∟ other/directory/foo1.txt
|
||||
would be flattened and uploaded as =>
|
||||
∟ some/directory/foo1.txt
|
||||
∟ some/directory/foo2.txt
|
||||
∟ other/directory/foo1.txt
|
||||
```
|
||||
|
||||
If multiple paths are provided as input, the least common ancestor of all the search paths will be used as the root directory of the artifact. Exclude paths do not affect the directory structure.
|
||||
If multiple paths are provided as input, the least common ancestor of all the search paths will be used as the root directory of the artifact. Exclude paths do not effect the directory structure.
|
||||
|
||||
Relative and absolute file paths are both allowed. Relative paths are rooted against the current working directory. Paths that begin with a wildcard character should be quoted to avoid being interpreted as YAML aliases.
|
||||
|
||||
### Altering compressions level (speed v. size)
|
||||
The [@actions/artifact](https://github.com/actions/toolkit/tree/master/packages/artifact) package is used internally to handle most of the logic around uploading an artifact. There is extra documentation around upload limitations and behavior in the toolkit repo that is worth checking out.
|
||||
|
||||
If you are uploading large or easily compressable data to your artifact, you may benefit from tweaking the compression level. By default, the compression level is `6`, the same as GNU Gzip.
|
||||
### Conditional Artifact Upload
|
||||
|
||||
The value can range from 0 to 9:
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of `0` is recommended for significantly faster uploads.
|
||||
|
||||
For instance, if you are uploading random binary data, you can save a lot of time by opting out of compression completely, since it won't benefit:
|
||||
To upload artifacts only when the previous step of a job failed, use [`if: failure()`](https://help.github.com/en/articles/contexts-and-expression-syntax-for-github-actions#job-status-check-functions):
|
||||
|
||||
```yaml
|
||||
- name: Make a 1GB random binary file
|
||||
run: |
|
||||
dd if=/dev/urandom of=my-1gb-file bs=1M count=1000
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact
|
||||
path: my-1gb-file
|
||||
compression-level: 0 # no compression
|
||||
```
|
||||
|
||||
But, if you are uploading data that is easily compressed (like plaintext, code, etc) you can save space and cost by having a higher compression level. But this will be heavier on the CPU therefore slower to upload:
|
||||
|
||||
```yaml
|
||||
- name: Make a file with a lot of repeated text
|
||||
run: |
|
||||
for i in {1..100000}; do echo -n 'foobar' >> foobar.txt; done
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact
|
||||
path: foobar.txt
|
||||
compression-level: 9 # maximum compression
|
||||
```
|
||||
|
||||
### Customization if no files are found
|
||||
|
||||
If a path (or paths), result in no files being found for the artifact, the action will succeed but print out a warning. In certain scenarios it may be desirable to fail the action or suppress the warning. The `if-no-files-found` option allows you to customize the behavior of the action if no files are found:
|
||||
|
||||
```yaml
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: failure()
|
||||
with:
|
||||
name: my-artifact
|
||||
path: path/to/artifact/
|
||||
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
|
||||
```
|
||||
|
||||
### (Not) Uploading to the same artifact
|
||||
### Uploading without an artifact name
|
||||
|
||||
Unlike earlier versions of `upload-artifact`, uploading to the same artifact via multiple jobs is _not_ supported with `v4`.
|
||||
You can upload an artifact without specifying a name
|
||||
```yaml
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
path: path/to/artifact/world.txt
|
||||
```
|
||||
|
||||
If not provided, `artifact` will be used as the default name which will manifest itself in the UI after upload.
|
||||
|
||||
### Uploading to the same artifact
|
||||
|
||||
Each artifact behaves as a file share. Uploading to the same artifact multiple times in the same workflow can overwrite and append already uploaded files
|
||||
|
||||
```yaml
|
||||
- run: echo hi > world.txt
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
# implicitly named as 'artifact'
|
||||
path: world.txt
|
||||
|
||||
- run: echo howdy > extra-file.txt
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
# also implicitly named as 'artifact', will fail here!
|
||||
path: extra-file.txt
|
||||
|
||||
- run: echo hello > world.txt
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
path: world.txt
|
||||
```
|
||||
With the following example, the available artifact (named `artifact` which is the default if no name is provided) would contain both `world.txt` (`hello`) and `extra-file.txt` (`howdy`).
|
||||
|
||||
Artifact names must be unique since each created artifact is idempotent so multiple jobs cannot modify the same artifact.
|
||||
|
||||
In matrix scenarios, be careful to not accidentally upload to the same artifact, or else you will encounter conflict errors. It would be best to name the artifact _with_ a prefix or suffix from the matrix:
|
||||
> **_Warning:_** Be careful when uploading to the same artifact via multiple jobs as artifacts may become corrupted
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
name: Generate Build Artifacts
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
version: [a, b, c]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
node-version: [8.x, 10.x, 12.x, 13.x]
|
||||
steps:
|
||||
- name: Build
|
||||
run: ./some-script --version=${{ matrix.version }} > my-binary
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binary-${{ matrix.os }}-${{ matrix.version }}
|
||||
path: my-binary
|
||||
- name: 'Create a file'
|
||||
run: echo ${{ matrix.node-version }} > my_file.txt
|
||||
- name: 'Accidently upload to the same artifact via multiple jobs'
|
||||
uses: 'actions/upload-artifact@v2'
|
||||
with:
|
||||
name: my-artifact
|
||||
path: ${{ github.workspace }}
|
||||
```
|
||||
|
||||
This will result in artifacts like: `binary-ubuntu-latest-a`, `binary-windows-latest-b`, and so on.
|
||||
In the above example, four jobs will upload four different files to the same artifact but there will only be one file available when `my-artifact` is downloaded. Each job overwrites what was previously uploaded. To ensure that jobs don't overwrite existing artifacts, use a different name per job.
|
||||
|
||||
Previously the behavior _allowed_ for the artifact names to be the same which resulted in unexpected mutations and accidental corruption. Artifacts created by upload-artifact@v4 are immutable.
|
||||
```yaml
|
||||
uses: 'actions/upload-artifact@v2'
|
||||
with:
|
||||
name: my-artifact ${{ matrix.node-version }}
|
||||
path: ${{ github.workspace }}
|
||||
```
|
||||
|
||||
### Environment Variables and Tilde Expansion
|
||||
|
||||
You can use `~` in the path input as a substitute for `$HOME`. Basic tilde expansion is supported:
|
||||
You can use `~` in the path input as a substitute for `$HOME`. Basic tilde expansion is supported.
|
||||
|
||||
```yaml
|
||||
- run: |
|
||||
mkdir -p ~/new/artifact
|
||||
echo hello > ~/new/artifact/world.txt
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: my-artifacts
|
||||
path: ~/new/**/*
|
||||
name: 'Artifacts-V2'
|
||||
path: '~/new/**/*'
|
||||
```
|
||||
|
||||
Environment variables along with context expressions can also be used for input. For documentation see [context and expression syntax](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions):
|
||||
Environment variables along with context expressions can also be used for input. For documentation see [context and expression syntax](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions).
|
||||
|
||||
```yaml
|
||||
env:
|
||||
|
@ -302,149 +181,24 @@ Environment variables along with context expressions can also be used for input.
|
|||
- run: |
|
||||
mkdir -p ${{ github.workspace }}/artifact
|
||||
echo hello > ${{ github.workspace }}/artifact/world.txt
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.name }}-name
|
||||
path: ${{ github.workspace }}/artifact/**/*
|
||||
```
|
||||
|
||||
For environment variables created in other steps, make sure to use the `env` expression syntax
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- run: |
|
||||
mkdir testing
|
||||
echo "This is a file to upload" > testing/file.txt
|
||||
echo "artifactPath=testing/file.txt" >> $GITHUB_ENV
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifact
|
||||
path: ${{ env.artifactPath }} # this will resolve to testing/file.txt at runtime
|
||||
```
|
||||
|
||||
### Retention Period
|
||||
|
||||
Artifacts are retained for 90 days by default. You can specify a shorter retention period using the `retention-days` input:
|
||||
|
||||
```yaml
|
||||
- name: Create a file
|
||||
run: echo "I won't live long" > my_file.txt
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact
|
||||
path: my_file.txt
|
||||
retention-days: 5
|
||||
```
|
||||
|
||||
The retention period must be between 1 and 90 inclusive. For more information see [artifact and log retention policies](https://docs.github.com/en/free-pro-team@latest/actions/reference/usage-limits-billing-and-administration#artifact-and-log-retention-policy).
|
||||
|
||||
### Using Outputs
|
||||
|
||||
If an artifact upload is successful then an `artifact-id` output is available. This ID is a unique identifier that can be used with [Artifact REST APIs](https://docs.github.com/en/rest/actions/artifacts).
|
||||
|
||||
#### Example output between steps
|
||||
|
||||
```yml
|
||||
- uses: actions/upload-artifact@v4
|
||||
id: artifact-upload-step
|
||||
with:
|
||||
name: my-artifact
|
||||
path: path/to/artifact/content/
|
||||
|
||||
- name: Output artifact ID
|
||||
run: echo 'Artifact ID is ${{ steps.artifact-upload-step.outputs.artifact-id }}'
|
||||
```
|
||||
|
||||
#### Example output between jobs
|
||||
|
||||
```yml
|
||||
jobs:
|
||||
job1:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
output1: ${{ steps.artifact-upload-step.outputs.artifact-id }}
|
||||
steps:
|
||||
- uses: actions/upload-artifact@v4
|
||||
id: artifact-upload-step
|
||||
with:
|
||||
name: my-artifact
|
||||
path: path/to/artifact/content/
|
||||
job2:
|
||||
runs-on: ubuntu-latest
|
||||
needs: job1
|
||||
steps:
|
||||
- env:
|
||||
OUTPUT1: ${{needs.job1.outputs.output1}}
|
||||
run: echo "Artifact ID from previous job is $OUTPUT1"
|
||||
```
|
||||
|
||||
### Overwriting an Artifact
|
||||
|
||||
Although it's not possible to mutate an Artifact, can completely overwrite one. But do note that this will give the Artifact a new ID, the previous one will no longer exist:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create a file
|
||||
run: echo "hello world" > my-file.txt
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact # NOTE: same artifact name
|
||||
path: my-file.txt
|
||||
upload-again:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create a different file
|
||||
run: echo "goodbye world" > my-file.txt
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact # NOTE: same artifact name
|
||||
path: my-file.txt
|
||||
overwrite: true
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
### Number of Artifacts
|
||||
|
||||
Within an individual job, there is a limit of 500 artifacts that can be created for that job.
|
||||
|
||||
You may also be limited by Artifacts if you have exceeded your shared storage quota. Storage is calculated every 6-12 hours. See [the documentation](https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending) for more info.
|
||||
|
||||
### Zip archives
|
||||
|
||||
When an Artifact is uploaded, all the files are assembled into an immutable Zip archive. There is currently no way to download artifacts in a format other than a Zip or to download individual artifact contents.
|
||||
|
||||
### Permission Loss
|
||||
|
||||
File permissions are not maintained during artifact upload. All directories will have `755` and all files will have `644`. For example, if you make a file executable using `chmod` and then upload that file, post-download the file is no longer guaranteed to be set as an executable.
|
||||
|
||||
If you must preserve permissions, you can `tar` all of your files together before artifact upload. Post download, the `tar` file will maintain file permissions and case sensitivity.
|
||||
|
||||
```yaml
|
||||
- name: 'Tar files'
|
||||
run: tar -cvf my_files.tar /path/to/my/directory
|
||||
|
||||
- name: 'Upload Artifact'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact
|
||||
path: my_files.tar
|
||||
```
|
||||
|
||||
## Where does the upload go?
|
||||
|
||||
At the bottom of the workflow summary page, there is a dedicated section for artifacts. Here's a screenshot of something you might see:
|
||||
|
||||
<img src="https://user-images.githubusercontent.com/16109154/103645952-223c6880-4f59-11eb-8268-8dca6937b5f9.png" width="700" height="300">
|
||||
In the top right corner of a workflow run, once the run is over, if you used this action, there will be a `Artifacts` dropdown which you can download items from. Here's a screenshot of what it looks like<br/>
|
||||
<img src="https://user-images.githubusercontent.com/16109154/72556687-20235a80-386d-11ea-9e2a-b534faa77083.png" width="375" height="140">
|
||||
|
||||
There is a trashcan icon that can be used to delete the artifact. This icon will only appear for users who have write permissions to the repository.
|
||||
|
||||
The size of the artifact is denoted in bytes. The displayed artifact size denotes the size of the zip that `upload-artifact` creates during upload.
|
||||
## Additional Documentation
|
||||
|
||||
See [persisting workflow data using artifacts](https://help.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts) for additional examples and tips.
|
||||
|
||||
See extra documentation for the [@actions/artifact](https://github.com/actions/toolkit/blob/master/packages/artifact/docs/additional-information.md) package that is used internally regarding certain behaviors and limitations.
|
||||
|
||||
# License
|
||||
|
||||
The scripts and documentation in this project are released under the [MIT License](LICENSE)
|
||||
|
|
|
@ -1,175 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import artifact from '@actions/artifact'
|
||||
import {run} from '../src/merge/merge-artifacts'
|
||||
import {Inputs} from '../src/merge/constants'
|
||||
import * as search from '../src/shared/search'
|
||||
|
||||
const fixtures = {
|
||||
artifactName: 'my-merged-artifact',
|
||||
tmpDirectory: '/tmp/merge-artifact',
|
||||
filesToUpload: [
|
||||
'/some/artifact/path/file-a.txt',
|
||||
'/some/artifact/path/file-b.txt',
|
||||
'/some/artifact/path/file-c.txt'
|
||||
],
|
||||
artifacts: [
|
||||
{
|
||||
name: 'my-artifact-a',
|
||||
id: 1,
|
||||
size: 100,
|
||||
createdAt: new Date('2024-01-01T00:00:00Z')
|
||||
},
|
||||
{
|
||||
name: 'my-artifact-b',
|
||||
id: 2,
|
||||
size: 100,
|
||||
createdAt: new Date('2024-01-01T00:00:00Z')
|
||||
},
|
||||
{
|
||||
name: 'my-artifact-c',
|
||||
id: 3,
|
||||
size: 100,
|
||||
createdAt: new Date('2024-01-01T00:00:00Z')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
context: {
|
||||
repo: {
|
||||
owner: 'actions',
|
||||
repo: 'toolkit'
|
||||
},
|
||||
runId: 123,
|
||||
serverUrl: 'https://github.com'
|
||||
}
|
||||
}))
|
||||
|
||||
jest.mock('@actions/core')
|
||||
|
||||
jest.mock('fs/promises', () => ({
|
||||
mkdtemp: jest.fn().mockResolvedValue('/tmp/merge-artifact'),
|
||||
rm: jest.fn().mockResolvedValue(undefined)
|
||||
}))
|
||||
|
||||
/* eslint-disable no-unused-vars */
|
||||
const mockInputs = (overrides?: Partial<{[K in Inputs]?: any}>) => {
|
||||
const inputs = {
|
||||
[Inputs.Name]: 'my-merged-artifact',
|
||||
[Inputs.Pattern]: '*',
|
||||
[Inputs.SeparateDirectories]: false,
|
||||
[Inputs.RetentionDays]: 0,
|
||||
[Inputs.CompressionLevel]: 6,
|
||||
[Inputs.DeleteMerged]: false,
|
||||
...overrides
|
||||
}
|
||||
|
||||
;(core.getInput as jest.Mock).mockImplementation((name: string) => {
|
||||
return inputs[name]
|
||||
})
|
||||
;(core.getBooleanInput as jest.Mock).mockImplementation((name: string) => {
|
||||
return inputs[name]
|
||||
})
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
||||
describe('merge', () => {
|
||||
beforeEach(async () => {
|
||||
mockInputs()
|
||||
|
||||
jest
|
||||
.spyOn(artifact, 'listArtifacts')
|
||||
.mockResolvedValue({artifacts: fixtures.artifacts})
|
||||
|
||||
jest.spyOn(artifact, 'downloadArtifact').mockResolvedValue({
|
||||
downloadPath: fixtures.tmpDirectory
|
||||
})
|
||||
|
||||
jest.spyOn(search, 'findFilesToUpload').mockResolvedValue({
|
||||
filesToUpload: fixtures.filesToUpload,
|
||||
rootDirectory: fixtures.tmpDirectory
|
||||
})
|
||||
|
||||
jest.spyOn(artifact, 'uploadArtifact').mockResolvedValue({
|
||||
size: 123,
|
||||
id: 1337
|
||||
})
|
||||
|
||||
jest
|
||||
.spyOn(artifact, 'deleteArtifact')
|
||||
.mockImplementation(async artifactName => {
|
||||
const artifact = fixtures.artifacts.find(a => a.name === artifactName)
|
||||
if (!artifact) throw new Error(`Artifact ${artifactName} not found`)
|
||||
return {id: artifact.id}
|
||||
})
|
||||
})
|
||||
|
||||
it('merges artifacts', async () => {
|
||||
await run()
|
||||
|
||||
for (const a of fixtures.artifacts) {
|
||||
expect(artifact.downloadArtifact).toHaveBeenCalledWith(a.id, {
|
||||
path: fixtures.tmpDirectory
|
||||
})
|
||||
}
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.tmpDirectory,
|
||||
{compressionLevel: 6}
|
||||
)
|
||||
})
|
||||
|
||||
it('fails if no artifacts found', async () => {
|
||||
mockInputs({[Inputs.Pattern]: 'this-does-not-match'})
|
||||
|
||||
expect(run()).rejects.toThrow()
|
||||
|
||||
expect(artifact.uploadArtifact).not.toBeCalled()
|
||||
expect(artifact.downloadArtifact).not.toBeCalled()
|
||||
})
|
||||
|
||||
it('supports custom compression level', async () => {
|
||||
mockInputs({
|
||||
[Inputs.CompressionLevel]: 2
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.tmpDirectory,
|
||||
{compressionLevel: 2}
|
||||
)
|
||||
})
|
||||
|
||||
it('supports custom retention days', async () => {
|
||||
mockInputs({
|
||||
[Inputs.RetentionDays]: 7
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.tmpDirectory,
|
||||
{retentionDays: 7, compressionLevel: 6}
|
||||
)
|
||||
})
|
||||
|
||||
it('supports deleting artifacts after merge', async () => {
|
||||
mockInputs({
|
||||
[Inputs.DeleteMerged]: true
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
for (const a of fixtures.artifacts) {
|
||||
expect(artifact.deleteArtifact).toHaveBeenCalledWith(a.name)
|
||||
}
|
||||
})
|
||||
})
|
|
@ -2,7 +2,7 @@ import * as core from '@actions/core'
|
|||
import * as path from 'path'
|
||||
import * as io from '@actions/io'
|
||||
import {promises as fs} from 'fs'
|
||||
import {findFilesToUpload} from '../src/shared/search'
|
||||
import {findFilesToUpload} from '../src/search'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'search')
|
||||
const searchItem1Path = path.join(
|
||||
|
|
|
@ -1,231 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import artifact, {ArtifactNotFoundError} from '@actions/artifact'
|
||||
import {run} from '../src/upload/upload-artifact'
|
||||
import {Inputs} from '../src/upload/constants'
|
||||
import * as search from '../src/shared/search'
|
||||
|
||||
const fixtures = {
|
||||
artifactName: 'artifact-name',
|
||||
rootDirectory: '/some/artifact/path',
|
||||
filesToUpload: [
|
||||
'/some/artifact/path/file1.txt',
|
||||
'/some/artifact/path/file2.txt'
|
||||
]
|
||||
}
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
context: {
|
||||
repo: {
|
||||
owner: 'actions',
|
||||
repo: 'toolkit'
|
||||
},
|
||||
runId: 123,
|
||||
serverUrl: 'https://github.com'
|
||||
}
|
||||
}))
|
||||
|
||||
jest.mock('@actions/core')
|
||||
|
||||
/* eslint-disable no-unused-vars */
|
||||
const mockInputs = (overrides?: Partial<{[K in Inputs]?: any}>) => {
|
||||
const inputs = {
|
||||
[Inputs.Name]: 'artifact-name',
|
||||
[Inputs.Path]: '/some/artifact/path',
|
||||
[Inputs.IfNoFilesFound]: 'warn',
|
||||
[Inputs.RetentionDays]: 0,
|
||||
[Inputs.CompressionLevel]: 6,
|
||||
[Inputs.Overwrite]: false,
|
||||
...overrides
|
||||
}
|
||||
|
||||
;(core.getInput as jest.Mock).mockImplementation((name: string) => {
|
||||
return inputs[name]
|
||||
})
|
||||
;(core.getBooleanInput as jest.Mock).mockImplementation((name: string) => {
|
||||
return inputs[name]
|
||||
})
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
||||
describe('upload', () => {
|
||||
beforeEach(async () => {
|
||||
mockInputs()
|
||||
|
||||
jest.spyOn(search, 'findFilesToUpload').mockResolvedValue({
|
||||
filesToUpload: fixtures.filesToUpload,
|
||||
rootDirectory: fixtures.rootDirectory
|
||||
})
|
||||
|
||||
jest.spyOn(artifact, 'uploadArtifact').mockResolvedValue({
|
||||
size: 123,
|
||||
id: 1337
|
||||
})
|
||||
})
|
||||
|
||||
it('uploads a single file', async () => {
|
||||
jest.spyOn(search, 'findFilesToUpload').mockResolvedValue({
|
||||
filesToUpload: [fixtures.filesToUpload[0]],
|
||||
rootDirectory: fixtures.rootDirectory
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
[fixtures.filesToUpload[0]],
|
||||
fixtures.rootDirectory,
|
||||
{compressionLevel: 6}
|
||||
)
|
||||
})
|
||||
|
||||
it('uploads multiple files', async () => {
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.rootDirectory,
|
||||
{compressionLevel: 6}
|
||||
)
|
||||
})
|
||||
|
||||
it('sets outputs', async () => {
|
||||
await run()
|
||||
|
||||
expect(core.setOutput).toHaveBeenCalledWith('artifact-id', 1337)
|
||||
expect(core.setOutput).toHaveBeenCalledWith(
|
||||
'artifact-url',
|
||||
`${github.context.serverUrl}/${github.context.repo.owner}/${
|
||||
github.context.repo.repo
|
||||
}/actions/runs/${github.context.runId}/artifacts/${1337}`
|
||||
)
|
||||
})
|
||||
|
||||
it('supports custom compression level', async () => {
|
||||
mockInputs({
|
||||
[Inputs.CompressionLevel]: 2
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.rootDirectory,
|
||||
{compressionLevel: 2}
|
||||
)
|
||||
})
|
||||
|
||||
it('supports custom retention days', async () => {
|
||||
mockInputs({
|
||||
[Inputs.RetentionDays]: 7
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.rootDirectory,
|
||||
{retentionDays: 7, compressionLevel: 6}
|
||||
)
|
||||
})
|
||||
|
||||
it('supports warn if-no-files-found', async () => {
|
||||
mockInputs({
|
||||
[Inputs.IfNoFilesFound]: 'warn'
|
||||
})
|
||||
|
||||
jest.spyOn(search, 'findFilesToUpload').mockResolvedValue({
|
||||
filesToUpload: [],
|
||||
rootDirectory: fixtures.rootDirectory
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(core.warning).toHaveBeenCalledWith(
|
||||
`No files were found with the provided path: ${fixtures.rootDirectory}. No artifacts will be uploaded.`
|
||||
)
|
||||
})
|
||||
|
||||
it('supports error if-no-files-found', async () => {
|
||||
mockInputs({
|
||||
[Inputs.IfNoFilesFound]: 'error'
|
||||
})
|
||||
|
||||
jest.spyOn(search, 'findFilesToUpload').mockResolvedValue({
|
||||
filesToUpload: [],
|
||||
rootDirectory: fixtures.rootDirectory
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(core.setFailed).toHaveBeenCalledWith(
|
||||
`No files were found with the provided path: ${fixtures.rootDirectory}. No artifacts will be uploaded.`
|
||||
)
|
||||
})
|
||||
|
||||
it('supports ignore if-no-files-found', async () => {
|
||||
mockInputs({
|
||||
[Inputs.IfNoFilesFound]: 'ignore'
|
||||
})
|
||||
|
||||
jest.spyOn(search, 'findFilesToUpload').mockResolvedValue({
|
||||
filesToUpload: [],
|
||||
rootDirectory: fixtures.rootDirectory
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(core.info).toHaveBeenCalledWith(
|
||||
`No files were found with the provided path: ${fixtures.rootDirectory}. No artifacts will be uploaded.`
|
||||
)
|
||||
})
|
||||
|
||||
it('supports overwrite', async () => {
|
||||
mockInputs({
|
||||
[Inputs.Overwrite]: true
|
||||
})
|
||||
|
||||
jest.spyOn(artifact, 'deleteArtifact').mockResolvedValue({
|
||||
id: 1337
|
||||
})
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.rootDirectory,
|
||||
{compressionLevel: 6}
|
||||
)
|
||||
|
||||
expect(artifact.deleteArtifact).toHaveBeenCalledWith(fixtures.artifactName)
|
||||
})
|
||||
|
||||
it('supports overwrite and continues if not found', async () => {
|
||||
mockInputs({
|
||||
[Inputs.Overwrite]: true
|
||||
})
|
||||
|
||||
jest
|
||||
.spyOn(artifact, 'deleteArtifact')
|
||||
.mockRejectedValue(new ArtifactNotFoundError('not found'))
|
||||
|
||||
await run()
|
||||
|
||||
expect(artifact.uploadArtifact).toHaveBeenCalledWith(
|
||||
fixtures.artifactName,
|
||||
fixtures.filesToUpload,
|
||||
fixtures.rootDirectory,
|
||||
{compressionLevel: 6}
|
||||
)
|
||||
|
||||
expect(artifact.deleteArtifact).toHaveBeenCalledWith(fixtures.artifactName)
|
||||
expect(core.debug).toHaveBeenCalledWith(
|
||||
`Skipping deletion of '${fixtures.artifactName}', it does not exist`
|
||||
)
|
||||
})
|
||||
})
|
54
action.yml
54
action.yml
|
@ -4,58 +4,10 @@ author: 'GitHub'
|
|||
inputs:
|
||||
name:
|
||||
description: 'Artifact name'
|
||||
default: 'artifact'
|
||||
required: false
|
||||
path:
|
||||
description: 'A file, directory or wildcard pattern that describes what to upload'
|
||||
required: true
|
||||
if-no-files-found:
|
||||
description: >
|
||||
The desired behavior if no files are found using the provided path.
|
||||
|
||||
Available Options:
|
||||
warn: Output a warning but do not fail the action
|
||||
error: Fail the action with an error message
|
||||
ignore: Do not output any warnings or errors, the action does not fail
|
||||
default: 'warn'
|
||||
retention-days:
|
||||
description: >
|
||||
Duration after which artifact will expire in days. 0 means using default retention.
|
||||
|
||||
Minimum 1 day.
|
||||
Maximum 90 days unless changed from the repository settings page.
|
||||
compression-level:
|
||||
description: >
|
||||
The level of compression for Zlib to be applied to the artifact archive.
|
||||
The value can range from 0 to 9:
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
default: '6'
|
||||
overwrite:
|
||||
description: >
|
||||
If true, an artifact with a matching name will be deleted before a new one is uploaded.
|
||||
If false, the action will fail if an artifact for the given name already exists.
|
||||
Does not fail if the artifact does not exist.
|
||||
default: 'false'
|
||||
|
||||
outputs:
|
||||
artifact-id:
|
||||
description: >
|
||||
A unique identifier for the artifact that was just uploaded. Empty if the artifact upload failed.
|
||||
|
||||
This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
artifact-url:
|
||||
description: >
|
||||
A download URL for the artifact that was just uploaded. Empty if the artifact upload failed.
|
||||
|
||||
This download URL only works for requests Authenticated with GitHub. Anonymous downloads will be prompted to first login.
|
||||
If an anonymous download URL is needed than a short time restricted URL can be generated using the download artifact API: https://docs.github.com/en/rest/actions/artifacts#download-an-artifact
|
||||
|
||||
This URL will be valid for as long as the artifact exists and the workflow run and repository exists. Once an artifact has expired this URL will no longer work.
|
||||
Common uses cases for such a download URL can be adding download links to artifacts in descriptions or comments on pull requests or issues.
|
||||
runs:
|
||||
using: 'node20'
|
||||
main: 'dist/upload/index.js'
|
||||
using: 'node12'
|
||||
main: 'dist/index.js'
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,209 +0,0 @@
|
|||
# Migration
|
||||
|
||||
- [Migration](#migration)
|
||||
- [Multiple uploads to the same named Artifact](#multiple-uploads-to-the-same-named-artifact)
|
||||
- [Overwriting an Artifact](#overwriting-an-artifact)
|
||||
- [Merging multiple artifacts](#merging-multiple-artifacts)
|
||||
|
||||
Several behavioral differences exist between Artifact actions `v3` and below vs `v4`. This document outlines common scenarios in `v3`, and how they would be handled in `v4`.
|
||||
|
||||
## Multiple uploads to the same named Artifact
|
||||
|
||||
In `v3`, Artifacts are _mutable_ so it's possible to write workflow scenarios where multiple jobs upload to the same Artifact like so:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
steps:
|
||||
- name: Create a File
|
||||
run: echo "hello from ${{ matrix.runs-on }}" > file-${{ matrix.runs-on }}.txt
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: my-artifact # NOTE: same artifact name
|
||||
path: file-${{ matrix.runs-on }}.txt
|
||||
download:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download All Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: my-artifact
|
||||
path: my-artifact
|
||||
- run: ls -R my-artifact
|
||||
```
|
||||
|
||||
This results in a directory like so:
|
||||
|
||||
```
|
||||
my-artifact/
|
||||
file-macos-latest.txt
|
||||
file-ubuntu-latest.txt
|
||||
file-windows-latest.txt
|
||||
```
|
||||
|
||||
In v4, Artifacts are immutable (unless deleted). So you must change each of the uploaded Artifacts to have a different name and filter the downloads by name to achieve the same effect:
|
||||
|
||||
```diff
|
||||
jobs:
|
||||
upload:
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
steps:
|
||||
- name: Create a File
|
||||
run: echo "hello from ${{ matrix.runs-on }}" > file-${{ matrix.runs-on }}.txt
|
||||
- name: Upload Artifact
|
||||
- uses: actions/upload-artifact@v3
|
||||
+ uses: actions/upload-artifact@v4
|
||||
with:
|
||||
- name: my-artifact
|
||||
+ name: my-artifact-${{ matrix.runs-on }}
|
||||
path: file-${{ matrix.runs-on }}.txt
|
||||
download:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download All Artifacts
|
||||
- uses: actions/download-artifact@v3
|
||||
+ uses: actions/download-artifact@v4
|
||||
with:
|
||||
- name: my-artifact
|
||||
path: my-artifact
|
||||
+ pattern: my-artifact-*
|
||||
+ merge-multiple: true
|
||||
- run: ls -R my-artifact
|
||||
```
|
||||
|
||||
In `v4`, the new `pattern:` input will filter the downloaded Artifacts to match the name specified. The new `merge-multiple:` input will support downloading multiple Artifacts to the same directory. If the files within the Artifacts have the same name, the last writer wins.
|
||||
|
||||
## Overwriting an Artifact
|
||||
|
||||
In `v3`, the contents of an Artifact were mutable so something like the following was possible:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create a file
|
||||
run: echo "hello world" > my-file.txt
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: my-artifact # NOTE: same artifact name
|
||||
path: my-file.txt
|
||||
upload-again:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create a different file
|
||||
run: echo "goodbye world" > my-file.txt
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: my-artifact # NOTE: same artifact name
|
||||
path: my-file.txt
|
||||
```
|
||||
|
||||
The resulting `my-file.txt` in `my-artifact` will have "goodbye world" as the content.
|
||||
|
||||
In `v4`, Artifacts are immutable unless deleted. To achieve this same behavior, you can use `overwrite: true` to delete the Artifact before a new one is created:
|
||||
|
||||
```diff
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create a file
|
||||
run: echo "hello world" > my-file.txt
|
||||
- name: Upload Artifact
|
||||
- uses: actions/upload-artifact@v3
|
||||
+ uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact # NOTE: same artifact name
|
||||
path: my-file.txt
|
||||
upload-again:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create a different file
|
||||
run: echo "goodbye world" > my-file.txt
|
||||
- name: Upload Artifact
|
||||
- uses: actions/upload-artifact@v3
|
||||
+ uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact # NOTE: same artifact name
|
||||
path: my-file.txt
|
||||
+ overwrite: true
|
||||
```
|
||||
|
||||
Note that this will create an _entirely_ new Artifact, with a different ID from the previous.
|
||||
|
||||
## Merging multiple artifacts
|
||||
|
||||
In `v3`, multiple uploads from multiple jobs could be done to the same Artifact. This would result in a single archive, which could be useful for sending to upstream systems outside of Actions via API or UI downloads.
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
steps:
|
||||
- name: Create a File
|
||||
run: echo "hello from ${{ matrix.runs-on }}" > file-${{ matrix.runs-on }}.txt
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: all-my-files # NOTE: same artifact name
|
||||
path: file-${{ matrix.runs-on }}.txt
|
||||
```
|
||||
|
||||
The single `all-my-files` artifact would contain the following:
|
||||
|
||||
```
|
||||
.
|
||||
∟ file-ubuntu-latest.txt
|
||||
∟ file-macos-latest.txt
|
||||
∟ file-windows-latest.txt
|
||||
```
|
||||
|
||||
To achieve the same in `v4` you can change it like so:
|
||||
|
||||
```diff
|
||||
jobs:
|
||||
upload:
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
steps:
|
||||
- name: Create a File
|
||||
run: echo "hello from ${{ matrix.runs-on }}" > file-${{ matrix.runs-on }}.txt
|
||||
- name: Upload Artifact
|
||||
- uses: actions/upload-artifact@v3
|
||||
+ uses: actions/upload-artifact@v4
|
||||
with:
|
||||
- name: all-my-files
|
||||
+ name: my-artifact-${{ matrix.runs-on }}
|
||||
path: file-${{ matrix.runs-on }}.txt
|
||||
+ merge:
|
||||
+ runs-on: ubuntu-latest
|
||||
+ needs: upload
|
||||
+ steps:
|
||||
+ - name: Merge Artifacts
|
||||
+ uses: actions/upload-artifact/merge@v4
|
||||
+ with:
|
||||
+ name: all-my-files
|
||||
+ pattern: my-artifact-*
|
||||
```
|
||||
|
||||
Note that this will download all artifacts to a temporary directory and reupload them as a single artifact. For more information on inputs and other use cases for `actions/upload-artifact/merge@v4`, see [the action documentation](../merge/README.md).
|
200
merge/README.md
200
merge/README.md
|
@ -1,200 +0,0 @@
|
|||
# `@actions/upload-artifact/merge`
|
||||
|
||||
Merge multiple [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts) in Workflow Runs. Internally powered by [@actions/artifact](https://github.com/actions/toolkit/tree/main/packages/artifact) package.
|
||||
|
||||
- [`@actions/upload-artifact/merge`](#actionsupload-artifactmerge)
|
||||
- [Usage](#usage)
|
||||
- [Inputs](#inputs)
|
||||
- [Outputs](#outputs)
|
||||
- [Examples](#examples)
|
||||
- [Combining all artifacts in a workflow run](#combining-all-artifacts-in-a-workflow-run)
|
||||
- [Prefix directories in merged artifact](#prefix-directories-in-merged-artifact)
|
||||
- [Deleting artifacts after merge](#deleting-artifacts-after-merge)
|
||||
- [Retention and Compression Level](#retention-and-compression-level)
|
||||
|
||||
## Usage
|
||||
|
||||
> [!IMPORTANT]
|
||||
> upload-artifact/merge@v4+ is not currently supported on GHES.
|
||||
|
||||
Note: this actions can only merge artifacts created with actions/upload-artifact@v4+
|
||||
|
||||
This sub-action is a helper to merge multiple artifacts after they are created. To do so, it will download multiple artifacts to a temporary directory and reupload them as a single artifact.
|
||||
|
||||
For most cases, this may not be the most efficient solution. See [the migration docs](../docs/MIGRATION.md#multiple-uploads-to-the-same-named-artifact) on how to download multiple artifacts to the same directory on a runner. This action should only be necessary for cases where multiple artifacts will need to be downloaded outside the runner environment, like downloads via the UI or REST API.
|
||||
|
||||
### Inputs
|
||||
|
||||
```yaml
|
||||
- uses: actions/upload-artifact/merge@v4
|
||||
with:
|
||||
# The name of the artifact that the artifacts will be merged into
|
||||
# Optional. Default is 'merged-artifacts'
|
||||
name:
|
||||
|
||||
# A glob pattern matching the artifacts that should be merged.
|
||||
# Optional. Default is '*'
|
||||
pattern:
|
||||
|
||||
# If true, the artifacts will be merged into separate directories.
|
||||
# If false, the artifacts will be merged into the root of the destination.
|
||||
# Optional. Default is 'false'
|
||||
separate-directories:
|
||||
|
||||
# If true, the artifacts that were merged will be deleted.
|
||||
# If false, the artifacts will still exist.
|
||||
# Optional. Default is 'false'
|
||||
delete-merged:
|
||||
|
||||
# Duration after which artifact will expire in days. 0 means using default retention.
|
||||
# Minimum 1 day.
|
||||
# Maximum 90 days unless changed from the repository settings page.
|
||||
# Optional. Defaults to repository settings.
|
||||
retention-days:
|
||||
|
||||
# The level of compression for Zlib to be applied to the artifact archive.
|
||||
# The value can range from 0 to 9.
|
||||
# For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
# Optional. Default is '6'
|
||||
compression-level:
|
||||
```
|
||||
|
||||
### Outputs
|
||||
|
||||
| Name | Description | Example |
|
||||
| - | - | - |
|
||||
| `artifact-id` | GitHub ID of an Artifact, can be used by the REST API | `1234` |
|
||||
| `artifact-url` | URL to download an Artifact. Can be used in many scenarios such as linking to artifacts in issues or pull requests. Users must be logged-in in order for this URL to work. This URL is valid as long as the artifact has not expired or the artifact, run or repository have not been deleted | `https://github.com/example-org/example-repo/actions/runs/1/artifacts/1234` |
|
||||
|
||||
## Examples
|
||||
|
||||
For each of these examples, assume we have a prior job matrix that generates three artifacts: `my-artifact-a`, `my-artifact-b` and `my-artifact-c`.
|
||||
|
||||
e.g.
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
foo: [a, b, c]
|
||||
|
||||
steps:
|
||||
- name: Run a one-line script
|
||||
run: echo "hello from job ${{ matrix.foo }}" > file-${{ matrix.foo }}.txt
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact-${{ matrix.foo }}
|
||||
path: file-${{ matrix.foo }}.txt
|
||||
```
|
||||
|
||||
Each of the following examples will use the `needs: upload` as a prerequesite before any merging operations.
|
||||
|
||||
### Combining all artifacts in a workflow run
|
||||
|
||||
By default (with no inputs), calling this action will take all the artifacts in the workflow run and combined them into a single artifact called `merged-artifacts`:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
# ... <upload job> ...
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@v4
|
||||
```
|
||||
|
||||
This will result in an artifact called `merged-artifacts` with the following content:
|
||||
|
||||
```
|
||||
.
|
||||
∟ file-a.txt
|
||||
∟ file-b.txt
|
||||
∟ file-c.txt
|
||||
```
|
||||
|
||||
To change the name of the artifact and filter on what artifacts are added, you can use the `name` and `pattern` inputs:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
# ... <upload job> ...
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@v4
|
||||
with:
|
||||
name: my-amazing-merged-artifact
|
||||
pattern: my-artifact-*
|
||||
```
|
||||
|
||||
### Prefix directories in merged artifact
|
||||
|
||||
To prevent overwriting files in artifacts that may have the same name, you can use the `separate-directories` to prefix the extracted files with directories (named after the original artifact):
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
# ... <upload job> ...
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@v4
|
||||
with:
|
||||
separate-directories: true
|
||||
```
|
||||
|
||||
This will result in the following artifact structure:
|
||||
|
||||
```
|
||||
.
|
||||
∟ my-artifact-a
|
||||
∟ file-a.txt
|
||||
∟ my-artifact-b
|
||||
∟ file-b.txt
|
||||
∟ my-artifact-c
|
||||
∟ file-c.txt
|
||||
```
|
||||
|
||||
### Deleting artifacts after merge
|
||||
|
||||
After merge, the old artifacts may no longer be required. To automatically delete them after they are merged into a new artifact, you can use `delete-merged` like so:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
# ... <upload job> ...
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@v4
|
||||
with:
|
||||
delete-merged: true
|
||||
```
|
||||
|
||||
After this runs, the matching artifact (`my-artifact-a`, `my-artifact-b` and `my-artifact-c`) will be merged.
|
||||
|
||||
### Retention and Compression Level
|
||||
|
||||
Similar to actions/upload-artifact, both [`retention-days`](../README.md#retention-period) and [`compression-level`](../README.md#altering-compressions-level-speed-v-size) are supported:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
# ... <upload job> ...
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs: upload
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@v4
|
||||
with:
|
||||
retention-days: 1
|
||||
compression-level: 9
|
||||
```
|
|
@ -1,57 +0,0 @@
|
|||
name: 'Merge Build Artifacts'
|
||||
description: 'Merge one or more build Artifacts'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
name:
|
||||
description: 'The name of the artifact that the artifacts will be merged into.'
|
||||
required: true
|
||||
default: 'merged-artifacts'
|
||||
pattern:
|
||||
description: 'A glob pattern matching the artifact names that should be merged.'
|
||||
default: '*'
|
||||
separate-directories:
|
||||
description: 'When multiple artifacts are matched, this changes the behavior of how they are merged in the archive.
|
||||
If true, the matched artifacts will be extracted into individual named directories within the specified path.
|
||||
If false, the matched artifacts will combined in the same directory.'
|
||||
default: 'false'
|
||||
retention-days:
|
||||
description: >
|
||||
Duration after which artifact will expire in days. 0 means using default retention.
|
||||
|
||||
Minimum 1 day.
|
||||
Maximum 90 days unless changed from the repository settings page.
|
||||
compression-level:
|
||||
description: >
|
||||
The level of compression for Zlib to be applied to the artifact archive.
|
||||
The value can range from 0 to 9:
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
default: '6'
|
||||
delete-merged:
|
||||
description: >
|
||||
If true, the artifacts that were merged will be deleted.
|
||||
If false, the artifacts will still exist.
|
||||
default: 'false'
|
||||
|
||||
outputs:
|
||||
artifact-id:
|
||||
description: >
|
||||
A unique identifier for the artifact that was just uploaded. Empty if the artifact upload failed.
|
||||
|
||||
This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
artifact-url:
|
||||
description: >
|
||||
A download URL for the artifact that was just uploaded. Empty if the artifact upload failed.
|
||||
|
||||
This download URL only works for requests Authenticated with GitHub. Anonymous downloads will be prompted to first login.
|
||||
If an anonymous download URL is needed than a short time restricted URL can be generated using the download artifact API: https://docs.github.com/en/rest/actions/artifacts#download-an-artifact
|
||||
|
||||
This URL will be valid for as long as the artifact exists and the workflow run and repository exists. Once an artifact has expired this URL will no longer work.
|
||||
Common uses cases for such a download URL can be adding download links to artifacts in descriptions or comments on pull requests or issues.
|
||||
runs:
|
||||
using: 'node20'
|
||||
main: '../dist/merge/index.js'
|
File diff suppressed because it is too large
Load Diff
48
package.json
48
package.json
|
@ -1,11 +1,11 @@
|
|||
{
|
||||
"name": "upload-artifact",
|
||||
"version": "4.3.6",
|
||||
"description": "Upload an Actions Artifact in a workflow run",
|
||||
"main": "dist/upload/index.js",
|
||||
"version": "2.0.1",
|
||||
"description": "Upload a build artifact that can be used by subsequent workflow steps",
|
||||
"main": "dist/index.js",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"release": "ncc build src/upload/index.ts -o dist/upload && ncc build src/merge/index.ts -o dist/merge",
|
||||
"release": "ncc build src/upload-artifact.ts && git add -f dist/",
|
||||
"check-all": "concurrently \"npm:format-check\" \"npm:lint\" \"npm:test\" \"npm:build\"",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
|
@ -28,28 +28,24 @@
|
|||
"url": "https://github.com/actions/upload-artifact/issues"
|
||||
},
|
||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "2.1.8",
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/glob": "^0.3.0",
|
||||
"@actions/io": "^1.1.2",
|
||||
"minimatch": "^9.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.2.5",
|
||||
"@types/node": "^18.11.18",
|
||||
"@typescript-eslint/parser": "^5.48.0",
|
||||
"@vercel/ncc": "^0.36.0",
|
||||
"concurrently": "^7.6.0",
|
||||
"eslint": "^8.31.0",
|
||||
"eslint-plugin-github": "^4.6.0",
|
||||
"eslint-plugin-jest": "^27.2.0",
|
||||
"glob": "^8.0.3",
|
||||
"jest": "^29.3.1",
|
||||
"jest-circus": "^29.3.1",
|
||||
"prettier": "^2.8.1",
|
||||
"ts-jest": "^29.0.3",
|
||||
"typescript": "^4.9.4"
|
||||
"@actions/artifact": "^0.3.2",
|
||||
"@actions/core": "^1.2.3",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/io": "^1.0.2",
|
||||
"@types/jest": "^25.2.1",
|
||||
"@types/node": "^13.11.1",
|
||||
"@typescript-eslint/parser": "^2.27.0",
|
||||
"@zeit/ncc": "^0.22.1",
|
||||
"concurrently": "^5.1.0",
|
||||
"eslint": "^7.4.0",
|
||||
"eslint-plugin-github": "^3.4.1",
|
||||
"eslint-plugin-jest": "^23.8.2",
|
||||
"glob": "^7.1.6",
|
||||
"jest": "^26.1.0",
|
||||
"jest-circus": "^26.1.0",
|
||||
"prettier": "^2.0.4",
|
||||
"ts-jest": "^25.3.1",
|
||||
"typescript": "^3.8.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
export enum Inputs {
|
||||
Name = 'name',
|
||||
Path = 'path'
|
||||
}
|
||||
|
||||
export function getDefaultArtifactName(): string {
|
||||
return 'artifact'
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
/* eslint-disable no-unused-vars */
|
||||
export enum Inputs {
|
||||
Name = 'name',
|
||||
Pattern = 'pattern',
|
||||
SeparateDirectories = 'separate-directories',
|
||||
RetentionDays = 'retention-days',
|
||||
CompressionLevel = 'compression-level',
|
||||
DeleteMerged = 'delete-merged'
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import {run} from './merge-artifacts'
|
||||
|
||||
run().catch(error => {
|
||||
core.setFailed((error as Error).message)
|
||||
})
|
|
@ -1,44 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import {Inputs} from './constants'
|
||||
import {MergeInputs} from './merge-inputs'
|
||||
|
||||
/**
|
||||
* Helper to get all the inputs for the action
|
||||
*/
|
||||
export function getInputs(): MergeInputs {
|
||||
const name = core.getInput(Inputs.Name, {required: true})
|
||||
const pattern = core.getInput(Inputs.Pattern, {required: true})
|
||||
const separateDirectories = core.getBooleanInput(Inputs.SeparateDirectories)
|
||||
const deleteMerged = core.getBooleanInput(Inputs.DeleteMerged)
|
||||
|
||||
const inputs = {
|
||||
name,
|
||||
pattern,
|
||||
separateDirectories,
|
||||
deleteMerged,
|
||||
retentionDays: 0,
|
||||
compressionLevel: 6
|
||||
} as MergeInputs
|
||||
|
||||
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
||||
if (retentionDaysStr) {
|
||||
inputs.retentionDays = parseInt(retentionDaysStr)
|
||||
if (isNaN(inputs.retentionDays)) {
|
||||
core.setFailed('Invalid retention-days')
|
||||
}
|
||||
}
|
||||
|
||||
const compressionLevelStr = core.getInput(Inputs.CompressionLevel)
|
||||
if (compressionLevelStr) {
|
||||
inputs.compressionLevel = parseInt(compressionLevelStr)
|
||||
if (isNaN(inputs.compressionLevel)) {
|
||||
core.setFailed('Invalid compression-level')
|
||||
}
|
||||
|
||||
if (inputs.compressionLevel < 0 || inputs.compressionLevel > 9) {
|
||||
core.setFailed('Invalid compression-level. Valid values are 0-9')
|
||||
}
|
||||
}
|
||||
|
||||
return inputs
|
||||
}
|
|
@ -1,93 +0,0 @@
|
|||
import * as path from 'path'
|
||||
import {mkdtemp, rm} from 'fs/promises'
|
||||
import * as core from '@actions/core'
|
||||
import {Minimatch} from 'minimatch'
|
||||
import artifactClient, {UploadArtifactOptions} from '@actions/artifact'
|
||||
import {getInputs} from './input-helper'
|
||||
import {uploadArtifact} from '../shared/upload-artifact'
|
||||
import {findFilesToUpload} from '../shared/search'
|
||||
|
||||
const PARALLEL_DOWNLOADS = 5
|
||||
|
||||
export const chunk = <T>(arr: T[], n: number): T[][] =>
|
||||
arr.reduce((acc, cur, i) => {
|
||||
const index = Math.floor(i / n)
|
||||
acc[index] = [...(acc[index] || []), cur]
|
||||
return acc
|
||||
}, [] as T[][])
|
||||
|
||||
export async function run(): Promise<void> {
|
||||
const inputs = getInputs()
|
||||
const tmpDir = await mkdtemp('merge-artifact')
|
||||
|
||||
const listArtifactResponse = await artifactClient.listArtifacts({
|
||||
latest: true
|
||||
})
|
||||
const matcher = new Minimatch(inputs.pattern)
|
||||
const artifacts = listArtifactResponse.artifacts.filter(artifact =>
|
||||
matcher.match(artifact.name)
|
||||
)
|
||||
core.debug(
|
||||
`Filtered from ${listArtifactResponse.artifacts.length} to ${artifacts.length} artifacts`
|
||||
)
|
||||
|
||||
if (artifacts.length === 0) {
|
||||
throw new Error(`No artifacts found matching pattern '${inputs.pattern}'`)
|
||||
}
|
||||
|
||||
core.info(`Preparing to download the following artifacts:`)
|
||||
artifacts.forEach(artifact => {
|
||||
core.info(`- ${artifact.name} (ID: ${artifact.id}, Size: ${artifact.size})`)
|
||||
})
|
||||
|
||||
const downloadPromises = artifacts.map(artifact =>
|
||||
artifactClient.downloadArtifact(artifact.id, {
|
||||
path: inputs.separateDirectories
|
||||
? path.join(tmpDir, artifact.name)
|
||||
: tmpDir
|
||||
})
|
||||
)
|
||||
|
||||
const chunkedPromises = chunk(downloadPromises, PARALLEL_DOWNLOADS)
|
||||
for (const chunk of chunkedPromises) {
|
||||
await Promise.all(chunk)
|
||||
}
|
||||
|
||||
const options: UploadArtifactOptions = {}
|
||||
if (inputs.retentionDays) {
|
||||
options.retentionDays = inputs.retentionDays
|
||||
}
|
||||
|
||||
if (typeof inputs.compressionLevel !== 'undefined') {
|
||||
options.compressionLevel = inputs.compressionLevel
|
||||
}
|
||||
|
||||
const searchResult = await findFilesToUpload(tmpDir)
|
||||
|
||||
await uploadArtifact(
|
||||
inputs.name,
|
||||
searchResult.filesToUpload,
|
||||
searchResult.rootDirectory,
|
||||
options
|
||||
)
|
||||
|
||||
core.info(
|
||||
`The ${artifacts.length} artifact(s) have been successfully merged!`
|
||||
)
|
||||
|
||||
if (inputs.deleteMerged) {
|
||||
const deletePromises = artifacts.map(artifact =>
|
||||
artifactClient.deleteArtifact(artifact.name)
|
||||
)
|
||||
await Promise.all(deletePromises)
|
||||
core.info(`The ${artifacts.length} artifact(s) have been deleted`)
|
||||
}
|
||||
|
||||
try {
|
||||
await rm(tmpDir, {recursive: true})
|
||||
} catch (error) {
|
||||
core.warning(
|
||||
`Unable to remove temporary directory: ${(error as Error).message}`
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
export interface MergeInputs {
|
||||
/**
|
||||
* The name of the artifact that the artifacts will be merged into
|
||||
*/
|
||||
name: string
|
||||
|
||||
/**
|
||||
* A glob pattern matching the artifacts that should be merged.
|
||||
*/
|
||||
pattern: string
|
||||
|
||||
/**
|
||||
* Duration after which artifact will expire in days
|
||||
*/
|
||||
retentionDays: number
|
||||
|
||||
/**
|
||||
* The level of compression for Zlib to be applied to the artifact archive.
|
||||
*/
|
||||
compressionLevel?: number
|
||||
|
||||
/**
|
||||
* If true, the artifacts that were merged will be deleted.
|
||||
* If false, the artifacts will still exist.
|
||||
*/
|
||||
deleteMerged: boolean
|
||||
|
||||
/**
|
||||
* If true, the artifacts will be merged into separate directories.
|
||||
* If false, the artifacts will be merged into the root of the destination.
|
||||
*/
|
||||
separateDirectories: boolean
|
||||
}
|
|
@ -1,10 +1,8 @@
|
|||
import * as glob from '@actions/glob'
|
||||
import * as path from 'path'
|
||||
import {debug, info} from '@actions/core'
|
||||
import {stat} from 'fs'
|
||||
import {lstatSync} from 'fs'
|
||||
import {dirname} from 'path'
|
||||
import {promisify} from 'util'
|
||||
const stats = promisify(stat)
|
||||
|
||||
export interface SearchResult {
|
||||
filesToUpload: string[]
|
||||
|
@ -66,7 +64,7 @@ function getMultiPathLCA(searchPaths: string[]): string {
|
|||
return true
|
||||
}
|
||||
|
||||
// loop over all the search paths until there is a non-common ancestor or we go out of bounds
|
||||
// Loop over all the search paths until there is a non-common ancestor or we go out of bounds
|
||||
while (splitIndex < smallestPathLength) {
|
||||
if (!isPathTheSame()) {
|
||||
break
|
||||
|
@ -89,31 +87,14 @@ export async function findFilesToUpload(
|
|||
)
|
||||
const rawSearchResults: string[] = await globber.glob()
|
||||
|
||||
/*
|
||||
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
||||
Detect any files that could be overwritten for user awareness
|
||||
*/
|
||||
const set = new Set<string>()
|
||||
|
||||
/*
|
||||
Directories will be rejected if attempted to be uploaded. This includes just empty
|
||||
directories so filter any directories out from the raw search results
|
||||
*/
|
||||
for (const searchResult of rawSearchResults) {
|
||||
const fileStats = await stats(searchResult)
|
||||
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
|
||||
if (!fileStats.isDirectory()) {
|
||||
if (!lstatSync(searchResult).isDirectory()) {
|
||||
debug(`File:${searchResult} was found using the provided searchPath`)
|
||||
searchResults.push(searchResult)
|
||||
|
||||
// detect any files that would be overwritten because of case insensitivity
|
||||
if (set.has(searchResult.toLowerCase())) {
|
||||
info(
|
||||
`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`
|
||||
)
|
||||
} else {
|
||||
set.add(searchResult.toLowerCase())
|
||||
}
|
||||
} else {
|
||||
debug(
|
||||
`Removing ${searchResult} from rawSearchResults because it is a directory`
|
|
@ -1,28 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import artifact, {UploadArtifactOptions} from '@actions/artifact'
|
||||
|
||||
export async function uploadArtifact(
|
||||
artifactName: string,
|
||||
filesToUpload: string[],
|
||||
rootDirectory: string,
|
||||
options: UploadArtifactOptions
|
||||
) {
|
||||
const uploadResponse = await artifact.uploadArtifact(
|
||||
artifactName,
|
||||
filesToUpload,
|
||||
rootDirectory,
|
||||
options
|
||||
)
|
||||
|
||||
core.info(
|
||||
`Artifact ${artifactName} has been successfully uploaded! Final size is ${uploadResponse.size} bytes. Artifact ID is ${uploadResponse.id}`
|
||||
)
|
||||
core.setOutput('artifact-id', uploadResponse.id)
|
||||
|
||||
const repository = github.context.repo
|
||||
const artifactURL = `${github.context.serverUrl}/${repository.owner}/${repository.repo}/actions/runs/${github.context.runId}/artifacts/${uploadResponse.id}`
|
||||
|
||||
core.info(`Artifact download URL: ${artifactURL}`)
|
||||
core.setOutput('artifact-url', artifactURL)
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
import * as core from '@actions/core'
|
||||
import {create, UploadOptions} from '@actions/artifact'
|
||||
import {Inputs, getDefaultArtifactName} from './constants'
|
||||
import {findFilesToUpload} from './search'
|
||||
|
||||
async function run(): Promise<void> {
|
||||
try {
|
||||
const name = core.getInput(Inputs.Name, {required: false})
|
||||
const path = core.getInput(Inputs.Path, {required: true})
|
||||
|
||||
const searchResult = await findFilesToUpload(path)
|
||||
if (searchResult.filesToUpload.length === 0) {
|
||||
core.warning(
|
||||
`No files were found for the provided path: ${path}. No artifacts will be uploaded.`
|
||||
)
|
||||
} else {
|
||||
core.info(
|
||||
`With the provided path, there will be ${searchResult.filesToUpload.length} files uploaded`
|
||||
)
|
||||
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`)
|
||||
|
||||
const artifactClient = create()
|
||||
const options: UploadOptions = {
|
||||
continueOnError: false
|
||||
}
|
||||
const uploadResponse = await artifactClient.uploadArtifact(
|
||||
name || getDefaultArtifactName(),
|
||||
searchResult.filesToUpload,
|
||||
searchResult.rootDirectory,
|
||||
options
|
||||
)
|
||||
|
||||
if (uploadResponse.failedItems.length > 0) {
|
||||
core.setFailed(
|
||||
`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`
|
||||
)
|
||||
} else {
|
||||
core.info(
|
||||
`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
core.setFailed(err.message)
|
||||
}
|
||||
}
|
||||
|
||||
run()
|
|
@ -1,26 +0,0 @@
|
|||
/* eslint-disable no-unused-vars */
|
||||
export enum Inputs {
|
||||
Name = 'name',
|
||||
Path = 'path',
|
||||
IfNoFilesFound = 'if-no-files-found',
|
||||
RetentionDays = 'retention-days',
|
||||
CompressionLevel = 'compression-level',
|
||||
Overwrite = 'overwrite'
|
||||
}
|
||||
|
||||
export enum NoFileOptions {
|
||||
/**
|
||||
* Default. Output a warning but do not fail the action
|
||||
*/
|
||||
warn = 'warn',
|
||||
|
||||
/**
|
||||
* Fail the action with an error message
|
||||
*/
|
||||
error = 'error',
|
||||
|
||||
/**
|
||||
* Do not output any warnings or errors, the action does not fail
|
||||
*/
|
||||
ignore = 'ignore'
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import {run} from './upload-artifact'
|
||||
|
||||
run().catch(error => {
|
||||
core.setFailed((error as Error).message)
|
||||
})
|
|
@ -1,54 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import {Inputs, NoFileOptions} from './constants'
|
||||
import {UploadInputs} from './upload-inputs'
|
||||
|
||||
/**
|
||||
* Helper to get all the inputs for the action
|
||||
*/
|
||||
export function getInputs(): UploadInputs {
|
||||
const name = core.getInput(Inputs.Name)
|
||||
const path = core.getInput(Inputs.Path, {required: true})
|
||||
const overwrite = core.getBooleanInput(Inputs.Overwrite)
|
||||
|
||||
const ifNoFilesFound = core.getInput(Inputs.IfNoFilesFound)
|
||||
const noFileBehavior: NoFileOptions = NoFileOptions[ifNoFilesFound]
|
||||
|
||||
if (!noFileBehavior) {
|
||||
core.setFailed(
|
||||
`Unrecognized ${
|
||||
Inputs.IfNoFilesFound
|
||||
} input. Provided: ${ifNoFilesFound}. Available options: ${Object.keys(
|
||||
NoFileOptions
|
||||
)}`
|
||||
)
|
||||
}
|
||||
|
||||
const inputs = {
|
||||
artifactName: name,
|
||||
searchPath: path,
|
||||
ifNoFilesFound: noFileBehavior,
|
||||
overwrite: overwrite
|
||||
} as UploadInputs
|
||||
|
||||
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
||||
if (retentionDaysStr) {
|
||||
inputs.retentionDays = parseInt(retentionDaysStr)
|
||||
if (isNaN(inputs.retentionDays)) {
|
||||
core.setFailed('Invalid retention-days')
|
||||
}
|
||||
}
|
||||
|
||||
const compressionLevelStr = core.getInput(Inputs.CompressionLevel)
|
||||
if (compressionLevelStr) {
|
||||
inputs.compressionLevel = parseInt(compressionLevelStr)
|
||||
if (isNaN(inputs.compressionLevel)) {
|
||||
core.setFailed('Invalid compression-level')
|
||||
}
|
||||
|
||||
if (inputs.compressionLevel < 0 || inputs.compressionLevel > 9) {
|
||||
core.setFailed('Invalid compression-level. Valid values are 0-9')
|
||||
}
|
||||
}
|
||||
|
||||
return inputs
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import artifact, {
|
||||
UploadArtifactOptions,
|
||||
ArtifactNotFoundError
|
||||
} from '@actions/artifact'
|
||||
import {findFilesToUpload} from '../shared/search'
|
||||
import {getInputs} from './input-helper'
|
||||
import {NoFileOptions} from './constants'
|
||||
import {uploadArtifact} from '../shared/upload-artifact'
|
||||
|
||||
async function deleteArtifactIfExists(artifactName: string): Promise<void> {
|
||||
try {
|
||||
await artifact.deleteArtifact(artifactName)
|
||||
} catch (error) {
|
||||
if (error instanceof ArtifactNotFoundError) {
|
||||
core.debug(`Skipping deletion of '${artifactName}', it does not exist`)
|
||||
return
|
||||
}
|
||||
|
||||
// Best effort, we don't want to fail the action if this fails
|
||||
core.debug(`Unable to delete artifact: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
export async function run(): Promise<void> {
|
||||
const inputs = getInputs()
|
||||
const searchResult = await findFilesToUpload(inputs.searchPath)
|
||||
if (searchResult.filesToUpload.length === 0) {
|
||||
// No files were found, different use cases warrant different types of behavior if nothing is found
|
||||
switch (inputs.ifNoFilesFound) {
|
||||
case NoFileOptions.warn: {
|
||||
core.warning(
|
||||
`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`
|
||||
)
|
||||
break
|
||||
}
|
||||
case NoFileOptions.error: {
|
||||
core.setFailed(
|
||||
`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`
|
||||
)
|
||||
break
|
||||
}
|
||||
case NoFileOptions.ignore: {
|
||||
core.info(
|
||||
`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`
|
||||
)
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const s = searchResult.filesToUpload.length === 1 ? '' : 's'
|
||||
core.info(
|
||||
`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`
|
||||
)
|
||||
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`)
|
||||
|
||||
if (inputs.overwrite) {
|
||||
await deleteArtifactIfExists(inputs.artifactName)
|
||||
}
|
||||
|
||||
const options: UploadArtifactOptions = {}
|
||||
if (inputs.retentionDays) {
|
||||
options.retentionDays = inputs.retentionDays
|
||||
}
|
||||
|
||||
if (typeof inputs.compressionLevel !== 'undefined') {
|
||||
options.compressionLevel = inputs.compressionLevel
|
||||
}
|
||||
|
||||
await uploadArtifact(
|
||||
inputs.artifactName,
|
||||
searchResult.filesToUpload,
|
||||
searchResult.rootDirectory,
|
||||
options
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
import {NoFileOptions} from './constants'
|
||||
|
||||
export interface UploadInputs {
|
||||
/**
|
||||
* The name of the artifact that will be uploaded
|
||||
*/
|
||||
artifactName: string
|
||||
|
||||
/**
|
||||
* The search path used to describe what to upload as part of the artifact
|
||||
*/
|
||||
searchPath: string
|
||||
|
||||
/**
|
||||
* The desired behavior if no files are found with the provided search path
|
||||
*/
|
||||
ifNoFilesFound: NoFileOptions
|
||||
|
||||
/**
|
||||
* Duration after which artifact will expire in days
|
||||
*/
|
||||
retentionDays: number
|
||||
|
||||
/**
|
||||
* The level of compression for Zlib to be applied to the artifact archive.
|
||||
*/
|
||||
compressionLevel?: number
|
||||
|
||||
/**
|
||||
* Whether or not to replace an existing artifact with the same name
|
||||
*/
|
||||
overwrite: boolean
|
||||
}
|
Loading…
Reference in New Issue