15 Commits

Author SHA1 Message Date
Akkuman
b8d9144f30 fix: must still call thefileHandle.close() method since nodejs v17 2026-03-24 05:55:35 +00:00
Akkuman
e476391eee fix: autoclose stream 2026-03-24 05:37:37 +00:00
Akkuman
fd3cba014c fix: use readStream instead of read
ralated: https://github.com/akkuman/gitea-release-action/issues/10
2026-03-24 05:26:41 +00:00
Akkuman
9ee2a5d4a3 dev: Add devcontainer config 2026-03-23 08:10:02 +00:00
Akkuman
4875285c09 fix: author info 2025-11-07 16:09:48 +08:00
Akkuman
aae35ac409 Merge remote-tracking branch 'origin/main' into pr-branch 2025-11-07 16:04:59 +08:00
akkuman
c95a2785f0 Merge pull request #8 from grypho/main
Feature: Preserve UI fields when creating/uploading a release
2025-11-07 16:56:49 +09:00
Dominik Wetzel
424dc33baa fix: Keep existing values by default when updating release 2025-11-07 08:22:00 +01:00
Carsten Schumann
9ca8dcac95 Bugfix: body_path was ignored because of default body. 2025-10-20 11:59:36 +02:00
Carsten Schumann
05b1004877 Update README 2025-10-20 11:25:28 +02:00
Carsten Schumann
008a54b0cd Feature: When using the Gitea UI to create an release the fields "name", "body" and "prerelease" are no longer overwritten with empty values. The existing data is used by default. 2025-08-20 10:41:54 +02:00
akkuman
fe8e032280 Merge pull request #7 from n08i40k/main
Declare missing body_path input in action.yml
2025-07-28 09:19:09 +08:00
Nikita
3dbdc45d61 docs: declare body_path input 2025-07-26 14:35:12 +04:00
Akkuman
f66c1c98f1 fix: deletion of old releases
Duplicate deletions occur when users generate their own .md5 and .sha256 files and do not use action's built-in md5sum and sha256sum functions.
issue: https://github.com/akkuman/gitea-release-action/issues/5
2025-06-25 10:53:06 +08:00
Akkuman
65a502e85c fix: delete old release attachments 2025-06-23 15:10:15 +08:00
5 changed files with 250 additions and 48 deletions

View File

@@ -0,0 +1,22 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/javascript-node
{
"name": "gitea-release-action",
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
"image": "mcr.microsoft.com/devcontainers/javascript-node:4-22-bookworm"
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "yarn install",
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

View File

@@ -2,6 +2,8 @@
An action to support publishing release to Gitea. An action to support publishing release to Gitea.
Preserves the fields body, prerelease and name when pushing the release if no value is given.
## Inputs ## Inputs
The following are optional as `step.with` keys The following are optional as `step.with` keys

View File

@@ -10,10 +10,14 @@ inputs:
body: body:
description: "Note-worthy description of changes in release" description: "Note-worthy description of changes in release"
required: false required: false
default: ${{ github.event.release.body != '' && github.event.release.body || null }}
body_path:
description: "Path to load description of changes in this release"
required: false
name: name:
description: "Gives the release a custom name. Defaults to tag name" description: "Gives the release a custom name. Defaults to tag name"
required: false required: false
default: ${{ github.ref_name }} default: ${{ github.event.release.name != '' && github.event.release.name || github.ref_name }}
tag_name: tag_name:
description: "Gives a tag name. Defaults to github.GITHUB_REF" description: "Gives a tag name. Defaults to github.GITHUB_REF"
required: false required: false
@@ -21,9 +25,11 @@ inputs:
draft: draft:
description: "Creates a draft release. Defaults to false" description: "Creates a draft release. Defaults to false"
required: false required: false
default: ${{ github.event.release.draft || false }}
prerelease: prerelease:
description: "Identify the release as a prerelease. Defaults to false" description: "Identify the release as a prerelease. Defaults to false"
required: false required: false
default: ${{ github.event.release.prerelease || false }}
files: files:
description: "Newline-delimited list of path globs for asset files to upload" description: "Newline-delimited list of path globs for asset files to upload"
required: false required: false

137
dist/index.js vendored
View File

@@ -40594,6 +40594,8 @@ var __webpack_exports__ = {};
// This entry need to be wrapped in an IIFE because it need to be isolated against other modules in the chunk. // This entry need to be wrapped in an IIFE because it need to be isolated against other modules in the chunk.
(() => { (() => {
;// CONCATENATED MODULE: external "node:fs/promises"
const promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:fs/promises");
// EXTERNAL MODULE: external "fs" // EXTERNAL MODULE: external "fs"
var external_fs_ = __nccwpck_require__(7147); var external_fs_ = __nccwpck_require__(7147);
var external_fs_namespaceObject = /*#__PURE__*/__nccwpck_require__.t(external_fs_, 2); var external_fs_namespaceObject = /*#__PURE__*/__nccwpck_require__.t(external_fs_, 2);
@@ -43826,7 +43828,7 @@ var external_path_ = __nccwpck_require__(1017);
// EXTERNAL MODULE: external "url" // EXTERNAL MODULE: external "url"
var external_url_ = __nccwpck_require__(7310); var external_url_ = __nccwpck_require__(7310);
;// CONCATENATED MODULE: external "fs/promises" ;// CONCATENATED MODULE: external "fs/promises"
const promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("fs/promises"); const external_fs_promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("fs/promises");
// EXTERNAL MODULE: external "events" // EXTERNAL MODULE: external "events"
var external_events_ = __nccwpck_require__(2361); var external_events_ = __nccwpck_require__(2361);
// EXTERNAL MODULE: external "stream" // EXTERNAL MODULE: external "stream"
@@ -44870,10 +44872,10 @@ const defaultFS = {
readlinkSync: external_fs_.readlinkSync, readlinkSync: external_fs_.readlinkSync,
realpathSync, realpathSync,
promises: { promises: {
lstat: promises_namespaceObject.lstat, lstat: external_fs_promises_namespaceObject.lstat,
readdir: promises_namespaceObject.readdir, readdir: external_fs_promises_namespaceObject.readdir,
readlink: promises_namespaceObject.readlink, readlink: external_fs_promises_namespaceObject.readlink,
realpath: promises_namespaceObject.realpath, realpath: external_fs_promises_namespaceObject.realpath,
}, },
}; };
// if they just gave us require('fs') then use our default // if they just gave us require('fs') then use our default
@@ -48144,6 +48146,7 @@ var crypto_js = __nccwpck_require__(4134);
function getIsTrue(v) { function getIsTrue(v) {
const trueValue = ['true', 'True', 'TRUE'] const trueValue = ['true', 'True', 'TRUE']
return trueValue.includes(v) return trueValue.includes(v)
@@ -48262,6 +48265,68 @@ function paths(patterns) {
}, []); }, []);
}; };
async function createStreamableFile(fpath) {
const name = external_path_.basename(fpath);
const handle = await promises_namespaceObject.open(fpath);
const { size } = await handle.stat();
const file = new external_buffer_.File([], name);
file.stream = () => handle.readableWebStream();
file.close = async () => await handle?.close();
// Set correct size otherwise, fetch will encounter UND_ERR_REQ_CONTENT_LENGTH_MISMATCH
Object.defineProperty(file, 'size', { get: () => size });
return file;
}
async function calculateMultipleHashes(file, algorithms = ['md5', 'sha256']) {
const stream = file.stream();
const reader = stream.getReader();
const hashers = algorithms.map(alg => {
switch(alg.toLowerCase()) {
case 'md5':
return { name: 'md5', instance: crypto_js.algo.MD5.create() };
case 'sha1':
return { name: 'sha1', instance: crypto_js.algo.SHA1.create() };
case 'sha256':
return { name: 'sha256', instance: crypto_js.algo.SHA256.create() };
case 'sha512':
return { name: 'sha512', instance: crypto_js.algo.SHA512.create() };
default:
throw new Error(`not support hash: ${alg}`);
}
});
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
const wordArray = crypto_js.lib.WordArray.create(value);
hashers.forEach(hasher => {
hasher.instance.update(wordArray);
});
}
const result = {};
hashers.forEach(hasher => {
result[hasher.name] = hasher.instance.finalize().toString(crypto_js.enc.Hex);
});
return result;
} finally {
reader.releaseLock();
}
}
/** /**
* *
* @param {gitea.GiteaApi} client * @param {gitea.GiteaApi} client
@@ -48278,32 +48343,55 @@ async function uploadFiles(client, owner, repo, release_id, all_files, params) {
repo: repo, repo: repo,
id: release_id, id: release_id,
}) })
// deleted old release attachment
const will_deleted = new Set();
for (const filepath of all_files) { for (const filepath of all_files) {
for (const attachment of attachments) { will_deleted.add(external_path_.basename(filepath));
let will_deleted = [external_path_.basename(filepath), `${external_path_.basename(filepath)}.md5`, `${external_path_.basename(filepath)}.sha256`] if (params.md5sum) {
if (will_deleted.includes(attachment.name)) { will_deleted.add(`${external_path_.basename(filepath)}.md5`);
await client.repository.repoDeleteReleaseAttachment({
owner: owner,
repo: repo,
id: release_id,
attachmentId: attachment.id,
})
console.log(`Successfully deleted old release attachment ${attachment.name}`)
}
} }
const content = external_fs_.readFileSync(filepath); if (params.sha256sum) {
let blob = new external_buffer_.Blob([content]); will_deleted.add(`${external_path_.basename(filepath)}.sha256`);
}
}
for (const attachment of attachments) {
if (will_deleted.has(attachment.name)) {
await client.repository.repoDeleteReleaseAttachment({
owner: owner,
repo: repo,
id: release_id,
attachmentId: attachment.id,
})
console.log(`Successfully deleted old release attachment ${attachment.name}`)
}
}
// upload new release attachment
for (const filepath of all_files) {
let curfile = await createStreamableFile(filepath)
await client.repository.repoCreateReleaseAttachment({ await client.repository.repoCreateReleaseAttachment({
owner: owner, owner: owner,
repo: repo, repo: repo,
id: release_id, id: release_id,
attachment: blob, attachment: curfile,
name: external_path_.basename(filepath), name: external_path_.basename(filepath),
}) })
await curfile.close();
let algorithms = [];
if (params.md5sum) { if (params.md5sum) {
let wordArray = crypto_js.lib.WordArray.create(content); algorithms = algorithms.concat('md5');
let hash = crypto_js.MD5(wordArray).toString(); }
blob = new external_buffer_.Blob([hash], { type : 'plain/text' }); if (params.sha256sum) {
algorithms = algorithms.concat('sha256');
}
let hashes = {};
if (algorithms.length !== 0) {
curfile = await createStreamableFile(filepath)
hashes = await calculateMultipleHashes(curfile, algorithms)
await curfile.close();
}
if (params.md5sum) {
let hash = hashes.md5;
let blob = new external_buffer_.Blob([hash], { type : 'plain/text' });
await client.repository.repoCreateReleaseAttachment({ await client.repository.repoCreateReleaseAttachment({
owner: owner, owner: owner,
repo: repo, repo: repo,
@@ -48313,9 +48401,8 @@ async function uploadFiles(client, owner, repo, release_id, all_files, params) {
}) })
} }
if (params.sha256sum) { if (params.sha256sum) {
let wordArray = crypto_js.lib.WordArray.create(content); let hash = hashes.sha256;
let hash = crypto_js.SHA256(wordArray).toString(); let blob = new external_buffer_.Blob([hash], { type : 'plain/text' });
blob = new external_buffer_.Blob([hash], { type : 'plain/text' });
await client.repository.repoCreateReleaseAttachment({ await client.repository.repoCreateReleaseAttachment({
owner: owner, owner: owner,
repo: repo, repo: repo,

127
main.js
View File

@@ -1,5 +1,6 @@
import asyncfs from "node:fs/promises";
import fs from "fs"; import fs from "fs";
import { Blob } from "buffer"; import { Blob, File } from "buffer";
import * as glob from "glob"; import * as glob from "glob";
import core from "@actions/core"; import core from "@actions/core";
@@ -126,6 +127,68 @@ function paths(patterns) {
}, []); }, []);
}; };
async function createStreamableFile(fpath) {
const name = path.basename(fpath);
const handle = await asyncfs.open(fpath);
const { size } = await handle.stat();
const file = new File([], name);
file.stream = () => handle.readableWebStream();
file.close = async () => await handle?.close();
// Set correct size otherwise, fetch will encounter UND_ERR_REQ_CONTENT_LENGTH_MISMATCH
Object.defineProperty(file, 'size', { get: () => size });
return file;
}
async function calculateMultipleHashes(file, algorithms = ['md5', 'sha256']) {
const stream = file.stream();
const reader = stream.getReader();
const hashers = algorithms.map(alg => {
switch(alg.toLowerCase()) {
case 'md5':
return { name: 'md5', instance: CryptoJS.algo.MD5.create() };
case 'sha1':
return { name: 'sha1', instance: CryptoJS.algo.SHA1.create() };
case 'sha256':
return { name: 'sha256', instance: CryptoJS.algo.SHA256.create() };
case 'sha512':
return { name: 'sha512', instance: CryptoJS.algo.SHA512.create() };
default:
throw new Error(`not support hash: ${alg}`);
}
});
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
const wordArray = CryptoJS.lib.WordArray.create(value);
hashers.forEach(hasher => {
hasher.instance.update(wordArray);
});
}
const result = {};
hashers.forEach(hasher => {
result[hasher.name] = hasher.instance.finalize().toString(CryptoJS.enc.Hex);
});
return result;
} finally {
reader.releaseLock();
}
}
/** /**
* *
* @param {gitea.GiteaApi} client * @param {gitea.GiteaApi} client
@@ -142,32 +205,55 @@ async function uploadFiles(client, owner, repo, release_id, all_files, params) {
repo: repo, repo: repo,
id: release_id, id: release_id,
}) })
// deleted old release attachment
const will_deleted = new Set();
for (const filepath of all_files) { for (const filepath of all_files) {
for (const attachment of attachments) { will_deleted.add(path.basename(filepath));
let will_deleted = [path.basename(filepath), `${path.basename(filepath)}.md5`, `${path.basename(filepath)}.sha256`] if (params.md5sum) {
if (will_deleted.includes(attachment.name)) { will_deleted.add(`${path.basename(filepath)}.md5`);
await client.repository.repoDeleteReleaseAttachment({
owner: owner,
repo: repo,
id: release_id,
attachmentId: attachment.id,
})
console.log(`Successfully deleted old release attachment ${attachment.name}`)
}
} }
const content = fs.readFileSync(filepath); if (params.sha256sum) {
let blob = new Blob([content]); will_deleted.add(`${path.basename(filepath)}.sha256`);
}
}
for (const attachment of attachments) {
if (will_deleted.has(attachment.name)) {
await client.repository.repoDeleteReleaseAttachment({
owner: owner,
repo: repo,
id: release_id,
attachmentId: attachment.id,
})
console.log(`Successfully deleted old release attachment ${attachment.name}`)
}
}
// upload new release attachment
for (const filepath of all_files) {
let curfile = await createStreamableFile(filepath)
await client.repository.repoCreateReleaseAttachment({ await client.repository.repoCreateReleaseAttachment({
owner: owner, owner: owner,
repo: repo, repo: repo,
id: release_id, id: release_id,
attachment: blob, attachment: curfile,
name: path.basename(filepath), name: path.basename(filepath),
}) })
await curfile.close();
let algorithms = [];
if (params.md5sum) { if (params.md5sum) {
let wordArray = CryptoJS.lib.WordArray.create(content); algorithms = algorithms.concat('md5');
let hash = CryptoJS.MD5(wordArray).toString(); }
blob = new Blob([hash], { type : 'plain/text' }); if (params.sha256sum) {
algorithms = algorithms.concat('sha256');
}
let hashes = {};
if (algorithms.length !== 0) {
curfile = await createStreamableFile(filepath)
hashes = await calculateMultipleHashes(curfile, algorithms)
await curfile.close();
}
if (params.md5sum) {
let hash = hashes.md5;
let blob = new Blob([hash], { type : 'plain/text' });
await client.repository.repoCreateReleaseAttachment({ await client.repository.repoCreateReleaseAttachment({
owner: owner, owner: owner,
repo: repo, repo: repo,
@@ -177,9 +263,8 @@ async function uploadFiles(client, owner, repo, release_id, all_files, params) {
}) })
} }
if (params.sha256sum) { if (params.sha256sum) {
let wordArray = CryptoJS.lib.WordArray.create(content); let hash = hashes.sha256;
let hash = CryptoJS.SHA256(wordArray).toString(); let blob = new Blob([hash], { type : 'plain/text' });
blob = new Blob([hash], { type : 'plain/text' });
await client.repository.repoCreateReleaseAttachment({ await client.repository.repoCreateReleaseAttachment({
owner: owner, owner: owner,
repo: repo, repo: repo,