Deploy version 1.0.2

This commit is contained in:
Gitea Actions
2025-04-30 15:13:46 +00:00
parent 7a29fe8330
commit aabe44717a

72
dist/index.js vendored
View File

@@ -26190,13 +26190,36 @@ var import_fs = require("fs");
// src/util.ts
function formatSize(bytes) {
if (bytes >= 1e9) {
return `${(bytes / 1e9).toFixed(2)} GB`;
} else if (bytes >= 1e6) {
return `${(bytes / 1e6).toFixed(2)} MB`;
} else {
return `${(bytes / 1e3).toFixed(2)} KB`;
const units = ["B", "KiB", "MiB", "GiB", "TiB"];
let i2 = 0;
while (bytes >= 1024 && i2 < units.length - 1) {
bytes /= 1024;
i2++;
}
return `${bytes.toFixed(2)} ${units[i2]}`;
}
function formatSpeed(bytes, seconds) {
if (seconds <= 0) return "\u221E";
const speedBytesPerSec = bytes / seconds;
const units = ["B/s", "KiB/s", "MiB/s", "GiB/s"];
let i2 = 0;
let speed = speedBytesPerSec;
while (speed >= 1024 && i2 < units.length - 1) {
speed /= 1024;
i2++;
}
return `${speed.toFixed(2)} ${units[i2]}`;
}
function formatDuration(ms) {
const totalSeconds = Math.floor(ms / 1e3);
const seconds = totalSeconds % 60;
const minutes = Math.floor(totalSeconds % 3600 / 60);
const hours = Math.floor(totalSeconds / 3600);
const parts = [];
if (hours > 0) parts.push(`${hours}h`);
if (minutes > 0 || hours > 0) parts.push(`${minutes}m`);
parts.push(`${seconds}s`);
return parts.join(" ");
}
// src/cache.ts
@@ -26217,8 +26240,8 @@ async function uploadCache(options, owner, repo, key, filePath) {
body: stream
});
const endTime = Date.now();
const duration = (endTime - startTime) / 1e3;
const speedMBs = fileStats.size / 1024 / 1024 / duration;
const duration = endTime - startTime;
const size = fileStats.size;
if (!res.ok) {
throw new Error(
`\u274C Failed to upload cache: ${res.status} ${res.statusText}`
@@ -26226,9 +26249,8 @@ async function uploadCache(options, owner, repo, key, filePath) {
}
console.log(`\u2705 Upload completed`);
console.log(`\u{1F4E6} Size: ${formatSize(fileStats.size)}`);
console.log(`\u23F1 Duration: ${duration.toFixed(2)} s`);
console.log(`\u26A1 Speed: ${speedMBs.toFixed(2)} MB/s
`);
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
console.log(`\u26A1 Speed: ${formatSpeed(size, duration)}`);
}
async function downloadCache(options, owner, repo, key, destinationPath) {
const url = `${options.serverUrl}/cache/${owner}/${repo}/${key}`;
@@ -26253,14 +26275,13 @@ async function downloadCache(options, owner, repo, key, destinationPath) {
const destStream = (0, import_fs.createWriteStream)(destinationPath);
await (0, import_promises.pipeline)(res.body, destStream);
const endTime = Date.now();
const duration = (endTime - startTime) / 1e3;
const duration = endTime - startTime;
const size = fs2.statSync(destinationPath).size;
const speedMBs = size / 1024 / 1024 / duration;
console.log(`\u2705 Download completed`);
console.log(`\u{1F4E6} Size: ${formatSize(size)}`);
console.log(`\u23F1 Duration: ${duration.toFixed(2)} s`);
console.log(`\u26A1 Speed: ${speedMBs.toFixed(2)} MB/s
`);
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
console.log(`\u26A1 Speed: ${formatSpeed(size, duration)}`);
return true;
}
@@ -26288,14 +26309,12 @@ async function createCompressedTar(key, paths, cmprss_lvl = 3) {
shell: "/bin/bash"
});
const endTime = Date.now();
const duration = (endTime - startTime) / 1e3;
const duration = endTime - startTime;
const size = fs3.statSync(archivePath).size;
const speedMBs = size / 1024 / 1024 / duration;
console.log(`\u2705 Archive created: ${archivePath}`);
console.log(`\u23F1 Duration: ${duration.toFixed(2)} s`);
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
console.log(`\u{1F4E6} Size: ${formatSize(size)}`);
console.log(`\u26A1 Speed: ${speedMBs.toFixed(2)} MB/s`);
console.log();
console.log(`\u26A1 Speed: ${formatSpeed(size, duration)}`);
return archivePath;
}
async function extractCompressedTar(archivePath, extractTo = "/") {
@@ -26314,15 +26333,14 @@ async function extractCompressedTar(archivePath, extractTo = "/") {
const size = fs3.statSync(archivePath).size;
const speedMBs = size / 1024 / 1024 / duration;
console.log(`\u2705 Extraction completed`);
console.log(`\u23F1 Duration: ${duration.toFixed(2)} s`);
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
console.log(`\u{1F4E6} Archive size: ${formatSize(size)}`);
console.log(`\u26A1 Speed: ${speedMBs.toFixed(2)} MB/s`);
console.log();
console.log(`\u26A1 Speed: ${formatSpeed(size, duration)}`);
}
// src/save.ts
async function saveCache(key, paths, cmprss_lvl = 3) {
core.info(`Starting cache save for key: ${key}`);
core.info(`\u{1F4BE} Starting cache save for key: ${key}`);
const startTime = Date.now();
const archivePath = await createCompressedTar(key, paths, cmprss_lvl);
const archiveSize = fs4.statSync(archivePath).size;
@@ -26345,7 +26363,7 @@ async function saveCache(key, paths, cmprss_lvl = 3) {
var core2 = __toESM(require_core());
var fs5 = __toESM(require("fs"));
async function restoreCache(key) {
core2.info(`Starting cache restore for key: ${key}`);
core2.info(`\u{1F4E6} Starting cache restore for key: ${key}`);
const startTime = Date.now();
const zstPath = `/tmp/${key}.tar.zst`;
const { owner, repo } = getRepoInfo();
@@ -26374,7 +26392,7 @@ async function restoreCache(key) {
async function run() {
try {
if (core3.getState("isPost") === "true") {
core3.info("Post-job: Saving cache...");
core3.info("\u{1F9F9} Post-job: Saving cache...");
const key = core3.getState("key");
const paths = JSON.parse(core3.getState("paths") || "[]");
const cmprss_lvl = parseInt(
@@ -26389,7 +26407,7 @@ async function run() {
}
await saveCache(key, paths, cmprss_lvl);
} else {
core3.info("Pre-job: Restoring cache...");
core3.info("\u{1F680} Pre-job: Restoring cache...");
const key = core3.getInput("key", { required: true });
const paths = core3.getMultilineInput("paths", { required: true }).filter(
(p) => p.trim() !== ""