Deploy version 1.3.0
This commit is contained in:
240
dist/index.js
vendored
240
dist/index.js
vendored
@@ -30393,7 +30393,7 @@ function getRepoInfo() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// src/cache.ts
|
// src/cache.ts
|
||||||
var fs2 = __toESM(require("fs"));
|
var fs3 = __toESM(require("fs"));
|
||||||
|
|
||||||
// node_modules/node-fetch/src/index.js
|
// node_modules/node-fetch/src/index.js
|
||||||
var import_node_http2 = __toESM(require("node:http"), 1);
|
var import_node_http2 = __toESM(require("node:http"), 1);
|
||||||
@@ -31740,12 +31740,129 @@ function sanitizeKey(key) {
|
|||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
|
|
||||||
// src/cache.ts
|
// src/compress.ts
|
||||||
async function uploadCache(options, owner, repo, key, filePath) {
|
var import_child_process = require("child_process");
|
||||||
const url = `${options.serverUrl}/cache/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/${encodeURIComponent(key)}`;
|
var import_util = require("util");
|
||||||
const fileStats = await fs2.promises.stat(filePath);
|
var fs2 = __toESM(require("fs"));
|
||||||
|
var path2 = __toESM(require("path"));
|
||||||
|
var os2 = __toESM(require("os"));
|
||||||
|
var import_fast_glob = __toESM(require_out4());
|
||||||
|
var execAsync = (0, import_util.promisify)(import_child_process.exec);
|
||||||
|
function hasGlob(p) {
|
||||||
|
return p.includes("**") || /[*?[\]{}()!]/.test(p);
|
||||||
|
}
|
||||||
|
async function resolvePaths(patterns) {
|
||||||
|
const out = /* @__PURE__ */ new Set();
|
||||||
|
for (const raw of patterns) {
|
||||||
|
const expanded = expandPath(raw);
|
||||||
|
if (hasGlob(expanded)) {
|
||||||
|
const matches = await (0, import_fast_glob.default)(expanded, {
|
||||||
|
absolute: true,
|
||||||
|
onlyFiles: false,
|
||||||
|
dot: false,
|
||||||
|
followSymbolicLinks: false,
|
||||||
|
unique: true
|
||||||
|
});
|
||||||
|
for (const m2 of matches) out.add(m2);
|
||||||
|
} else {
|
||||||
|
out.add(expanded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const result = Array.from(out);
|
||||||
|
if (result.length === 0) {
|
||||||
|
throw new Error(
|
||||||
|
`No files matched for provided paths/patterns: ${patterns.join(", ")}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
async function createCompressedTar(key, paths, cmprss_lvl = 0) {
|
||||||
|
if (!paths || paths.length === 0) {
|
||||||
|
throw new Error("No paths specified for archiving.");
|
||||||
|
}
|
||||||
|
const tempDir = await fs2.promises.mkdtemp(path2.join(os2.tmpdir(), "cache-"));
|
||||||
|
const archivePath = path2.join(tempDir, `${sanitizeKey(key)}.tar.zst`);
|
||||||
|
const resolvedPaths = await resolvePaths(paths);
|
||||||
|
const quotedPaths = resolvedPaths.map((p) => `"${p}"`).join(" ");
|
||||||
|
const isCompressed = cmprss_lvl > 0;
|
||||||
|
const cmd = isCompressed ? `tar -cf - ${quotedPaths} | zstd -${cmprss_lvl} -o "${archivePath}"` : `tar -cf "${archivePath}" ${quotedPaths}`;
|
||||||
|
console.log(
|
||||||
|
isCompressed ? `
|
||||||
|
\u{1F504} Compressing with zstd (Level: ${cmprss_lvl})...` : `
|
||||||
|
\u{1F4E6} Creating uncompressed tar archive...`
|
||||||
|
);
|
||||||
|
const startTime = Date.now();
|
||||||
|
await execAsync(cmd, {
|
||||||
|
cwd: "/",
|
||||||
|
// Required for absolute paths
|
||||||
|
shell: "/bin/bash"
|
||||||
|
});
|
||||||
|
const endTime = Date.now();
|
||||||
|
const duration = endTime - startTime;
|
||||||
|
const size = fs2.statSync(archivePath).size;
|
||||||
|
console.log(`\u2705 Archive created: ${archivePath}`);
|
||||||
|
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
|
||||||
|
console.log(`\u{1F4E6} Size: ${formatSize(size)}`);
|
||||||
|
console.log(`\u26A1 Speed: ${formatSpeed(size, duration)}`);
|
||||||
|
return archivePath;
|
||||||
|
}
|
||||||
|
async function extractCompressedTar(archivePath, extractTo = "/") {
|
||||||
|
if (!fs2.existsSync(archivePath)) {
|
||||||
|
throw new Error(`Archive not found: ${archivePath}`);
|
||||||
|
}
|
||||||
|
const type = await detectArchiveType(archivePath);
|
||||||
|
if (type === "unknown") {
|
||||||
|
console.error(
|
||||||
|
`\u26A0\uFE0F Unknown archive type for ${archivePath}. Cannot extract.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const cmd = type === "zstd" ? `zstd -dc "${archivePath}" | tar -xf - -C "${extractTo}"` : `tar -xf "${archivePath}" -C "${extractTo}"`;
|
||||||
console.log(`
|
console.log(`
|
||||||
|
\u{1F4C2} Extracting archive: ${archivePath}`);
|
||||||
|
const startTime = Date.now();
|
||||||
|
await execAsync(cmd, {
|
||||||
|
shell: "/bin/bash"
|
||||||
|
});
|
||||||
|
const endTime = Date.now();
|
||||||
|
const duration = endTime - startTime;
|
||||||
|
const size = fs2.statSync(archivePath).size;
|
||||||
|
const speedMBs = size / 1024 / 1024 / duration;
|
||||||
|
console.log(`\u2705 Extraction completed`);
|
||||||
|
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
|
||||||
|
console.log(`\u{1F4E6} Archive size: ${formatSize(size)}`);
|
||||||
|
console.log(`\u26A1 Speed: ${formatSpeed(size, duration)}`);
|
||||||
|
}
|
||||||
|
async function detectArchiveType(path3) {
|
||||||
|
const { stdout } = await execAsync(`file --brief --mime-type "${path3}"`);
|
||||||
|
const type = stdout.trim();
|
||||||
|
if (type === "application/zstd") {
|
||||||
|
return "zstd";
|
||||||
|
}
|
||||||
|
if (type === "application/x-tar" || type === "application/x-ustar") {
|
||||||
|
return "tar";
|
||||||
|
}
|
||||||
|
const { stdout: fileOutput } = await execAsync(`file "${path3}"`);
|
||||||
|
if (/Zstandard compressed data/.test(fileOutput)) return "zstd";
|
||||||
|
if (/tar archive/.test(fileOutput)) return "tar";
|
||||||
|
return "unknown";
|
||||||
|
}
|
||||||
|
|
||||||
|
// src/cache.ts
|
||||||
|
async function uploadCache(options, owner, repo, key, filePath, paths) {
|
||||||
|
const url = `${options.serverUrl}/cache/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/${encodeURIComponent(key)}`;
|
||||||
|
const fileStats = await fs3.promises.stat(filePath);
|
||||||
|
if (paths && paths.length > 0) {
|
||||||
|
const resolvedPaths = await resolvePaths(paths);
|
||||||
|
console.log(`
|
||||||
\u2B06\uFE0F Uploading cache: ${filePath}`);
|
\u2B06\uFE0F Uploading cache: ${filePath}`);
|
||||||
|
if (resolvedPaths && resolvedPaths.length > 0) {
|
||||||
|
console.log(`\u{1F4C2} Paths:`);
|
||||||
|
for (const p of resolvedPaths) {
|
||||||
|
console.log(` \u2022 ${p}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const stream = (0, import_fs.createReadStream)(filePath);
|
const stream = (0, import_fs.createReadStream)(filePath);
|
||||||
const res = await fetch(url, {
|
const res = await fetch(url, {
|
||||||
@@ -31795,7 +31912,7 @@ async function downloadCache(options, owner, repo, key, destinationPath) {
|
|||||||
await (0, import_promises.pipeline)(res.body, destStream);
|
await (0, import_promises.pipeline)(res.body, destStream);
|
||||||
const endTime = Date.now();
|
const endTime = Date.now();
|
||||||
const duration = endTime - startTime;
|
const duration = endTime - startTime;
|
||||||
const size = fs2.statSync(destinationPath).size;
|
const size = fs3.statSync(destinationPath).size;
|
||||||
console.log(`\u2705 Download completed`);
|
console.log(`\u2705 Download completed`);
|
||||||
console.log(`\u{1F4E6} Size: ${formatSize(size)}`);
|
console.log(`\u{1F4E6} Size: ${formatSize(size)}`);
|
||||||
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
|
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
|
||||||
@@ -31803,114 +31920,6 @@ async function downloadCache(options, owner, repo, key, destinationPath) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// src/compress.ts
|
|
||||||
var import_child_process = require("child_process");
|
|
||||||
var import_util2 = require("util");
|
|
||||||
var fs3 = __toESM(require("fs"));
|
|
||||||
var path2 = __toESM(require("path"));
|
|
||||||
var os2 = __toESM(require("os"));
|
|
||||||
var import_fast_glob = __toESM(require_out4());
|
|
||||||
var execAsync = (0, import_util2.promisify)(import_child_process.exec);
|
|
||||||
function hasGlob(p) {
|
|
||||||
return p.includes("**") || /[*?[\]{}()!]/.test(p);
|
|
||||||
}
|
|
||||||
async function resolvePaths(patterns) {
|
|
||||||
const out = /* @__PURE__ */ new Set();
|
|
||||||
for (const raw of patterns) {
|
|
||||||
const expanded = expandPath(raw);
|
|
||||||
if (hasGlob(expanded)) {
|
|
||||||
const matches = await (0, import_fast_glob.default)(expanded, {
|
|
||||||
absolute: true,
|
|
||||||
onlyFiles: false,
|
|
||||||
dot: false,
|
|
||||||
followSymbolicLinks: false,
|
|
||||||
unique: true
|
|
||||||
});
|
|
||||||
for (const m2 of matches) out.add(m2);
|
|
||||||
} else {
|
|
||||||
out.add(expanded);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const result = Array.from(out);
|
|
||||||
if (result.length === 0) {
|
|
||||||
throw new Error(
|
|
||||||
`No files matched for provided paths/patterns: ${patterns.join(", ")}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
async function createCompressedTar(key, paths, cmprss_lvl = 0) {
|
|
||||||
if (!paths || paths.length === 0) {
|
|
||||||
throw new Error("No paths specified for archiving.");
|
|
||||||
}
|
|
||||||
const tempDir = await fs3.promises.mkdtemp(path2.join(os2.tmpdir(), "cache-"));
|
|
||||||
const archivePath = path2.join(tempDir, `${sanitizeKey(key)}.tar.zst`);
|
|
||||||
const resolvedPaths = await resolvePaths(paths);
|
|
||||||
const quotedPaths = resolvedPaths.map((p) => `"${p}"`).join(" ");
|
|
||||||
const isCompressed = cmprss_lvl > 0;
|
|
||||||
const cmd = isCompressed ? `tar -cf - ${quotedPaths} | zstd -${cmprss_lvl} -o "${archivePath}"` : `tar -cf "${archivePath}" ${quotedPaths}`;
|
|
||||||
console.log(
|
|
||||||
isCompressed ? `
|
|
||||||
\u{1F504} Compressing with zstd (Level: ${cmprss_lvl})...` : `
|
|
||||||
\u{1F4E6} Creating uncompressed tar archive...`
|
|
||||||
);
|
|
||||||
const startTime = Date.now();
|
|
||||||
await execAsync(cmd, {
|
|
||||||
cwd: "/",
|
|
||||||
// Required for absolute paths
|
|
||||||
shell: "/bin/bash"
|
|
||||||
});
|
|
||||||
const endTime = Date.now();
|
|
||||||
const duration = endTime - startTime;
|
|
||||||
const size = fs3.statSync(archivePath).size;
|
|
||||||
console.log(`\u2705 Archive created: ${archivePath}`);
|
|
||||||
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
|
|
||||||
console.log(`\u{1F4E6} Size: ${formatSize(size)}`);
|
|
||||||
console.log(`\u26A1 Speed: ${formatSpeed(size, duration)}`);
|
|
||||||
return archivePath;
|
|
||||||
}
|
|
||||||
async function extractCompressedTar(archivePath, extractTo = "/") {
|
|
||||||
if (!fs3.existsSync(archivePath)) {
|
|
||||||
throw new Error(`Archive not found: ${archivePath}`);
|
|
||||||
}
|
|
||||||
const type = await detectArchiveType(archivePath);
|
|
||||||
if (type === "unknown") {
|
|
||||||
console.error(
|
|
||||||
`\u26A0\uFE0F Unknown archive type for ${archivePath}. Cannot extract.`
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const cmd = type === "zstd" ? `zstd -dc "${archivePath}" | tar -xf - -C "${extractTo}"` : `tar -xf "${archivePath}" -C "${extractTo}"`;
|
|
||||||
console.log(`
|
|
||||||
\u{1F4C2} Extracting archive: ${archivePath}`);
|
|
||||||
const startTime = Date.now();
|
|
||||||
await execAsync(cmd, {
|
|
||||||
shell: "/bin/bash"
|
|
||||||
});
|
|
||||||
const endTime = Date.now();
|
|
||||||
const duration = endTime - startTime;
|
|
||||||
const size = fs3.statSync(archivePath).size;
|
|
||||||
const speedMBs = size / 1024 / 1024 / duration;
|
|
||||||
console.log(`\u2705 Extraction completed`);
|
|
||||||
console.log(`\u23F1 Duration: ${formatDuration(duration)}`);
|
|
||||||
console.log(`\u{1F4E6} Archive size: ${formatSize(size)}`);
|
|
||||||
console.log(`\u26A1 Speed: ${formatSpeed(size, duration)}`);
|
|
||||||
}
|
|
||||||
async function detectArchiveType(path3) {
|
|
||||||
const { stdout } = await execAsync(`file --brief --mime-type "${path3}"`);
|
|
||||||
const type = stdout.trim();
|
|
||||||
if (type === "application/zstd") {
|
|
||||||
return "zstd";
|
|
||||||
}
|
|
||||||
if (type === "application/x-tar" || type === "application/x-ustar") {
|
|
||||||
return "tar";
|
|
||||||
}
|
|
||||||
const { stdout: fileOutput } = await execAsync(`file "${path3}"`);
|
|
||||||
if (/Zstandard compressed data/.test(fileOutput)) return "zstd";
|
|
||||||
if (/tar archive/.test(fileOutput)) return "tar";
|
|
||||||
return "unknown";
|
|
||||||
}
|
|
||||||
|
|
||||||
// src/save.ts
|
// src/save.ts
|
||||||
async function saveCache(key, paths, cmprss_lvl = 3) {
|
async function saveCache(key, paths, cmprss_lvl = 3) {
|
||||||
core2.info(`\u{1F4BE} Starting cache save for key: ${key}`);
|
core2.info(`\u{1F4BE} Starting cache save for key: ${key}`);
|
||||||
@@ -31923,7 +31932,8 @@ async function saveCache(key, paths, cmprss_lvl = 3) {
|
|||||||
owner,
|
owner,
|
||||||
repo,
|
repo,
|
||||||
key,
|
key,
|
||||||
archivePath
|
archivePath,
|
||||||
|
paths
|
||||||
);
|
);
|
||||||
const endTime = Date.now();
|
const endTime = Date.now();
|
||||||
const duration = endTime - startTime;
|
const duration = endTime - startTime;
|
||||||
|
|||||||
Reference in New Issue
Block a user