'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); var zodToJsonSchema = require('zod-to-json-schema'); var child_process = require('child_process'); var stream = require('stream'); var backendCommon = require('@backstage/backend-common'); var errors = require('@backstage/errors'); var fs = require('fs-extra'); var path = require('path'); var fs$1 = require('fs'); var globby = require('globby'); var limiterFactory = require('p-limit'); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } var zodToJsonSchema__default = /*#__PURE__*/_interopDefaultLegacy(zodToJsonSchema); var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs); var path__default = /*#__PURE__*/_interopDefaultLegacy(path); var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby); var limiterFactory__default = /*#__PURE__*/_interopDefaultLegacy(limiterFactory); const createTemplateAction = (action) => { var _a, _b, _c, _d; const inputSchema = ((_a = action.schema) == null ? void 0 : _a.input) && "safeParseAsync" in action.schema.input ? zodToJsonSchema__default["default"](action.schema.input) : (_b = action.schema) == null ? void 0 : _b.input; const outputSchema = ((_c = action.schema) == null ? void 0 : _c.output) && "safeParseAsync" in action.schema.output ? zodToJsonSchema__default["default"](action.schema.output) : (_d = action.schema) == null ? void 0 : _d.output; return { ...action, schema: { ...action.schema, input: inputSchema, output: outputSchema } }; }; async function executeShellCommand(options) { const { command, args, options: spawnOptions, logStream = new stream.PassThrough() } = options; await new Promise((resolve, reject) => { const process = child_process.spawn(command, args, spawnOptions); process.stdout.on("data", (stream) => { logStream.write(stream); }); process.stderr.on("data", (stream) => { logStream.write(stream); }); process.on("error", (error) => { return reject(error); }); process.on("close", (code) => { if (code !== 0) { return reject( new Error(`Command ${command} failed, exit code: ${code}`) ); } return resolve(); }); }); } async function fetchContents(options) { const { reader, integrations, baseUrl, fetchUrl = ".", outputPath } = options; const fetchUrlIsAbsolute = isFetchUrlAbsolute(fetchUrl); if (!fetchUrlIsAbsolute && (baseUrl == null ? void 0 : baseUrl.startsWith("file://"))) { const basePath = baseUrl.slice("file://".length); const srcDir = backendCommon.resolveSafeChildPath(path__default["default"].dirname(basePath), fetchUrl); await fs__default["default"].copy(srcDir, outputPath); } else { const readUrl = getReadUrl(fetchUrl, baseUrl, integrations); const res = await reader.readTree(readUrl); await fs__default["default"].ensureDir(outputPath); await res.dir({ targetDir: outputPath }); } } async function fetchFile(options) { const { reader, integrations, baseUrl, fetchUrl = ".", outputPath } = options; const fetchUrlIsAbsolute = isFetchUrlAbsolute(fetchUrl); if (!fetchUrlIsAbsolute && (baseUrl == null ? void 0 : baseUrl.startsWith("file://"))) { const basePath = baseUrl.slice("file://".length); const src = backendCommon.resolveSafeChildPath(path__default["default"].dirname(basePath), fetchUrl); await fs__default["default"].copyFile(src, outputPath); } else { const readUrl = getReadUrl(fetchUrl, baseUrl, integrations); const res = await reader.readUrl(readUrl); await fs__default["default"].ensureDir(path__default["default"].dirname(outputPath)); const buffer = await res.buffer(); await fs__default["default"].outputFile(outputPath, buffer.toString()); } } function isFetchUrlAbsolute(fetchUrl) { let fetchUrlIsAbsolute = false; try { new URL(fetchUrl); fetchUrlIsAbsolute = true; } catch { } return fetchUrlIsAbsolute; } function getReadUrl(fetchUrl, baseUrl, integrations) { if (isFetchUrlAbsolute(fetchUrl)) { return fetchUrl; } else if (baseUrl) { const integration = integrations.byUrl(baseUrl); if (!integration) { throw new errors.InputError(`No integration found for location ${baseUrl}`); } return integration.resolveUrl({ url: fetchUrl, base: baseUrl }); } throw new errors.InputError( `Failed to fetch, template location could not be determined and the fetch URL is relative, ${fetchUrl}` ); } async function initRepoAndPush(input) { var _a, _b; const { dir, remoteUrl, auth, logger, defaultBranch = "master", commitMessage = "Initial commit", gitAuthorInfo } = input; const git = backendCommon.Git.fromAuth({ ...auth, logger }); await git.init({ dir, defaultBranch }); await git.add({ dir, filepath: "." }); const authorInfo = { name: (_a = gitAuthorInfo == null ? void 0 : gitAuthorInfo.name) != null ? _a : "Scaffolder", email: (_b = gitAuthorInfo == null ? void 0 : gitAuthorInfo.email) != null ? _b : "scaffolder@backstage.io" }; const commitHash = await git.commit({ dir, message: commitMessage, author: authorInfo, committer: authorInfo }); await git.addRemote({ dir, url: remoteUrl, remote: "origin" }); await git.push({ dir, remote: "origin" }); return { commitHash }; } async function commitAndPushRepo(input) { var _a, _b; const { dir, auth, logger, commitMessage, gitAuthorInfo, branch = "master", remoteRef } = input; const git = backendCommon.Git.fromAuth({ ...auth, logger }); await git.fetch({ dir }); await git.checkout({ dir, ref: branch }); await git.add({ dir, filepath: "." }); const authorInfo = { name: (_a = gitAuthorInfo == null ? void 0 : gitAuthorInfo.name) != null ? _a : "Scaffolder", email: (_b = gitAuthorInfo == null ? void 0 : gitAuthorInfo.email) != null ? _b : "scaffolder@backstage.io" }; const commitHash = await git.commit({ dir, message: commitMessage, author: authorInfo, committer: authorInfo }); await git.push({ dir, remote: "origin", remoteRef: remoteRef != null ? remoteRef : `refs/heads/${branch}` }); return { commitHash }; } const getRepoSourceDirectory = (workspacePath, sourcePath) => { if (sourcePath) { const safeSuffix = path.normalize(sourcePath).replace( /^(\.\.(\/|\\|$))+/, "" ); const path$1 = path.join(workspacePath, safeSuffix); if (!backendCommon.isChildPath(workspacePath, path$1)) { throw new Error("Invalid source path"); } return path$1; } return workspacePath; }; const parseRepoUrl = (repoUrl, integrations) => { var _a, _b, _c, _d, _e; let parsed; try { parsed = new URL(`https://${repoUrl}`); } catch (error) { throw new errors.InputError( `Invalid repo URL passed to publisher, got ${repoUrl}, ${error}` ); } const host = parsed.host; const owner = (_a = parsed.searchParams.get("owner")) != null ? _a : void 0; const organization = (_b = parsed.searchParams.get("organization")) != null ? _b : void 0; const workspace = (_c = parsed.searchParams.get("workspace")) != null ? _c : void 0; const project = (_d = parsed.searchParams.get("project")) != null ? _d : void 0; const type = (_e = integrations.byHost(host)) == null ? void 0 : _e.type; if (!type) { throw new errors.InputError( `No matching integration configuration for host ${host}, please check your integrations config` ); } const repo = parsed.searchParams.get("repo"); switch (type) { case "bitbucket": { if (host === "www.bitbucket.org") { checkRequiredParams(parsed, "workspace"); } checkRequiredParams(parsed, "project", "repo"); break; } case "gitlab": { if (!project) { checkRequiredParams(parsed, "owner", "repo"); } break; } case "gerrit": { checkRequiredParams(parsed, "repo"); break; } default: { checkRequiredParams(parsed, "repo", "owner"); break; } } return { host, owner, repo, organization, workspace, project }; }; function checkRequiredParams(repoUrl, ...params) { for (let i = 0; i < params.length; i++) { if (!repoUrl.searchParams.get(params[i])) { throw new errors.InputError( `Invalid repo URL passed to publisher: ${repoUrl.toString()}, missing ${params[i]}` ); } } } const DEFAULT_GLOB_PATTERNS = ["./**", "!.git"]; const isExecutable = (fileMode) => { if (!fileMode) { return false; } const executeBitMask = 73; const res = fileMode & executeBitMask; return res > 0; }; async function asyncFilter(array, callback) { const filterMap = await Promise.all(array.map(callback)); return array.filter((_value, index) => filterMap[index]); } async function serializeDirectoryContents(sourcePath, options) { var _a; const paths = await globby__default["default"]((_a = options == null ? void 0 : options.globPatterns) != null ? _a : DEFAULT_GLOB_PATTERNS, { cwd: sourcePath, dot: true, gitignore: options == null ? void 0 : options.gitignore, followSymbolicLinks: false, // In order to pick up 'broken' symlinks, we oxymoronically request files AND folders yet we filter out folders // This is because broken symlinks aren't classed as files so we need to glob everything onlyFiles: false, objectMode: true, stats: true }); const limiter = limiterFactory__default["default"](10); const valid = await asyncFilter(paths, async ({ dirent, path }) => { if (dirent.isDirectory()) return false; if (!dirent.isSymbolicLink()) return true; const safePath = backendCommon.resolveSafeChildPath(sourcePath, path); try { await fs$1.promises.stat(safePath); return false; } catch (e) { return errors.isError(e) && e.code === "ENOENT"; } }); return Promise.all( valid.map(async ({ dirent, path, stats }) => ({ path, content: await limiter(async () => { const absFilePath = backendCommon.resolveSafeChildPath(sourcePath, path); if (dirent.isSymbolicLink()) { return fs$1.promises.readlink(absFilePath, "buffer"); } return fs$1.promises.readFile(absFilePath); }), executable: isExecutable(stats == null ? void 0 : stats.mode), symlink: dirent.isSymbolicLink() })) ); } async function deserializeDirectoryContents(targetPath, files) { for (const file of files) { const filePath = backendCommon.resolveSafeChildPath(targetPath, file.path); await fs__default["default"].ensureDir(path.dirname(filePath)); await fs__default["default"].writeFile(filePath, file.content); } } exports.commitAndPushRepo = commitAndPushRepo; exports.createTemplateAction = createTemplateAction; exports.deserializeDirectoryContents = deserializeDirectoryContents; exports.executeShellCommand = executeShellCommand; exports.fetchContents = fetchContents; exports.fetchFile = fetchFile; exports.getRepoSourceDirectory = getRepoSourceDirectory; exports.initRepoAndPush = initRepoAndPush; exports.parseRepoUrl = parseRepoUrl; exports.serializeDirectoryContents = serializeDirectoryContents; //# sourceMappingURL=index.cjs.js.map