'use strict'; var catalogModel = require('@backstage/catalog-model'); var config = require('@backstage/config'); var errors = require('@backstage/errors'); var integration = require('@backstage/integration'); var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common'); var alpha = require('@backstage/plugin-scaffolder-common/alpha'); var express = require('express'); var Router = require('express-promise-router'); var jsonschema = require('jsonschema'); var zod = require('zod'); var pluginScaffolderNode = require('@backstage/plugin-scaffolder-node'); var yaml = require('yaml'); var fs = require('fs-extra'); var backendCommon = require('@backstage/backend-common'); var path = require('path'); var luxon = require('luxon'); var globby = require('globby'); var isbinaryfile = require('isbinaryfile'); var isolatedVm = require('isolated-vm'); var get = require('lodash/get'); var github = require('@backstage/plugin-scaffolder-backend-module-github'); var azure = require('@backstage/plugin-scaffolder-backend-module-azure'); var bitbucket = require('@backstage/plugin-scaffolder-backend-module-bitbucket'); var gerrit = require('@backstage/plugin-scaffolder-backend-module-gerrit'); var gitlab = require('@backstage/plugin-scaffolder-backend-module-gitlab'); var uuid = require('uuid'); var ObservableImpl = require('zen-observable'); var PQueue = require('p-queue'); var winston = require('winston'); var nunjucks = require('nunjucks'); var stream = require('stream'); var lodash = require('lodash'); var pluginPermissionNode = require('@backstage/plugin-permission-node'); var promClient = require('prom-client'); var pluginPermissionCommon = require('@backstage/plugin-permission-common'); var url = require('url'); var os = require('os'); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } function _interopNamespace(e) { if (e && e.__esModule) return e; var n = Object.create(null); if (e) { Object.keys(e).forEach(function (k) { if (k !== 'default') { var d = Object.getOwnPropertyDescriptor(e, k); Object.defineProperty(n, k, d.get ? d : { enumerable: true, get: function () { return e[k]; } }); } }); } n["default"] = e; return Object.freeze(n); } var express__default = /*#__PURE__*/_interopDefaultLegacy(express); var Router__default = /*#__PURE__*/_interopDefaultLegacy(Router); var yaml__default = /*#__PURE__*/_interopDefaultLegacy(yaml); var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml); var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs); var path__default = /*#__PURE__*/_interopDefaultLegacy(path); var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby); var get__default = /*#__PURE__*/_interopDefaultLegacy(get); var ObservableImpl__default = /*#__PURE__*/_interopDefaultLegacy(ObservableImpl); var PQueue__default = /*#__PURE__*/_interopDefaultLegacy(PQueue); var winston__namespace = /*#__PURE__*/_interopNamespace(winston); var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks); var os__default = /*#__PURE__*/_interopDefaultLegacy(os); const examples$9 = [ { description: "Register with the catalog", example: yaml__default["default"].stringify({ steps: [ { action: "catalog:register", id: "register-with-catalog", name: "Register with the catalog", input: { catalogInfoUrl: "http://github.com/backstage/backstage/blob/master/catalog-info.yaml" } } ] }) } ]; const id$4 = "catalog:register"; function createCatalogRegisterAction(options) { const { catalogClient, integrations } = options; return pluginScaffolderNode.createTemplateAction({ id: id$4, description: "Registers entities from a catalog descriptor file in the workspace into the software catalog.", examples: examples$9, schema: { input: { oneOf: [ { type: "object", required: ["catalogInfoUrl"], properties: { catalogInfoUrl: { title: "Catalog Info URL", description: "An absolute URL pointing to the catalog info file location", type: "string" }, optional: { title: "Optional", description: "Permit the registered location to optionally exist. Default: false", type: "boolean" } } }, { type: "object", required: ["repoContentsUrl"], properties: { repoContentsUrl: { title: "Repository Contents URL", description: "An absolute URL pointing to the root of a repository directory tree", type: "string" }, catalogInfoPath: { title: "Fetch URL", description: "A relative path from the repo root pointing to the catalog info file, defaults to /catalog-info.yaml", type: "string" }, optional: { title: "Optional", description: "Permit the registered location to optionally exist. Default: false", type: "boolean" } } } ] }, output: { type: "object", required: ["catalogInfoUrl"], properties: { entityRef: { type: "string" }, catalogInfoUrl: { type: "string" } } } }, async handler(ctx) { var _a, _b; const { input } = ctx; let catalogInfoUrl; if ("catalogInfoUrl" in input) { catalogInfoUrl = input.catalogInfoUrl; } else { const { repoContentsUrl, catalogInfoPath = "/catalog-info.yaml" } = input; const integration = integrations.byUrl(repoContentsUrl); if (!integration) { throw new errors.InputError( `No integration found for host ${repoContentsUrl}` ); } catalogInfoUrl = integration.resolveUrl({ base: repoContentsUrl, url: catalogInfoPath }); } ctx.logger.info(`Registering ${catalogInfoUrl} in the catalog`); try { await catalogClient.addLocation( { type: "url", target: catalogInfoUrl }, ((_a = ctx.secrets) == null ? void 0 : _a.backstageToken) ? { token: ctx.secrets.backstageToken } : {} ); } catch (e) { if (!input.optional) { throw e; } } try { const result = await catalogClient.addLocation( { dryRun: true, type: "url", target: catalogInfoUrl }, ((_b = ctx.secrets) == null ? void 0 : _b.backstageToken) ? { token: ctx.secrets.backstageToken } : {} ); if (result.entities.length) { const { entities } = result; let entity; entity = entities.find( (e) => !e.metadata.name.startsWith("generated-") && e.kind === "Component" ); if (!entity) { entity = entities.find( (e) => !e.metadata.name.startsWith("generated-") ); } if (!entity) { entity = entities[0]; } ctx.output("entityRef", catalogModel.stringifyEntityRef(entity)); } } catch (e) { if (!input.optional) { throw e; } } ctx.output("catalogInfoUrl", catalogInfoUrl); } }); } const examples$8 = [ { description: "Write a catalog yaml file", example: yaml__namespace.stringify({ steps: [ { action: "catalog:write", id: "create-catalog-info-file", name: "Create catalog file", input: { entity: { apiVersion: "backstage.io/v1alpha1", kind: "Component", metadata: { name: "test", annotations: {} }, spec: { type: "service", lifecycle: "production", owner: "default/owner" } } } } ] }) } ]; const id$3 = "catalog:write"; function createCatalogWriteAction() { return pluginScaffolderNode.createTemplateAction({ id: id$3, description: "Writes the catalog-info.yaml for your template", schema: { input: zod.z.object({ filePath: zod.z.string().optional().describe("Defaults to catalog-info.yaml"), // TODO: this should reference an zod entity validator if it existed. entity: zod.z.record(zod.z.any()).describe( "You can provide the same values used in the Entity schema." ) }) }, examples: examples$8, supportsDryRun: true, async handler(ctx) { ctx.logStream.write(`Writing catalog-info.yaml`); const { filePath, entity } = ctx.input; const path = filePath != null ? filePath : "catalog-info.yaml"; await fs__default["default"].writeFile( backendCommon.resolveSafeChildPath(ctx.workspacePath, path), yaml__namespace.stringify(entity) ); } }); } const examples$7 = [ { description: "Fetch entity by reference", example: yaml__default["default"].stringify({ steps: [ { action: "catalog:fetch", id: "fetch", name: "Fetch catalog entity", input: { entityRef: "component:default/name" } } ] }) }, { description: "Fetch multiple entities by reference", example: yaml__default["default"].stringify({ steps: [ { action: "catalog:fetch", id: "fetchMultiple", name: "Fetch catalog entities", input: { entityRefs: ["component:default/name"] } } ] }) } ]; const id$2 = "catalog:fetch"; function createFetchCatalogEntityAction(options) { const { catalogClient } = options; return pluginScaffolderNode.createTemplateAction({ id: id$2, description: "Returns entity or entities from the catalog by entity reference(s)", examples: examples$7, supportsDryRun: true, schema: { input: zod.z.object({ entityRef: zod.z.string({ description: "Entity reference of the entity to get" }).optional(), entityRefs: zod.z.array(zod.z.string(), { description: "Entity references of the entities to get" }).optional(), optional: zod.z.boolean({ description: "Allow the entity or entities to optionally exist. Default: false" }).optional(), defaultKind: zod.z.string({ description: "The default kind" }).optional(), defaultNamespace: zod.z.string({ description: "The default namespace" }).optional() }), output: zod.z.object({ entity: zod.z.any({ description: "Object containing same values used in the Entity schema. Only when used with `entityRef` parameter." }).optional(), entities: zod.z.array( zod.z.any({ description: "Array containing objects with same values used in the Entity schema. Only when used with `entityRefs` parameter." }) ).optional() }) }, async handler(ctx) { var _a, _b; const { entityRef, entityRefs, optional, defaultKind, defaultNamespace } = ctx.input; if (!entityRef && !entityRefs) { if (optional) { return; } throw new Error("Missing entity reference or references"); } if (entityRef) { const entity = await catalogClient.getEntityByRef( catalogModel.stringifyEntityRef( catalogModel.parseEntityRef(entityRef, { defaultKind, defaultNamespace }) ), { token: (_a = ctx.secrets) == null ? void 0 : _a.backstageToken } ); if (!entity && !optional) { throw new Error(`Entity ${entityRef} not found`); } ctx.output("entity", entity != null ? entity : null); } if (entityRefs) { const entities = await catalogClient.getEntitiesByRefs( { entityRefs: entityRefs.map( (ref) => catalogModel.stringifyEntityRef( catalogModel.parseEntityRef(ref, { defaultKind, defaultNamespace }) ) ) }, { token: (_b = ctx.secrets) == null ? void 0 : _b.backstageToken } ); const finalEntities = entities.items.map((e, i) => { if (!e && !optional) { throw new Error(`Entity ${entityRefs[i]} not found`); } return e != null ? e : null; }); ctx.output("entities", finalEntities); } } }); } const examples$6 = [ { description: "Write a debug message", example: yaml__default["default"].stringify({ steps: [ { action: "debug:log", id: "write-debug-line", name: 'Write "Hello Backstage!" log line', input: { message: "Hello Backstage!" } } ] }) }, { description: "List the workspace directory", example: yaml__default["default"].stringify({ steps: [ { action: "debug:log", id: "write-workspace-directory", name: "List the workspace directory", input: { listWorkspace: true } } ] }) } ]; const id$1 = "debug:log"; function createDebugLogAction() { return pluginScaffolderNode.createTemplateAction({ id: id$1, description: "Writes a message into the log or lists all files in the workspace.", examples: examples$6, schema: { input: { type: "object", properties: { message: { title: "Message to output.", type: "string" }, listWorkspace: { title: "List all files in the workspace, if true.", type: "boolean" }, extra: { title: "Extra info" } } } }, supportsDryRun: true, async handler(ctx) { var _a, _b; ctx.logger.info(JSON.stringify(ctx.input, null, 2)); if ((_a = ctx.input) == null ? void 0 : _a.message) { ctx.logStream.write(ctx.input.message); } if ((_b = ctx.input) == null ? void 0 : _b.listWorkspace) { const files = await recursiveReadDir(ctx.workspacePath); ctx.logStream.write( `Workspace: ${files.map((f) => ` - ${path.relative(ctx.workspacePath, f)}`).join("\n")}` ); } } }); } async function recursiveReadDir(dir) { const subdirs = await fs.readdir(dir); const files = await Promise.all( subdirs.map(async (subdir) => { const res = path.join(dir, subdir); return (await fs.stat(res)).isDirectory() ? recursiveReadDir(res) : [res]; }) ); return files.reduce((a, f) => a.concat(f), []); } const examples$5 = [ { description: "Waiting for 50 milliseconds", example: yaml__default["default"].stringify({ steps: [ { action: "debug:wait", id: "wait-milliseconds", name: "Waiting for 50 milliseconds", input: { milliseconds: 50 } } ] }) }, { description: "Waiting for 5 seconds", example: yaml__default["default"].stringify({ steps: [ { action: "debug:wait", id: "wait-5sec", name: "Waiting for 5 seconds", input: { seconds: 5 } } ] }) }, { description: "Waiting for 1 minutes", example: yaml__default["default"].stringify({ steps: [ { action: "debug:wait", id: "wait-1min", name: "Waiting for 1 minutes", input: { minutes: 1 } } ] }) } ]; const id = "debug:wait"; const MAX_WAIT_TIME_IN_ISO = "T00:00:30"; function createWaitAction(options) { const toDuration = (maxWaitTime) => { if (maxWaitTime) { if (maxWaitTime instanceof luxon.Duration) { return maxWaitTime; } return luxon.Duration.fromObject(maxWaitTime); } return luxon.Duration.fromISOTime(MAX_WAIT_TIME_IN_ISO); }; return pluginScaffolderNode.createTemplateAction({ id, description: "Waits for a certain period of time.", examples: examples$5, schema: { input: { type: "object", properties: { minutes: { title: "Waiting period in minutes.", type: "number" }, seconds: { title: "Waiting period in seconds.", type: "number" }, milliseconds: { title: "Waiting period in milliseconds.", type: "number" } } } }, async handler(ctx) { const delayTime = luxon.Duration.fromObject(ctx.input); const maxWait = toDuration(options == null ? void 0 : options.maxWaitTime); if (delayTime.minus(maxWait).toMillis() > 0) { throw new Error( `Waiting duration is longer than the maximum threshold of ${maxWait.toHuman()}` ); } await new Promise((resolve) => { var _a; const controller = new AbortController(); const timeoutHandle = setTimeout(abort, delayTime.toMillis()); (_a = ctx.signal) == null ? void 0 : _a.addEventListener("abort", abort); function abort() { var _a2; (_a2 = ctx.signal) == null ? void 0 : _a2.removeEventListener("abort", abort); clearTimeout(timeoutHandle); controller.abort(); resolve("finished"); } }); } }); } const examples$4 = [ { description: "Downloads content and places it in the workspace.", example: yaml__default["default"].stringify({ steps: [ { action: "fetch:plain", id: "fetch-plain", name: "Fetch plain", input: { url: "https://github.com/backstage/community/tree/main/backstage-community-sessions/assets" } } ] }) }, { description: "Optionally, if you would prefer the data to be downloaded to a subdirectory in the workspace you may specify the \u2018targetPath\u2019 input option.", example: yaml__default["default"].stringify({ steps: [ { action: "fetch:plain", id: "fetch-plain", name: "Fetch plain", input: { url: "https://github.com/backstage/community/tree/main/backstage-community-sessions/assets", targetPath: "fetched-data" } } ] }) } ]; const ACTION_ID = "fetch:plain"; function createFetchPlainAction(options) { const { reader, integrations } = options; return pluginScaffolderNode.createTemplateAction({ id: ACTION_ID, examples: examples$4, description: "Downloads content and places it in the workspace, or optionally in a subdirectory specified by the `targetPath` input option.", schema: { input: { type: "object", required: ["url"], properties: { url: { title: "Fetch URL", description: "Relative path or absolute URL pointing to the directory tree to fetch", type: "string" }, targetPath: { title: "Target Path", description: "Target path within the working directory to download the contents to.", type: "string" } } } }, supportsDryRun: true, async handler(ctx) { var _a, _b; ctx.logger.info("Fetching plain content from remote URL"); const targetPath = (_a = ctx.input.targetPath) != null ? _a : "./"; const outputPath = backendCommon.resolveSafeChildPath(ctx.workspacePath, targetPath); await pluginScaffolderNode.fetchContents({ reader, integrations, baseUrl: (_b = ctx.templateInfo) == null ? void 0 : _b.baseUrl, fetchUrl: ctx.input.url, outputPath }); } }); } const examples$3 = [ { description: "Downloads a file and places it in the workspace.", example: yaml__default["default"].stringify({ steps: [ { action: "fetch:plain:file", id: "fetch-plain-file", name: "Fetch plain file", input: { url: "https://github.com/backstage/community/tree/main/backstage-community-sessions/assets/Backstage%20Community%20Sessions.png", targetPath: "target-path" } } ] }) } ]; function createFetchPlainFileAction(options) { const { reader, integrations } = options; return pluginScaffolderNode.createTemplateAction({ id: "fetch:plain:file", description: "Downloads single file and places it in the workspace.", examples: examples$3, schema: { input: { type: "object", required: ["url", "targetPath"], properties: { url: { title: "Fetch URL", description: "Relative path or absolute URL pointing to the single file to fetch.", type: "string" }, targetPath: { title: "Target Path", description: "Target path within the working directory to download the file as.", type: "string" } } } }, supportsDryRun: true, async handler(ctx) { var _a; ctx.logger.info("Fetching plain content from remote URL"); const outputPath = backendCommon.resolveSafeChildPath( ctx.workspacePath, ctx.input.targetPath ); await pluginScaffolderNode.fetchFile({ reader, integrations, baseUrl: (_a = ctx.templateInfo) == null ? void 0 : _a.baseUrl, fetchUrl: ctx.input.url, outputPath }); } }); } const mkScript = (nunjucksSource) => ` const { render, renderCompat } = (() => { const module = {}; const process = { env: {} }; const require = (pkg) => { if (pkg === 'events') { return function (){}; }}; ${nunjucksSource} const env = module.exports.configure({ autoescape: false, tags: { variableStart: '\${{', variableEnd: '}}', }, }); const compatEnv = module.exports.configure({ autoescape: false, tags: { variableStart: '{{', variableEnd: '}}', }, }); compatEnv.addFilter('jsonify', compatEnv.getFilter('dump')); for (const name of JSON.parse(availableTemplateFilters)) { env.addFilter(name, (...args) => JSON.parse(callFilter(name, args))); } for (const [name, value] of Object.entries(JSON.parse(availableTemplateGlobals))) { env.addGlobal(name, value); } for (const name of JSON.parse(availableTemplateCallbacks)) { env.addGlobal(name, (...args) => JSON.parse(callGlobal(name, args))); } let uninstallCompat = undefined; function render(str, values) { try { if (uninstallCompat) { uninstallCompat(); uninstallCompat = undefined; } return env.renderString(str, JSON.parse(values)); } catch (error) { // Make sure errors don't leak anything throw new Error(String(error.message)); } } function renderCompat(str, values) { try { if (!uninstallCompat) { uninstallCompat = module.exports.installJinjaCompat(); } return compatEnv.renderString(str, JSON.parse(values)); } catch (error) { // Make sure errors don't leak anything throw new Error(String(error.message)); } } return { render, renderCompat }; })(); `; class SecureTemplater { static async loadRenderer(options = {}) { const { cookiecutterCompat, templateFilters = {}, templateGlobals = {} } = options; const isolate = new isolatedVm.Isolate({ memoryLimit: 128 }); const context = await isolate.createContext(); const contextGlobal = context.global; const nunjucksSource = await fs__default["default"].readFile( backendCommon.resolvePackagePath( "@backstage/plugin-scaffolder-backend", "assets/nunjucks.js.txt" ), "utf-8" ); const nunjucksScript = await isolate.compileScript( mkScript(nunjucksSource) ); const availableFilters = Object.keys(templateFilters); await contextGlobal.set( "availableTemplateFilters", JSON.stringify(availableFilters) ); const globalCallbacks = []; const globalValues = {}; for (const [name, value] of Object.entries(templateGlobals)) { if (typeof value === "function") { globalCallbacks.push(name); } else { globalValues[name] = value; } } await contextGlobal.set( "availableTemplateGlobals", JSON.stringify(globalValues) ); await contextGlobal.set( "availableTemplateCallbacks", JSON.stringify(globalCallbacks) ); await contextGlobal.set( "callFilter", (filterName, args) => { if (!Object.hasOwn(templateFilters, filterName)) { return ""; } return JSON.stringify(templateFilters[filterName](...args)); } ); await contextGlobal.set( "callGlobal", (globalName, args) => { if (!Object.hasOwn(templateGlobals, globalName)) { return ""; } const global = templateGlobals[globalName]; if (typeof global !== "function") { return ""; } return JSON.stringify(global(...args)); } ); await nunjucksScript.run(context); const render = (template, values) => { if (!context) { throw new Error("SecureTemplater has not been initialized"); } contextGlobal.setSync("templateStr", String(template)); contextGlobal.setSync("templateValues", JSON.stringify(values)); if (cookiecutterCompat) { return context.evalSync(`renderCompat(templateStr, templateValues)`); } return context.evalSync(`render(templateStr, templateValues)`); }; return render; } } const createDefaultFilters = ({ integrations }) => { return { parseRepoUrl: (url) => pluginScaffolderNode.parseRepoUrl(url, integrations), parseEntityRef: (ref, context) => catalogModel.parseEntityRef(ref, context), pick: (obj, key) => get__default["default"](obj, key), projectSlug: (repoUrl) => { const { owner, repo } = pluginScaffolderNode.parseRepoUrl(repoUrl, integrations); return `${owner}/${repo}`; } }; }; const examples$2 = [ { description: "Downloads a skelaton directory that lives alongside the template file and fill it out with values.", example: yaml__default["default"].stringify({ steps: [ { action: "fetch:template", id: "fetch-template", name: "Fetch template", input: { url: "./skeleton", targetPath: "./target", values: { name: "test-project", count: 1234, itemList: ["first", "second", "third"], showDummyFile: false } } } ] }) } ]; function createFetchTemplateAction(options) { const { reader, integrations, additionalTemplateFilters, additionalTemplateGlobals } = options; const defaultTemplateFilters = createDefaultFilters({ integrations }); return pluginScaffolderNode.createTemplateAction({ id: "fetch:template", description: "Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the `targetPath` input option.", examples: examples$2, schema: { input: { type: "object", required: ["url"], properties: { url: { title: "Fetch URL", description: "Relative path or absolute URL pointing to the directory tree to fetch", type: "string" }, targetPath: { title: "Target Path", description: "Target path within the working directory to download the contents to. Defaults to the working directory root.", type: "string" }, values: { title: "Template Values", description: "Values to pass on to the templating engine", type: "object" }, copyWithoutRender: { title: "[Deprecated] Copy Without Render", description: "An array of glob patterns. Any files or directories which match are copied without being processed as templates.", type: "array", items: { type: "string" } }, copyWithoutTemplating: { title: "Copy Without Templating", description: "An array of glob patterns. Contents of matched files or directories are copied without being processed, but paths are subject to rendering.", type: "array", items: { type: "string" } }, cookiecutterCompat: { title: "Cookiecutter compatibility mode", description: "Enable features to maximise compatibility with templates built for fetch:cookiecutter", type: "boolean" }, templateFileExtension: { title: "Template File Extension", description: "If set, only files with the given extension will be templated. If set to `true`, the default extension `.njk` is used.", type: ["string", "boolean"] }, replace: { title: "Replace files", description: "If set, replace files in targetPath instead of skipping existing ones.", type: "boolean" } } } }, supportsDryRun: true, async handler(ctx) { var _a, _b; ctx.logger.info("Fetching template content from remote URL"); const workDir = await ctx.createTemporaryDirectory(); const templateDir = backendCommon.resolveSafeChildPath(workDir, "template"); const targetPath = (_a = ctx.input.targetPath) != null ? _a : "./"; const outputDir = backendCommon.resolveSafeChildPath(ctx.workspacePath, targetPath); if (ctx.input.copyWithoutRender && ctx.input.copyWithoutTemplating) { throw new errors.InputError( "Fetch action input copyWithoutRender and copyWithoutTemplating can not be used at the same time" ); } let copyOnlyPatterns; let renderFilename; if (ctx.input.copyWithoutRender) { ctx.logger.warn( "[Deprecated] copyWithoutRender is deprecated Please use copyWithoutTemplating instead." ); copyOnlyPatterns = ctx.input.copyWithoutRender; renderFilename = false; } else { copyOnlyPatterns = ctx.input.copyWithoutTemplating; renderFilename = true; } if (copyOnlyPatterns && !Array.isArray(copyOnlyPatterns)) { throw new errors.InputError( "Fetch action input copyWithoutRender/copyWithoutTemplating must be an Array" ); } if (ctx.input.templateFileExtension && (copyOnlyPatterns || ctx.input.cookiecutterCompat)) { throw new errors.InputError( "Fetch action input extension incompatible with copyWithoutRender/copyWithoutTemplating and cookiecutterCompat" ); } let extension = false; if (ctx.input.templateFileExtension) { extension = ctx.input.templateFileExtension === true ? ".njk" : ctx.input.templateFileExtension; if (!extension.startsWith(".")) { extension = `.${extension}`; } } await pluginScaffolderNode.fetchContents({ reader, integrations, baseUrl: (_b = ctx.templateInfo) == null ? void 0 : _b.baseUrl, fetchUrl: ctx.input.url, outputPath: templateDir }); ctx.logger.info("Listing files and directories in template"); const allEntriesInTemplate = await globby__default["default"](`**/*`, { cwd: templateDir, dot: true, onlyFiles: false, markDirectories: true, followSymbolicLinks: false }); const nonTemplatedEntries = new Set( await globby__default["default"](copyOnlyPatterns || [], { cwd: templateDir, dot: true, onlyFiles: false, markDirectories: true, followSymbolicLinks: false }) ); const { cookiecutterCompat, values } = ctx.input; const context = { [cookiecutterCompat ? "cookiecutter" : "values"]: values }; ctx.logger.info( `Processing ${allEntriesInTemplate.length} template files/directories with input values`, ctx.input.values ); const renderTemplate = await SecureTemplater.loadRenderer({ cookiecutterCompat: ctx.input.cookiecutterCompat, templateFilters: { ...defaultTemplateFilters, ...additionalTemplateFilters }, templateGlobals: additionalTemplateGlobals }); for (const location of allEntriesInTemplate) { let renderContents; let localOutputPath = location; if (extension) { renderContents = path.extname(localOutputPath) === extension; if (renderContents) { localOutputPath = localOutputPath.slice(0, -extension.length); } localOutputPath = renderTemplate(localOutputPath, context); } else { renderContents = !nonTemplatedEntries.has(location); if (renderFilename) { localOutputPath = renderTemplate(localOutputPath, context); } else { localOutputPath = renderContents ? renderTemplate(localOutputPath, context) : localOutputPath; } } if (containsSkippedContent(localOutputPath)) { continue; } const outputPath = backendCommon.resolveSafeChildPath(outputDir, localOutputPath); if (fs__default["default"].existsSync(outputPath) && !ctx.input.replace) { continue; } if (!renderContents && !extension) { ctx.logger.info( `Copying file/directory ${location} without processing.` ); } if (location.endsWith("/")) { ctx.logger.info( `Writing directory ${location} to template output path.` ); await fs__default["default"].ensureDir(outputPath); } else { const inputFilePath = backendCommon.resolveSafeChildPath(templateDir, location); const stats = await fs__default["default"].promises.lstat(inputFilePath); if (stats.isSymbolicLink() || await isbinaryfile.isBinaryFile(inputFilePath)) { ctx.logger.info( `Copying file binary or symbolic link at ${location}, to template output path.` ); await fs__default["default"].copy(inputFilePath, outputPath); } else { const statsObj = await fs__default["default"].stat(inputFilePath); ctx.logger.info( `Writing file ${location} to template output path with mode ${statsObj.mode}.` ); const inputFileContents = await fs__default["default"].readFile(inputFilePath, "utf-8"); await fs__default["default"].outputFile( outputPath, renderContents ? renderTemplate(inputFileContents, context) : inputFileContents, { mode: statsObj.mode } ); } } } ctx.logger.info(`Template result written to ${outputDir}`); } }); } function containsSkippedContent(localOutputPath) { return localOutputPath === "" || localOutputPath.startsWith("/") || localOutputPath.includes("//"); } const examples$1 = [ { description: "Delete specified files", example: yaml__namespace.stringify({ steps: [ { action: "fs:delete", id: "deleteFiles", name: "Delete files", input: { files: ["file1.txt", "file2.txt"] } } ] }) } ]; const createFilesystemDeleteAction = () => { return pluginScaffolderNode.createTemplateAction({ id: "fs:delete", description: "Deletes files and directories from the workspace", examples: examples$1, schema: { input: { required: ["files"], type: "object", properties: { files: { title: "Files", description: "A list of files and directories that will be deleted", type: "array", items: { type: "string" } } } } }, supportsDryRun: true, async handler(ctx) { var _a; if (!Array.isArray((_a = ctx.input) == null ? void 0 : _a.files)) { throw new errors.InputError("files must be an Array"); } for (const file of ctx.input.files) { const filepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file); try { await fs__default["default"].remove(filepath); ctx.logger.info(`File ${filepath} deleted successfully`); } catch (err) { ctx.logger.error(`Failed to delete file ${filepath}:`, err); throw err; } } } }); }; const examples = [ { description: "Rename specified files ", example: yaml__namespace.stringify({ steps: [ { action: "fs:rename", id: "renameFiles", name: "Rename files", input: { files: [ { from: "file1.txt", to: "file1Renamed.txt" }, { from: "file2.txt", to: "file2Renamed.txt" }, { from: "file3.txt", to: "file3Renamed.txt", overwrite: true } ] } } ] }) } ]; const createFilesystemRenameAction = () => { return pluginScaffolderNode.createTemplateAction({ id: "fs:rename", description: "Renames files and directories within the workspace", examples, schema: { input: { required: ["files"], type: "object", properties: { files: { title: "Files", description: "A list of file and directory names that will be renamed", type: "array", items: { type: "object", required: ["from", "to"], properties: { from: { type: "string", title: "The source location of the file to be renamed" }, to: { type: "string", title: "The destination of the new file" }, overwrite: { type: "boolean", title: "Overwrite existing file or directory, default is false" } } } } } } }, supportsDryRun: true, async handler(ctx) { var _a, _b; if (!Array.isArray((_a = ctx.input) == null ? void 0 : _a.files)) { throw new errors.InputError("files must be an Array"); } for (const file of ctx.input.files) { if (!file.from || !file.to) { throw new errors.InputError("each file must have a from and to property"); } const sourceFilepath = backendCommon.resolveSafeChildPath( ctx.workspacePath, file.from ); const destFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.to); try { await fs__default["default"].move(sourceFilepath, destFilepath, { overwrite: (_b = file.overwrite) != null ? _b : false }); ctx.logger.info( `File ${sourceFilepath} renamed to ${destFilepath} successfully` ); } catch (err) { ctx.logger.error( `Failed to rename file ${sourceFilepath} to ${destFilepath}:`, err ); throw err; } } } }); }; const createBuiltinActions = (options) => { const { reader, integrations, catalogClient, config, additionalTemplateFilters, additionalTemplateGlobals } = options; const githubCredentialsProvider = integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations); const actions = [ createFetchPlainAction({ reader, integrations }), createFetchPlainFileAction({ reader, integrations }), createFetchTemplateAction({ integrations, reader, additionalTemplateFilters, additionalTemplateGlobals }), gerrit.createPublishGerritAction({ integrations, config }), gerrit.createPublishGerritReviewAction({ integrations, config }), github.createPublishGithubAction({ integrations, config, githubCredentialsProvider }), github.createPublishGithubPullRequestAction({ integrations, githubCredentialsProvider }), gitlab.createPublishGitlabAction({ integrations, config }), gitlab.createPublishGitlabMergeRequestAction({ integrations }), bitbucket.createPublishBitbucketAction({ integrations, config }), bitbucket.createPublishBitbucketCloudAction({ integrations, config }), bitbucket.createPublishBitbucketServerAction({ integrations, config }), bitbucket.createPublishBitbucketServerPullRequestAction({ integrations, config }), azure.createPublishAzureAction({ integrations, config }), createDebugLogAction(), createWaitAction(), createCatalogRegisterAction({ catalogClient, integrations }), createFetchCatalogEntityAction({ catalogClient }), createCatalogWriteAction(), createFilesystemDeleteAction(), createFilesystemRenameAction(), github.createGithubActionsDispatchAction({ integrations, githubCredentialsProvider }), github.createGithubWebhookAction({ integrations, githubCredentialsProvider }), github.createGithubIssuesLabelAction({ integrations, githubCredentialsProvider }), github.createGithubRepoCreateAction({ integrations, githubCredentialsProvider }), github.createGithubRepoPushAction({ integrations, config, githubCredentialsProvider }), github.createGithubEnvironmentAction({ integrations }), github.createGithubDeployKeyAction({ integrations }), github.createGithubAutolinksAction({ integrations, githubCredentialsProvider }) ]; return actions; }; var __defProp$4 = Object.defineProperty; var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$4 = (obj, key, value) => { __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; class TemplateActionRegistry { constructor() { __publicField$4(this, "actions", /* @__PURE__ */ new Map()); } register(action) { if (this.actions.has(action.id)) { throw new errors.ConflictError( `Template action with ID '${action.id}' has already been registered` ); } this.actions.set(action.id, action); } get(actionId) { const action = this.actions.get(actionId); if (!action) { throw new errors.NotFoundError( `Template action with ID '${actionId}' is not registered.` ); } return action; } list() { return [...this.actions.values()]; } } var __defProp$3 = Object.defineProperty; var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$3 = (obj, key, value) => { __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const migrationsDir = backendCommon.resolvePackagePath( "@backstage/plugin-scaffolder-backend", "migrations" ); function isPluginDatabaseManager(opt) { return opt.getClient !== void 0; } const parseSqlDateToIsoString = (input) => { if (typeof input === "string") { const parsed = luxon.DateTime.fromSQL(input, { zone: "UTC" }); if (!parsed.isValid) { throw new Error( `Failed to parse database timestamp '${input}', ${parsed.invalidReason}: ${parsed.invalidExplanation}` ); } return parsed.toISO(); } return input; }; class DatabaseTaskStore { constructor(client) { __publicField$3(this, "db"); this.db = client; } static async create(options) { const { database } = options; const client = await this.getClient(database); await this.runMigrations(database, client); return new DatabaseTaskStore(client); } static async getClient(database) { if (isPluginDatabaseManager(database)) { return database.getClient(); } return database; } static async runMigrations(database, client) { var _a; if (!isPluginDatabaseManager(database)) { await client.migrate.latest({ directory: migrationsDir }); return; } if (!((_a = database.migrations) == null ? void 0 : _a.skip)) { await client.migrate.latest({ directory: migrationsDir }); } } async list(options) { const queryBuilder = this.db("tasks"); if (options.createdBy) { queryBuilder.where({ created_by: options.createdBy }); } const results = await queryBuilder.orderBy("created_at", "desc").select(); const tasks = results.map((result) => { var _a; return { id: result.id, spec: JSON.parse(result.spec), status: result.status, createdBy: (_a = result.created_by) != null ? _a : void 0, lastHeartbeatAt: parseSqlDateToIsoString(result.last_heartbeat_at), createdAt: parseSqlDateToIsoString(result.created_at) }; }); return { tasks }; } async getTask(taskId) { var _a; const [result] = await this.db("tasks").where({ id: taskId }).select(); if (!result) { throw new errors.NotFoundError(`No task with id '${taskId}' found`); } try { const spec = JSON.parse(result.spec); const secrets = result.secrets ? JSON.parse(result.secrets) : void 0; return { id: result.id, spec, status: result.status, lastHeartbeatAt: parseSqlDateToIsoString(result.last_heartbeat_at), createdAt: parseSqlDateToIsoString(result.created_at), createdBy: (_a = result.created_by) != null ? _a : void 0, secrets }; } catch (error) { throw new Error(`Failed to parse spec of task '${taskId}', ${error}`); } } async createTask(options) { var _a; const taskId = uuid.v4(); await this.db("tasks").insert({ id: taskId, spec: JSON.stringify(options.spec), secrets: options.secrets ? JSON.stringify(options.secrets) : void 0, created_by: (_a = options.createdBy) != null ? _a : null, status: "open" }); return { taskId }; } async claimTask() { return this.db.transaction(async (tx) => { var _a; const [task] = await tx("tasks").where({ status: "open" }).limit(1).select(); if (!task) { return void 0; } const updateCount = await tx("tasks").where({ id: task.id, status: "open" }).update({ status: "processing", last_heartbeat_at: this.db.fn.now(), // remove the secrets when moving to processing state. secrets: null }); if (updateCount < 1) { return void 0; } try { const spec = JSON.parse(task.spec); const secrets = task.secrets ? JSON.parse(task.secrets) : void 0; return { id: task.id, spec, status: "processing", lastHeartbeatAt: task.last_heartbeat_at, createdAt: task.created_at, createdBy: (_a = task.created_by) != null ? _a : void 0, secrets }; } catch (error) { throw new Error(`Failed to parse spec of task '${task.id}', ${error}`); } }); } async heartbeatTask(taskId) { const updateCount = await this.db("tasks").where({ id: taskId, status: "processing" }).update({ last_heartbeat_at: this.db.fn.now() }); if (updateCount === 0) { throw new errors.ConflictError(`No running task with taskId ${taskId} found`); } } async listStaleTasks(options) { const { timeoutS } = options; let heartbeatInterval = this.db.raw(`? - interval '${timeoutS} seconds'`, [ this.db.fn.now() ]); if (this.db.client.config.client.includes("mysql")) { heartbeatInterval = this.db.raw( `date_sub(now(), interval ${timeoutS} second)` ); } else if (this.db.client.config.client.includes("sqlite3")) { heartbeatInterval = this.db.raw(`datetime('now', ?)`, [ `-${timeoutS} seconds` ]); } const rawRows = await this.db("tasks").where("status", "processing").andWhere("last_heartbeat_at", "<=", heartbeatInterval); const tasks = rawRows.map((row) => ({ taskId: row.id })); return { tasks }; } async completeTask(options) { const { taskId, status, eventBody } = options; let oldStatus; if (["failed", "completed", "cancelled"].includes(status)) { oldStatus = "processing"; } else { throw new Error( `Invalid status update of run '${taskId}' to status '${status}'` ); } await this.db.transaction(async (tx) => { const [task] = await tx("tasks").where({ id: taskId }).limit(1).select(); const updateTask = async (criteria) => { const updateCount = await tx("tasks").where(criteria).update({ status }); if (updateCount !== 1) { throw new errors.ConflictError( `Failed to update status to '${status}' for taskId ${taskId}` ); } await tx("task_events").insert({ task_id: taskId, event_type: "completion", body: JSON.stringify(eventBody) }); }; if (status === "cancelled") { await updateTask({ id: taskId }); return; } if (task.status === "cancelled") { return; } if (!task) { throw new Error(`No task with taskId ${taskId} found`); } if (task.status !== oldStatus) { throw new errors.ConflictError( `Refusing to update status of run '${taskId}' to status '${status}' as it is currently '${task.status}', expected '${oldStatus}'` ); } await updateTask({ id: taskId, status: oldStatus }); }); } async emitLogEvent(options) { const { taskId, body } = options; const serializedBody = JSON.stringify(body); await this.db("task_events").insert({ task_id: taskId, event_type: "log", body: serializedBody }); } async listEvents(options) { const { taskId, after } = options; const rawEvents = await this.db("task_events").where({ task_id: taskId }).andWhere((builder) => { if (typeof after === "number") { builder.where("id", ">", after).orWhere("event_type", "completion"); } }).orderBy("id").select(); const events = rawEvents.map((event) => { try { const body = JSON.parse(event.body); return { id: Number(event.id), taskId, body, type: event.event_type, createdAt: parseSqlDateToIsoString(event.created_at) }; } catch (error) { throw new Error( `Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}` ); } }); return { events }; } async shutdownTask(options) { const { taskId } = options; const message = `This task was marked as stale as it exceeded its timeout`; const statusStepEvents = (await this.listEvents({ taskId })).events.filter( ({ body }) => body == null ? void 0 : body.stepId ); const completedSteps = statusStepEvents.filter( ({ body: { status } }) => status === "failed" || status === "completed" ).map((step) => step.body.stepId); const hungProcessingSteps = statusStepEvents.filter(({ body: { status } }) => status === "processing").map((event) => event.body.stepId).filter((step) => !completedSteps.includes(step)); for (const step of hungProcessingSteps) { await this.emitLogEvent({ taskId, body: { message, stepId: step, status: "failed" } }); } await this.completeTask({ taskId, status: "failed", eventBody: { message } }); } async cancelTask(options) { const { taskId, body } = options; const serializedBody = JSON.stringify(body); await this.db("task_events").insert({ task_id: taskId, event_type: "cancelled", body: serializedBody }); } } var __defProp$2 = Object.defineProperty; var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$2 = (obj, key, value) => { __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; class TaskManager { // Runs heartbeat internally constructor(task, storage, signal, logger) { this.task = task; this.storage = storage; this.signal = signal; this.logger = logger; __publicField$2(this, "isDone", false); __publicField$2(this, "heartbeatTimeoutId"); } static create(task, storage, abortSignal, logger) { const agent = new TaskManager(task, storage, abortSignal, logger); agent.startTimeout(); return agent; } get spec() { return this.task.spec; } get cancelSignal() { return this.signal; } get secrets() { return this.task.secrets; } get createdBy() { return this.task.createdBy; } async getWorkspaceName() { return this.task.taskId; } get done() { return this.isDone; } async emitLog(message, logMetadata) { await this.storage.emitLogEvent({ taskId: this.task.taskId, body: { message, ...logMetadata } }); } async complete(result, metadata) { await this.storage.completeTask({ taskId: this.task.taskId, status: result === "failed" ? "failed" : "completed", eventBody: { message: `Run completed with status: ${result}`, ...metadata } }); this.isDone = true; if (this.heartbeatTimeoutId) { clearTimeout(this.heartbeatTimeoutId); } } startTimeout() { this.heartbeatTimeoutId = setTimeout(async () => { try { await this.storage.heartbeatTask(this.task.taskId); this.startTimeout(); } catch (error) { this.isDone = true; this.logger.error( `Heartbeat for task ${this.task.taskId} failed`, error ); } }, 1e3); } } function defer() { let resolve = () => { }; const promise = new Promise((_resolve) => { resolve = _resolve; }); return { promise, resolve }; } class StorageTaskBroker { constructor(storage, logger) { this.storage = storage; this.logger = logger; __publicField$2(this, "deferredDispatch", defer()); } async list(options) { if (!this.storage.list) { throw new Error( "TaskStore does not implement the list method. Please implement the list method to be able to list tasks" ); } return await this.storage.list({ createdBy: options == null ? void 0 : options.createdBy }); } async registerCancellable(taskId, abortController) { let shouldUnsubscribe = false; const subscription = this.event$({ taskId, after: void 0 }).subscribe({ error: (_) => { subscription.unsubscribe(); }, next: ({ events }) => { for (const event of events) { if (event.type === "cancelled") { abortController.abort(); shouldUnsubscribe = true; } if (event.type === "completion") { shouldUnsubscribe = true; } } if (shouldUnsubscribe) { subscription.unsubscribe(); } } }); } /** * {@inheritdoc TaskBroker.claim} */ async claim() { for (; ; ) { const pendingTask = await this.storage.claimTask(); if (pendingTask) { const abortController = new AbortController(); await this.registerCancellable(pendingTask.id, abortController); return TaskManager.create( { taskId: pendingTask.id, spec: pendingTask.spec, secrets: pendingTask.secrets, createdBy: pendingTask.createdBy }, this.storage, abortController.signal, this.logger ); } await this.waitForDispatch(); } } /** * {@inheritdoc TaskBroker.dispatch} */ async dispatch(options) { const taskRow = await this.storage.createTask(options); this.signalDispatch(); return { taskId: taskRow.taskId }; } /** * {@inheritdoc TaskBroker.get} */ async get(taskId) { return this.storage.getTask(taskId); } /** * {@inheritdoc TaskBroker.event$} */ event$(options) { return new ObservableImpl__default["default"]((observer) => { const { taskId } = options; let after = options.after; let cancelled = false; (async () => { while (!cancelled) { const result = await this.storage.listEvents({ taskId, after }); const { events } = result; if (events.length) { after = events[events.length - 1].id; observer.next(result); } await new Promise((resolve) => setTimeout(resolve, 1e3)); } })(); return () => { cancelled = true; }; }); } /** * {@inheritdoc TaskBroker.vacuumTasks} */ async vacuumTasks(options) { const { tasks } = await this.storage.listStaleTasks(options); await Promise.all( tasks.map(async (task) => { try { await this.storage.completeTask({ taskId: task.taskId, status: "failed", eventBody: { message: "The task was cancelled because the task worker lost connection to the task broker" } }); } catch (error) { this.logger.warn(`Failed to cancel task '${task.taskId}', ${error}`); } }) ); } waitForDispatch() { return this.deferredDispatch.promise; } signalDispatch() { this.deferredDispatch.resolve(); this.deferredDispatch = defer(); } async cancel(taskId) { var _a, _b; const { events } = await this.storage.listEvents({ taskId }); const currentStepId = events.length > 0 ? events.filter(({ body }) => body == null ? void 0 : body.stepId).reduce((prev, curr) => prev.id > curr.id ? prev : curr).body.stepId : 0; await ((_b = (_a = this.storage).cancelTask) == null ? void 0 : _b.call(_a, { taskId, body: { message: `Step ${currentStepId} has been cancelled.`, stepId: currentStepId, status: "cancelled" } })); } } function isTruthy(value) { return lodash.isArray(value) ? value.length > 0 : !!value; } function generateExampleOutput(schema) { var _a, _b; const { examples } = schema; if (examples && Array.isArray(examples)) { return examples[0]; } if (schema.type === "object") { return Object.fromEntries( Object.entries((_a = schema.properties) != null ? _a : {}).map(([key, value]) => [ key, generateExampleOutput(value) ]) ); } else if (schema.type === "array") { const [firstSchema] = (_b = [schema.items]) == null ? void 0 : _b.flat(); if (firstSchema) { return [generateExampleOutput(firstSchema)]; } return []; } else if (schema.type === "string") { return ""; } else if (schema.type === "number") { return 0; } else if (schema.type === "boolean") { return false; } return ""; } function createCounterMetric(config) { let metric = promClient.register.getSingleMetric(config.name); if (!metric) { metric = new promClient.Counter(config); promClient.register.registerMetric(metric); } return metric; } function createHistogramMetric(config) { let metric = promClient.register.getSingleMetric(config.name); if (!metric) { metric = new promClient.Histogram(config); promClient.register.registerMetric(metric); } return metric; } const createTemplatePermissionRule = pluginPermissionNode.makeCreatePermissionRule(); const hasTag = createTemplatePermissionRule({ name: "HAS_TAG", resourceType: alpha.RESOURCE_TYPE_SCAFFOLDER_TEMPLATE, description: `Match parameters or steps with the given tag`, paramsSchema: zod.z.object({ tag: zod.z.string().describe("Name of the tag to match on") }), apply: (resource, { tag }) => { var _a, _b, _c; return (_c = (_b = (_a = resource["backstage:permissions"]) == null ? void 0 : _a.tags) == null ? void 0 : _b.includes(tag)) != null ? _c : false; }, toQuery: () => ({}) }); const createActionPermissionRule = pluginPermissionNode.makeCreatePermissionRule(); const hasActionId = createActionPermissionRule({ name: "HAS_ACTION_ID", resourceType: alpha.RESOURCE_TYPE_SCAFFOLDER_ACTION, description: `Match actions with the given actionId`, paramsSchema: zod.z.object({ actionId: zod.z.string().describe("Name of the actionId to match on") }), apply: (resource, { actionId }) => { return resource.action === actionId; }, toQuery: () => ({}) }); buildHasProperty({ name: "HAS_PROPERTY", valueSchema: zod.z.union([zod.z.string(), zod.z.number(), zod.z.boolean(), zod.z.null()]), validateProperty: false }); const hasBooleanProperty = buildHasProperty({ name: "HAS_BOOLEAN_PROPERTY", valueSchema: zod.z.boolean() }); const hasNumberProperty = buildHasProperty({ name: "HAS_NUMBER_PROPERTY", valueSchema: zod.z.number() }); const hasStringProperty = buildHasProperty({ name: "HAS_STRING_PROPERTY", valueSchema: zod.z.string() }); function buildHasProperty({ name, valueSchema, validateProperty = true }) { return createActionPermissionRule({ name, description: `Allow actions with the specified property`, resourceType: alpha.RESOURCE_TYPE_SCAFFOLDER_ACTION, paramsSchema: zod.z.object({ key: zod.z.string().describe(`Property within the action parameters to match on`), value: valueSchema.describe(`Value of the given property to match on`) }), apply: (resource, { key, value }) => { const foundValue = lodash.get(resource.input, key); if (validateProperty && !valueSchema.safeParse(foundValue).success) { return false; } if (value !== void 0) { if (valueSchema.safeParse(value).success) { return value === foundValue; } return false; } return foundValue !== void 0; }, toQuery: () => ({}) }); } const scaffolderTemplateRules = { hasTag }; const scaffolderActionRules = { hasActionId, hasBooleanProperty, hasNumberProperty, hasStringProperty }; var __defProp$1 = Object.defineProperty; var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$1 = (obj, key, value) => { __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const isValidTaskSpec = (taskSpec) => { return taskSpec.apiVersion === "scaffolder.backstage.io/v1beta3"; }; const createStepLogger = ({ task, step }) => { const metadata = { stepId: step.id }; const taskLogger = winston__namespace.createLogger({ level: process.env.LOG_LEVEL || "info", format: winston__namespace.format.combine( winston__namespace.format.colorize(), winston__namespace.format.simple() ), defaultMeta: {} }); const streamLogger = new stream.PassThrough(); streamLogger.on("data", async (data) => { const message = data.toString().trim(); if ((message == null ? void 0 : message.length) > 1) { await task.emitLog(message, metadata); } }); taskLogger.add(new winston__namespace.transports.Stream({ stream: streamLogger })); return { taskLogger, streamLogger }; }; const isActionAuthorized = pluginPermissionNode.createConditionAuthorizer( Object.values(scaffolderActionRules) ); class NunjucksWorkflowRunner { constructor(options) { this.options = options; __publicField$1(this, "defaultTemplateFilters"); __publicField$1(this, "tracker", scaffoldingTracker()); this.defaultTemplateFilters = createDefaultFilters({ integrations: this.options.integrations }); } isSingleTemplateString(input) { var _a, _b; const { parser, nodes } = nunjucks__default["default"]; const parsed = parser.parse( input, {}, { autoescape: false, tags: { variableStart: "${{", variableEnd: "}}" } } ); return parsed.children.length === 1 && !(((_b = (_a = parsed.children[0]) == null ? void 0 : _a.children) == null ? void 0 : _b[0]) instanceof nodes.TemplateData); } render(input, context, renderTemplate) { return JSON.parse(JSON.stringify(input), (_key, value) => { try { if (typeof value === "string") { try { if (this.isSingleTemplateString(value)) { const wrappedDumped = value.replace( /\${{(.+)}}/g, "${{ ( $1 ) | dump }}" ); const templated2 = renderTemplate(wrappedDumped, context); if (templated2 === "") { return void 0; } return JSON.parse(templated2); } } catch (ex) { this.options.logger.error( `Failed to parse template string: ${value} with error ${ex.message}` ); } const templated = renderTemplate(value, context); if (templated === "") { return void 0; } return templated; } } catch { return value; } return value; }); } async executeStep(task, step, context, renderTemplate, taskTrack, workspacePath, decision) { var _a, _b, _c, _d, _e; const stepTrack = await this.tracker.stepStart(task, step); if (task.cancelSignal.aborted) { throw new Error(`Step ${step.name} has been cancelled.`); } try { if (step.if) { const ifResult = await this.render(step.if, context, renderTemplate); if (!isTruthy(ifResult)) { await stepTrack.skipFalsy(); return; } } const action = this.options.actionRegistry.get(step.action); const { taskLogger, streamLogger } = createStepLogger({ task, step }); if (task.isDryRun) { const redactedSecrets = Object.fromEntries( Object.entries((_a = task.secrets) != null ? _a : {}).map((secret) => [ secret[0], "[REDACTED]" ]) ); const debugInput = (_b = step.input && this.render( step.input, { ...context, secrets: redactedSecrets }, renderTemplate )) != null ? _b : {}; taskLogger.info( `Running ${action.id} in dry-run mode with inputs (secrets redacted): ${JSON.stringify( debugInput, void 0, 2 )}` ); if (!action.supportsDryRun) { await taskTrack.skipDryRun(step, action); const outputSchema = (_c = action.schema) == null ? void 0 : _c.output; if (outputSchema) { context.steps[step.id] = { output: generateExampleOutput(outputSchema) }; } else { context.steps[step.id] = { output: {} }; } return; } } const iterations = (step.each ? Object.entries(this.render(step.each, context, renderTemplate)).map( ([key, value]) => ({ each: { key, value } }) ) : [{}]).map((i) => { var _a2; return { ...i, // Secrets are only passed when templating the input to actions for security reasons input: step.input ? this.render( step.input, { ...context, secrets: (_a2 = task.secrets) != null ? _a2 : {}, ...i }, renderTemplate ) : {} }; }); for (const iteration of iterations) { const actionId = `${action.id}${iteration.each ? `[${iteration.each.key}]` : ""}`; if ((_d = action.schema) == null ? void 0 : _d.input) { const validateResult = jsonschema.validate( iteration.input, action.schema.input ); if (!validateResult.valid) { const errors$1 = validateResult.errors.join(", "); throw new errors.InputError( `Invalid input passed to action ${actionId}, ${errors$1}` ); } } if (!isActionAuthorized(decision, { action: action.id, input: iteration.input })) { throw new errors.NotAllowedError( `Unauthorized action: ${actionId}. The action is not allowed. Input: ${JSON.stringify( iteration.input, null, 2 )}` ); } } const tmpDirs = new Array(); const stepOutput = {}; for (const iteration of iterations) { if (iteration.each) { taskLogger.info( `Running step each: ${JSON.stringify( iteration.each, (k, v) => k ? v.toString() : v, 0 )}` ); } await action.handler({ input: iteration.input, secrets: (_e = task.secrets) != null ? _e : {}, logger: taskLogger, logStream: streamLogger, workspacePath, createTemporaryDirectory: async () => { const tmpDir = await fs__default["default"].mkdtemp( `${workspacePath}_step-${step.id}-` ); tmpDirs.push(tmpDir); return tmpDir; }, output(name, value) { if (step.each) { stepOutput[name] = stepOutput[name] || []; stepOutput[name].push(value); } else { stepOutput[name] = value; } }, templateInfo: task.spec.templateInfo, user: task.spec.user, isDryRun: task.isDryRun, signal: task.cancelSignal }); } for (const tmpDir of tmpDirs) { await fs__default["default"].remove(tmpDir); } context.steps[step.id] = { output: stepOutput }; if (task.cancelSignal.aborted) { throw new Error(`Step ${step.name} has been cancelled.`); } await stepTrack.markSuccessful(); } catch (err) { await taskTrack.markFailed(step, err); await stepTrack.markFailed(); throw err; } } async execute(task) { var _a; if (!isValidTaskSpec(task.spec)) { throw new errors.InputError( "Wrong template version executed with the workflow engine" ); } const workspacePath = path__default["default"].join( this.options.workingDirectory, await task.getWorkspaceName() ); const { additionalTemplateFilters, additionalTemplateGlobals } = this.options; const renderTemplate = await SecureTemplater.loadRenderer({ templateFilters: { ...this.defaultTemplateFilters, ...additionalTemplateFilters }, templateGlobals: additionalTemplateGlobals }); try { const taskTrack = await this.tracker.taskStart(task); await fs__default["default"].ensureDir(workspacePath); const context = { parameters: task.spec.parameters, steps: {}, user: task.spec.user }; const [decision] = this.options.permissions && task.spec.steps.length ? await this.options.permissions.authorizeConditional( [{ permission: alpha.actionExecutePermission }], { token: (_a = task.secrets) == null ? void 0 : _a.backstageToken } ) : [{ result: pluginPermissionCommon.AuthorizeResult.ALLOW }]; for (const step of task.spec.steps) { await this.executeStep( task, step, context, renderTemplate, taskTrack, workspacePath, decision ); } const output = this.render(task.spec.output, context, renderTemplate); await taskTrack.markSuccessful(); return { output }; } finally { if (workspacePath) { await fs__default["default"].remove(workspacePath); } } } } function scaffoldingTracker() { const taskCount = createCounterMetric({ name: "scaffolder_task_count", help: "Count of task runs", labelNames: ["template", "user", "result"] }); const taskDuration = createHistogramMetric({ name: "scaffolder_task_duration", help: "Duration of a task run", labelNames: ["template", "result"] }); const stepCount = createCounterMetric({ name: "scaffolder_step_count", help: "Count of step runs", labelNames: ["template", "step", "result"] }); const stepDuration = createHistogramMetric({ name: "scaffolder_step_duration", help: "Duration of a step runs", labelNames: ["template", "step", "result"] }); async function taskStart(task) { var _a, _b; await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`); const template = ((_a = task.spec.templateInfo) == null ? void 0 : _a.entityRef) || ""; const user = ((_b = task.spec.user) == null ? void 0 : _b.ref) || ""; const taskTimer = taskDuration.startTimer({ template }); async function skipDryRun(step, action) { task.emitLog(`Skipping because ${action.id} does not support dry-run`, { stepId: step.id, status: "skipped" }); } async function markSuccessful() { taskCount.inc({ template, user, result: "ok" }); taskTimer({ result: "ok" }); } async function markFailed(step, err) { await task.emitLog(String(err.stack), { stepId: step.id, status: "failed" }); taskCount.inc({ template, user, result: "failed" }); taskTimer({ result: "failed" }); } async function markCancelled(step) { await task.emitLog(`Step ${step.id} has been cancelled.`, { stepId: step.id, status: "cancelled" }); taskCount.inc({ template, user, result: "cancelled" }); taskTimer({ result: "cancelled" }); } return { skipDryRun, markCancelled, markSuccessful, markFailed }; } async function stepStart(task, step) { var _a; await task.emitLog(`Beginning step ${step.name}`, { stepId: step.id, status: "processing" }); const template = ((_a = task.spec.templateInfo) == null ? void 0 : _a.entityRef) || ""; const stepTimer = stepDuration.startTimer({ template, step: step.name }); async function markSuccessful() { await task.emitLog(`Finished step ${step.name}`, { stepId: step.id, status: "completed" }); stepCount.inc({ template, step: step.name, result: "ok" }); stepTimer({ result: "ok" }); } async function markCancelled() { stepCount.inc({ template, step: step.name, result: "cancelled" }); stepTimer({ result: "cancelled" }); } async function markFailed() { stepCount.inc({ template, step: step.name, result: "failed" }); stepTimer({ result: "failed" }); } async function skipFalsy() { await task.emitLog( `Skipping step ${step.id} because its if condition was false`, { stepId: step.id, status: "skipped" } ); stepTimer({ result: "skipped" }); } return { markCancelled, markFailed, markSuccessful, skipFalsy }; } return { taskStart, stepStart }; } var __defProp = Object.defineProperty; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField = (obj, key, value) => { __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; class TaskWorker { constructor(options) { this.options = options; __publicField(this, "taskQueue"); this.taskQueue = new PQueue__default["default"]({ concurrency: options.concurrentTasksLimit }); } static async create(options) { const { taskBroker, logger, actionRegistry, integrations, workingDirectory, additionalTemplateFilters, concurrentTasksLimit = 10, // from 1 to Infinity additionalTemplateGlobals, permissions } = options; const workflowRunner = new NunjucksWorkflowRunner({ actionRegistry, integrations, logger, workingDirectory, additionalTemplateFilters, additionalTemplateGlobals, permissions }); return new TaskWorker({ taskBroker, runners: { workflowRunner }, concurrentTasksLimit, permissions }); } start() { (async () => { for (; ; ) { await this.onReadyToClaimTask(); const task = await this.options.taskBroker.claim(); this.taskQueue.add(() => this.runOneTask(task)); } })(); } onReadyToClaimTask() { if (this.taskQueue.pending < this.options.concurrentTasksLimit) { return Promise.resolve(); } return new Promise((resolve) => { this.taskQueue.once("next", () => { resolve(); }); }); } async runOneTask(task) { try { if (task.spec.apiVersion !== "scaffolder.backstage.io/v1beta3") { throw new Error( `Unsupported Template apiVersion ${task.spec.apiVersion}` ); } const { output } = await this.options.runners.workflowRunner.execute( task ); await task.complete("completed", { output }); } catch (error) { errors.assertError(error); await task.complete("failed", { error: { name: error.name, message: error.message } }); } } } class DecoratedActionsRegistry extends TemplateActionRegistry { constructor(innerRegistry, extraActions) { super(); this.innerRegistry = innerRegistry; for (const action of extraActions) { this.register(action); } } get(actionId) { try { return super.get(actionId); } catch { return this.innerRegistry.get(actionId); } } } function createDryRunner(options) { return async function dryRun(input) { let contentPromise; const workflowRunner = new NunjucksWorkflowRunner({ ...options, actionRegistry: new DecoratedActionsRegistry(options.actionRegistry, [ pluginScaffolderNode.createTemplateAction({ id: "dry-run:extract", supportsDryRun: true, async handler(ctx) { contentPromise = pluginScaffolderNode.serializeDirectoryContents(ctx.workspacePath); await contentPromise.catch(() => { }); } }) ]) }); const dryRunId = uuid.v4(); const log = new Array(); const contentsPath = backendCommon.resolveSafeChildPath( options.workingDirectory, `dry-run-content-${dryRunId}` ); try { await pluginScaffolderNode.deserializeDirectoryContents(contentsPath, input.directoryContents); const abortSignal = new AbortController().signal; const result = await workflowRunner.execute({ spec: { ...input.spec, steps: [ ...input.spec.steps, { id: dryRunId, name: "dry-run:extract", action: "dry-run:extract" } ], templateInfo: { entityRef: "template:default/dry-run", baseUrl: url.pathToFileURL( backendCommon.resolveSafeChildPath(contentsPath, "template.yaml") ).toString() } }, secrets: input.secrets, // No need to update this at the end of the run, so just hard-code it done: false, isDryRun: true, getWorkspaceName: async () => `dry-run-${dryRunId}`, cancelSignal: abortSignal, async emitLog(message, logMetadata) { if ((logMetadata == null ? void 0 : logMetadata.stepId) === dryRunId) { return; } log.push({ body: { ...logMetadata, message } }); }, complete: async () => { throw new Error("Not implemented"); } }); if (!contentPromise) { throw new Error("Content extraction step was skipped"); } const directoryContents = await contentPromise; return { log, directoryContents, output: result.output }; } finally { await fs__default["default"].remove(contentsPath); } }; } async function getWorkingDirectory(config, logger) { if (!config.has("backend.workingDirectory")) { return os__default["default"].tmpdir(); } const workingDirectory = config.getString("backend.workingDirectory"); try { await fs__default["default"].access(workingDirectory, fs__default["default"].constants.F_OK | fs__default["default"].constants.W_OK); logger.info(`using working directory: ${workingDirectory}`); } catch (err) { errors.assertError(err); logger.error( `working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}` ); throw err; } return workingDirectory; } function getEntityBaseUrl(entity) { var _a, _b; let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.ANNOTATION_SOURCE_LOCATION]; if (!location) { location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.ANNOTATION_LOCATION]; } if (!location) { return void 0; } const { type, target } = catalogModel.parseLocationRef(location); if (type === "url") { return target; } else if (type === "file") { return `file://${target}`; } return void 0; } async function findTemplate(options) { const { entityRef, token, catalogApi } = options; if (entityRef.kind.toLocaleLowerCase("en-US") !== "template") { throw new errors.InputError(`Invalid kind, only 'Template' kind is supported`); } const template = await catalogApi.getEntityByRef(entityRef, { token }); if (!template) { throw new errors.NotFoundError( `Template ${catalogModel.stringifyEntityRef(entityRef)} not found` ); } return template; } function isTemplatePermissionRuleInput(permissionRule) { return permissionRule.resourceType === alpha.RESOURCE_TYPE_SCAFFOLDER_TEMPLATE; } function isActionPermissionRuleInput(permissionRule) { return permissionRule.resourceType === alpha.RESOURCE_TYPE_SCAFFOLDER_ACTION; } function isSupportedTemplate(entity) { return entity.apiVersion === "scaffolder.backstage.io/v1beta3"; } function buildDefaultIdentityClient(options) { return { getIdentity: async ({ request }) => { var _a; const header = request.headers.authorization; const { logger } = options; if (!header) { return void 0; } try { const token = (_a = header.match(/^Bearer\s(\S+\.\S+\.\S+)$/i)) == null ? void 0 : _a[1]; if (!token) { throw new TypeError("Expected Bearer with JWT"); } const [_header, rawPayload, _signature] = token.split("."); const payload = JSON.parse( Buffer.from(rawPayload, "base64").toString() ); if (typeof payload !== "object" || payload === null || Array.isArray(payload)) { throw new TypeError("Malformed JWT payload"); } const sub = payload.sub; if (typeof sub !== "string") { throw new TypeError("Expected string sub claim"); } if (sub === "backstage-server") { return void 0; } catalogModel.parseEntityRef(sub); return { identity: { userEntityRef: sub, ownershipEntityRefs: [], type: "user" }, token }; } catch (e) { logger.error(`Invalid authorization header: ${errors.stringifyError(e)}`); return void 0; } } }; } const readDuration = (config$1, key, defaultValue) => { if (config$1.has(key)) { return config.readDurationFromConfig(config$1, { key }); } return defaultValue; }; async function createRouter(options) { var _a; const router = Router__default["default"](); router.use(express__default["default"].json({ limit: "10MB" })); const { logger: parentLogger, config, reader, database, catalogClient, actions, taskWorkers, scheduler, additionalTemplateFilters, additionalTemplateGlobals, permissions, permissionRules } = options; const concurrentTasksLimit = (_a = options.concurrentTasksLimit) != null ? _a : options.config.getOptionalNumber("scaffolder.concurrentTasksLimit"); const logger = parentLogger.child({ plugin: "scaffolder" }); const identity = options.identity || buildDefaultIdentityClient(options); const workingDirectory = await getWorkingDirectory(config, logger); const integrations = integration.ScmIntegrations.fromConfig(config); let taskBroker; if (!options.taskBroker) { const databaseTaskStore = await DatabaseTaskStore.create({ database }); taskBroker = new StorageTaskBroker(databaseTaskStore, logger); if (scheduler && databaseTaskStore.listStaleTasks) { await scheduler.scheduleTask({ id: "close_stale_tasks", frequency: readDuration( config, "scaffolder.taskTimeoutJanitorFrequency", { minutes: 5 } ), timeout: { minutes: 15 }, fn: async () => { const { tasks } = await databaseTaskStore.listStaleTasks({ timeoutS: luxon.Duration.fromObject( readDuration(config, "scaffolder.taskTimeout", { hours: 24 }) ).as("seconds") }); for (const task of tasks) { await databaseTaskStore.shutdownTask(task); logger.info(`Successfully closed stale task ${task.taskId}`); } } }); } } else { taskBroker = options.taskBroker; } const actionRegistry = new TemplateActionRegistry(); const workers = []; if (concurrentTasksLimit !== 0) { for (let i = 0; i < (taskWorkers || 1); i++) { const worker = await TaskWorker.create({ taskBroker, actionRegistry, integrations, logger, workingDirectory, additionalTemplateFilters, additionalTemplateGlobals, concurrentTasksLimit, permissions }); workers.push(worker); } } const actionsToRegister = Array.isArray(actions) ? actions : createBuiltinActions({ integrations, catalogClient, reader, config, additionalTemplateFilters, additionalTemplateGlobals }); actionsToRegister.forEach((action) => actionRegistry.register(action)); workers.forEach((worker) => worker.start()); const dryRunner = createDryRunner({ actionRegistry, integrations, logger, workingDirectory, additionalTemplateFilters, additionalTemplateGlobals, permissions }); const templateRules = Object.values( scaffolderTemplateRules ); const actionRules = Object.values( scaffolderActionRules ); if (permissionRules) { templateRules.push( ...permissionRules.filter(isTemplatePermissionRuleInput) ); actionRules.push(...permissionRules.filter(isActionPermissionRuleInput)); } const isAuthorized = pluginPermissionNode.createConditionAuthorizer(Object.values(templateRules)); const permissionIntegrationRouter = pluginPermissionNode.createPermissionIntegrationRouter({ resources: [ { resourceType: alpha.RESOURCE_TYPE_SCAFFOLDER_TEMPLATE, permissions: alpha.scaffolderTemplatePermissions, rules: templateRules }, { resourceType: alpha.RESOURCE_TYPE_SCAFFOLDER_ACTION, permissions: alpha.scaffolderActionPermissions, rules: actionRules } ] }); router.use(permissionIntegrationRouter); router.get( "/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => { var _a2, _b; const userIdentity = await identity.getIdentity({ request: req }); const token = userIdentity == null ? void 0 : userIdentity.token; const template = await authorizeTemplate(req.params, token); const parameters = [(_a2 = template.spec.parameters) != null ? _a2 : []].flat(); const presentation = template.spec.presentation; res.json({ title: (_b = template.metadata.title) != null ? _b : template.metadata.name, ...presentation ? { presentation } : {}, description: template.metadata.description, "ui:options": template.metadata["ui:options"], steps: parameters.map((schema) => { var _a3; return { title: (_a3 = schema.title) != null ? _a3 : "Please enter the following information", description: schema.description, schema }; }) }); } ).get("/v2/actions", async (_req, res) => { const actionsList = actionRegistry.list().map((action) => { return { id: action.id, description: action.description, examples: action.examples, schema: action.schema }; }); res.json(actionsList); }).post("/v2/tasks", async (req, res) => { var _a2, _b; const templateRef = req.body.templateRef; const { kind, namespace, name } = catalogModel.parseEntityRef(templateRef, { defaultKind: "template" }); const callerIdentity = await identity.getIdentity({ request: req }); const token = callerIdentity == null ? void 0 : callerIdentity.token; const userEntityRef = callerIdentity == null ? void 0 : callerIdentity.identity.userEntityRef; const userEntity = userEntityRef ? await catalogClient.getEntityByRef(userEntityRef, { token }) : void 0; let auditLog = `Scaffolding task for ${templateRef}`; if (userEntityRef) { auditLog += ` created by ${userEntityRef}`; } logger.info(auditLog); const values = req.body.values; const template = await authorizeTemplate( { kind, namespace, name }, token ); for (const parameters of [(_a2 = template.spec.parameters) != null ? _a2 : []].flat()) { const result2 = jsonschema.validate(values, parameters); if (!result2.valid) { res.status(400).json({ errors: result2.errors }); return; } } const baseUrl = getEntityBaseUrl(template); const taskSpec = { apiVersion: template.apiVersion, steps: template.spec.steps.map((step, index) => { var _a3, _b2; return { ...step, id: (_a3 = step.id) != null ? _a3 : `step-${index + 1}`, name: (_b2 = step.name) != null ? _b2 : step.action }; }), output: (_b = template.spec.output) != null ? _b : {}, parameters: values, user: { entity: userEntity, ref: userEntityRef }, templateInfo: { entityRef: catalogModel.stringifyEntityRef({ kind, name, namespace }), baseUrl, entity: { metadata: template.metadata } } }; const result = await taskBroker.dispatch({ spec: taskSpec, createdBy: userEntityRef, secrets: { ...req.body.secrets, backstageToken: token } }); res.status(201).json({ id: result.taskId }); }).get("/v2/tasks", async (req, res) => { const [userEntityRef] = [req.query.createdBy].flat(); if (typeof userEntityRef !== "string" && typeof userEntityRef !== "undefined") { throw new errors.InputError("createdBy query parameter must be a string"); } if (!taskBroker.list) { throw new Error( "TaskBroker does not support listing tasks, please implement the list method on the TaskBroker." ); } const tasks = await taskBroker.list({ createdBy: userEntityRef }); res.status(200).json(tasks); }).get("/v2/tasks/:taskId", async (req, res) => { const { taskId } = req.params; const task = await taskBroker.get(taskId); if (!task) { throw new errors.NotFoundError(`Task with id ${taskId} does not exist`); } delete task.secrets; res.status(200).json(task); }).post("/v2/tasks/:taskId/cancel", async (req, res) => { var _a2; const { taskId } = req.params; await ((_a2 = taskBroker.cancel) == null ? void 0 : _a2.call(taskBroker, taskId)); res.status(200).json({ status: "cancelled" }); }).get("/v2/tasks/:taskId/eventstream", async (req, res) => { const { taskId } = req.params; const after = req.query.after !== void 0 ? Number(req.query.after) : void 0; logger.debug(`Event stream observing taskId '${taskId}' opened`); res.writeHead(200, { Connection: "keep-alive", "Cache-Control": "no-cache", "Content-Type": "text/event-stream" }); const subscription = taskBroker.event$({ taskId, after }).subscribe({ error: (error) => { logger.error( `Received error from event stream when observing taskId '${taskId}', ${error}` ); res.end(); }, next: ({ events }) => { var _a2; let shouldUnsubscribe = false; for (const event of events) { res.write( `event: ${event.type} data: ${JSON.stringify(event)} ` ); if (event.type === "completion") { shouldUnsubscribe = true; } } (_a2 = res.flush) == null ? void 0 : _a2.call(res); if (shouldUnsubscribe) { subscription.unsubscribe(); res.end(); } } }); req.on("close", () => { subscription.unsubscribe(); logger.debug(`Event stream observing taskId '${taskId}' closed`); }); }).get("/v2/tasks/:taskId/events", async (req, res) => { const { taskId } = req.params; const after = Number(req.query.after) || void 0; const timeout = setTimeout(() => { res.json([]); }, 3e4); const subscription = taskBroker.event$({ taskId, after }).subscribe({ error: (error) => { logger.error( `Received error from event stream when observing taskId '${taskId}', ${error}` ); }, next: ({ events }) => { clearTimeout(timeout); subscription.unsubscribe(); res.json(events); } }); req.on("close", () => { subscription.unsubscribe(); clearTimeout(timeout); }); }).post("/v2/dry-run", async (req, res) => { var _a2, _b, _c, _d; const bodySchema = zod.z.object({ template: zod.z.unknown(), values: zod.z.record(zod.z.unknown()), secrets: zod.z.record(zod.z.string()).optional(), directoryContents: zod.z.array( zod.z.object({ path: zod.z.string(), base64Content: zod.z.string() }) ) }); const body = await bodySchema.parseAsync(req.body).catch((e) => { throw new errors.InputError(`Malformed request: ${e}`); }); const template = body.template; if (!await pluginScaffolderCommon.templateEntityV1beta3Validator.check(template)) { throw new errors.InputError("Input template is not a template"); } const token = (_a2 = await identity.getIdentity({ request: req })) == null ? void 0 : _a2.token; for (const parameters of [(_b = template.spec.parameters) != null ? _b : []].flat()) { const result2 = jsonschema.validate(body.values, parameters); if (!result2.valid) { res.status(400).json({ errors: result2.errors }); return; } } const steps = template.spec.steps.map((step, index) => { var _a3, _b2; return { ...step, id: (_a3 = step.id) != null ? _a3 : `step-${index + 1}`, name: (_b2 = step.name) != null ? _b2 : step.action }; }); const result = await dryRunner({ spec: { apiVersion: template.apiVersion, steps, output: (_c = template.spec.output) != null ? _c : {}, parameters: body.values }, directoryContents: ((_d = body.directoryContents) != null ? _d : []).map((file) => ({ path: file.path, content: Buffer.from(file.base64Content, "base64") })), secrets: { ...body.secrets, ...token && { backstageToken: token } } }); res.status(200).json({ ...result, steps, directoryContents: result.directoryContents.map((file) => ({ path: file.path, executable: file.executable, base64Content: file.content.toString("base64") })) }); }); const app = express__default["default"](); app.set("logger", logger); app.use("/", router); async function authorizeTemplate(entityRef, token) { const template = await findTemplate({ catalogApi: catalogClient, entityRef, token }); if (!isSupportedTemplate(template)) { throw new errors.InputError( `Unsupported apiVersion field in schema entity, ${template.apiVersion}` ); } if (!permissions) { return template; } const [parameterDecision, stepDecision] = await permissions.authorizeConditional( [ { permission: alpha.templateParameterReadPermission }, { permission: alpha.templateStepReadPermission } ], { token } ); if (Array.isArray(template.spec.parameters)) { template.spec.parameters = template.spec.parameters.filter( (step) => isAuthorized(parameterDecision, step) ); } else if (template.spec.parameters && !isAuthorized(parameterDecision, template.spec.parameters)) { template.spec.parameters = void 0; } template.spec.steps = template.spec.steps.filter( (step) => isAuthorized(stepDecision, step) ); return template; } return app; } exports.DatabaseTaskStore = DatabaseTaskStore; exports.TaskManager = TaskManager; exports.TaskWorker = TaskWorker; exports.TemplateActionRegistry = TemplateActionRegistry; exports.createBuiltinActions = createBuiltinActions; exports.createCatalogRegisterAction = createCatalogRegisterAction; exports.createCatalogWriteAction = createCatalogWriteAction; exports.createDebugLogAction = createDebugLogAction; exports.createFetchCatalogEntityAction = createFetchCatalogEntityAction; exports.createFetchPlainAction = createFetchPlainAction; exports.createFetchPlainFileAction = createFetchPlainFileAction; exports.createFetchTemplateAction = createFetchTemplateAction; exports.createFilesystemDeleteAction = createFilesystemDeleteAction; exports.createFilesystemRenameAction = createFilesystemRenameAction; exports.createRouter = createRouter; exports.createWaitAction = createWaitAction; exports.scaffolderActionRules = scaffolderActionRules; exports.scaffolderTemplateRules = scaffolderTemplateRules; //# sourceMappingURL=router-ca1e7c8a.cjs.js.map