'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); var backendPluginApi = require('@backstage/backend-plugin-api'); var Keyv = require('keyv'); var KeyvMemcache = require('@keyv/memcache'); var KeyvRedis = require('@keyv/redis'); var winston = require('winston'); var backendAppApi = require('@backstage/backend-app-api'); var lodash = require('lodash'); var Transport = require('winston-transport'); var crypto = require('crypto'); var config = require('@backstage/config'); var errors = require('@backstage/errors'); var knexFactory = require('knex'); var limiterFactory = require('p-limit'); var yn = require('yn'); var fs = require('fs-extra'); var platformPath = require('path'); var backendDevUtils = require('@backstage/backend-dev-utils'); var paths = require('./cjs/paths-cfcd05fc.cjs.js'); var integration = require('@backstage/integration'); var fetch = require('node-fetch'); var minimatch = require('minimatch'); var stream = require('stream'); var getRawBody = require('raw-body'); var parseGitUrl = require('git-url-parse'); var base64Stream = require('base64-stream'); var concatStream = require('concat-stream'); var os = require('os'); var tar = require('tar'); var util = require('util'); var git = require('isomorphic-git'); var http = require('isomorphic-git/http/node'); var integrationAwsNode = require('@backstage/integration-aws-node'); var credentialProviders = require('@aws-sdk/credential-providers'); var clientS3 = require('@aws-sdk/client-s3'); var abortController = require('@aws-sdk/abort-controller'); var archiver = require('archiver'); var yauzl = require('yauzl'); var storage = require('@google-cloud/storage'); var compression = require('compression'); var cors = require('cors'); var express = require('express'); var helmet = require('helmet'); var Router = require('express-promise-router'); var jose = require('jose'); var luxon = require('luxon'); var clientNode = require('@kubernetes/client-node'); var uuid = require('uuid'); var cliCommon = require('@backstage/cli-common'); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } function _interopNamespace(e) { if (e && e.__esModule) return e; var n = Object.create(null); if (e) { Object.keys(e).forEach(function (k) { if (k !== 'default') { var d = Object.getOwnPropertyDescriptor(e, k); Object.defineProperty(n, k, d.get ? d : { enumerable: true, get: function () { return e[k]; } }); } }); } n["default"] = e; return Object.freeze(n); } var Keyv__default = /*#__PURE__*/_interopDefaultLegacy(Keyv); var KeyvMemcache__default = /*#__PURE__*/_interopDefaultLegacy(KeyvMemcache); var KeyvRedis__default = /*#__PURE__*/_interopDefaultLegacy(KeyvRedis); var winston__namespace = /*#__PURE__*/_interopNamespace(winston); var Transport__default = /*#__PURE__*/_interopDefaultLegacy(Transport); var knexFactory__default = /*#__PURE__*/_interopDefaultLegacy(knexFactory); var limiterFactory__default = /*#__PURE__*/_interopDefaultLegacy(limiterFactory); var yn__default = /*#__PURE__*/_interopDefaultLegacy(yn); var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs); var platformPath__default = /*#__PURE__*/_interopDefaultLegacy(platformPath); var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch); var getRawBody__default = /*#__PURE__*/_interopDefaultLegacy(getRawBody); var parseGitUrl__default = /*#__PURE__*/_interopDefaultLegacy(parseGitUrl); var concatStream__default = /*#__PURE__*/_interopDefaultLegacy(concatStream); var os__default = /*#__PURE__*/_interopDefaultLegacy(os); var tar__default = /*#__PURE__*/_interopDefaultLegacy(tar); var git__default = /*#__PURE__*/_interopDefaultLegacy(git); var http__default = /*#__PURE__*/_interopDefaultLegacy(http); var archiver__default = /*#__PURE__*/_interopDefaultLegacy(archiver); var yauzl__default = /*#__PURE__*/_interopDefaultLegacy(yauzl); var compression__default = /*#__PURE__*/_interopDefaultLegacy(compression); var cors__default = /*#__PURE__*/_interopDefaultLegacy(cors); var express__default = /*#__PURE__*/_interopDefaultLegacy(express); var helmet__default = /*#__PURE__*/_interopDefaultLegacy(helmet); var Router__default = /*#__PURE__*/_interopDefaultLegacy(Router); const getRedacter = /* @__PURE__ */ (() => { let redacter = void 0; return () => { if (!redacter) { redacter = backendAppApi.WinstonLogger.redacter(); } return redacter; }; })(); const setRootLoggerRedactionList = (redactions) => { getRedacter().add(redactions); }; function redactWinstonLogLine(info) { return getRedacter().format.transform( info ); } const colorizer = winston.format.colorize(); const coloredFormat = winston.format.combine( winston.format.timestamp(), winston.format.colorize({ colors: { timestamp: "dim", prefix: "blue", field: "cyan", debug: "grey" } }), winston.format.printf((info) => { const { timestamp, level, message, plugin, service, ...fields } = info; const prefix = plugin || service; const timestampColor = colorizer.colorize("timestamp", timestamp); const prefixColor = colorizer.colorize("prefix", prefix); const extraFields = Object.entries(fields).map( ([key, value]) => `${colorizer.colorize("field", `${key}`)}=${value}` ).join(" "); return `${timestampColor} ${prefixColor} ${level} ${message} ${extraFields}`; }) ); function createRootLogger(options = {}, env = process.env) { const logger = winston__namespace.createLogger( lodash.merge( { level: env.LOG_LEVEL || "info", format: winston__namespace.format.combine( getRedacter().format, env.NODE_ENV === "production" ? winston__namespace.format.json() : backendAppApi.WinstonLogger.colorFormat() ), transports: [ new winston__namespace.transports.Console({ silent: env.JEST_WORKER_ID !== void 0 && !env.LOG_LEVEL }) ] }, options ) ).child({ service: "backstage" }); setRootLogger(logger); return logger; } function getVoidLogger() { return winston__namespace.createLogger({ transports: [new winston__namespace.transports.Console({ silent: true })] }); } let rootLogger; function getRootLogger() { if (!rootLogger) { rootLogger = createRootLogger(); } return rootLogger; } function setRootLogger(newLogger) { rootLogger = newLogger; } class BackstageLoggerTransport extends Transport__default["default"] { constructor(backstageLogger, opts) { super(opts); this.backstageLogger = backstageLogger; } log(info, callback) { if (typeof info !== "object" || info === null) { callback(); return; } const { level, message, ...meta } = info; switch (level) { case "error": this.backstageLogger.error(String(message), meta); break; case "warn": this.backstageLogger.warn(String(message), meta); break; case "info": this.backstageLogger.info(String(message), meta); break; case "debug": this.backstageLogger.debug(String(message), meta); break; default: this.backstageLogger.info(String(message), meta); } callback(); } } function loggerToWinstonLogger(logger, opts) { return winston.createLogger({ transports: [new BackstageLoggerTransport(logger, opts)] }); } var __accessCheck = (obj, member, msg) => { if (!member.has(obj)) throw TypeError("Cannot " + msg); }; var __privateGet = (obj, member, getter) => { __accessCheck(obj, member, "read from private field"); return getter ? getter.call(obj) : member.get(obj); }; var __privateAdd = (obj, member, value) => { if (member.has(obj)) throw TypeError("Cannot add the same private member more than once"); member instanceof WeakSet ? member.add(obj) : member.set(obj, value); }; var __privateSet = (obj, member, value, setter) => { __accessCheck(obj, member, "write to private field"); setter ? setter.call(obj, value) : member.set(obj, value); return value; }; var _client, _clientFactory, _options; const _DefaultCacheClient = class _DefaultCacheClient { constructor(client, clientFactory, options) { __privateAdd(this, _client, void 0); __privateAdd(this, _clientFactory, void 0); __privateAdd(this, _options, void 0); __privateSet(this, _client, client); __privateSet(this, _clientFactory, clientFactory); __privateSet(this, _options, options); } async get(key) { const k = this.getNormalizedKey(key); const value = await __privateGet(this, _client).get(k); return value; } async set(key, value, opts = {}) { const k = this.getNormalizedKey(key); await __privateGet(this, _client).set(k, value, opts.ttl); } async delete(key) { const k = this.getNormalizedKey(key); await __privateGet(this, _client).delete(k); } withOptions(options) { const newOptions = { ...__privateGet(this, _options), ...options }; return new _DefaultCacheClient( __privateGet(this, _clientFactory).call(this, newOptions), __privateGet(this, _clientFactory), newOptions ); } /** * Ensures keys are well-formed for any/all cache stores. */ getNormalizedKey(candidateKey) { const wellFormedKey = Buffer.from(candidateKey).toString("base64"); if (wellFormedKey.length < 200) { return wellFormedKey; } return crypto.createHash("md5").update(candidateKey).digest("base64"); } }; _client = new WeakMap(); _clientFactory = new WeakMap(); _options = new WeakMap(); let DefaultCacheClient = _DefaultCacheClient; var __defProp$k = Object.defineProperty; var __defNormalProp$k = (obj, key, value) => key in obj ? __defProp$k(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$k = (obj, key, value) => { __defNormalProp$k(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; class CacheManager { constructor(store, connectionString, useRedisSets, logger, errorHandler, defaultTtl) { /** * Keys represent supported `backend.cache.store` values, mapped to factories * that return Keyv instances appropriate to the store. */ __publicField$k(this, "storeFactories", { redis: this.getRedisClient, memcache: this.getMemcacheClient, memory: this.getMemoryClient }); /** * Shared memory store for the in-memory cache client. Sharing the same Map * instance ensures get/set/delete operations hit the same store, regardless * of where/when a client is instantiated. */ __publicField$k(this, "memoryStore", /* @__PURE__ */ new Map()); __publicField$k(this, "logger"); __publicField$k(this, "store"); __publicField$k(this, "connection"); __publicField$k(this, "useRedisSets"); __publicField$k(this, "errorHandler"); __publicField$k(this, "defaultTtl"); if (!this.storeFactories.hasOwnProperty(store)) { throw new Error(`Unknown cache store: ${store}`); } this.logger = logger; this.store = store; this.connection = connectionString; this.useRedisSets = useRedisSets; this.errorHandler = errorHandler; this.defaultTtl = defaultTtl; } /** * Creates a new {@link CacheManager} instance by reading from the `backend` * config section, specifically the `.cache` key. * * @param config - The loaded application configuration. */ static fromConfig(config, options = {}) { var _a; const store = config.getOptionalString("backend.cache.store") || "memory"; const defaultTtl = config.getOptionalNumber("backend.cache.defaultTtl"); const connectionString = config.getOptionalString("backend.cache.connection") || ""; const useRedisSets = (_a = config.getOptionalBoolean("backend.cache.useRedisSets")) != null ? _a : true; const logger = (options.logger || getRootLogger()).child({ type: "cacheManager" }); return new CacheManager( store, connectionString, useRedisSets, logger, options.onError, defaultTtl ); } /** * Generates a PluginCacheManager for consumption by plugins. * * @param pluginId - The plugin that the cache manager should be created for. * Plugin names should be unique. */ forPlugin(pluginId) { return { getClient: (defaultOptions = {}) => { const clientFactory = (options) => { var _a; const concreteClient = this.getClientWithTtl( pluginId, (_a = options.defaultTtl) != null ? _a : this.defaultTtl ); concreteClient.on("error", (err) => { this.logger.error("Failed to create cache client", err); if (typeof this.errorHandler === "function") { this.errorHandler(err); } }); return concreteClient; }; return new DefaultCacheClient( clientFactory(defaultOptions), clientFactory, defaultOptions ); } }; } getClientWithTtl(pluginId, ttl) { return this.storeFactories[this.store].call(this, pluginId, ttl); } getRedisClient(pluginId, defaultTtl) { return new Keyv__default["default"]({ namespace: pluginId, ttl: defaultTtl, store: new KeyvRedis__default["default"](this.connection), useRedisSets: this.useRedisSets }); } getMemcacheClient(pluginId, defaultTtl) { return new Keyv__default["default"]({ namespace: pluginId, ttl: defaultTtl, store: new KeyvMemcache__default["default"](this.connection) }); } getMemoryClient(pluginId, defaultTtl) { return new Keyv__default["default"]({ namespace: pluginId, ttl: defaultTtl, store: this.memoryStore }); } } function cacheToPluginCacheManager(cache) { return { getClient: (opts) => cache.withOptions(opts) }; } function makeLegacyPlugin(envMapping, envTransforms) { return (name, createRouterImport) => { const compatPlugin = backendPluginApi.createBackendPlugin({ pluginId: name, register(env) { env.registerInit({ deps: { ...envMapping, _router: backendPluginApi.coreServices.httpRouter }, async init({ _router, ...envDeps }) { const { default: createRouter } = await createRouterImport; const pluginEnv = Object.fromEntries( Object.entries(envDeps).map(([key, dep]) => { const transform = envTransforms[key]; if (transform) { return [key, transform(dep)]; } return [key, dep]; }) ); const router = await createRouter( pluginEnv ); _router.use(router); } }); } }); return compatPlugin(); }; } const legacyPlugin = makeLegacyPlugin( { cache: backendPluginApi.coreServices.cache, config: backendPluginApi.coreServices.rootConfig, database: backendPluginApi.coreServices.database, discovery: backendPluginApi.coreServices.discovery, logger: backendPluginApi.coreServices.logger, permissions: backendPluginApi.coreServices.permissions, scheduler: backendPluginApi.coreServices.scheduler, tokenManager: backendPluginApi.coreServices.tokenManager, reader: backendPluginApi.coreServices.urlReader, identity: backendPluginApi.coreServices.identity }, { logger: (log) => loggerToWinstonLogger(log), cache: (cache) => cacheToPluginCacheManager(cache) } ); async function loadBackendConfig(options) { var _a; const secretEnumerator = await backendAppApi.createConfigSecretEnumerator({ logger: options.logger }); const { config } = await backendAppApi.loadBackendConfig(options); setRootLoggerRedactionList(secretEnumerator(config)); (_a = config.subscribe) == null ? void 0 : _a.call( config, () => setRootLoggerRedactionList(secretEnumerator(config)) ); return config; } function mergeDatabaseConfig(config, ...overrides) { return lodash.merge({}, config, ...overrides); } function defaultNameOverride(name) { return { connection: { database: name } }; } function createMysqlDatabaseClient(dbConfig, overrides) { const knexConfig = buildMysqlDatabaseConfig(dbConfig, overrides); const database = knexFactory__default["default"](knexConfig); return database; } function buildMysqlDatabaseConfig(dbConfig, overrides) { return mergeDatabaseConfig( dbConfig.get(), { connection: getMysqlConnectionConfig(dbConfig, !!overrides), useNullAsDefault: true }, overrides ); } function getMysqlConnectionConfig(dbConfig, parseConnectionString) { const connection = dbConfig.get("connection"); const isConnectionString = typeof connection === "string" || connection instanceof String; const autoParse = typeof parseConnectionString !== "boolean"; const shouldParseConnectionString = autoParse ? isConnectionString : parseConnectionString && isConnectionString; return shouldParseConnectionString ? parseMysqlConnectionString(connection) : connection; } function parseMysqlConnectionString(connectionString) { try { const { protocol, username, password, port, hostname, pathname, searchParams } = new URL(connectionString); if (protocol !== "mysql:") { throw new Error(`Unknown protocol ${protocol}`); } else if (!username || !password) { throw new Error(`Missing username/password`); } else if (!pathname.match(/^\/[^/]+$/)) { throw new Error(`Expected single path segment`); } const result = { user: username, password, host: hostname, port: Number(port || 3306), database: decodeURIComponent(pathname.substring(1)) }; const ssl = searchParams.get("ssl"); if (ssl) { result.ssl = ssl; } const debug = searchParams.get("debug"); if (debug) { result.debug = yn__default["default"](debug); } return result; } catch (e) { throw new errors.InputError( `Error while parsing MySQL connection string, ${e}`, e ); } } async function ensureMysqlDatabaseExists(dbConfig, ...databases) { const admin = createMysqlDatabaseClient(dbConfig, { connection: { database: null }, pool: { min: 0, acquireTimeoutMillis: 1e4 } }); try { const ensureDatabase = async (database) => { await admin.raw(`CREATE DATABASE IF NOT EXISTS ??`, [database]); }; await Promise.all( databases.map(async (database) => { let lastErr = void 0; for (let i = 0; i < 3; i++) { try { return await ensureDatabase(database); } catch (err) { lastErr = err; } await new Promise((resolve) => setTimeout(resolve, 100)); } throw lastErr; }) ); } finally { await admin.destroy(); } } const mysqlConnector = Object.freeze({ createClient: createMysqlDatabaseClient, ensureDatabaseExists: ensureMysqlDatabaseExists, createNameOverride: defaultNameOverride, parseConnectionString: parseMysqlConnectionString }); function defaultSchemaOverride(name) { return { searchPath: [name] }; } function createPgDatabaseClient(dbConfig, overrides) { const knexConfig = buildPgDatabaseConfig(dbConfig, overrides); const database = knexFactory__default["default"](knexConfig); const role = dbConfig.getOptionalString("role"); if (role) { database.client.pool.on( "createSuccess", async (_event, pgClient) => { await pgClient.query(`SET ROLE ${role}`); } ); } return database; } function buildPgDatabaseConfig(dbConfig, overrides) { return mergeDatabaseConfig( dbConfig.get(), { connection: getPgConnectionConfig(dbConfig, !!overrides), useNullAsDefault: true }, overrides ); } function getPgConnectionConfig(dbConfig, parseConnectionString) { const connection = dbConfig.get("connection"); const isConnectionString = typeof connection === "string" || connection instanceof String; const autoParse = typeof parseConnectionString !== "boolean"; const shouldParseConnectionString = autoParse ? isConnectionString : parseConnectionString && isConnectionString; return shouldParseConnectionString ? parsePgConnectionString(connection) : connection; } function parsePgConnectionString(connectionString) { const parse = requirePgConnectionString(); return parse(connectionString); } function requirePgConnectionString() { try { return require("pg-connection-string").parse; } catch (e) { throw new errors.ForwardedError("Postgres: Install 'pg-connection-string'", e); } } async function ensurePgDatabaseExists(dbConfig, ...databases) { const admin = createPgDatabaseClient(dbConfig, { connection: { database: "postgres" }, pool: { min: 0, acquireTimeoutMillis: 1e4 } }); try { const ensureDatabase = async (database) => { const result = await admin.from("pg_database").where("datname", database).count(); if (parseInt(result[0].count, 10) > 0) { return; } await admin.raw(`CREATE DATABASE ??`, [database]); }; await Promise.all( databases.map(async (database) => { let lastErr = void 0; for (let i = 0; i < 3; i++) { try { return await ensureDatabase(database); } catch (err) { lastErr = err; } await new Promise((resolve) => setTimeout(resolve, 100)); } throw lastErr; }) ); } finally { await admin.destroy(); } } async function ensurePgSchemaExists(dbConfig, ...schemas) { const admin = createPgDatabaseClient(dbConfig); const role = dbConfig.getOptionalString("role"); try { const ensureSchema = async (database) => { if (role) { await admin.raw(`CREATE SCHEMA IF NOT EXISTS ?? AUTHORIZATION ??`, [ database, role ]); } else { await admin.raw(`CREATE SCHEMA IF NOT EXISTS ??`, [database]); } }; await Promise.all(schemas.map(ensureSchema)); } finally { await admin.destroy(); } } const pgConnector = Object.freeze({ createClient: createPgDatabaseClient, ensureDatabaseExists: ensurePgDatabaseExists, ensureSchemaExists: ensurePgSchemaExists, createNameOverride: defaultNameOverride, createSchemaOverride: defaultSchemaOverride, parseConnectionString: parsePgConnectionString }); function createSqliteDatabaseClient(dbConfig, overrides, deps) { var _a; const knexConfig = buildSqliteDatabaseConfig(dbConfig, overrides); const connConfig = knexConfig.connection; const filename = (_a = connConfig.filename) != null ? _a : ":memory:"; if (filename !== ":memory:") { const directory = platformPath__default["default"].dirname(filename); fs.ensureDirSync(directory); } let database; if (deps && filename === ":memory:") { const devStore = backendDevUtils.DevDataStore.get(); if (devStore) { const dataKey = `sqlite3-db-${deps.pluginMetadata.getId()}`; const connectionLoader = async () => { const { data: seedData } = await devStore.load(dataKey); return { ...knexConfig.connection, filename: seedData != null ? seedData : ":memory:" }; }; database = knexFactory__default["default"]({ ...knexConfig, connection: Object.assign(connectionLoader, { // This is a workaround for the knex SQLite driver always warning when using a config loader filename: ":memory:" }) }); deps.lifecycle.addShutdownHook(async () => { const connection = await database.client.acquireConnection(); const data = connection.serialize(); await devStore.save(dataKey, data); }); } else { database = knexFactory__default["default"](knexConfig); } } else { database = knexFactory__default["default"](knexConfig); } database.client.pool.on("createSuccess", (_eventId, resource) => { resource.run("PRAGMA foreign_keys = ON", () => { }); }); return database; } function buildSqliteDatabaseConfig(dbConfig, overrides) { const baseConfig = dbConfig.get(); if (typeof baseConfig.connection === "string") { baseConfig.connection = { filename: baseConfig.connection }; } if (overrides && typeof overrides.connection === "string") { overrides.connection = { filename: overrides.connection }; } const config = mergeDatabaseConfig( { connection: {} }, baseConfig, { useNullAsDefault: true }, overrides ); return config; } function createSqliteNameOverride(name) { return { connection: parseSqliteConnectionString(name) }; } function parseSqliteConnectionString(name) { return { filename: name }; } const sqlite3Connector = Object.freeze({ createClient: createSqliteDatabaseClient, createNameOverride: createSqliteNameOverride, parseConnectionString: parseSqliteConnectionString }); const ddlLimiter = limiterFactory__default["default"](1); const ConnectorMapping = { pg: pgConnector, "better-sqlite3": sqlite3Connector, sqlite3: sqlite3Connector, mysql: mysqlConnector, mysql2: mysqlConnector }; function createDatabaseClient(dbConfig, overrides, deps) { var _a, _b; const client = dbConfig.getString("client"); return (_b = (_a = ConnectorMapping[client]) == null ? void 0 : _a.createClient(dbConfig, overrides, deps)) != null ? _b : knexFactory__default["default"](mergeDatabaseConfig(dbConfig.get(), overrides)); } async function ensureDatabaseExists(dbConfig, ...databases) { const client = dbConfig.getString("client"); return await ddlLimiter( () => { var _a, _b; return (_b = (_a = ConnectorMapping[client]) == null ? void 0 : _a.ensureDatabaseExists) == null ? void 0 : _b.call(_a, dbConfig, ...databases); } ); } async function ensureSchemaExists(dbConfig, ...schemas) { const client = dbConfig.getString("client"); return await ddlLimiter( () => { var _a, _b; return (_b = (_a = ConnectorMapping[client]) == null ? void 0 : _a.ensureSchemaExists) == null ? void 0 : _b.call(_a, dbConfig, ...schemas); } ); } function createNameOverride(client, name) { try { return ConnectorMapping[client].createNameOverride(name); } catch (e) { throw new errors.InputError( `Unable to create database name override for '${client}' connector`, e ); } } function createSchemaOverride(client, name) { var _a, _b; try { return (_b = (_a = ConnectorMapping[client]) == null ? void 0 : _a.createSchemaOverride) == null ? void 0 : _b.call(_a, name); } catch (e) { throw new errors.InputError( `Unable to create database schema override for '${client}' connector`, e ); } } function parseConnectionString(connectionString, client) { if (typeof client === "undefined" || client === null) { throw new errors.InputError( "Database connection string client type auto-detection is not yet supported." ); } try { return ConnectorMapping[client].parseConnectionString(connectionString); } catch (e) { throw new errors.InputError( `Unable to parse connection string for '${client}' connector` ); } } function normalizeConnection(connection, client) { if (typeof connection === "undefined" || connection === null) { return {}; } return typeof connection === "string" || connection instanceof String ? parseConnectionString(connection, client) : connection; } function pluginPath(pluginId) { return `plugin.${pluginId}`; } class DatabaseManager { constructor(config, prefix = "backstage_plugin_", options, databaseCache = /* @__PURE__ */ new Map()) { this.config = config; this.prefix = prefix; this.options = options; this.databaseCache = databaseCache; } /** * Creates a {@link DatabaseManager} from `backend.database` config. * * @param config - The loaded application configuration. * @param options - An optional configuration object. */ static fromConfig(config, options) { const databaseConfig = config.getConfig("backend.database"); return new DatabaseManager( databaseConfig, databaseConfig.getOptionalString("prefix"), options ); } /** * Generates a PluginDatabaseManager for consumption by plugins. * * @param pluginId - The plugin that the database manager should be created for. Plugin names * should be unique as they are used to look up database config overrides under * `backend.database.plugin`. */ forPlugin(pluginId, deps) { var _a; const getClient = () => this.getDatabase(pluginId, deps); const migrations = { skip: false, ...(_a = this.options) == null ? void 0 : _a.migrations }; return { getClient, migrations }; } /** * Provides the canonical database name for a given plugin. * * This method provides the effective database name which is determined using global * and plugin specific database config. If no explicit database name is configured * and `pluginDivisionMode` is not `schema`, this method will provide a generated name * which is the pluginId prefixed with 'backstage_plugin_'. If `pluginDivisionMode` is * `schema`, it will fallback to using the default database for the knex instance. * * @param pluginId - Lookup the database name for given plugin * @returns String representing the plugin's database name */ getDatabaseName(pluginId) { var _a; const connection = this.getConnectionConfig(pluginId); if (this.getClientType(pluginId).client.includes("sqlite3")) { const sqliteFilename = connection.filename; if (sqliteFilename === ":memory:") { return sqliteFilename; } const sqliteDirectory = (_a = connection.directory) != null ? _a : "."; return platformPath__default["default"].join(sqliteDirectory, sqliteFilename != null ? sqliteFilename : `${pluginId}.sqlite`); } const databaseName = connection == null ? void 0 : connection.database; if (this.getPluginDivisionModeConfig() === "schema") { return databaseName; } return databaseName != null ? databaseName : `${this.prefix}${pluginId}`; } /** * Provides the client type which should be used for a given plugin. * * The client type is determined by plugin specific config if present. * Otherwise the base client is used as the fallback. * * @param pluginId - Plugin to get the client type for * @returns Object with client type returned as `client` and boolean * representing whether or not the client was overridden as * `overridden` */ getClientType(pluginId) { const pluginClient = this.config.getOptionalString( `${pluginPath(pluginId)}.client` ); const baseClient = this.config.getString("client"); const client = pluginClient != null ? pluginClient : baseClient; return { client, overridden: client !== baseClient }; } getRoleConfig(pluginId) { var _a; return (_a = this.config.getOptionalString(`${pluginPath(pluginId)}.role`)) != null ? _a : this.config.getOptionalString("role"); } /** * Provides the knexConfig which should be used for a given plugin. * * @param pluginId - Plugin to get the knexConfig for * @returns The merged knexConfig value or undefined if it isn't specified */ getAdditionalKnexConfig(pluginId) { var _a, _b; const pluginConfig = (_a = this.config.getOptionalConfig(`${pluginPath(pluginId)}.knexConfig`)) == null ? void 0 : _a.get(); const baseConfig = (_b = this.config.getOptionalConfig("knexConfig")) == null ? void 0 : _b.get(); return lodash.merge(baseConfig, pluginConfig); } getEnsureExistsConfig(pluginId) { var _a, _b; const baseConfig = (_a = this.config.getOptionalBoolean("ensureExists")) != null ? _a : true; return (_b = this.config.getOptionalBoolean(`${pluginPath(pluginId)}.ensureExists`)) != null ? _b : baseConfig; } getPluginDivisionModeConfig() { var _a; return (_a = this.config.getOptionalString("pluginDivisionMode")) != null ? _a : "database"; } /** * Provides a Knex connection plugin config by combining base and plugin * config. * * This method provides a baseConfig for a plugin database connector. If the * client type has not been overridden, the global connection config will be * included with plugin specific config as the base. Values from the plugin * connection take precedence over the base. Base database name is omitted for * all supported databases excluding SQLite unless `pluginDivisionMode` is set * to `schema`. */ getConnectionConfig(pluginId) { const { client, overridden } = this.getClientType(pluginId); let baseConnection = normalizeConnection( this.config.get("connection"), this.config.getString("client") ); if (client.includes("sqlite3") && "filename" in baseConnection && baseConnection.filename !== ":memory:") { throw new Error( "`connection.filename` is not supported for the base sqlite connection. Prefer `connection.directory` or provide a filename for the plugin connection instead." ); } if (this.getPluginDivisionModeConfig() !== "schema") { baseConnection = lodash.omit(baseConnection, "database"); } const connection = normalizeConnection( this.config.getOptional(`${pluginPath(pluginId)}.connection`), client ); if (client === "pg") { baseConnection.application_name || (baseConnection.application_name = `backstage_plugin_${pluginId}`); } return { // include base connection if client type has not been overridden ...overridden ? {} : baseConnection, ...connection }; } /** * Provides a Knex database config for a given plugin. * * This method provides a Knex configuration object along with the plugin's * client type. * * @param pluginId - The plugin that the database config should correspond with */ getConfigForPlugin(pluginId) { const { client } = this.getClientType(pluginId); const role = this.getRoleConfig(pluginId); return { ...this.getAdditionalKnexConfig(pluginId), client, connection: this.getConnectionConfig(pluginId), ...role && { role } }; } /** * Provides a partial `Knex.Config` database schema override for a given * plugin. * * @param pluginId - Target plugin to get database schema override * @returns Partial `Knex.Config` with database schema override */ getSchemaOverrides(pluginId) { return createSchemaOverride(this.getClientType(pluginId).client, pluginId); } /** * Provides a partial `Knex.Config`• database name override for a given plugin. * * @param pluginId - Target plugin to get database name override * @returns Partial `Knex.Config` with database name override */ getDatabaseOverrides(pluginId) { const databaseName = this.getDatabaseName(pluginId); return databaseName ? createNameOverride(this.getClientType(pluginId).client, databaseName) : {}; } /** * Provides a scoped Knex client for a plugin as per application config. * * @param pluginId - Plugin to get a Knex client for * @returns Promise which resolves to a scoped Knex database client for a * plugin */ async getDatabase(pluginId, deps) { if (this.databaseCache.has(pluginId)) { return this.databaseCache.get(pluginId); } const clientPromise = Promise.resolve().then(async () => { const pluginConfig = new config.ConfigReader( this.getConfigForPlugin(pluginId) ); const databaseName = this.getDatabaseName(pluginId); if (databaseName && this.getEnsureExistsConfig(pluginId)) { try { await ensureDatabaseExists(pluginConfig, databaseName); } catch (error) { throw new Error( `Failed to connect to the database to make sure that '${databaseName}' exists, ${error}` ); } } let schemaOverrides; if (this.getPluginDivisionModeConfig() === "schema") { schemaOverrides = this.getSchemaOverrides(pluginId); if (this.getEnsureExistsConfig(pluginId)) { try { await ensureSchemaExists(pluginConfig, pluginId); } catch (error) { throw new Error( `Failed to connect to the database to make sure that schema for plugin '${pluginId}' exists, ${error}` ); } } } const databaseClientOverrides = mergeDatabaseConfig( {}, this.getDatabaseOverrides(pluginId), schemaOverrides ); const client = createDatabaseClient( pluginConfig, databaseClientOverrides, deps ); if (process.env.NODE_ENV !== "test") { this.startKeepaliveLoop(pluginId, client); } return client; }); this.databaseCache.set(pluginId, clientPromise); return clientPromise; } startKeepaliveLoop(pluginId, client) { let lastKeepaliveFailed = false; setInterval(() => { client == null ? void 0 : client.raw("select 1").then( () => { lastKeepaliveFailed = false; }, (error) => { var _a, _b; if (!lastKeepaliveFailed) { lastKeepaliveFailed = true; (_b = (_a = this.options) == null ? void 0 : _a.logger) == null ? void 0 : _b.warn( `Database keepalive failed for plugin ${pluginId}, ${errors.stringifyError( error )}` ); } } ); }, 60 * 1e3); } } function isDatabaseConflictError(e) { const message = e == null ? void 0 : e.message; return typeof message === "string" && (/SQLITE_CONSTRAINT(?:_UNIQUE)?: UNIQUE/.test(message) || /UNIQUE constraint failed:/.test(message) || /unique constraint/.test(message) || /Duplicate entry/.test(message)); } const HostDiscovery = backendAppApi.HostDiscovery; const SingleHostDiscovery = backendAppApi.HostDiscovery; function findAllAncestors(_module) { const ancestors = new Array(); const parentIds = /* @__PURE__ */ new Set(); function add(id, m) { if (parentIds.has(id)) { return; } parentIds.add(id); ancestors.push(m); for (const parentId of m.parents) { const parent = require.cache[parentId]; if (parent) { add(parentId, parent); } } } add(_module.id, _module); return ancestors; } function useHotCleanup(_module, cancelEffect) { var _a; if (_module.hot) { const ancestors = findAllAncestors(_module); let cancelled = false; const handler = () => { if (!cancelled) { cancelled = true; cancelEffect(); } }; for (const m of ancestors) { (_a = m.hot) == null ? void 0 : _a.addDisposeHandler(handler); } } } const CURRENT_HOT_MEMOIZE_INDEX_KEY = "backstage.io/hmr-memoize-key"; function useHotMemoize(_module, valueFactory) { var _a, _b, _c; if (!_module.hot) { return valueFactory(); } if (!((_a = _module.hot.data) == null ? void 0 : _a[CURRENT_HOT_MEMOIZE_INDEX_KEY])) { for (const ancestor of findAllAncestors(_module)) { (_b = ancestor.hot) == null ? void 0 : _b.addDisposeHandler((data) => { data[CURRENT_HOT_MEMOIZE_INDEX_KEY] = 1; }); } _module.hot.data = { ..._module.hot.data, [CURRENT_HOT_MEMOIZE_INDEX_KEY]: 1 }; } const index = _module.hot.data[CURRENT_HOT_MEMOIZE_INDEX_KEY]++; const value = (_c = _module.hot.data[index]) != null ? _c : valueFactory(); _module.hot.addDisposeHandler((data) => { data[index] = value; }); return value; } function errorHandler(options = {}) { var _a; return backendAppApi.MiddlewareFactory.create({ config: new config.ConfigReader({}), logger: (_a = options.logger) != null ? _a : getRootLogger() }).error({ logAllErrors: options.logClientErrors, showStackTraces: options.showStackTraces }); } function notFoundHandler() { return backendAppApi.MiddlewareFactory.create({ config: new config.ConfigReader({}), logger: getRootLogger() }).notFound(); } function requestLoggingHandler(logger) { return backendAppApi.MiddlewareFactory.create({ config: new config.ConfigReader({}), logger: logger != null ? logger : getRootLogger() }).logging(); } async function statusCheckHandler(options = {}) { const statusCheck = options.statusCheck ? options.statusCheck : () => Promise.resolve({ status: "ok" }); return async (_request, response, next) => { try { const status = await statusCheck(); response.status(200).json(status); } catch (err) { next(err); } }; } class ReadUrlResponseFactory { /** * Resolves a ReadUrlResponse from a Readable stream. */ static async fromReadable(stream, options) { let buffer; const conflictError = new errors.ConflictError( "Cannot use buffer() and stream() from the same ReadUrlResponse" ); let hasCalledStream = false; let hasCalledBuffer = false; return { buffer: () => { hasCalledBuffer = true; if (hasCalledStream) throw conflictError; if (buffer) return buffer; buffer = getRawBody__default["default"](stream); return buffer; }, stream: () => { hasCalledStream = true; if (hasCalledBuffer) throw conflictError; return stream; }, etag: options == null ? void 0 : options.etag, lastModifiedAt: options == null ? void 0 : options.lastModifiedAt }; } /** * Resolves a ReadUrlResponse from an old-style NodeJS.ReadableStream. */ static async fromNodeJSReadable(oldStyleStream, options) { const readable = stream.Readable.from(oldStyleStream); return ReadUrlResponseFactory.fromReadable(readable, options); } } var __defProp$j = Object.defineProperty; var __defNormalProp$j = (obj, key, value) => key in obj ? __defProp$j(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$j = (obj, key, value) => { __defNormalProp$j(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const _AzureUrlReader = class _AzureUrlReader { constructor(integration, deps) { this.integration = integration; this.deps = deps; } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { const { signal } = options != null ? options : {}; const builtUrl = integration.getAzureFileFetchUrl(url); let response; try { const credentials = await this.deps.credentialsProvider.getCredentials({ url: builtUrl }); response = await fetch__default["default"](builtUrl, { headers: credentials == null ? void 0 : credentials.headers, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can // be removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } }); } catch (e) { throw new Error(`Unable to read ${url}, ${e}`); } if (response.ok && response.status !== 203) { return ReadUrlResponseFactory.fromNodeJSReadable(response.body); } const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } async readTree(url, options) { const { etag, filter, signal } = options != null ? options : {}; const credentials = await this.deps.credentialsProvider.getCredentials({ url }); const commitsAzureResponse = await fetch__default["default"](integration.getAzureCommitsUrl(url), { headers: credentials == null ? void 0 : credentials.headers }); if (!commitsAzureResponse.ok) { const message = `Failed to read tree from ${url}, ${commitsAzureResponse.status} ${commitsAzureResponse.statusText}`; if (commitsAzureResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } const commitSha = (await commitsAzureResponse.json()).value[0].commitId; if (etag && etag === commitSha) { throw new errors.NotModifiedError(); } const archiveAzureResponse = await fetch__default["default"](integration.getAzureDownloadUrl(url), { headers: { ...credentials == null ? void 0 : credentials.headers, Accept: "application/zip" }, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can be // removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } }); if (!archiveAzureResponse.ok) { const message = `Failed to read tree from ${url}, ${archiveAzureResponse.status} ${archiveAzureResponse.statusText}`; if (archiveAzureResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } let subpath; const path = new URL(url).searchParams.get("path"); if (path) { subpath = path.split("/").filter(Boolean).slice(-1)[0]; } return await this.deps.treeResponseFactory.fromZipArchive({ stream: stream.Readable.from(archiveAzureResponse.body), etag: commitSha, filter, subpath }); } async search(url, options) { const treeUrl = new URL(url); const path = treeUrl.searchParams.get("path"); const matcher = path && new minimatch.Minimatch(path.replace(/^\/+/, "")); treeUrl.searchParams.delete("path"); const tree = await this.readTree(treeUrl.toString(), { etag: options == null ? void 0 : options.etag, signal: options == null ? void 0 : options.signal, filter: (p) => matcher ? matcher.match(p) : true }); const files = await tree.files(); return { etag: tree.etag, files: files.map((file) => ({ url: this.integration.resolveUrl({ url: `/${file.path}`, base: url }), content: file.content, lastModifiedAt: file.lastModifiedAt })) }; } toString() { const { host, credentials } = this.integration.config; return `azure{host=${host},authed=${Boolean( credentials !== void 0 && credentials.length > 0 )}}`; } }; __publicField$j(_AzureUrlReader, "factory", ({ config, treeResponseFactory }) => { const integrations = integration.ScmIntegrations.fromConfig(config); const credentialProvider = integration.DefaultAzureDevOpsCredentialsProvider.fromIntegrations(integrations); return integrations.azure.list().map((integration) => { const reader = new _AzureUrlReader(integration, { treeResponseFactory, credentialsProvider: credentialProvider }); const predicate = (url) => url.host === integration.config.host; return { reader, predicate }; }); }); let AzureUrlReader = _AzureUrlReader; function parseLastModified(value) { if (!value) { return void 0; } return new Date(value); } var __defProp$i = Object.defineProperty; var __defNormalProp$i = (obj, key, value) => key in obj ? __defProp$i(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$i = (obj, key, value) => { __defNormalProp$i(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const _BitbucketCloudUrlReader = class _BitbucketCloudUrlReader { constructor(integration, deps) { this.integration = integration; this.deps = deps; const { host, username, appPassword } = integration.config; if (username && !appPassword) { throw new Error( `Bitbucket Cloud integration for '${host}' has configured a username but is missing a required appPassword.` ); } } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { var _a; const { etag, lastModifiedAfter, signal } = options != null ? options : {}; const bitbucketUrl = integration.getBitbucketCloudFileFetchUrl( url, this.integration.config ); const requestOptions = integration.getBitbucketCloudRequestOptions( this.integration.config ); let response; try { response = await fetch__default["default"](bitbucketUrl.toString(), { headers: { ...requestOptions.headers, ...etag && { "If-None-Match": etag }, ...lastModifiedAfter && { "If-Modified-Since": lastModifiedAfter.toUTCString() } }, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can be // removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } }); } catch (e) { throw new Error(`Unable to read ${url}, ${e}`); } if (response.status === 304) { throw new errors.NotModifiedError(); } if (response.ok) { return ReadUrlResponseFactory.fromNodeJSReadable(response.body, { etag: (_a = response.headers.get("ETag")) != null ? _a : void 0, lastModifiedAt: parseLastModified( response.headers.get("Last-Modified") ) }); } const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } async readTree(url, options) { const { filepath } = parseGitUrl__default["default"](url); const lastCommitShortHash = await this.getLastCommitShortHash(url); if ((options == null ? void 0 : options.etag) && options.etag === lastCommitShortHash) { throw new errors.NotModifiedError(); } const downloadUrl = await integration.getBitbucketCloudDownloadUrl( url, this.integration.config ); const archiveResponse = await fetch__default["default"]( downloadUrl, integration.getBitbucketCloudRequestOptions(this.integration.config) ); if (!archiveResponse.ok) { const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`; if (archiveResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } return await this.deps.treeResponseFactory.fromTarArchive({ stream: stream.Readable.from(archiveResponse.body), subpath: filepath, etag: lastCommitShortHash, filter: options == null ? void 0 : options.filter }); } async search(url, options) { const { filepath } = parseGitUrl__default["default"](url); const matcher = new minimatch.Minimatch(filepath); const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/"); const tree = await this.readTree(treeUrl, { etag: options == null ? void 0 : options.etag, filter: (path) => matcher.match(path) }); const files = await tree.files(); return { etag: tree.etag, files: files.map((file) => ({ url: this.integration.resolveUrl({ url: `/${file.path}`, base: url }), content: file.content, lastModifiedAt: file.lastModifiedAt })) }; } toString() { const { host, username, appPassword } = this.integration.config; const authed = Boolean(username && appPassword); return `bitbucketCloud{host=${host},authed=${authed}}`; } async getLastCommitShortHash(url) { const { name: repoName, owner: project, ref } = parseGitUrl__default["default"](url); let branch = ref; if (!branch) { branch = await integration.getBitbucketCloudDefaultBranch( url, this.integration.config ); } const commitsApiUrl = `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}`; const commitsResponse = await fetch__default["default"]( commitsApiUrl, integration.getBitbucketCloudRequestOptions(this.integration.config) ); if (!commitsResponse.ok) { const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`; if (commitsResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } const commits = await commitsResponse.json(); if (commits && commits.values && commits.values.length > 0 && commits.values[0].hash) { return commits.values[0].hash.substring(0, 12); } throw new Error(`Failed to read response from ${commitsApiUrl}`); } }; __publicField$i(_BitbucketCloudUrlReader, "factory", ({ config, treeResponseFactory }) => { const integrations = integration.ScmIntegrations.fromConfig(config); return integrations.bitbucketCloud.list().map((integration) => { const reader = new _BitbucketCloudUrlReader(integration, { treeResponseFactory }); const predicate = (url) => url.host === integration.config.host; return { reader, predicate }; }); }); let BitbucketCloudUrlReader = _BitbucketCloudUrlReader; var __defProp$h = Object.defineProperty; var __defNormalProp$h = (obj, key, value) => key in obj ? __defProp$h(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$h = (obj, key, value) => { __defNormalProp$h(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const _BitbucketUrlReader = class _BitbucketUrlReader { constructor(integration, logger, deps) { this.integration = integration; this.deps = deps; const { host, token, username, appPassword } = integration.config; const replacement = host === "bitbucket.org" ? "bitbucketCloud" : "bitbucketServer"; logger.warn( `[Deprecated] Please migrate from "integrations.bitbucket" to "integrations.${replacement}".` ); if (!token && username && !appPassword) { throw new Error( `Bitbucket integration for '${host}' has configured a username but is missing a required appPassword.` ); } } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { var _a; const { etag, lastModifiedAfter, signal } = options != null ? options : {}; const bitbucketUrl = integration.getBitbucketFileFetchUrl(url, this.integration.config); const requestOptions = integration.getBitbucketRequestOptions(this.integration.config); let response; try { response = await fetch__default["default"](bitbucketUrl.toString(), { headers: { ...requestOptions.headers, ...etag && { "If-None-Match": etag }, ...lastModifiedAfter && { "If-Modified-Since": lastModifiedAfter.toUTCString() } }, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can be // removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } }); } catch (e) { throw new Error(`Unable to read ${url}, ${e}`); } if (response.status === 304) { throw new errors.NotModifiedError(); } if (response.ok) { return ReadUrlResponseFactory.fromNodeJSReadable(response.body, { etag: (_a = response.headers.get("ETag")) != null ? _a : void 0, lastModifiedAt: parseLastModified( response.headers.get("Last-Modified") ) }); } const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } async readTree(url, options) { const { filepath } = parseGitUrl__default["default"](url); const lastCommitShortHash = await this.getLastCommitShortHash(url); if ((options == null ? void 0 : options.etag) && options.etag === lastCommitShortHash) { throw new errors.NotModifiedError(); } const downloadUrl = await integration.getBitbucketDownloadUrl( url, this.integration.config ); const archiveBitbucketResponse = await fetch__default["default"]( downloadUrl, integration.getBitbucketRequestOptions(this.integration.config) ); if (!archiveBitbucketResponse.ok) { const message = `Failed to read tree from ${url}, ${archiveBitbucketResponse.status} ${archiveBitbucketResponse.statusText}`; if (archiveBitbucketResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } return await this.deps.treeResponseFactory.fromTarArchive({ stream: stream.Readable.from(archiveBitbucketResponse.body), subpath: filepath, etag: lastCommitShortHash, filter: options == null ? void 0 : options.filter }); } async search(url, options) { const { filepath } = parseGitUrl__default["default"](url); const matcher = new minimatch.Minimatch(filepath); const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/"); const tree = await this.readTree(treeUrl, { etag: options == null ? void 0 : options.etag, filter: (path) => matcher.match(path) }); const files = await tree.files(); return { etag: tree.etag, files: files.map((file) => ({ url: this.integration.resolveUrl({ url: `/${file.path}`, base: url }), content: file.content, lastModifiedAt: file.lastModifiedAt })) }; } toString() { const { host, token, username, appPassword } = this.integration.config; let authed = Boolean(token); if (!authed) { authed = Boolean(username && appPassword); } return `bitbucket{host=${host},authed=${authed}}`; } async getLastCommitShortHash(url) { const { resource, name: repoName, owner: project, ref } = parseGitUrl__default["default"](url); let branch = ref; if (!branch) { branch = await integration.getBitbucketDefaultBranch(url, this.integration.config); } const isHosted = resource === "bitbucket.org"; const commitsApiUrl = isHosted ? `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}` : `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/commits`; const commitsResponse = await fetch__default["default"]( commitsApiUrl, integration.getBitbucketRequestOptions(this.integration.config) ); if (!commitsResponse.ok) { const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`; if (commitsResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } const commits = await commitsResponse.json(); if (isHosted) { if (commits && commits.values && commits.values.length > 0 && commits.values[0].hash) { return commits.values[0].hash.substring(0, 12); } } else { if (commits && commits.values && commits.values.length > 0 && commits.values[0].id) { return commits.values[0].id.substring(0, 12); } } throw new Error(`Failed to read response from ${commitsApiUrl}`); } }; __publicField$h(_BitbucketUrlReader, "factory", ({ config, logger, treeResponseFactory }) => { const integrations = integration.ScmIntegrations.fromConfig(config); return integrations.bitbucket.list().filter( (item) => !integrations.bitbucketCloud.byHost(item.config.host) && !integrations.bitbucketServer.byHost(item.config.host) ).map((integration) => { const reader = new _BitbucketUrlReader(integration, logger, { treeResponseFactory }); const predicate = (url) => url.host === integration.config.host; return { reader, predicate }; }); }); let BitbucketUrlReader = _BitbucketUrlReader; var __defProp$g = Object.defineProperty; var __defNormalProp$g = (obj, key, value) => key in obj ? __defProp$g(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$g = (obj, key, value) => { __defNormalProp$g(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const _BitbucketServerUrlReader = class _BitbucketServerUrlReader { constructor(integration, deps) { this.integration = integration; this.deps = deps; } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { var _a; const { etag, lastModifiedAfter, signal } = options != null ? options : {}; const bitbucketUrl = integration.getBitbucketServerFileFetchUrl( url, this.integration.config ); const requestOptions = integration.getBitbucketServerRequestOptions( this.integration.config ); let response; try { response = await fetch__default["default"](bitbucketUrl.toString(), { headers: { ...requestOptions.headers, ...etag && { "If-None-Match": etag }, ...lastModifiedAfter && { "If-Modified-Since": lastModifiedAfter.toUTCString() } }, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can be // removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } }); } catch (e) { throw new Error(`Unable to read ${url}, ${e}`); } if (response.status === 304) { throw new errors.NotModifiedError(); } if (response.ok) { return ReadUrlResponseFactory.fromNodeJSReadable(response.body, { etag: (_a = response.headers.get("ETag")) != null ? _a : void 0, lastModifiedAt: parseLastModified( response.headers.get("Last-Modified") ) }); } const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } async readTree(url, options) { const { filepath } = parseGitUrl__default["default"](url); const lastCommitShortHash = await this.getLastCommitShortHash(url); if ((options == null ? void 0 : options.etag) && options.etag === lastCommitShortHash) { throw new errors.NotModifiedError(); } const downloadUrl = await integration.getBitbucketServerDownloadUrl( url, this.integration.config ); const archiveResponse = await fetch__default["default"]( downloadUrl, integration.getBitbucketServerRequestOptions(this.integration.config) ); if (!archiveResponse.ok) { const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`; if (archiveResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } return await this.deps.treeResponseFactory.fromTarArchive({ stream: stream.Readable.from(archiveResponse.body), subpath: filepath, etag: lastCommitShortHash, filter: options == null ? void 0 : options.filter }); } async search(url, options) { const { filepath } = parseGitUrl__default["default"](url); const matcher = new minimatch.Minimatch(filepath); const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/"); const tree = await this.readTree(treeUrl, { etag: options == null ? void 0 : options.etag, filter: (path) => matcher.match(path) }); const files = await tree.files(); return { etag: tree.etag, files: files.map((file) => ({ url: this.integration.resolveUrl({ url: `/${file.path}`, base: url }), content: file.content, lastModifiedAt: file.lastModifiedAt })) }; } toString() { const { host, token } = this.integration.config; const authed = Boolean(token); return `bitbucketServer{host=${host},authed=${authed}}`; } async getLastCommitShortHash(url) { const { name: repoName, owner: project, ref: branch } = parseGitUrl__default["default"](url); const branchParameter = branch ? `?filterText=${encodeURIComponent(branch)}` : "/default"; const branchListUrl = `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/branches${branchParameter}`; const branchListResponse = await fetch__default["default"]( branchListUrl, integration.getBitbucketServerRequestOptions(this.integration.config) ); if (!branchListResponse.ok) { const message = `Failed to retrieve branch list from ${branchListUrl}, ${branchListResponse.status} ${branchListResponse.statusText}`; if (branchListResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } const branchMatches = await branchListResponse.json(); if (branchMatches && branchMatches.size > 0) { const exactBranchMatch = branchMatches.values.filter( (branchDetails) => branchDetails.displayId === branch )[0]; return exactBranchMatch.latestCommit.substring(0, 12); } if (!branch && branchMatches) { return branchMatches.latestCommit.substring(0, 12); } throw new Error( `Failed to find Last Commit using ${branch ? `branch "${branch}"` : "default branch"} in response from ${branchListUrl}` ); } }; __publicField$g(_BitbucketServerUrlReader, "factory", ({ config, treeResponseFactory }) => { const integrations = integration.ScmIntegrations.fromConfig(config); return integrations.bitbucketServer.list().map((integration) => { const reader = new _BitbucketServerUrlReader(integration, { treeResponseFactory }); const predicate = (url) => url.host === integration.config.host; return { reader, predicate }; }); }); let BitbucketServerUrlReader = _BitbucketServerUrlReader; var __defProp$f = Object.defineProperty; var __defNormalProp$f = (obj, key, value) => key in obj ? __defProp$f(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$f = (obj, key, value) => { __defNormalProp$f(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; function isAuthCallbackOptions(options) { return "onAuth" in options; } const _Git = class _Git { constructor(config) { this.config = config; __publicField$f(this, "headers"); __publicField$f(this, "onAuth"); __publicField$f(this, "onProgressHandler", () => { let currentPhase = ""; return (event) => { var _a, _b; if (currentPhase !== event.phase) { currentPhase = event.phase; (_a = this.config.logger) == null ? void 0 : _a.info(event.phase); } const total = event.total ? `${Math.round(event.loaded / event.total * 100)}%` : event.loaded; (_b = this.config.logger) == null ? void 0 : _b.debug(`status={${event.phase},total={${total}}}`); }; }); this.onAuth = config.onAuth; this.headers = { "user-agent": "git/@isomorphic-git", ...config.token ? { Authorization: `Bearer ${config.token}` } : {} }; } async add(options) { var _a; const { dir, filepath } = options; (_a = this.config.logger) == null ? void 0 : _a.info(`Adding file {dir=${dir},filepath=${filepath}}`); return git__default["default"].add({ fs: fs__default["default"], dir, filepath }); } async addRemote(options) { var _a; const { dir, url, remote, force } = options; (_a = this.config.logger) == null ? void 0 : _a.info( `Creating new remote {dir=${dir},remote=${remote},url=${url}}` ); return git__default["default"].addRemote({ fs: fs__default["default"], dir, remote, url, force }); } async deleteRemote(options) { var _a; const { dir, remote } = options; (_a = this.config.logger) == null ? void 0 : _a.info(`Deleting remote {dir=${dir},remote=${remote}}`); return git__default["default"].deleteRemote({ fs: fs__default["default"], dir, remote }); } async checkout(options) { var _a; const { dir, ref } = options; (_a = this.config.logger) == null ? void 0 : _a.info(`Checking out branch {dir=${dir},ref=${ref}}`); return git__default["default"].checkout({ fs: fs__default["default"], dir, ref }); } async branch(options) { var _a; const { dir, ref } = options; (_a = this.config.logger) == null ? void 0 : _a.info(`Creating branch {dir=${dir},ref=${ref}`); return git__default["default"].branch({ fs: fs__default["default"], dir, ref }); } async commit(options) { var _a; const { dir, message, author, committer } = options; (_a = this.config.logger) == null ? void 0 : _a.info( `Committing file to repo {dir=${dir},message=${message}}` ); return git__default["default"].commit({ fs: fs__default["default"], dir, message, author, committer }); } /** https://isomorphic-git.org/docs/en/clone */ async clone(options) { var _a, _b; const { url, dir, ref, depth, noCheckout } = options; (_a = this.config.logger) == null ? void 0 : _a.info(`Cloning repo {dir=${dir},url=${url}}`); try { return await git__default["default"].clone({ fs: fs__default["default"], http: http__default["default"], url, dir, ref, singleBranch: true, depth: depth != null ? depth : 1, noCheckout, onProgress: this.onProgressHandler(), headers: this.headers, onAuth: this.onAuth }); } catch (ex) { (_b = this.config.logger) == null ? void 0 : _b.error(`Failed to clone repo {dir=${dir},url=${url}}`); if (ex.data) { throw new Error(`${ex.message} {data=${JSON.stringify(ex.data)}}`); } throw ex; } } /** https://isomorphic-git.org/docs/en/currentBranch */ async currentBranch(options) { const { dir, fullName = false } = options; return git__default["default"].currentBranch({ fs: fs__default["default"], dir, fullname: fullName }); } /** https://isomorphic-git.org/docs/en/fetch */ async fetch(options) { var _a, _b; const { dir, remote = "origin", tags = false } = options; (_a = this.config.logger) == null ? void 0 : _a.info( `Fetching remote=${remote} for repository {dir=${dir}}` ); try { await git__default["default"].fetch({ fs: fs__default["default"], http: http__default["default"], dir, remote, tags, onProgress: this.onProgressHandler(), headers: this.headers, onAuth: this.onAuth }); } catch (ex) { (_b = this.config.logger) == null ? void 0 : _b.error( `Failed to fetch repo {dir=${dir},remote=${remote}}` ); if (ex.data) { throw new Error(`${ex.message} {data=${JSON.stringify(ex.data)}}`); } throw ex; } } async init(options) { var _a; const { dir, defaultBranch = "master" } = options; (_a = this.config.logger) == null ? void 0 : _a.info(`Init git repository {dir=${dir}}`); return git__default["default"].init({ fs: fs__default["default"], dir, defaultBranch }); } /** https://isomorphic-git.org/docs/en/merge */ async merge(options) { var _a; const { dir, theirs, ours, author, committer } = options; (_a = this.config.logger) == null ? void 0 : _a.info( `Merging branch '${theirs}' into '${ours}' for repository {dir=${dir}}` ); return git__default["default"].merge({ fs: fs__default["default"], dir, ours, theirs, author, committer }); } async push(options) { var _a, _b; const { dir, remote, remoteRef, force } = options; (_a = this.config.logger) == null ? void 0 : _a.info( `Pushing directory to remote {dir=${dir},remote=${remote}}` ); try { return await git__default["default"].push({ fs: fs__default["default"], dir, http: http__default["default"], onProgress: this.onProgressHandler(), remoteRef, force, headers: this.headers, remote, onAuth: this.onAuth }); } catch (ex) { (_b = this.config.logger) == null ? void 0 : _b.error( `Failed to push to repo {dir=${dir}, remote=${remote}}` ); if (ex.data) { throw new Error(`${ex.message} {data=${JSON.stringify(ex.data)}}`); } throw ex; } } /** https://isomorphic-git.org/docs/en/readCommit */ async readCommit(options) { const { dir, sha } = options; return git__default["default"].readCommit({ fs: fs__default["default"], dir, oid: sha }); } /** https://isomorphic-git.org/docs/en/resolveRef */ async resolveRef(options) { const { dir, ref } = options; return git__default["default"].resolveRef({ fs: fs__default["default"], dir, ref }); } /** https://isomorphic-git.org/docs/en/log */ async log(options) { const { dir, ref } = options; return git__default["default"].log({ fs: fs__default["default"], dir, ref: ref != null ? ref : "HEAD" }); } }; __publicField$f(_Git, "fromAuth", (options) => { if (isAuthCallbackOptions(options)) { const { onAuth, logger: logger2 } = options; return new _Git({ onAuth, logger: logger2 }); } const { username, password, token, logger } = options; return new _Git({ onAuth: () => ({ username, password }), token, logger }); }); let Git = _Git; var __defProp$e = Object.defineProperty; var __defNormalProp$e = (obj, key, value) => key in obj ? __defProp$e(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$e = (obj, key, value) => { __defNormalProp$e(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const pipeline$3 = util.promisify(stream.pipeline); const createTemporaryDirectory = async (workDir) => await fs__default["default"].mkdtemp(platformPath.join(workDir, "/gerrit-clone-")); const _GerritUrlReader = class _GerritUrlReader { constructor(integration, deps, workDir) { this.integration = integration; this.deps = deps; this.workDir = workDir; } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { const apiUrl = integration.getGerritFileContentsApiUrl(this.integration.config, url); let response; try { response = await fetch__default["default"](apiUrl, { method: "GET", ...integration.getGerritRequestOptions(this.integration.config), // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can // be removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 signal: options == null ? void 0 : options.signal }); } catch (e) { throw new Error(`Unable to read gerrit file ${url}, ${e}`); } if (response.ok) { let responseBody; return { buffer: async () => { if (responseBody === void 0) { responseBody = await response.text(); } return Buffer.from(responseBody, "base64"); }, stream: () => { const readable = stream.Readable.from(response.body); return readable.pipe(new base64Stream.Base64Decode()); } }; } if (response.status === 404) { throw new errors.NotFoundError(`File ${url} not found.`); } throw new Error( `${url} could not be read as ${apiUrl}, ${response.status} ${response.statusText}` ); } async readTree(url, options) { const apiUrl = integration.getGerritBranchApiUrl(this.integration.config, url); let response; try { response = await fetch__default["default"](apiUrl, { method: "GET", ...integration.getGerritRequestOptions(this.integration.config) }); } catch (e) { throw new Error(`Unable to read branch state ${url}, ${e}`); } if (response.status === 404) { throw new errors.NotFoundError(`Not found: ${url}`); } if (!response.ok) { throw new Error( `${url} could not be read as ${apiUrl}, ${response.status} ${response.statusText}` ); } const branchInfo = await integration.parseGerritJsonResponse(response); if ((options == null ? void 0 : options.etag) === branchInfo.revision) { throw new errors.NotModifiedError(); } if (this.integration.config.gitilesBaseUrl !== this.integration.config.baseUrl) { return this.readTreeFromGitiles(url, branchInfo.revision, options); } return this.readTreeFromGitClone(url, branchInfo.revision, options); } async search() { throw new Error("GerritReader does not implement search"); } toString() { const { host, password } = this.integration.config; return `gerrit{host=${host},authed=${Boolean(password)}}`; } async readTreeFromGitClone(url, revision, options) { const { filePath } = integration.parseGerritGitilesUrl(this.integration.config, url); const git = Git.fromAuth({ username: this.integration.config.username, password: this.integration.config.password }); const tempDir = await createTemporaryDirectory(this.workDir); const cloneUrl = integration.getGerritCloneRepoUrl(this.integration.config, url); try { await git.clone({ url: cloneUrl, dir: platformPath.join(tempDir, "repo"), ref: revision, depth: 1 }); const data = await new Promise(async (resolve) => { await pipeline$3( tar__default["default"].create({ cwd: tempDir }, [""]), concatStream__default["default"](resolve) ); }); const tarArchive = stream.Readable.from(data); return await this.deps.treeResponseFactory.fromTarArchive({ stream: tarArchive, subpath: filePath === "/" ? void 0 : filePath, etag: revision, filter: options == null ? void 0 : options.filter }); } catch (error) { throw new Error(`Could not clone ${cloneUrl}: ${error}`); } finally { await fs__default["default"].rm(tempDir, { recursive: true, force: true }); } } async readTreeFromGitiles(url, revision, options) { const { branch, filePath, project } = integration.parseGerritGitilesUrl( this.integration.config, url ); const archiveUrl = integration.buildGerritGitilesArchiveUrl( this.integration.config, project, branch, filePath ); const archiveResponse = await fetch__default["default"](archiveUrl, { ...integration.getGerritRequestOptions(this.integration.config), // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can // be removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 signal: options == null ? void 0 : options.signal }); if (archiveResponse.status === 404) { throw new errors.NotFoundError(`Not found: ${archiveUrl}`); } if (!archiveResponse.ok) { throw new Error( `${url} could not be read as ${archiveUrl}, ${archiveResponse.status} ${archiveResponse.statusText}` ); } return await this.deps.treeResponseFactory.fromTarArchive({ stream: archiveResponse.body, etag: revision, filter: options == null ? void 0 : options.filter, stripFirstDirectory: false }); } }; __publicField$e(_GerritUrlReader, "factory", ({ config, treeResponseFactory }) => { var _a; const integrations = integration.ScmIntegrations.fromConfig(config); if (!integrations.gerrit) { return []; } const workDir = (_a = config.getOptionalString("backend.workingDirectory")) != null ? _a : os__default["default"].tmpdir(); return integrations.gerrit.list().map((integration) => { const reader = new _GerritUrlReader( integration, { treeResponseFactory }, workDir ); const predicate = (url) => { const gitilesUrl = new URL(integration.config.gitilesBaseUrl); return url.host === gitilesUrl.host; }; return { reader, predicate }; }); }); let GerritUrlReader = _GerritUrlReader; var __defProp$d = Object.defineProperty; var __defNormalProp$d = (obj, key, value) => key in obj ? __defProp$d(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$d = (obj, key, value) => { __defNormalProp$d(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const _GithubUrlReader = class _GithubUrlReader { constructor(integration, deps) { this.integration = integration; this.deps = deps; if (!integration.config.apiBaseUrl && !integration.config.rawBaseUrl) { throw new Error( `GitHub integration '${integration.title}' must configure an explicit apiBaseUrl or rawBaseUrl` ); } } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { var _a; const credentials = await this.deps.credentialsProvider.getCredentials({ url }); const ghUrl = integration.getGithubFileFetchUrl( url, this.integration.config, credentials ); let response; try { response = await fetch__default["default"](ghUrl, { headers: { ...credentials == null ? void 0 : credentials.headers, ...(options == null ? void 0 : options.etag) && { "If-None-Match": options.etag }, ...(options == null ? void 0 : options.lastModifiedAfter) && { "If-Modified-Since": options.lastModifiedAfter.toUTCString() }, Accept: "application/vnd.github.v3.raw" }, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can // be removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 signal: options == null ? void 0 : options.signal }); } catch (e) { throw new Error(`Unable to read ${url}, ${e}`); } if (response.status === 304) { throw new errors.NotModifiedError(); } if (response.ok) { return ReadUrlResponseFactory.fromNodeJSReadable(response.body, { etag: (_a = response.headers.get("ETag")) != null ? _a : void 0, lastModifiedAt: parseLastModified( response.headers.get("Last-Modified") ) }); } let message = `${url} could not be read as ${ghUrl}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } if (response.status === 403 && response.headers.get("X-RateLimit-Remaining") === "0") { message += " (rate limit exceeded)"; } throw new Error(message); } async readTree(url, options) { const repoDetails = await this.getRepoDetails(url); const commitSha = repoDetails.commitSha; if ((options == null ? void 0 : options.etag) && options.etag === commitSha) { throw new errors.NotModifiedError(); } const { filepath } = parseGitUrl__default["default"](url); const { headers } = await this.deps.credentialsProvider.getCredentials({ url }); return this.doReadTree( repoDetails.repo.archive_url, commitSha, filepath, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can be // removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 { headers, signal: options == null ? void 0 : options.signal }, options ); } async search(url, options) { const repoDetails = await this.getRepoDetails(url); const commitSha = repoDetails.commitSha; if ((options == null ? void 0 : options.etag) && options.etag === commitSha) { throw new errors.NotModifiedError(); } const { filepath } = parseGitUrl__default["default"](url); const { headers } = await this.deps.credentialsProvider.getCredentials({ url }); const files = await this.doSearch( url, repoDetails.repo.trees_url, repoDetails.repo.archive_url, commitSha, filepath, { headers, signal: options == null ? void 0 : options.signal } ); return { files, etag: commitSha }; } toString() { const { host, token } = this.integration.config; return `github{host=${host},authed=${Boolean(token)}}`; } async doReadTree(archiveUrl, sha, subpath, init, options) { const archive = await this.fetchResponse( archiveUrl.replace("{archive_format}", "tarball").replace("{/ref}", `/${sha}`), init ); return await this.deps.treeResponseFactory.fromTarArchive({ // TODO(Rugvip): Underlying implementation of fetch will be node-fetch, we probably want // to stick to using that in exclusively backend code. stream: stream.Readable.from(archive.body), subpath, etag: sha, filter: options == null ? void 0 : options.filter }); } async doSearch(url, treesUrl, archiveUrl, sha, query, init) { function pathToUrl(path) { const updated = new URL(url); const base = updated.pathname.split("/").slice(1, 5).join("/"); updated.pathname = `${base}/${path}`; return updated.toString(); } const matcher = new minimatch.Minimatch(query.replace(/^\/+/, "")); const recursiveTree = await this.fetchJson( treesUrl.replace("{/sha}", `/${sha}?recursive=true`), init ); if (!recursiveTree.truncated) { const matching = recursiveTree.tree.filter( (item) => item.type === "blob" && item.path && item.url && matcher.match(item.path) ); return matching.map((item) => ({ url: pathToUrl(item.path), content: async () => { const blob = await this.fetchJson(item.url, init); return Buffer.from(blob.content, "base64"); } })); } const tree = await this.doReadTree(archiveUrl, sha, "", init, { filter: (path) => matcher.match(path) }); const files = await tree.files(); return files.map((file) => ({ url: pathToUrl(file.path), content: file.content, lastModifiedAt: file.lastModifiedAt })); } async getRepoDetails(url) { const parsed = parseGitUrl__default["default"](url); const { ref, full_name } = parsed; const credentials = await this.deps.credentialsProvider.getCredentials({ url }); const { headers } = credentials; const commitStatus = await this.fetchJson( `${this.integration.config.apiBaseUrl}/repos/${full_name}/commits/${ref || await this.getDefaultBranch(full_name, credentials)}/status?per_page=0`, { headers } ); return { commitSha: commitStatus.sha, repo: commitStatus.repository }; } async getDefaultBranch(repoFullName, credentials) { const repo = await this.fetchJson( `${this.integration.config.apiBaseUrl}/repos/${repoFullName}`, { headers: credentials.headers } ); return repo.default_branch; } async fetchResponse(url, init) { const urlAsString = url.toString(); const response = await fetch__default["default"](urlAsString, init); if (!response.ok) { const message = `Request failed for ${urlAsString}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } return response; } async fetchJson(url, init) { const response = await this.fetchResponse(url, init); return await response.json(); } }; __publicField$d(_GithubUrlReader, "factory", ({ config, treeResponseFactory }) => { const integrations = integration.ScmIntegrations.fromConfig(config); const credentialsProvider = integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations); return integrations.github.list().map((integration) => { const reader = new _GithubUrlReader(integration, { treeResponseFactory, credentialsProvider }); const predicate = (url) => url.host === integration.config.host; return { reader, predicate }; }); }); let GithubUrlReader = _GithubUrlReader; var __defProp$c = Object.defineProperty; var __defNormalProp$c = (obj, key, value) => key in obj ? __defProp$c(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$c = (obj, key, value) => { __defNormalProp$c(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const _GitlabUrlReader = class _GitlabUrlReader { constructor(integration, deps) { this.integration = integration; this.deps = deps; } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { var _a; const { etag, lastModifiedAfter, signal } = options != null ? options : {}; const builtUrl = await this.getGitlabFetchUrl(url); let response; try { response = await fetch__default["default"](builtUrl, { headers: { ...integration.getGitLabRequestOptions(this.integration.config).headers, ...etag && { "If-None-Match": etag }, ...lastModifiedAfter && { "If-Modified-Since": lastModifiedAfter.toUTCString() } }, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can be // removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } }); } catch (e) { throw new Error(`Unable to read ${url}, ${e}`); } if (response.status === 304) { throw new errors.NotModifiedError(); } if (response.ok) { return ReadUrlResponseFactory.fromNodeJSReadable(response.body, { etag: (_a = response.headers.get("ETag")) != null ? _a : void 0, lastModifiedAt: parseLastModified( response.headers.get("Last-Modified") ) }); } const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } async readTree(url, options) { var _a, _b; const { etag, signal } = options != null ? options : {}; const { ref, full_name, filepath } = parseGitUrl__default["default"](url); let repoFullName = full_name; const relativePath = integration.getGitLabIntegrationRelativePath( this.integration.config ); if (relativePath) { const rectifiedRelativePath = `${lodash.trimStart(relativePath, "/")}/`; repoFullName = full_name.replace(rectifiedRelativePath, ""); } const projectGitlabResponse = await fetch__default["default"]( new URL( `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent( repoFullName )}` ).toString(), integration.getGitLabRequestOptions(this.integration.config) ); if (!projectGitlabResponse.ok) { const msg = `Failed to read tree from ${url}, ${projectGitlabResponse.status} ${projectGitlabResponse.statusText}`; if (projectGitlabResponse.status === 404) { throw new errors.NotFoundError(msg); } throw new Error(msg); } const projectGitlabResponseJson = await projectGitlabResponse.json(); const branch = ref || projectGitlabResponseJson.default_branch; const commitsReqParams = new URLSearchParams(); commitsReqParams.set("ref_name", branch); if (!!filepath) { commitsReqParams.set("path", filepath); } const commitsGitlabResponse = await fetch__default["default"]( new URL( `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent( repoFullName )}/repository/commits?${commitsReqParams.toString()}` ).toString(), { ...integration.getGitLabRequestOptions(this.integration.config), // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can // be removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } } ); if (!commitsGitlabResponse.ok) { const message = `Failed to read tree (branch) from ${url}, ${commitsGitlabResponse.status} ${commitsGitlabResponse.statusText}`; if (commitsGitlabResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } const commitSha = (_b = (_a = (await commitsGitlabResponse.json())[0]) == null ? void 0 : _a.id) != null ? _b : ""; if (etag && etag === commitSha) { throw new errors.NotModifiedError(); } const archiveReqParams = new URLSearchParams(); archiveReqParams.set("sha", branch); if (!!filepath) { archiveReqParams.set("path", filepath); } const archiveGitLabResponse = await fetch__default["default"]( `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent( repoFullName )}/repository/archive?${archiveReqParams.toString()}`, { ...integration.getGitLabRequestOptions(this.integration.config), // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can // be removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 ...signal && { signal } } ); if (!archiveGitLabResponse.ok) { const message = `Failed to read tree (archive) from ${url}, ${archiveGitLabResponse.status} ${archiveGitLabResponse.statusText}`; if (archiveGitLabResponse.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } return await this.deps.treeResponseFactory.fromTarArchive({ stream: stream.Readable.from(archiveGitLabResponse.body), subpath: filepath, etag: commitSha, filter: options == null ? void 0 : options.filter }); } async search(url, options) { const { filepath } = parseGitUrl__default["default"](url); const staticPart = this.getStaticPart(filepath); const matcher = new minimatch.Minimatch(filepath); const treeUrl = lodash.trimEnd(url.replace(filepath, staticPart), `/`); const pathPrefix = staticPart ? `${staticPart}/` : ""; const tree = await this.readTree(treeUrl, { etag: options == null ? void 0 : options.etag, signal: options == null ? void 0 : options.signal, filter: (path) => matcher.match(`${pathPrefix}${path}`) }); const files = await tree.files(); return { etag: tree.etag, files: files.map((file) => ({ url: this.integration.resolveUrl({ url: `/${pathPrefix}${file.path}`, base: url }), content: file.content, lastModifiedAt: file.lastModifiedAt })) }; } /** * This function splits the input globPattern string into segments using the path separator /. It then iterates over * the segments from the end of the array towards the beginning, checking if the concatenated string up to that * segment matches the original globPattern using the minimatch function. If a match is found, it continues iterating. * If no match is found, it returns the concatenated string up to the current segment, which is the static part of the * glob pattern. * * E.g. `catalog/foo/*.yaml` will return `catalog/foo`. * * @param globPattern the glob pattern * @private */ getStaticPart(globPattern) { const segments = globPattern.split("/"); let i = segments.length; while (i > 0 && new minimatch.Minimatch(segments.slice(0, i).join("/")).match(globPattern)) { i--; } return segments.slice(0, i).join("/"); } toString() { const { host, token } = this.integration.config; return `gitlab{host=${host},authed=${Boolean(token)}}`; } async getGitlabFetchUrl(target) { const targetUrl = new URL(target); if (targetUrl.pathname.includes("/-/jobs/artifacts/")) { return this.getGitlabArtifactFetchUrl(targetUrl).then( (value) => value.toString() ); } return integration.getGitLabFileFetchUrl(target, this.integration.config); } // convert urls of the form: // https://example.com///-/jobs/artifacts//raw/?job= // to urls of the form: // https://example.com/api/v4/projects/:id/jobs/artifacts/:ref_name/raw/*artifact_path?job= async getGitlabArtifactFetchUrl(target) { if (!target.pathname.includes("/-/jobs/artifacts/")) { throw new Error("Unable to process url as an GitLab artifact"); } try { const [namespaceAndProject, ref] = target.pathname.split("/-/jobs/artifacts/"); const projectPath = new URL(target); projectPath.pathname = namespaceAndProject; const projectId = await this.resolveProjectToId(projectPath); const relativePath = integration.getGitLabIntegrationRelativePath( this.integration.config ); const newUrl = new URL(target); newUrl.pathname = `${relativePath}/api/v4/projects/${projectId}/jobs/artifacts/${ref}`; return newUrl; } catch (e) { throw new Error( `Unable to translate GitLab artifact URL: ${target}, ${e}` ); } } async resolveProjectToId(pathToProject) { let project = pathToProject.pathname; const relativePath = integration.getGitLabIntegrationRelativePath( this.integration.config ); if (relativePath) { project = project.replace(relativePath, ""); } project = project.replace(/^\//, ""); const result = await fetch__default["default"]( `${pathToProject.origin}${relativePath}/api/v4/projects/${encodeURIComponent(project)}`, integration.getGitLabRequestOptions(this.integration.config) ); const data = await result.json(); if (!result.ok) { throw new Error(`Gitlab error: ${data.error}, ${data.error_description}`); } return Number(data.id); } }; __publicField$c(_GitlabUrlReader, "factory", ({ config, treeResponseFactory }) => { const integrations = integration.ScmIntegrations.fromConfig(config); return integrations.gitlab.list().map((integration) => { const reader = new _GitlabUrlReader(integration, { treeResponseFactory }); const predicate = (url) => url.host === integration.config.host; return { reader, predicate }; }); }); let GitlabUrlReader = _GitlabUrlReader; var __defProp$b = Object.defineProperty; var __defNormalProp$b = (obj, key, value) => key in obj ? __defProp$b(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$b = (obj, key, value) => { __defNormalProp$b(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const _GiteaUrlReader = class _GiteaUrlReader { constructor(integration, deps) { this.integration = integration; this.deps = deps; } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { var _a; let response; const blobUrl = integration.getGiteaFileContentsUrl(this.integration.config, url); try { response = await fetch__default["default"](blobUrl, { method: "GET", ...integration.getGiteaRequestOptions(this.integration.config), signal: options == null ? void 0 : options.signal }); } catch (e) { throw new Error(`Unable to read ${blobUrl}, ${e}`); } if (response.ok) { const { encoding, content } = await response.json(); if (encoding === "base64") { return ReadUrlResponseFactory.fromReadable( stream.Readable.from(Buffer.from(content, "base64")), { etag: (_a = response.headers.get("ETag")) != null ? _a : void 0, lastModifiedAt: parseLastModified( response.headers.get("Last-Modified") ) } ); } throw new Error(`Unknown encoding: ${encoding}`); } const message = `${url} could not be read as ${blobUrl}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } if (response.status === 304) { throw new errors.NotModifiedError(); } if (response.status === 403) { throw new errors.AuthenticationError(); } throw new Error(message); } async readTree(url, options) { const lastCommitHash = await this.getLastCommitHash(url); if ((options == null ? void 0 : options.etag) && options.etag === lastCommitHash) { throw new errors.NotModifiedError(); } const archiveUri = integration.getGiteaArchiveUrl(this.integration.config, url); let response; try { response = await fetch__default["default"](archiveUri, { method: "GET", ...integration.getGiteaRequestOptions(this.integration.config), signal: options == null ? void 0 : options.signal }); } catch (e) { throw new Error(`Unable to read ${archiveUri}, ${e}`); } const parsedUri = integration.parseGiteaUrl(this.integration.config, url); return this.deps.treeResponseFactory.fromTarArchive({ stream: stream.Readable.from(response.body), subpath: parsedUri.path, etag: lastCommitHash, filter: options == null ? void 0 : options.filter }); } search() { throw new Error("GiteaUrlReader search not implemented."); } toString() { const { host } = this.integration.config; return `gitea{host=${host},authed=${Boolean( this.integration.config.password )}}`; } async getLastCommitHash(url) { const commitUri = integration.getGiteaLatestCommitUrl(this.integration.config, url); const response = await fetch__default["default"]( commitUri, integration.getGiteaRequestOptions(this.integration.config) ); if (!response.ok) { const message = `Failed to retrieve latest commit information from ${commitUri}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } return (await response.json()).sha; } }; __publicField$b(_GiteaUrlReader, "factory", ({ config, treeResponseFactory }) => { return integration.ScmIntegrations.fromConfig(config).gitea.list().map((integration) => { const reader = new _GiteaUrlReader(integration, { treeResponseFactory }); const predicate = (url) => { return url.host === integration.config.host; }; return { reader, predicate }; }); }); let GiteaUrlReader = _GiteaUrlReader; var __defProp$a = Object.defineProperty; var __defNormalProp$a = (obj, key, value) => key in obj ? __defProp$a(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$a = (obj, key, value) => { __defNormalProp$a(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const DEFAULT_REGION = "us-east-1"; function parseUrl(url, config) { const parsedUrl = new URL(url); const pathname = parsedUrl.pathname.substring(1); const host = parsedUrl.host; if (config.host === "amazonaws.com" || config.host === "amazonaws.com.cn") { const match = host.match( /^(?:([a-z0-9.-]+)\.)?s3(?:[.-]([a-z0-9-]+))?\.amazonaws\.com(\.cn)?$/ ); if (!match) { throw new Error(`Invalid AWS S3 URL ${url}`); } const [, hostBucket, hostRegion] = match; if (config.s3ForcePathStyle || !hostBucket) { const slashIndex = pathname.indexOf("/"); if (slashIndex < 0) { throw new Error( `Invalid path-style AWS S3 URL ${url}, does not contain bucket in the path` ); } return { path: pathname.substring(slashIndex + 1), bucket: pathname.substring(0, slashIndex), region: hostRegion != null ? hostRegion : DEFAULT_REGION }; } return { path: pathname, bucket: hostBucket, region: hostRegion != null ? hostRegion : DEFAULT_REGION }; } const usePathStyle = config.s3ForcePathStyle || host.length === config.host.length; if (usePathStyle) { const slashIndex = pathname.indexOf("/"); if (slashIndex < 0) { throw new Error( `Invalid path-style AWS S3 URL ${url}, does not contain bucket in the path` ); } return { path: pathname.substring(slashIndex + 1), bucket: pathname.substring(0, slashIndex), region: DEFAULT_REGION }; } return { path: pathname, bucket: host.substring(0, host.length - config.host.length - 1), region: DEFAULT_REGION }; } const _AwsS3UrlReader = class _AwsS3UrlReader { constructor(credsManager, integration, deps) { this.credsManager = credsManager; this.integration = integration; this.deps = deps; } /** * If accessKeyId and secretAccessKey are missing, the standard credentials provider chain will be used: * https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html */ static buildStaticCredentials(accessKeyId, secretAccessKey) { return async () => { return { accessKeyId, secretAccessKey }; }; } static async buildCredentials(credsManager, region, integration) { if (!integration) { return (await credsManager.getCredentialProvider()).sdkCredentialProvider; } const accessKeyId = integration.config.accessKeyId; const secretAccessKey = integration.config.secretAccessKey; let explicitCredentials; if (accessKeyId && secretAccessKey) { explicitCredentials = _AwsS3UrlReader.buildStaticCredentials( accessKeyId, secretAccessKey ); } else { explicitCredentials = (await credsManager.getCredentialProvider()).sdkCredentialProvider; } const roleArn = integration.config.roleArn; if (roleArn) { return credentialProviders.fromTemporaryCredentials({ masterCredentials: explicitCredentials, params: { RoleSessionName: "backstage-aws-s3-url-reader", RoleArn: roleArn, ExternalId: integration.config.externalId }, clientConfig: { region } }); } return explicitCredentials; } async buildS3Client(credsManager, region, integration) { const credentials = await _AwsS3UrlReader.buildCredentials( credsManager, region, integration ); const s3 = new clientS3.S3Client({ region, credentials, endpoint: integration.config.endpoint, forcePathStyle: integration.config.s3ForcePathStyle }); return s3; } async retrieveS3ObjectData(stream$1) { return new Promise((resolve, reject) => { try { const chunks = []; stream$1.on("data", (chunk) => chunks.push(chunk)); stream$1.on( "error", (e) => reject(new errors.ForwardedError("Unable to read stream", e)) ); stream$1.on("end", () => resolve(stream.Readable.from(Buffer.concat(chunks)))); } catch (e) { throw new errors.ForwardedError("Unable to parse the response data", e); } }); } async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { var _a; const { etag, lastModifiedAfter } = options != null ? options : {}; try { const { path, bucket, region } = parseUrl(url, this.integration.config); const s3Client = await this.buildS3Client( this.credsManager, region, this.integration ); const abortController$1 = new abortController.AbortController(); const params = { Bucket: bucket, Key: path, ...etag && { IfNoneMatch: etag }, ...lastModifiedAfter && { IfModifiedSince: lastModifiedAfter } }; (_a = options == null ? void 0 : options.signal) == null ? void 0 : _a.addEventListener("abort", () => abortController$1.abort()); const getObjectCommand = new clientS3.GetObjectCommand(params); const response = await s3Client.send(getObjectCommand, { abortSignal: abortController$1.signal }); const s3ObjectData = await this.retrieveS3ObjectData( response.Body ); return ReadUrlResponseFactory.fromReadable(s3ObjectData, { etag: response.ETag, lastModifiedAt: response.LastModified }); } catch (e) { if (e.$metadata && e.$metadata.httpStatusCode === 304) { throw new errors.NotModifiedError(); } throw new errors.ForwardedError("Could not retrieve file from S3", e); } } async readTree(url, options) { var _a, _b; try { const { path, bucket, region } = parseUrl(url, this.integration.config); const s3Client = await this.buildS3Client( this.credsManager, region, this.integration ); const abortController$1 = new abortController.AbortController(); const allObjects = []; const responses = []; let continuationToken; let output; do { const listObjectsV2Command = new clientS3.ListObjectsV2Command({ Bucket: bucket, ContinuationToken: continuationToken, Prefix: path }); (_a = options == null ? void 0 : options.signal) == null ? void 0 : _a.addEventListener( "abort", () => abortController$1.abort() ); output = await s3Client.send(listObjectsV2Command, { abortSignal: abortController$1.signal }); if (output.Contents) { output.Contents.forEach((contents) => { allObjects.push(contents.Key); }); } continuationToken = output.NextContinuationToken; } while (continuationToken); for (let i = 0; i < allObjects.length; i++) { const getObjectCommand = new clientS3.GetObjectCommand({ Bucket: bucket, Key: String(allObjects[i]) }); const response = await s3Client.send(getObjectCommand); const s3ObjectData = await this.retrieveS3ObjectData( response.Body ); responses.push({ data: s3ObjectData, path: String(allObjects[i]), lastModifiedAt: (_b = response == null ? void 0 : response.LastModified) != null ? _b : void 0 }); } return await this.deps.treeResponseFactory.fromReadableArray(responses); } catch (e) { throw new errors.ForwardedError("Could not retrieve file tree from S3", e); } } async search() { throw new Error("AwsS3Reader does not implement search"); } toString() { const secretAccessKey = this.integration.config.secretAccessKey; return `awsS3{host=${this.integration.config.host},authed=${Boolean( secretAccessKey )}}`; } }; __publicField$a(_AwsS3UrlReader, "factory", ({ config, treeResponseFactory }) => { const integrations = integration.ScmIntegrations.fromConfig(config); const credsManager = integrationAwsNode.DefaultAwsCredentialsManager.fromConfig(config); return integrations.awsS3.list().map((integration) => { const reader = new _AwsS3UrlReader(credsManager, integration, { treeResponseFactory }); const predicate = (url) => url.host.endsWith(integration.config.host); return { reader, predicate }; }); }); let AwsS3UrlReader = _AwsS3UrlReader; var __defProp$9 = Object.defineProperty; var __defNormalProp$9 = (obj, key, value) => key in obj ? __defProp$9(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$9 = (obj, key, value) => { __defNormalProp$9(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const isInRange = (num, [start, end]) => { return num >= start && num <= end; }; const parsePortRange = (port) => { const isRange = port.includes("-"); if (isRange) { const range = port.split("-").map((v) => parseInt(v, 10)).filter(Boolean); if (range.length !== 2) throw new Error(`Port range is not valid: ${port}`); const [start, end] = range; if (start <= 0 || end <= 0 || start > end) throw new Error(`Port range is not valid: [${start}, ${end}]`); return range; } const parsedPort = parseInt(port, 10); return [parsedPort, parsedPort]; }; const parsePortPredicate = (port) => { if (port) { const range = parsePortRange(port); return (url) => { if (url.port) return isInRange(parseInt(url.port, 10), range); if (url.protocol === "http:") return isInRange(80, range); if (url.protocol === "https:") return isInRange(443, range); return false; }; } return (url) => !url.port; }; const _FetchUrlReader = class _FetchUrlReader { async read(url) { const response = await this.readUrl(url); return response.buffer(); } async readUrl(url, options) { var _a; let response; try { response = await fetch__default["default"](url, { headers: { ...(options == null ? void 0 : options.etag) && { "If-None-Match": options.etag }, ...(options == null ? void 0 : options.lastModifiedAfter) && { "If-Modified-Since": options.lastModifiedAfter.toUTCString() } }, // TODO(freben): The signal cast is there because pre-3.x versions of // node-fetch have a very slightly deviating AbortSignal type signature. // The difference does not affect us in practice however. The cast can // be removed after we support ESM for CLI dependencies and migrate to // version 3 of node-fetch. // https://github.com/backstage/backstage/issues/8242 signal: options == null ? void 0 : options.signal }); } catch (e) { throw new Error(`Unable to read ${url}, ${e}`); } if (response.status === 304) { throw new errors.NotModifiedError(); } if (response.ok) { return ReadUrlResponseFactory.fromNodeJSReadable(response.body, { etag: (_a = response.headers.get("ETag")) != null ? _a : void 0, lastModifiedAt: parseLastModified( response.headers.get("Last-Modified") ) }); } const message = `could not read ${url}, ${response.status} ${response.statusText}`; if (response.status === 404) { throw new errors.NotFoundError(message); } throw new Error(message); } async readTree() { throw new Error("FetchUrlReader does not implement readTree"); } async search() { throw new Error("FetchUrlReader does not implement search"); } toString() { return "fetch{}"; } }; /** * The factory creates a single reader that will be used for reading any URL that's listed * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing * targets to allow, containing the following fields: * * `host`: * Either full hostnames to match, or subdomain wildcard matchers with a leading '*'. * For example 'example.com' and '*.example.com' are valid values, 'prod.*.example.com' is not. * * `paths`: * An optional list of paths which are allowed. If the list is omitted all paths are allowed. */ __publicField$9(_FetchUrlReader, "factory", ({ config }) => { var _a, _b; const predicates = (_b = (_a = config.getOptionalConfigArray("backend.reading.allow")) == null ? void 0 : _a.map((allowConfig) => { const paths = allowConfig.getOptionalStringArray("paths"); const checkPath = paths ? (url) => { const targetPath = platformPath__default["default"].posix.normalize(url.pathname); return paths.some( (allowedPath) => targetPath.startsWith(allowedPath) ); } : (_url) => true; const host = allowConfig.getString("host"); const [hostname, port] = host.split(":"); const checkPort = parsePortPredicate(port); if (hostname.startsWith("*.")) { const suffix = hostname.slice(1); return (url) => url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url); } return (url) => url.hostname === hostname && checkPath(url) && checkPort(url); })) != null ? _b : []; const reader = new _FetchUrlReader(); const predicate = (url) => predicates.some((p) => p(url)); return [{ reader, predicate }]; }); let FetchUrlReader = _FetchUrlReader; var __defProp$8 = Object.defineProperty; var __defNormalProp$8 = (obj, key, value) => key in obj ? __defProp$8(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$8 = (obj, key, value) => { __defNormalProp$8(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; function notAllowedMessage(url) { return `Reading from '${url}' is not allowed. You may need to configure an integration for the target host, or add it to the configured list of allowed hosts at 'backend.reading.allow'`; } class UrlReaderPredicateMux { constructor() { __publicField$8(this, "readers", []); } register(tuple) { this.readers.push(tuple); } async readUrl(url, options) { const parsed = new URL(url); for (const { predicate, reader } of this.readers) { if (predicate(parsed)) { return reader.readUrl(url, options); } } throw new errors.NotAllowedError(notAllowedMessage(url)); } async readTree(url, options) { const parsed = new URL(url); for (const { predicate, reader } of this.readers) { if (predicate(parsed)) { return await reader.readTree(url, options); } } throw new errors.NotAllowedError(notAllowedMessage(url)); } async search(url, options) { const parsed = new URL(url); for (const { predicate, reader } of this.readers) { if (predicate(parsed)) { return await reader.search(url, options); } } throw new errors.NotAllowedError(notAllowedMessage(url)); } toString() { return `predicateMux{readers=${this.readers.map((t) => t.reader).join(",")}`; } } const pipeline$2 = util.promisify(stream.pipeline); const directoryNameRegex = /^[^\/]+\//; function stripFirstDirectoryFromPath(path) { return path.replace(directoryNameRegex, ""); } const streamToBuffer = (stream) => { return new Promise(async (resolve, reject) => { try { await pipeline$2(stream, concatStream__default["default"](resolve)); } catch (ex) { reject(ex); } }); }; var __defProp$7 = Object.defineProperty; var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$7 = (obj, key, value) => { __defNormalProp$7(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const TarParseStream = tar.Parse; const pipeline$1 = util.promisify(stream.pipeline); class TarArchiveResponse { constructor(stream, subPath, workDir, etag, filter, stripFirstDirectory = true) { this.stream = stream; this.subPath = subPath; this.workDir = workDir; this.etag = etag; this.filter = filter; this.stripFirstDirectory = stripFirstDirectory; __publicField$7(this, "read", false); if (subPath) { if (!subPath.endsWith("/")) { this.subPath += "/"; } if (subPath.startsWith("/")) { throw new TypeError( `TarArchiveResponse subPath must not start with a /, got '${subPath}'` ); } } this.etag = etag; } // Make sure the input stream is only read once onlyOnce() { if (this.read) { throw new Error("Response has already been read"); } this.read = true; } async files() { this.onlyOnce(); const files = Array(); const parser = new TarParseStream(); parser.on("entry", (entry) => { if (entry.type === "Directory") { entry.resume(); return; } const relativePath = this.stripFirstDirectory ? stripFirstDirectoryFromPath(entry.path) : entry.path; if (this.subPath) { if (!relativePath.startsWith(this.subPath)) { entry.resume(); return; } } const path = relativePath.slice(this.subPath.length); if (this.filter) { if (!this.filter(path, { size: entry.remain })) { entry.resume(); return; } } const content = new Promise(async (resolve) => { await pipeline$1(entry, concatStream__default["default"](resolve)); }); files.push({ path, content: () => content }); entry.resume(); }); await pipeline$1(this.stream, parser); return files; } async archive() { if (!this.subPath) { this.onlyOnce(); return this.stream; } const tmpDir = await this.dir(); try { const data = await new Promise(async (resolve) => { await pipeline$1( tar__default["default"].create({ cwd: tmpDir }, [""]), concatStream__default["default"](resolve) ); }); return stream.Readable.from(data); } finally { await fs__default["default"].remove(tmpDir); } } async dir(options) { var _a; this.onlyOnce(); const dir = (_a = options == null ? void 0 : options.targetDir) != null ? _a : await fs__default["default"].mkdtemp(platformPath__default["default"].join(this.workDir, "backstage-")); let strip = this.subPath ? this.subPath.split("/").length : 1; if (!this.stripFirstDirectory) { strip--; } let filterError = void 0; await pipeline$1( this.stream, tar__default["default"].extract({ strip, cwd: dir, filter: (path, stat) => { if (filterError) { return false; } const relativePath = this.stripFirstDirectory ? stripFirstDirectoryFromPath(path) : path; if (this.subPath && !relativePath.startsWith(this.subPath)) { return false; } if (this.filter) { const innerPath = path.split("/").slice(strip).join("/"); try { return this.filter(innerPath, { size: stat.size }); } catch (error) { filterError = error; return false; } } return true; } }) ); if (filterError) { if (!(options == null ? void 0 : options.targetDir)) { await fs__default["default"].remove(dir).catch(() => { }); } throw filterError; } return dir; } } var __defProp$6 = Object.defineProperty; var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$6 = (obj, key, value) => { __defNormalProp$6(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; class ZipArchiveResponse { constructor(stream, subPath, workDir, etag, filter) { this.stream = stream; this.subPath = subPath; this.workDir = workDir; this.etag = etag; this.filter = filter; __publicField$6(this, "read", false); if (subPath) { if (!subPath.endsWith("/")) { this.subPath += "/"; } if (subPath.startsWith("/")) { throw new TypeError( `ZipArchiveResponse subPath must not start with a /, got '${subPath}'` ); } } this.etag = etag; } // Make sure the input stream is only read once onlyOnce() { if (this.read) { throw new Error("Response has already been read"); } this.read = true; } // File path relative to the root extracted directory or a sub directory if subpath is set. getInnerPath(path) { return path.slice(this.subPath.length); } shouldBeIncluded(entry) { if (this.subPath) { if (!entry.fileName.startsWith(this.subPath)) { return false; } } if (this.filter) { return this.filter(this.getInnerPath(entry.fileName), { size: entry.uncompressedSize }); } return true; } async streamToTemporaryFile(stream) { const tmpDir = await fs__default["default"].mkdtemp( platformPath__default["default"].join(this.workDir, "backstage-tmp") ); const tmpFile = platformPath__default["default"].join(tmpDir, "tmp.zip"); const writeStream = fs__default["default"].createWriteStream(tmpFile); return new Promise((resolve, reject) => { writeStream.on("error", reject); writeStream.on("finish", () => { writeStream.end(); resolve({ fileName: tmpFile, cleanup: () => fs__default["default"].rm(tmpDir, { recursive: true }) }); }); stream.pipe(writeStream); }); } forEveryZipEntry(zip, callback) { return new Promise((resolve, reject) => { yauzl__default["default"].open(zip, { lazyEntries: true }, (err, zipfile) => { if (err || !zipfile) { reject(err || new Error(`Failed to open zip file ${zip}`)); return; } zipfile.on("entry", async (entry) => { if (!entry.fileName.endsWith("/") && this.shouldBeIncluded(entry)) { zipfile.openReadStream(entry, async (openErr, readStream) => { if (openErr || !readStream) { reject( openErr || new Error(`Failed to open zip entry ${entry.fileName}`) ); return; } await callback(entry, readStream); zipfile.readEntry(); }); } else { zipfile.readEntry(); } }); zipfile.once("end", () => resolve()); zipfile.on("error", (e) => reject(e)); zipfile.readEntry(); }); }); } async files() { this.onlyOnce(); const files = Array(); const temporary = await this.streamToTemporaryFile(this.stream); await this.forEveryZipEntry(temporary.fileName, async (entry, content) => { files.push({ path: this.getInnerPath(entry.fileName), content: async () => await streamToBuffer(content), lastModifiedAt: entry.lastModFileTime ? new Date(entry.lastModFileTime) : void 0 }); }); await temporary.cleanup(); return files; } async archive() { this.onlyOnce(); if (!this.subPath) { return this.stream; } const archive = archiver__default["default"]("zip"); const temporary = await this.streamToTemporaryFile(this.stream); await this.forEveryZipEntry(temporary.fileName, async (entry, content) => { archive.append(await streamToBuffer(content), { name: this.getInnerPath(entry.fileName) }); }); archive.finalize(); await temporary.cleanup(); return archive; } async dir(options) { var _a; this.onlyOnce(); const dir = (_a = options == null ? void 0 : options.targetDir) != null ? _a : await fs__default["default"].mkdtemp(platformPath__default["default"].join(this.workDir, "backstage-")); const temporary = await this.streamToTemporaryFile(this.stream); await this.forEveryZipEntry(temporary.fileName, async (entry, content) => { const entryPath = this.getInnerPath(entry.fileName); const dirname = platformPath__default["default"].dirname(entryPath); if (dirname) { await fs__default["default"].mkdirp(paths.resolveSafeChildPath(dir, dirname)); } return new Promise(async (resolve, reject) => { const file = fs__default["default"].createWriteStream(paths.resolveSafeChildPath(dir, entryPath)); file.on("finish", resolve); content.on("error", reject); content.pipe(file); }); }); await temporary.cleanup(); return dir; } } var __defProp$5 = Object.defineProperty; var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$5 = (obj, key, value) => { __defNormalProp$5(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const pipeline = util.promisify(stream.pipeline); class ReadableArrayResponse { constructor(stream, workDir, etag) { this.stream = stream; this.workDir = workDir; this.etag = etag; __publicField$5(this, "read", false); this.etag = etag; } // Make sure the input stream is only read once onlyOnce() { if (this.read) { throw new Error("Response has already been read"); } this.read = true; } async files() { var _a; this.onlyOnce(); const files = Array(); for (let i = 0; i < this.stream.length; i++) { if (!this.stream[i].path.endsWith("/")) { files.push({ path: this.stream[i].path, content: () => getRawBody__default["default"](this.stream[i].data), lastModifiedAt: (_a = this.stream[i]) == null ? void 0 : _a.lastModifiedAt }); } } return files; } async archive() { const tmpDir = await this.dir(); try { const data = await new Promise(async (resolve) => { await pipeline( tar__default["default"].create({ cwd: tmpDir }, [""]), concatStream__default["default"](resolve) ); }); return stream.Readable.from(data); } finally { await fs__default["default"].remove(tmpDir); } } async dir(options) { var _a; this.onlyOnce(); const dir = (_a = options == null ? void 0 : options.targetDir) != null ? _a : await fs__default["default"].mkdtemp(platformPath__default["default"].join(this.workDir, "backstage-")); for (let i = 0; i < this.stream.length; i++) { if (!this.stream[i].path.endsWith("/")) { await pipeline( this.stream[i].data, fs__default["default"].createWriteStream( platformPath__default["default"].join(dir, platformPath.basename(this.stream[i].path)) ) ); } } return dir; } } class DefaultReadTreeResponseFactory { constructor(workDir) { this.workDir = workDir; } static create(options) { var _a; return new DefaultReadTreeResponseFactory( (_a = options.config.getOptionalString("backend.workingDirectory")) != null ? _a : os__default["default"].tmpdir() ); } async fromTarArchive(options) { var _a, _b; return new TarArchiveResponse( options.stream, (_a = options.subpath) != null ? _a : "", this.workDir, options.etag, options.filter, (_b = options.stripFirstDirectory) != null ? _b : true ); } async fromZipArchive(options) { var _a; return new ZipArchiveResponse( options.stream, (_a = options.subpath) != null ? _a : "", this.workDir, options.etag, options.filter ); } async fromReadableArray(options) { return new ReadableArrayResponse(options, this.workDir, ""); } } var __defProp$4 = Object.defineProperty; var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$4 = (obj, key, value) => { __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const GOOGLE_GCS_HOST = "storage.cloud.google.com"; const parseURL = (url) => { const { host, pathname } = new URL(url); if (host !== GOOGLE_GCS_HOST) { throw new Error(`not a valid GCS URL: ${url}`); } const [, bucket, ...key] = pathname.split("/"); return { host, bucket, key: key.join("/") }; }; const _GoogleGcsUrlReader = class _GoogleGcsUrlReader { constructor(integration, storage) { this.integration = integration; this.storage = storage; } readStreamFromUrl(url) { const { bucket, key } = parseURL(url); return this.storage.bucket(bucket).file(key).createReadStream(); } async read(url) { try { return await getRawBody__default["default"](this.readStreamFromUrl(url)); } catch (error) { throw new Error(`unable to read gcs file from ${url}, ${error}`); } } async readUrl(url, _options) { const stream = this.readStreamFromUrl(url); return ReadUrlResponseFactory.fromReadable(stream); } async readTree() { throw new Error("GcsUrlReader does not implement readTree"); } async search(url) { const { bucket, key: pattern } = parseURL(url); if (!pattern.endsWith("*") || pattern.indexOf("*") !== pattern.length - 1) { throw new Error("GcsUrlReader only supports prefix-based searches"); } const [files] = await this.storage.bucket(bucket).getFiles({ autoPaginate: true, prefix: pattern.split("*").join("") }); return { files: files.map((file) => { const fullUrl = ["https:/", GOOGLE_GCS_HOST, bucket, file.name].join( "/" ); return { url: fullUrl, content: async () => { const readResponse = await this.readUrl(fullUrl); return readResponse.buffer(); } }; }), // TODO etag is not implemented yet. etag: "NOT/IMPLEMENTED" }; } toString() { const key = this.integration.privateKey; return `googleGcs{host=${GOOGLE_GCS_HOST},authed=${Boolean(key)}}`; } }; __publicField$4(_GoogleGcsUrlReader, "factory", ({ config, logger }) => { if (!config.has("integrations.googleGcs")) { return []; } const gcsConfig = integration.readGoogleGcsIntegrationConfig( config.getConfig("integrations.googleGcs") ); let storage$1; if (!gcsConfig.clientEmail || !gcsConfig.privateKey) { logger.info( "googleGcs credentials not found in config. Using default credentials provider." ); storage$1 = new storage.Storage(); } else { storage$1 = new storage.Storage({ credentials: { client_email: gcsConfig.clientEmail || void 0, private_key: gcsConfig.privateKey || void 0 } }); } const reader = new _GoogleGcsUrlReader(gcsConfig, storage$1); const predicate = (url) => url.host === GOOGLE_GCS_HOST; return [{ reader, predicate }]; }); let GoogleGcsUrlReader = _GoogleGcsUrlReader; class UrlReaders { /** * Creates a custom {@link @backstage/backend-plugin-api#UrlReaderService} wrapper for your own set of factories. */ static create(options) { const { logger, config, factories } = options; const mux = new UrlReaderPredicateMux(); const treeResponseFactory = DefaultReadTreeResponseFactory.create({ config }); for (const factory of factories != null ? factories : []) { const tuples = factory({ config, logger, treeResponseFactory }); for (const tuple of tuples) { mux.register(tuple); } } return mux; } /** * Creates a {@link @backstage/backend-plugin-api#UrlReaderService} wrapper that includes all the default factories * from this package. * * Any additional factories passed will be loaded before the default ones. */ static default(options) { const { logger, config, factories = [] } = options; return UrlReaders.create({ logger, config, factories: factories.concat([ AzureUrlReader.factory, BitbucketCloudUrlReader.factory, BitbucketServerUrlReader.factory, BitbucketUrlReader.factory, GerritUrlReader.factory, GithubUrlReader.factory, GiteaUrlReader.factory, GitlabUrlReader.factory, GoogleGcsUrlReader.factory, AwsS3UrlReader.factory, FetchUrlReader.factory ]) }); } } var __defProp$3 = Object.defineProperty; var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$3 = (obj, key, value) => { __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; class ServiceBuilderImpl { constructor(moduleRef) { __publicField$3(this, "logger"); __publicField$3(this, "serverOptions"); __publicField$3(this, "helmetOptions"); __publicField$3(this, "corsOptions"); __publicField$3(this, "routers"); __publicField$3(this, "requestLoggingHandler"); __publicField$3(this, "errorHandler"); __publicField$3(this, "useDefaultErrorHandler"); // Reference to the module where builder is created - needed for hot module // reloading __publicField$3(this, "module"); this.routers = []; this.module = moduleRef; this.useDefaultErrorHandler = true; this.serverOptions = backendAppApi.readHttpServerOptions(); this.corsOptions = backendAppApi.readCorsOptions(); this.helmetOptions = backendAppApi.readHelmetOptions(); } loadConfig(config) { const backendConfig = config.getOptionalConfig("backend"); this.serverOptions = backendAppApi.readHttpServerOptions(backendConfig); this.corsOptions = backendAppApi.readCorsOptions(backendConfig); this.helmetOptions = backendAppApi.readHelmetOptions(backendConfig); return this; } setPort(port) { this.serverOptions.listen.port = port; return this; } setHost(host) { this.serverOptions.listen.host = host; return this; } setLogger(logger) { this.logger = logger; return this; } setHttpsSettings(settings) { if ("hostname" in settings.certificate) { this.serverOptions.https = { certificate: { ...settings.certificate, type: "generated" } }; } else { this.serverOptions.https = { certificate: { ...settings.certificate, type: "pem" } }; } return this; } enableCors(options) { this.corsOptions = options; return this; } setCsp(options) { const csp = this.helmetOptions.contentSecurityPolicy; this.helmetOptions.contentSecurityPolicy = { ...typeof csp === "object" ? csp : {}, directives: applyCspDirectives(options) }; return this; } addRouter(root, router) { this.routers.push([root, router]); return this; } setRequestLoggingHandler(requestLoggingHandler) { this.requestLoggingHandler = requestLoggingHandler; return this; } setErrorHandler(errorHandler) { this.errorHandler = errorHandler; return this; } disableDefaultErrorHandler() { this.useDefaultErrorHandler = false; return this; } async start() { var _a, _b; const app = express__default["default"](); const logger = (_a = this.logger) != null ? _a : getRootLogger(); app.use(helmet__default["default"](this.helmetOptions)); app.use(cors__default["default"](this.corsOptions)); app.use(compression__default["default"]()); app.use( ((_b = this.requestLoggingHandler) != null ? _b : requestLoggingHandler)(logger) ); for (const [root, route] of this.routers) { app.use(root, route); } app.use(notFoundHandler()); if (this.errorHandler) { app.use(this.errorHandler); } if (this.useDefaultErrorHandler) { app.use(errorHandler()); } const server = await backendAppApi.createHttpServer(app, this.serverOptions, { logger }); useHotCleanup( this.module, () => server.stop().catch((error) => { console.error(error); }) ); await server.start(); return server; } } function applyCspDirectives(directives) { const result = helmet__default["default"].contentSecurityPolicy.getDefaultDirectives(); result["script-src"] = ["'self'", "'unsafe-eval'"]; delete result["form-action"]; if (directives) { for (const [key, value] of Object.entries(directives)) { if (value === false) { delete result[key]; } else { result[key] = value; } } } return result; } function createServiceBuilder(_module) { return new ServiceBuilderImpl(_module); } async function createStatusCheckRouter(options) { const router = Router__default["default"](); const { path = "/healthcheck", statusCheck } = options; router.use(path, await statusCheckHandler({ statusCheck })); router.use(errorHandler()); return router; } var __defProp$2 = Object.defineProperty; var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$2 = (obj, key, value) => { __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; const TOKEN_ALG = "HS256"; const TOKEN_SUB = "backstage-server"; const TOKEN_EXPIRY_AFTER = luxon.Duration.fromObject({ hours: 1 }); const TOKEN_REISSUE_AFTER = luxon.Duration.fromObject({ minutes: 10 }); class NoopTokenManager { constructor() { __publicField$2(this, "isInsecureServerTokenManager", true); } async getToken() { return { token: "" }; } async authenticate() { } } class ServerTokenManager { constructor(secrets, options) { __publicField$2(this, "options"); __publicField$2(this, "verificationKeys"); __publicField$2(this, "signingKey"); __publicField$2(this, "privateKeyPromise"); __publicField$2(this, "currentTokenPromise"); if (!secrets.length && process.env.NODE_ENV !== "development") { throw new Error( "No secrets provided when constructing ServerTokenManager" ); } this.options = options; this.verificationKeys = secrets.map((s) => jose.base64url.decode(s)); this.signingKey = this.verificationKeys[0]; } /** * Creates a token manager that issues static fake tokens and never fails * authentication. This can be useful for testing. */ static noop() { return new NoopTokenManager(); } static fromConfig(config, options) { const keys = config.getOptionalConfigArray("backend.auth.keys"); if (keys == null ? void 0 : keys.length) { return new ServerTokenManager( keys.map((key) => key.getString("secret")), options ); } if (process.env.NODE_ENV !== "development") { throw new Error( "You must configure at least one key in backend.auth.keys for production." ); } options.logger.warn( "Generated a secret for service-to-service authentication: DEVELOPMENT USE ONLY." ); return new ServerTokenManager([], options); } // Called when no keys have been generated yet in the dev environment async generateKeys() { if (process.env.NODE_ENV !== "development") { throw new Error( "Key generation is not supported outside of the dev environment" ); } if (this.privateKeyPromise) { return this.privateKeyPromise; } const promise = (async () => { var _a; const secret = await jose.generateSecret(TOKEN_ALG); const jwk = await jose.exportJWK(secret); this.verificationKeys.push(jose.base64url.decode((_a = jwk.k) != null ? _a : "")); this.signingKey = this.verificationKeys[0]; return; })(); try { this.privateKeyPromise = promise; await promise; } catch (error) { this.options.logger.error(`Failed to generate new key, ${error}`); delete this.privateKeyPromise; } return promise; } async getToken() { if (!this.verificationKeys.length) { await this.generateKeys(); } if (this.currentTokenPromise) { return this.currentTokenPromise; } const result = Promise.resolve().then(async () => { const jwt = await new jose.SignJWT({}).setProtectedHeader({ alg: TOKEN_ALG }).setSubject(TOKEN_SUB).setExpirationTime( luxon.DateTime.now().plus(TOKEN_EXPIRY_AFTER).toUnixInteger() ).sign(this.signingKey); return { token: jwt }; }); this.currentTokenPromise = result; result.then(() => { setTimeout(() => { this.currentTokenPromise = void 0; }, TOKEN_REISSUE_AFTER.toMillis()); }).catch(() => { this.currentTokenPromise = void 0; }); return result; } async authenticate(token) { let verifyError = void 0; for (const key of this.verificationKeys) { try { const { protectedHeader: { alg }, payload: { sub, exp } } = await jose.jwtVerify(token, key); if (alg !== TOKEN_ALG) { throw new errors.AuthenticationError(`Illegal alg "${alg}"`); } if (sub !== TOKEN_SUB) { throw new errors.AuthenticationError(`Illegal sub "${sub}"`); } if (typeof exp !== "number") { throw new errors.AuthenticationError( "Server-to-server token had no exp claim" ); } return; } catch (e) { verifyError = e; } } throw new errors.AuthenticationError("Invalid server token", verifyError); } } var __defProp$1 = Object.defineProperty; var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField$1 = (obj, key, value) => { __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; class DockerContainerRunner { constructor(options) { __publicField$1(this, "dockerClient"); this.dockerClient = options.dockerClient; } async runContainer(options) { const { imageName, command, args, logStream = new stream.PassThrough(), mountDirs = {}, workingDir, envVars = {}, pullImage = true, defaultUser = false } = options; try { await this.dockerClient.ping(); } catch (e) { throw new errors.ForwardedError( "This operation requires Docker. Docker does not appear to be available. Docker.ping() failed with", e ); } if (pullImage) { await new Promise((resolve, reject) => { this.dockerClient.pull(imageName, {}, (err, stream) => { if (err) return reject(err); stream.pipe(logStream, { end: false }); stream.on("end", () => resolve()); stream.on("error", (error2) => reject(error2)); return void 0; }); }); } const userOptions = {}; if (!defaultUser && process.getuid && process.getgid) { userOptions.User = `${process.getuid()}:${process.getgid()}`; } const Volumes = {}; for (const containerDir of Object.values(mountDirs)) { Volumes[containerDir] = {}; } const Binds = []; for (const [hostDir, containerDir] of Object.entries(mountDirs)) { const realHostDir = await fs__default["default"].realpath(hostDir); Binds.push(`${realHostDir}:${containerDir}`); } const Env = []; for (const [key, value] of Object.entries(envVars)) { Env.push(`${key}=${value}`); } const [{ Error: error, StatusCode: statusCode }] = await this.dockerClient.run(imageName, args, logStream, { Volumes, HostConfig: { AutoRemove: true, Binds }, ...workingDir ? { WorkingDir: workingDir } : {}, Entrypoint: command, Env, ...userOptions }); if (error) { throw new Error( `Docker failed to run with the following error message: ${error}` ); } if (statusCode !== 0) { throw new Error( `Docker container returned a non-zero exit code (${statusCode})` ); } } } var __defProp = Object.defineProperty; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField = (obj, key, value) => { __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); return value; }; class KubernetesContainerRunner { constructor(options) { __publicField(this, "kubeConfig"); __publicField(this, "batchV1Api"); __publicField(this, "log"); __publicField(this, "name"); __publicField(this, "namespace"); __publicField(this, "mountBase"); __publicField(this, "podTemplate"); __publicField(this, "timeoutMs"); __publicField(this, "containerName", "executor"); const { kubeConfig, name, namespace, mountBase, podTemplate, timeoutMs } = options; this.kubeConfig = kubeConfig; this.batchV1Api = kubeConfig.makeApiClient(clientNode.BatchV1Api); this.log = new clientNode.Log(kubeConfig); this.name = name; this.namespace = this.getNamespace(kubeConfig, namespace); if (mountBase) { this.mountBase = this.validateMountBase(mountBase, podTemplate); } this.podTemplate = podTemplate; this.timeoutMs = timeoutMs || 120 * 1e3; } getNamespace(kubeConfig, namespace) { var _a; let _namespace = namespace; if (!_namespace) { _namespace = (_a = kubeConfig.getContextObject( kubeConfig.currentContext )) == null ? void 0 : _a.namespace; } if (!_namespace) { throw new Error("Cannot read current namespace from Kubernetes cluster"); } return _namespace; } validateMountBase(mountBase, podTemplate) { var _a, _b; if (!((_b = (_a = podTemplate == null ? void 0 : podTemplate.spec) == null ? void 0 : _a.volumes) == null ? void 0 : _b.filter((v) => v.name === mountBase.volumeName).length)) { throw new Error( `A Pod template containing the volume ${mountBase.volumeName} is required` ); } if (!mountBase.basePath.endsWith("/")) { mountBase.basePath += "/"; } return mountBase; } async runContainer(options) { var _a, _b; const { imageName, command, args, logStream = new stream.PassThrough(), mountDirs = {}, workingDir, envVars = {} } = options; const commandArr = typeof command === "string" ? [command] : command; const volumeMounts = []; for (const [hostDir, containerDir] of Object.entries(mountDirs)) { if (!this.mountBase) { throw new Error( "A volumeName and a basePath must be configured to bind mount directories" ); } if (!hostDir.startsWith(this.mountBase.basePath)) { throw new Error( `Mounted '${hostDir}' dir should be subdirectories of '${this.mountBase.basePath}'` ); } volumeMounts.push({ name: this.mountBase.volumeName, mountPath: containerDir, subPath: hostDir.slice(this.mountBase.basePath.length) }); } const env = []; for (const [key, value] of Object.entries(envVars)) { env.push({ name: key, value }); } const taskId = uuid.v4(); const mergedPodTemplate = { metadata: { ...{ labels: { task: taskId } }, ...(_a = this.podTemplate) == null ? void 0 : _a.metadata }, spec: { ...{ containers: [ { name: this.containerName, image: imageName, command: commandArr, args, env, workingDir, volumeMounts } ], restartPolicy: "Never" }, ...(_b = this.podTemplate) == null ? void 0 : _b.spec } }; const jobSpec = { metadata: { generateName: `${this.name}-` }, spec: { backoffLimit: 0, ttlSecondsAfterFinished: 60, template: mergedPodTemplate } }; await this.runJob(jobSpec, taskId, logStream); } handleError(err, errorCallback) { if (err.code !== "ECONNRESET" && err.message !== "aborted") { errorCallback( handleKubernetesError( "Kubernetes watch request failed with the following error message:", err ) ); } } watchPod(taskId, callback, errorCallback) { const watch = new clientNode.Watch(this.kubeConfig); const labelSelector = `task=${taskId}`; return watch.watch( `/api/v1/namespaces/${this.namespace}/pods`, { labelSelector }, (_, pod) => { callback(pod); }, (err) => { if (err) { this.handleError(err, errorCallback); } } ); } tailLogs(taskId, logStream) { let log; let req; const watchPromise = new Promise((_, reject) => { req = this.watchPod( taskId, (pod) => { var _a, _b, _c, _d; if (log === void 0 && (((_a = pod.status) == null ? void 0 : _a.phase) === "Running" || ((_b = pod.status) == null ? void 0 : _b.phase) === "Succeeded" || ((_c = pod.status) == null ? void 0 : _c.phase) === "Failed")) { log = this.log.log( this.namespace, (_d = pod.metadata) == null ? void 0 : _d.name, this.containerName, logStream, { follow: true } ); } }, reject ); }); const logPromise = new Promise((resolve, _) => { if (!logStream.writableFinished) { logStream.on("finish", () => { resolve(); }); } else { resolve(); } }); const close = async () => { if (req) { (await req).abort(); } if (log) { (await log).abort(); } }; return { promise: Promise.race([watchPromise, logPromise]), close }; } waitPod(taskId) { let req; const promise = new Promise(async (resolve, reject) => { req = this.watchPod( taskId, (pod) => { var _a, _b; if (((_a = pod.status) == null ? void 0 : _a.phase) === "Succeeded") { resolve(); } if (((_b = pod.status) == null ? void 0 : _b.phase) === "Failed") { reject(new Error("Container execution failed")); } }, reject ); }); const close = async () => { if (req) { (await req).abort(); } }; return { promise, close }; } async createJob(jobSpec) { return this.batchV1Api.createNamespacedJob(this.namespace, jobSpec).catch((err) => { throw handleKubernetesError( "Kubernetes Job creation failed with the following error message:", err ); }); } async runJob(jobSpec, taskId, logStream) { let timeout; const timeoutPromise = new Promise((_, reject) => { timeout = setTimeout( reject, this.timeoutMs, new Error(`Failed to complete in ${this.timeoutMs} ms`) ); }); const { promise: waitPromise, close: waitClose } = this.waitPod(taskId); const { promise: tailPromise, close: tailClose } = this.tailLogs( taskId, logStream ); const taskPromise = Promise.all([ waitPromise, tailPromise, this.createJob(jobSpec) ]).finally(() => { clearTimeout(timeout); }); return Promise.race([timeoutPromise, taskPromise]).finally(() => { return waitClose(); }).finally(() => { return tailClose(); }); } } function handleKubernetesError(message, err) { if (err instanceof clientNode.HttpError) { return new Error(`${message} ${err.body.message}`); } return new Error(`${message} ${err}`); } exports.packagePathMocks = paths.packagePathMocks; exports.resolvePackagePath = paths.resolvePackagePath; exports.resolveSafeChildPath = paths.resolveSafeChildPath; Object.defineProperty(exports, 'isChildPath', { enumerable: true, get: function () { return cliCommon.isChildPath; } }); exports.AwsS3UrlReader = AwsS3UrlReader; exports.AzureUrlReader = AzureUrlReader; exports.BitbucketCloudUrlReader = BitbucketCloudUrlReader; exports.BitbucketServerUrlReader = BitbucketServerUrlReader; exports.BitbucketUrlReader = BitbucketUrlReader; exports.CacheManager = CacheManager; exports.DatabaseManager = DatabaseManager; exports.DockerContainerRunner = DockerContainerRunner; exports.FetchUrlReader = FetchUrlReader; exports.GerritUrlReader = GerritUrlReader; exports.Git = Git; exports.GiteaUrlReader = GiteaUrlReader; exports.GithubUrlReader = GithubUrlReader; exports.GitlabUrlReader = GitlabUrlReader; exports.HostDiscovery = HostDiscovery; exports.KubernetesContainerRunner = KubernetesContainerRunner; exports.ReadUrlResponseFactory = ReadUrlResponseFactory; exports.ServerTokenManager = ServerTokenManager; exports.SingleHostDiscovery = SingleHostDiscovery; exports.UrlReaders = UrlReaders; exports.cacheToPluginCacheManager = cacheToPluginCacheManager; exports.coloredFormat = coloredFormat; exports.createDatabaseClient = createDatabaseClient; exports.createRootLogger = createRootLogger; exports.createServiceBuilder = createServiceBuilder; exports.createStatusCheckRouter = createStatusCheckRouter; exports.ensureDatabaseExists = ensureDatabaseExists; exports.errorHandler = errorHandler; exports.getRootLogger = getRootLogger; exports.getVoidLogger = getVoidLogger; exports.isDatabaseConflictError = isDatabaseConflictError; exports.legacyPlugin = legacyPlugin; exports.loadBackendConfig = loadBackendConfig; exports.loggerToWinstonLogger = loggerToWinstonLogger; exports.makeLegacyPlugin = makeLegacyPlugin; exports.notFoundHandler = notFoundHandler; exports.redactWinstonLogLine = redactWinstonLogLine; exports.requestLoggingHandler = requestLoggingHandler; exports.setRootLogger = setRootLogger; exports.statusCheckHandler = statusCheckHandler; exports.useHotCleanup = useHotCleanup; exports.useHotMemoize = useHotMemoize; //# sourceMappingURL=index.cjs.js.map