Upgrade to Oclif v3 (#4807)

This commit is contained in:
Kamil Kisiela 2024-05-23 11:33:53 +02:00 committed by GitHub
parent 3d339e6562
commit 44b80b27ab
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 1109 additions and 2503 deletions

View file

@ -6,7 +6,7 @@
"access": "restricted",
"baseBranch": "main",
"updateInternalDependencies": "patch",
"ignore": ["@hive/*", "integration-tests"],
"ignore": ["@hive/*", "integration-tests", "eslint-plugin-hive"],
"snapshot": {
"useCalculatedVersion": true,
"prereleaseTemplate": "{tag}-{datetime}-{commit}"

View file

@ -0,0 +1,5 @@
---
"@graphql-hive/cli": minor
---
Upgrade to Oclif v3

View file

@ -116,16 +116,16 @@
"patchedDependencies": {
"@theguild/buddy@0.1.0": "patches/@theguild__buddy@0.1.0.patch",
"mjml-core@4.14.0": "patches/mjml-core@4.14.0.patch",
"oclif@3.17.2": "patches/oclif@3.17.2.patch",
"@apollo/federation@0.38.1": "patches/@apollo__federation@0.38.1.patch",
"@theguild/editor@1.2.5": "patches/@theguild__editor@1.2.5.patch",
"eslint@8.57.0": "patches/eslint@8.57.0.patch",
"@oclif/core@2.8.5": "patches/@oclif__core@2.8.5.patch",
"@graphql-eslint/eslint-plugin@3.20.1": "patches/@graphql-eslint__eslint-plugin@3.20.1.patch",
"graphiql@3.0.0-alpha.0": "patches/graphiql@3.0.0-alpha.0.patch",
"@graphiql/react@0.18.0-alpha.0": "patches/@graphiql__react@0.18.0-alpha.0.patch",
"got@14.2.1": "patches/got@14.2.1.patch",
"slonik@30.4.4": "patches/slonik@30.4.4.patch"
"slonik@30.4.4": "patches/slonik@30.4.4.patch",
"@oclif/core@3.26.6": "patches/@oclif__core@3.26.6.patch",
"oclif@4.11.0": "patches/oclif@4.11.0.patch"
}
}
}

View file

@ -40,7 +40,7 @@ curl -sSL https://graphql-hive.com/install.sh | sh
- [`hive config:reset`](#hive-configreset)
- [`hive config:set KEY VALUE`](#hive-configset-key-value)
- [`hive dev`](#hive-dev)
- [`hive help [COMMANDS]`](#hive-help-commands)
- [`hive help [COMMAND]`](#hive-help-command)
- [`hive introspect LOCATION`](#hive-introspect-location)
- [`hive operations:check FILE`](#hive-operationscheck-file)
- [`hive schema:check FILE`](#hive-schemacheck-file)
@ -71,7 +71,7 @@ DESCRIPTION
```
_See code:
[dist/commands/artifact/fetch.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/artifact/fetch.js)_
[dist/commands/artifact/fetch.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/artifact/fetch.js)_
## `hive config:delete KEY`
@ -89,7 +89,7 @@ DESCRIPTION
```
_See code:
[dist/commands/config/delete.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/config/delete.js)_
[dist/commands/config/delete.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/config/delete.js)_
## `hive config:get KEY`
@ -107,7 +107,7 @@ DESCRIPTION
```
_See code:
[dist/commands/config/get.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/config/get.js)_
[dist/commands/config/get.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/config/get.js)_
## `hive config:reset`
@ -122,7 +122,7 @@ DESCRIPTION
```
_See code:
[dist/commands/config/reset.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/config/reset.js)_
[dist/commands/config/reset.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/config/reset.js)_
## `hive config:set KEY VALUE`
@ -141,44 +141,49 @@ DESCRIPTION
```
_See code:
[dist/commands/config/set.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/config/set.js)_
[dist/commands/config/set.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/config/set.js)_
## `hive dev`
develop and compose Supergraph with service substitution (only available for Federation projects)
Develop and compose Supergraph with service substitution
```
USAGE
$ hive dev (--url <value> --service <value>) [--registry.endpoint <value>] [--registry.accessToken
<value>] [--schema <value> ] [--watch] [--watchInterval <value>] [--write <value>]
$ hive dev (--url <value> --service <value>) [--registry.endpoint <value>] [--registry <value>]
[--registry.accessToken <value>] [--token <value>] [--schema <value> ] [--watch] [--watchInterval <value>] [--write
<value>]
FLAGS
--registry=<value> registry address
--registry.accessToken=<value> registry access token
--registry.endpoint=<value> registry endpoint
--schema=<filepath>... Service sdl. If not provided, will be introspected from the service
--service=<string>... (required) Service name
--token=<value> api token
--url=<address>... (required) Service url
--watch Watch mode
--watchInterval=<value> [default: 1000] Watch interval in milliseconds
--write=<value> [default: supergraph.graphql] Where to save the supergraph schema file
DESCRIPTION
develop and compose Supergraph with service substitution (only available for Federation projects)
Develop and compose Supergraph with service substitution
Only available for Federation projects.
Work in Progress: Please note that this command is still under development and may undergo changes in future releases
```
_See code:
[dist/commands/dev.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/dev.js)_
[dist/commands/dev.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/dev.js)_
## `hive help [COMMANDS]`
## `hive help [COMMAND]`
Display help for hive.
```
USAGE
$ hive help [COMMANDS] [-n]
$ hive help [COMMAND...] [-n]
ARGUMENTS
COMMANDS Command to show help for.
COMMAND... Command to show help for.
FLAGS
-n, --nested-commands Include all nested commands in the output.
@ -188,7 +193,7 @@ DESCRIPTION
```
_See code:
[@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v5.2.20/src/commands/help.ts)_
[@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v6.0.22/src/commands/help.ts)_
## `hive introspect LOCATION`
@ -210,7 +215,7 @@ DESCRIPTION
```
_See code:
[dist/commands/introspect.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/introspect.js)_
[dist/commands/introspect.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/introspect.js)_
## `hive operations:check FILE`
@ -260,7 +265,7 @@ DESCRIPTION
```
_See code:
[dist/commands/operations/check.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/operations/check.js)_
[dist/commands/operations/check.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/operations/check.js)_
## `hive schema:check FILE`
@ -294,7 +299,7 @@ DESCRIPTION
```
_See code:
[dist/commands/schema/check.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/schema/check.js)_
[dist/commands/schema/check.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/schema/check.js)_
## `hive schema:delete SERVICE`
@ -321,7 +326,7 @@ DESCRIPTION
```
_See code:
[dist/commands/schema/delete.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/schema/delete.js)_
[dist/commands/schema/delete.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/schema/delete.js)_
## `hive schema:fetch ACTIONID`
@ -349,7 +354,7 @@ DESCRIPTION
```
_See code:
[dist/commands/schema/fetch.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/schema/fetch.js)_
[dist/commands/schema/fetch.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/schema/fetch.js)_
## `hive schema:publish FILE`
@ -387,7 +392,7 @@ DESCRIPTION
```
_See code:
[dist/commands/schema/publish.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/schema/publish.js)_
[dist/commands/schema/publish.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/schema/publish.js)_
## `hive update [CHANNEL]`
@ -395,13 +400,13 @@ update the hive CLI
```
USAGE
$ hive update [CHANNEL] [-a] [-v <value> | -i] [--force]
$ hive update [CHANNEL] [-a] [--force] [-i | -v <value>]
FLAGS
-a, --available Install a specific version.
-a, --available See available versions.
-i, --interactive Interactively select version to install. This is ignored if a channel is provided.
-v, --version=<value> Install a specific version.
--force Force a re-download of the requested version.
--force Force a re-download of the requested version.
DESCRIPTION
update the hive CLI
@ -425,7 +430,7 @@ EXAMPLES
```
_See code:
[@oclif/plugin-update](https://github.com/oclif/plugin-update/blob/v3.2.4/src/commands/update.ts)_
[@oclif/plugin-update](https://github.com/oclif/plugin-update/blob/v4.2.13/src/commands/update.ts)_
## `hive whoami`
@ -447,7 +452,7 @@ DESCRIPTION
```
_See code:
[dist/commands/whoami.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.34.1/dist/commands/whoami.js)_
[dist/commands/whoami.js](https://github.com/kamilkisiela/graphql-hive/blob/v0.37.0/dist/commands/whoami.js)_
<!-- commandsstop -->

View file

@ -1,17 +1,17 @@
#!/usr/bin/env node
#!/usr/bin/env tsx
const oclif = require('@oclif/core');
const path = require('path');
const project = path.join(__dirname, '..', 'tsconfig.json');
// In dev mode -> use ts-node and dev plugins
process.env.NODE_ENV = 'development';
require('ts-node').register({ project });
// In dev mode, always show stack traces
oclif.settings.debug = true;
// Start the CLI
oclif.run().then(oclif.flush).catch(oclif.Errors.handle);
oclif
.execute({ development: true, dir: __dirname })
.catch(e => {
throw e;
})
.then(() => {});

View file

@ -1,6 +1,10 @@
#!/usr/bin/env node
require('@oclif/core')
.run()
.then(require('@oclif/core/flush'))
.catch(require('@oclif/core/handle'));
const oclif = require('@oclif/core');
oclif
.execute({ dir: __dirname })
.catch(e => {
throw e;
})
.then(() => {});

View file

@ -54,9 +54,9 @@
"@graphql-tools/url-loader": "~8.0.0",
"@graphql-tools/utils": "10.2.0",
"@graphql-typed-document-node/core": "3.2.0",
"@oclif/core": "^2.8.5",
"@oclif/plugin-help": "5.2.20",
"@oclif/plugin-update": "3.2.4",
"@oclif/core": "^3.26.6",
"@oclif/plugin-help": "6.0.22",
"@oclif/plugin-update": "4.2.13",
"@whatwg-node/fetch": "0.9.17",
"colors": "1.4.0",
"env-ci": "7.3.0",
@ -70,9 +70,9 @@
"@types/env-ci": "3.1.4",
"@types/mkdirp": "2.0.0",
"graphql": "16.8.1",
"oclif": "3.17.2",
"oclif": "4.11.0",
"rimraf": "4.4.1",
"ts-node": "10.9.2"
"tsx": "4.10.5"
},
"publishConfig": {
"registry": "https://registry.npmjs.org",

View file

@ -1,241 +0,0 @@
diff --git a/lib/module-loader.js b/lib/module-loader.js
index e28f1254473d53c5f0b21c390f19ced16dc1d27b..4907936a8252e02f59145bd3297512d93302d4d8 100644
--- a/lib/module-loader.js
+++ b/lib/module-loader.js
@@ -127,7 +127,21 @@ class ModuleLoader {
return config.type !== undefined;
};
try {
- filePath = require.resolve(modulePath);
+ try {
+ // We wrap it with try/catch and fallback to custom path if it fails to make it compatible with Hive.
+ // Due to some weird behavior in tsup/esbuild, it fails to resolve the path to the module.
+ filePath = require.resolve(modulePath);
+ } catch (error) {
+ const customPath = process.env.OCLIF_CLI_CUSTOM_PATH;
+ if (typeof customPath !== "string") {
+ throw error;
+ }
+ modulePath = modulePath.replace('/src/', '/dist/').replace('\\src\\', '\\dist\\');
+ filePath = require.resolve(
+ path.resolve(customPath, modulePath) + ".js"
+ );
+ }
+
isESM = ModuleLoader.isPathModule(filePath);
}
catch {
diff --git a/lib/module-loader.modified.js b/lib/module-loader.modified.js
new file mode 100644
index 0000000000000000000000000000000000000000..bd3bf6cda2c0537590ee13860bbe36fbfb3c24d3
--- /dev/null
+++ b/lib/module-loader.modified.js
@@ -0,0 +1,208 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+const path = require("path");
+const url = require("url");
+const fs = require("fs-extra");
+const errors_1 = require("./errors");
+const Config = require("./config");
+const getPackageType = require("get-package-type");
+/**
+ * Defines file extension resolution when source files do not have an extension.
+ */
+// eslint-disable-next-line camelcase
+const s_EXTENSIONS = [".ts", ".js", ".mjs", ".cjs"];
+/**
+ * Provides a mechanism to use dynamic import / import() with tsconfig -> module: commonJS as otherwise import() gets
+ * transpiled to require().
+ */
+const _importDynamic = new Function("modulePath", "return import(modulePath)"); // eslint-disable-line no-new-func
+/**
+ * Provides a static class with several utility methods to work with Oclif config / plugin to load ESM or CJS Node
+ * modules and source files.
+ *
+ * @author Michael Leahy <support@typhonjs.io> (https://github.com/typhonrt)
+ */
+// eslint-disable-next-line unicorn/no-static-only-class
+class ModuleLoader {
+ /**
+ * Loads and returns a module.
+ *
+ * Uses `getPackageType` to determine if `type` is set to 'module. If so loads '.js' files as ESM otherwise uses
+ * a bare require to load as CJS. Also loads '.mjs' files as ESM.
+ *
+ * Uses dynamic import to load ESM source or require for CommonJS.
+ *
+ * A unique error, ModuleLoadError, combines both CJS and ESM loader module not found errors into a single error that
+ * provides a consistent stack trace and info.
+ *
+ * @param {IConfig|IPlugin} config - Oclif config or plugin config.
+ * @param {string} modulePath - NPM module name or file path to load.
+ *
+ * @returns {Promise<*>} The entire ESM module from dynamic import or CJS module by require.
+ */
+ static async load(config, modulePath) {
+ let filePath;
+ let isESM;
+ try {
+ ({ isESM, filePath } = ModuleLoader.resolvePath(config, modulePath));
+ // It is important to await on _importDynamic to catch the error code.
+ return isESM
+ ? await _importDynamic(url.pathToFileURL(filePath))
+ : require(filePath);
+ } catch (error) {
+ if (
+ error.code === "MODULE_NOT_FOUND" ||
+ error.code === "ERR_MODULE_NOT_FOUND"
+ ) {
+ throw new errors_1.ModuleLoadError(
+ `${isESM ? "import()" : "require"} failed to load ${
+ filePath || modulePath
+ }`
+ );
+ }
+ throw error;
+ }
+ }
+ /**
+ * Loads a module and returns an object with the module and data about the module.
+ *
+ * Uses `getPackageType` to determine if `type` is set to `module`. If so loads '.js' files as ESM otherwise uses
+ * a bare require to load as CJS. Also loads '.mjs' files as ESM.
+ *
+ * Uses dynamic import to load ESM source or require for CommonJS.
+ *
+ * A unique error, ModuleLoadError, combines both CJS and ESM loader module not found errors into a single error that
+ * provides a consistent stack trace and info.
+ *
+ * @param {IConfig|IPlugin} config - Oclif config or plugin config.
+ * @param {string} modulePath - NPM module name or file path to load.
+ *
+ * @returns {Promise<{isESM: boolean, module: *, filePath: string}>} An object with the loaded module & data including
+ * file path and whether the module is ESM.
+ */
+ static async loadWithData(config, modulePath) {
+ let filePath;
+ let isESM;
+ try {
+ ({ isESM, filePath } = ModuleLoader.resolvePath(config, modulePath));
+ const module = isESM
+ ? await _importDynamic(url.pathToFileURL(filePath))
+ : require(filePath);
+ return { isESM, module, filePath };
+ } catch (error) {
+ if (
+ error.code === "MODULE_NOT_FOUND" ||
+ error.code === "ERR_MODULE_NOT_FOUND"
+ ) {
+ throw new errors_1.ModuleLoadError(
+ `${isESM ? "import()" : "require"} failed to load ${
+ filePath || modulePath
+ }: ${error.message}`
+ );
+ }
+ throw error;
+ }
+ }
+ /**
+ * For `.js` files uses `getPackageType` to determine if `type` is set to `module` in associated `package.json`. If
+ * the `modulePath` provided ends in `.mjs` it is assumed to be ESM.
+ *
+ * @param {string} filePath - File path to test.
+ *
+ * @returns {boolean} The modulePath is an ES Module.
+ * @see https://www.npmjs.com/package/get-package-type
+ */
+ static isPathModule(filePath) {
+ const extension = path.extname(filePath).toLowerCase();
+ switch (extension) {
+ case ".js":
+ return getPackageType.sync(filePath) === "module";
+ case ".ts":
+ return getPackageType.sync(filePath) === "module";
+ case ".mjs":
+ return true;
+ default:
+ return false;
+ }
+ }
+ /**
+ * Resolves a modulePath first by `require.resolve` to allow Node to resolve an actual module. If this fails then
+ * the `modulePath` is resolved from the root of the provided config. `Config.tsPath` is used for initial resolution.
+ * If this file path does not exist then several extensions are tried from `s_EXTENSIONS` in order: '.js', '.mjs',
+ * '.cjs'. After a file path has been selected `isPathModule` is used to determine if the file is an ES Module.
+ *
+ * @param {IConfig|IPlugin} config - Oclif config or plugin config.
+ * @param {string} modulePath - File path to load.
+ *
+ * @returns {{isESM: boolean, filePath: string}} An object including file path and whether the module is ESM.
+ */
+ static resolvePath(config, modulePath) {
+ let isESM;
+ let filePath;
+ const isPlugin = (config) => {
+ return config.type !== undefined;
+ };
+ try {
+ try {
+ // We wrap it with try/catch and fallback to custom path if it fails to make it compatible with Hive.
+ // Due to some weird behavior in tsup/esbuild, it fails to resolve the path to the module.
+ filePath = require.resolve(modulePath);
+ } catch (error) {
+ const customPath = process.env.OCLIF_CLI_CUSTOM_PATH;
+ if (typeof customPath !== "string") {
+ throw error;
+ }
+ filePath = require.resolve(
+ path.resolve(customPath, modulePath) + ".js"
+ );
+ }
+ isESM = ModuleLoader.isPathModule(filePath);
+ } catch {
+ filePath = isPlugin(config)
+ ? Config.tsPath(config.root, modulePath, config.type)
+ : Config.tsPath(config.root, modulePath);
+ let fileExists = false;
+ let isDirectory = false;
+ if (fs.existsSync(filePath)) {
+ fileExists = true;
+ try {
+ if (fs.lstatSync(filePath)?.isDirectory?.()) {
+ fileExists = false;
+ isDirectory = true;
+ }
+ } catch {}
+ }
+ if (!fileExists) {
+ // Try all supported extensions.
+ let foundPath = ModuleLoader.findFile(filePath);
+ if (!foundPath && isDirectory) {
+ // Since filePath is a directory, try looking for index file.
+ foundPath = ModuleLoader.findFile(path.join(filePath, "index"));
+ }
+ if (foundPath) {
+ filePath = foundPath;
+ }
+ }
+ isESM = ModuleLoader.isPathModule(filePath);
+ }
+ return { isESM, filePath };
+ }
+ /**
+ * Try adding the different extensions from `s_EXTENSIONS` to find the file.
+ *
+ * @param {string} filePath - File path to load.
+ *
+ * @returns {string | null} Modified file path including extension or null if file is not found.
+ */
+ static findFile(filePath) {
+ // eslint-disable-next-line camelcase
+ for (const extension of s_EXTENSIONS) {
+ const testPath = `${filePath}${extension}`;
+ if (fs.existsSync(testPath)) {
+ return testPath;
+ }
+ }
+ return null;
+ }
+}
+exports.default = ModuleLoader;

View file

@ -0,0 +1,25 @@
diff --git a/lib/module-loader.js b/lib/module-loader.js
index 6162032e60f1e44ecfa19525ee07eecee1186208..58cfe558d025fc29e2e837b4435a3847b6e30585 100644
--- a/lib/module-loader.js
+++ b/lib/module-loader.js
@@ -156,6 +156,20 @@ async function resolvePath(config, modulePath) {
let filePath;
try {
filePath = require.resolve(modulePath);
+ try {
+ // We wrap it with try/catch and fallback to custom path if it fails to make it compatible with Hive.
+ // Due to some weird behavior in tsup/esbuild, it fails to resolve the path to the module.
+ filePath = require.resolve(modulePath);
+ } catch (error) {
+ const customPath = process.env.OCLIF_CLI_CUSTOM_PATH;
+ if (typeof customPath !== "string") {
+ throw error;
+ }
+ modulePath = modulePath.replace('/src/', '/dist/').replace('\\src\\', '\\dist\\');
+ filePath = require.resolve(
+ path.resolve(customPath, modulePath) + ".js"
+ );
+ }
isESM = isPathModule(filePath);
}
catch {

View file

@ -1,365 +0,0 @@
diff --git a/lib/commands/promote.js b/lib/commands/promote.js
index eefb804e9fd520db694a52dd6b4f881b276096b9..6ecf874c5cc3744ff983fa17c7a1b9c0b3652a61 100644
--- a/lib/commands/promote.js
+++ b/lib/commands/promote.js
@@ -27,48 +27,51 @@ class Promote extends core_1.Command {
MetadataDirective: 'REPLACE',
CacheControl: indexDefaults.maxAge,
};
- const cloudBucketCommitKey = (shortKey) => path.join(s3Config.bucket, (0, upload_util_1.commitAWSDir)(flags.version, flags.sha, s3Config), shortKey);
+ const cloudBucketCommitKey = (shortKey) => path.join(s3Config.bucket, (0, upload_util_1.commitAWSDir)(flags.version, s3Config), shortKey);
const cloudChannelKey = (shortKey) => path.join((0, upload_util_1.channelAWSDir)(flags.channel, s3Config), shortKey);
// copy tarballs manifests
if (buildConfig.targets.length > 0)
this.log(`Promoting buildmanifests & unversioned tarballs to ${flags.channel}`);
const promoteManifest = async (target) => {
+ // replace the git sha from the file name
const manifest = (0, upload_util_1.templateShortKey)('manifest', {
arch: target.arch,
bin: config.bin,
platform: target.platform,
sha: flags.sha,
version: flags.version,
- });
+ }).replace(`-${flags.sha}`, '');
// strip version & sha so update/scripts can point to a static channel manifest
- const unversionedManifest = manifest.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedManifest = manifest.replace(`-v${flags.version}`, '');
await aws_1.default.s3.copyObject(Object.assign(Object.assign({}, awsDefaults), { CopySource: cloudBucketCommitKey(manifest), Key: cloudChannelKey(unversionedManifest) }));
};
const promoteGzTarballs = async (target) => {
+ // replace the git sha from the file name
const versionedTarGzName = (0, upload_util_1.templateShortKey)('versioned', '.tar.gz', {
arch: target.arch,
bin: config.bin,
platform: target.platform,
sha: flags.sha,
version: flags.version,
- });
+ }).replace(`-${flags.sha}`, '');
const versionedTarGzKey = cloudBucketCommitKey(versionedTarGzName);
// strip version & sha so update/scripts can point to a static channel tarball
- const unversionedTarGzName = versionedTarGzName.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedTarGzName = versionedTarGzName.replace(`-v${flags.version}`, '');
const unversionedTarGzKey = cloudChannelKey(unversionedTarGzName);
await Promise.all([aws_1.default.s3.copyObject(Object.assign(Object.assign({}, awsDefaults), { CopySource: versionedTarGzKey, Key: unversionedTarGzKey }))].concat(flags.indexes ? [(0, version_indexes_1.appendToIndex)(Object.assign(Object.assign({}, indexDefaults), { originalUrl: versionedTarGzKey, filename: unversionedTarGzName }))] : []));
};
const promoteXzTarballs = async (target) => {
+ // replace the git sha from the file name
const versionedTarXzName = (0, upload_util_1.templateShortKey)('versioned', '.tar.xz', {
arch: target.arch,
bin: config.bin,
platform: target.platform,
sha: flags.sha,
version: flags.version,
- });
+ }).replace(`-${flags.sha}`, '');
const versionedTarXzKey = cloudBucketCommitKey(versionedTarXzName);
// strip version & sha so update/scripts can point to a static channel tarball
- const unversionedTarXzName = versionedTarXzName.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedTarXzName = versionedTarXzName.replace(`-v${flags.version}`, '');
const unversionedTarXzKey = cloudChannelKey(unversionedTarXzName);
await Promise.all([aws_1.default.s3.copyObject(Object.assign(Object.assign({}, awsDefaults), { CopySource: versionedTarXzKey, Key: unversionedTarXzKey }))].concat(flags.indexes ? [(0, version_indexes_1.appendToIndex)(Object.assign(Object.assign({}, indexDefaults), { originalUrl: versionedTarXzKey, filename: unversionedTarXzName }))] : []));
};
@@ -76,10 +79,11 @@ class Promote extends core_1.Command {
this.log(`Promoting macos pkgs to ${flags.channel}`);
const arches = _.uniq(buildConfig.targets.filter(t => t.platform === 'darwin').map(t => t.arch));
await Promise.all(arches.map(async (arch) => {
- const darwinPkg = (0, upload_util_1.templateShortKey)('macos', { bin: config.bin, version: flags.version, sha: flags.sha, arch });
+ // replace the git sha from the file name
+ const darwinPkg = (0, upload_util_1.templateShortKey)('macos', { bin: config.bin, version: flags.version, sha: flags.sha, arch }).replace(`-${flags.sha}`, '');
const darwinCopySource = cloudBucketCommitKey(darwinPkg);
// strip version & sha so scripts can point to a static channel pkg
- const unversionedPkg = darwinPkg.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedPkg = darwinPkg.replace(`-v${flags.version}`, '');
await Promise.all([aws_1.default.s3.copyObject(Object.assign(Object.assign({}, awsDefaults), { CopySource: darwinCopySource, Key: cloudChannelKey(unversionedPkg) }))].concat(flags.indexes ? [(0, version_indexes_1.appendToIndex)(Object.assign(Object.assign({}, indexDefaults), { originalUrl: darwinCopySource, filename: unversionedPkg }))] : []));
}));
};
@@ -88,10 +92,11 @@ class Promote extends core_1.Command {
this.log(`Promoting windows exe to ${flags.channel}`);
const arches = buildConfig.targets.filter(t => t.platform === 'win32').map(t => t.arch);
await Promise.all(arches.map(async (arch) => {
- const winPkg = (0, upload_util_1.templateShortKey)('win32', { bin: config.bin, version: flags.version, sha: flags.sha, arch });
+ // replace the git sha from the file name
+ const winPkg = (0, upload_util_1.templateShortKey)('win32', { bin: config.bin, version: flags.version, sha: flags.sha, arch }).replace(`-${flags.sha}`, '');;
const winCopySource = cloudBucketCommitKey(winPkg);
// strip version & sha so scripts can point to a static channel exe
- const unversionedExe = winPkg.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedExe = winPkg.replace(`-v${flags.version}`, '');
await Promise.all([aws_1.default.s3.copyObject(Object.assign(Object.assign({}, awsDefaults), { CopySource: winCopySource, Key: cloudChannelKey(unversionedExe) }))].concat(flags.indexes ? [(0, version_indexes_1.appendToIndex)(Object.assign(Object.assign({}, indexDefaults), { originalUrl: winCopySource, filename: unversionedExe }))] : []));
core_1.ux.action.stop('successfully');
}));
diff --git a/lib/commands/upload/deb.js b/lib/commands/upload/deb.js
index a4657a5d1740faf03347b03b6304641dd278cc0f..492c978b0f629ffaf6425ef271074107436adba6 100644
--- a/lib/commands/upload/deb.js
+++ b/lib/commands/upload/deb.js
@@ -21,9 +21,10 @@ class UploadDeb extends core_1.Command {
this.error('Cannot find debian artifacts', {
suggestions: ['Run "oclif pack deb" before uploading'],
});
- const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.pjson.version, buildConfig.gitSha, s3Config);
+ const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.pjson.version, s3Config);
const upload = (file) => {
- const cloudKey = `${cloudKeyBase}/apt/${file}`;
+ // remove the git sha from the file name
+ const cloudKey = `${cloudKeyBase}/apt/${file}`.replace(`-${buildConfig.gitSha}`, '');
return aws_1.default.s3.uploadFile(dist(file), Object.assign(Object.assign({}, S3Options), { CacheControl: 'max-age=86400', Key: cloudKey }));
};
// apt expects ../apt/dists/versionName/[artifacts] but oclif wants versions/sha/apt/[artifacts]
@@ -31,7 +32,8 @@ class UploadDeb extends core_1.Command {
// this workaround puts the code in both places that the redirect was doing
// with this, the docs are correct. The copies are all done in parallel so it shouldn't be too costly.
const uploadWorkaround = (file) => {
- const cloudKey = `${cloudKeyBase}/apt/./${file}`;
+ // remove the git sha from the file name
+ const cloudKey = `${cloudKeyBase}/apt/./${file}`.replace(`-${buildConfig.gitSha}`, '');
return aws_1.default.s3.uploadFile(dist(file), Object.assign(Object.assign({}, S3Options), { CacheControl: 'max-age=86400', Key: cloudKey }));
};
const uploadDeb = async (arch) => {
diff --git a/lib/commands/upload/macos.js b/lib/commands/upload/macos.js
index 7c2971748829dcda52e7665ce1c7b3a03f4a3b51..b468920eebeebf3c0148e87a8f731e075a3fa259 100644
--- a/lib/commands/upload/macos.js
+++ b/lib/commands/upload/macos.js
@@ -17,10 +17,11 @@ class UploadMacos extends core_1.Command {
Bucket: s3Config.bucket,
ACL: s3Config.acl || 'public-read',
};
- const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.version, buildConfig.gitSha, s3Config);
+ const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.version, s3Config);
const upload = async (arch) => {
const templateKey = (0, upload_util_1.templateShortKey)('macos', { bin: config.bin, version: config.version, sha: buildConfig.gitSha, arch });
- const cloudKey = `${cloudKeyBase}/${templateKey}`;
+ // remove the git sha from the file name
+ const cloudKey = `${cloudKeyBase}/${templateKey}`.replace(`-${buildConfig.gitSha}`, '');
const localPkg = dist(`macos/${templateKey}`);
if (fs.existsSync(localPkg))
await aws_1.default.s3.uploadFile(localPkg, Object.assign(Object.assign({}, S3Options), { CacheControl: 'max-age=86400', Key: cloudKey }));
diff --git a/lib/commands/upload/tarballs.js b/lib/commands/upload/tarballs.js
index 6b4577c16408ed1039ce19ef4f86d6810984ce26..d470941ee517d1a406cff582df297a439201214b 100644
--- a/lib/commands/upload/tarballs.js
+++ b/lib/commands/upload/tarballs.js
@@ -38,17 +38,21 @@ class UploadTarballs extends core_1.Command {
};
const releaseTarballs = async (ext) => {
const localKey = (0, upload_util_1.templateShortKey)('versioned', ext, shortKeyInputs);
- const cloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, buildConfig.gitSha, s3Config)}/${localKey}`;
+ // remove the git sha from the file name
+ const cloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, s3Config)}/${localKey}`.replace(`-${buildConfig.gitSha}`, '');
+ (0, log_1.log)(`uploading targets ${localKey} to ${cloudKey}`)
await aws_1.default.s3.uploadFile(dist(localKey), Object.assign(Object.assign({}, S3Options), { CacheControl: 'max-age=604800', ContentType: 'application/gzip', Key: cloudKey }));
};
const manifest = (0, upload_util_1.templateShortKey)('manifest', shortKeyInputs);
- const cloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, buildConfig.gitSha, s3Config)}/${manifest}`;
+ // remove the git sha from the file name
+ const cloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, s3Config)}/${manifest}`.replace(`-${buildConfig.gitSha}`, '');
+ (0, log_1.log)(`uploading targets ${manifest} to ${cloudKey}`)
await Promise.all([releaseTarballs('.tar.gz'), aws_1.default.s3.uploadFile(dist(manifest), Object.assign(Object.assign({}, S3Options), { CacheControl: 'max-age=86400', ContentType: 'application/json', Key: cloudKey }))].concat(xz ? [releaseTarballs('.tar.xz')] : []));
};
if (buildConfig.targets.length > 0)
(0, log_1.log)('uploading targets');
await Promise.all(buildConfig.targets.map(t => uploadTarball(t)));
- (0, log_1.log)(`done uploading tarballs & manifests for v${config.version}-${buildConfig.gitSha}`);
+ (0, log_1.log)(`done uploading tarballs & manifests for v${config.version}`);
}
}
exports.default = UploadTarballs;
diff --git a/lib/commands/upload/win.js b/lib/commands/upload/win.js
index 64bee4efe80de5dfa51810585ff22f027ecf6eb9..57eb39fe3fb89940d8939b9179fb72d4d8243620 100644
--- a/lib/commands/upload/win.js
+++ b/lib/commands/upload/win.js
@@ -25,11 +25,12 @@ class UploadWin extends core_1.Command {
suggestions: ['Run "oclif pack win" before uploading'],
});
}
- const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.pjson.version, buildConfig.gitSha, s3Config);
+ const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.pjson.version, s3Config);
const uploadWin = async (arch) => {
const templateKey = (0, upload_util_1.templateShortKey)('win32', { bin: config.bin, version: config.version, sha: buildConfig.gitSha, arch });
const localExe = dist(`win32/${templateKey}`);
- const cloudKey = `${cloudKeyBase}/${templateKey}`;
+ // remove the git sha from the file name
+ const cloudKey = `${cloudKeyBase}/${templateKey}`.replace(`-${buildConfig.gitSha}`, '');
if (fs.existsSync(localExe))
await aws_1.default.s3.uploadFile(localExe, Object.assign(Object.assign({}, S3Options), { CacheControl: 'max-age=86400', Key: cloudKey }));
};
diff --git a/lib/tarballs/bin.js b/lib/tarballs/bin.js
index 8301c7be4c3101de9caf1c1b127cf1f940a1706c..e7d955d29ce3d002a260ef7f4a1355fcdc95e018 100644
--- a/lib/tarballs/bin.js
+++ b/lib/tarballs/bin.js
@@ -7,85 +7,65 @@ const node_child_process_1 = require("node:child_process");
const node_util_1 = require("node:util");
const exec = (0, node_util_1.promisify)(node_child_process_1.exec);
async function writeBinScripts({ config, baseWorkspace, nodeVersion }) {
- var _a, _b;
- const binPathEnvVar = config.scopedEnvVarKey('BINPATH');
- const redirectedEnvVar = config.scopedEnvVarKey('REDIRECTED');
- const clientHomeEnvVar = config.scopedEnvVarKey('OCLIF_CLIENT_HOME');
- const writeWin32 = async (bin) => {
- await fs.promises.writeFile(path.join(baseWorkspace, 'bin', `${bin}.cmd`), `@echo off
+ var _a, _b;
+ const writeWin32 = async (bin) => {
+ await fs.promises.writeFile(
+ path.join(baseWorkspace, "bin", `${bin}.cmd`),
+ `@echo off
setlocal enableextensions
-if not "%${redirectedEnvVar}%"=="1" if exist "%LOCALAPPDATA%\\${bin}\\client\\bin\\${bin}.cmd" (
- set ${redirectedEnvVar}=1
- "%LOCALAPPDATA%\\${bin}\\client\\bin\\${bin}.cmd" %*
- goto:EOF
-)
-
-if not defined ${binPathEnvVar} set ${binPathEnvVar}="%~dp0${bin}.cmd"
-if exist "%~dp0..\\bin\\node.exe" (
- "%~dp0..\\bin\\node.exe" "%~dp0..\\bin\\run" %*
-) else if exist "%LOCALAPPDATA%\\oclif\\node\\node-${nodeVersion}.exe" (
- "%LOCALAPPDATA%\\oclif\\node\\node-${nodeVersion}.exe" "%~dp0..\\bin\\run" %*
-) else (
- node "%~dp0..\\bin\\run" %*
-)
-`);
- };
- const writeUnix = async () => {
- const bin = path.join(baseWorkspace, 'bin', config.bin);
- await fs.promises.writeFile(bin, `#!/usr/bin/env bash
+"%~dp0..\\bin\\node.exe" "%~dp0..\\bin\\run" %*
+`
+ );
+ };
+ const writeUnix = async () => {
+ const bin = path.join(baseWorkspace, "bin", config.bin);
+ await fs.promises.writeFile(
+ bin,
+ `#!/usr/bin/env sh
set -e
-echoerr() { echo "$@" 1>&2; }
get_script_dir () {
- SOURCE="\${BASH_SOURCE[0]}"
+ SOURCE="\$0"
# While \$SOURCE is a symlink, resolve it
while [ -h "\$SOURCE" ]; do
DIR="\$( cd -P "\$( dirname "\$SOURCE" )" && pwd )"
SOURCE="\$( readlink "\$SOURCE" )"
# If \$SOURCE was a relative symlink (so no "/" as prefix, need to resolve it relative to the symlink base directory
- [[ \$SOURCE != /* ]] && SOURCE="\$DIR/\$SOURCE"
+ if echo "\$SOURCE" | grep -q '^/*\$'; then
+ SOURCE="\$DIR/\$SOURCE"
+ fi
done
DIR="\$( cd -P "\$( dirname "\$SOURCE" )" && pwd )"
echo "\$DIR"
}
DIR=\$(get_script_dir)
-CLI_HOME=\$(cd && pwd)
-XDG_DATA_HOME=\${XDG_DATA_HOME:="\$CLI_HOME/.local/share"}
-CLIENT_HOME=\${${clientHomeEnvVar}:=$XDG_DATA_HOME/${config.dirname}/client}
-BIN_PATH="\$CLIENT_HOME/bin/${config.bin}"
-if [ -z "\$${redirectedEnvVar}" ] && [ -x "\$BIN_PATH" ] && [[ ! "\$DIR/${config.bin}" -ef "\$BIN_PATH" ]]; then
- if [ "\$DEBUG" == "*" ]; then
- echoerr "\$BIN_PATH" "\$@"
- fi
- ${binPathEnvVar}="\$BIN_PATH" ${redirectedEnvVar}=1 "\$BIN_PATH" "\$@"
-else
- export ${binPathEnvVar}=\${${binPathEnvVar}:="\$DIR/${config.bin}"}
- if [ -x "$(command -v "\$XDG_DATA_HOME/oclif/node/node-custom")" ]; then
- NODE="\$XDG_DATA_HOME/oclif/node/node-custom"
- elif [ -x "$(command -v "\$DIR/node")" ]; then
- NODE="\$DIR/node"
- elif [ -x "$(command -v "\$XDG_DATA_HOME/oclif/node/node-${nodeVersion}")" ]; then
- NODE="\$XDG_DATA_HOME/oclif/node/node-${nodeVersion}"
- elif [ -x "$(command -v node)" ]; then
- NODE=node
- else
- echoerr 'Error: node is not installed.' >&2
- exit 1
- fi
- if [ "\$DEBUG" == "*" ]; then
- echoerr ${binPathEnvVar}="\$${binPathEnvVar}" "\$NODE" "\$DIR/run" "\$@"
- fi
- "\$NODE" "\$DIR/run" "\$@"
+NODE="\$DIR/node"
+
+if [ "\$DEBUG" = "1" ]; then
+ echo "script_dir: \$DIR"
fi
-`, { mode: 0o755 });
- };
- await Promise.all([
- writeWin32(config.bin),
- writeUnix(),
- ...(_b = (_a = config.binAliases) === null || _a === void 0 ? void 0 : _a.map(alias => process.platform === 'win32' ?
- writeWin32(alias) :
- exec(`ln -sf ${config.bin} ${alias}`, { cwd: path.join(baseWorkspace, 'bin') }))) !== null && _b !== void 0 ? _b : [],
- ]);
+
+"\$NODE" "\$DIR/run" "\$@"
+`,
+ { mode: 0o755 }
+ );
+ };
+ await Promise.all([
+ writeWin32(config.bin),
+ writeUnix(),
+ ...((_b =
+ (_a = config.binAliases) === null || _a === void 0
+ ? void 0
+ : _a.map((alias) =>
+ process.platform === "win32"
+ ? writeWin32(alias)
+ : exec(`ln -sf ${config.bin} ${alias}`, {
+ cwd: path.join(baseWorkspace, "bin"),
+ })
+ )) !== null && _b !== void 0
+ ? _b
+ : []),
+ ]);
}
exports.writeBinScripts = writeBinScripts;
diff --git a/lib/tarballs/build.js b/lib/tarballs/build.js
index 384ea4be6bb7179c0622436ce41142b59b226771..f7714506b50472b9cb4c327055267cceda67efad 100644
--- a/lib/tarballs/build.js
+++ b/lib/tarballs/build.js
@@ -73,9 +73,10 @@ async function build(c, options = {}) {
}
else {
const lockpath = fs.existsSync(path.join(c.root, 'package-lock.json')) ?
- path.join(c.root, 'package-lock.json') :
- path.join(c.root, 'npm-shrinkwrap.json');
- await fs.copy(lockpath, path.join(c.workspace(), path.basename(lockpath)));
+ path.join(c.root, 'package-lock.json') : null;
+ if (lockpath) {
+ await fs.copy(lockpath, path.join(c.workspace(), path.basename(lockpath)));
+ }
await exec('npm install --production', { cwd: c.workspace() });
}
};
@@ -131,8 +132,8 @@ async function build(c, options = {}) {
if (!c.updateConfig.s3.host)
return;
const rollout = (typeof c.updateConfig.autoupdate === 'object' && c.updateConfig.autoupdate.rollout);
- const gzCloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, c.gitSha, c.updateConfig.s3)}/${gzLocalKey}`;
- const xzCloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, c.gitSha, c.updateConfig.s3)}/${xzLocalKey}`;
+ const gzCloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, c.updateConfig.s3)}/${gzLocalKey}`;
+ const xzCloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, c.updateConfig.s3)}/${xzLocalKey}`;
const [sha256gz, sha256xz] = await Promise.all([(0, util_1.hash)('sha256', c.dist(gzLocalKey))].concat(xz ? [(0, util_1.hash)('sha256', c.dist(xzLocalKey))] : []));
const manifest = {
rollout: rollout === false ? undefined : rollout,
diff --git a/lib/upload-util.js b/lib/upload-util.js
index 6963e4df0cbcc41cd924e0fa7a1790edfd297b96..aa506b97ccae547ab4bfdfc75f4dcf4a452f07fc 100644
--- a/lib/upload-util.js
+++ b/lib/upload-util.js
@@ -2,11 +2,11 @@
Object.defineProperty(exports, "__esModule", { value: true });
exports.debVersion = exports.debArch = exports.templateShortKey = exports.channelAWSDir = exports.commitAWSDir = void 0;
const path = require("path");
-function commitAWSDir(version, sha, s3Config) {
+function commitAWSDir(version, s3Config) {
let s3SubDir = s3Config.folder || '';
if (s3SubDir !== '' && s3SubDir.slice(-1) !== '/')
s3SubDir = `${s3SubDir}/`;
- return path.join(s3SubDir, 'versions', version, sha);
+ return path.join(s3SubDir, 'versions', version);
}
exports.commitAWSDir = commitAWSDir;
function channelAWSDir(channel, s3Config) {

389
patches/oclif@4.11.0.patch Normal file
View file

@ -0,0 +1,389 @@
diff --git a/lib/commands/pack/macos.js b/lib/commands/pack/macos.js
index 2d571cac7e01f4e89bf516f44419439b6ab689a3..381b2287767a61e4fa09d8340e6eedefac078075 100644
--- a/lib/commands/pack/macos.js
+++ b/lib/commands/pack/macos.js
@@ -202,7 +202,6 @@ the CLI should already exist in a directory named after the CLI that is the root
const templateKey = (0, upload_util_1.templateShortKey)('macos', {
arch,
bin: config.bin,
- sha: buildConfig.gitSha,
version: config.version,
});
const dist = buildConfig.dist(`macos/${templateKey}`);
diff --git a/lib/commands/pack/win.js b/lib/commands/pack/win.js
index 9057ee611738b861a1eb05b8656e1a48a652b5dd..68451f869daa1bc1da9266dd7a2752fc100ae224 100644
--- a/lib/commands/pack/win.js
+++ b/lib/commands/pack/win.js
@@ -310,7 +310,6 @@ the CLI should already exist in a directory named after the CLI that is the root
const templateKey = (0, upload_util_1.templateShortKey)('win32', {
arch,
bin: config.bin,
- sha: buildConfig.gitSha,
version: config.version,
});
const o = buildConfig.dist(`win32/${templateKey}`);
diff --git a/lib/commands/promote.js b/lib/commands/promote.js
index 0ea8c249f1b2fa889671c66b401862f24cf6680b..abb473956e673c5c5c846f54106813670e31c5f3 100644
--- a/lib/commands/promote.js
+++ b/lib/commands/promote.js
@@ -66,7 +66,7 @@ class Promote extends core_1.Command {
CacheControl: indexDefaults.maxAge,
MetadataDirective: client_s3_1.MetadataDirective.REPLACE,
};
- const cloudBucketCommitKey = (shortKey) => path.join(s3Config.bucket, (0, upload_util_1.commitAWSDir)(flags.version, flags.sha, s3Config), shortKey);
+ const cloudBucketCommitKey = (shortKey) => path.join(s3Config.bucket, (0, upload_util_1.commitAWSDir)(flags.version, s3Config), shortKey);
const cloudChannelKey = (shortKey) => path.join((0, upload_util_1.channelAWSDir)(flags.channel, s3Config), shortKey);
// copy tarballs manifests
if (buildConfig.targets.length > 0)
@@ -76,11 +76,10 @@ class Promote extends core_1.Command {
arch: target.arch,
bin: config.bin,
platform: target.platform,
- sha: flags.sha,
version: flags.version,
});
// strip version & sha so update/scripts can point to a static channel manifest
- const unversionedManifest = manifest.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedManifest = manifest.replace(`-v${flags.version}`, '');
await aws_1.default.s3.copyObject({
...awsDefaults,
CopySource: cloudBucketCommitKey(manifest),
@@ -88,17 +87,17 @@ class Promote extends core_1.Command {
});
};
const promoteGzTarballs = async (target) => {
+ // replace the git sha from the file name
const versionedTarGzName = (0, upload_util_1.templateShortKey)('versioned', {
arch: target.arch,
bin: config.bin,
ext: '.tar.gz',
platform: target.platform,
- sha: flags.sha,
version: flags.version,
- });
+ })
const versionedTarGzKey = cloudBucketCommitKey(versionedTarGzName);
// strip version & sha so update/scripts can point to a static channel tarball
- const unversionedTarGzName = versionedTarGzName.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedTarGzName = versionedTarGzName.replace(`-v${flags.version}`, '');
const unversionedTarGzKey = cloudChannelKey(unversionedTarGzName);
await Promise.all([
aws_1.default.s3.copyObject({
@@ -112,17 +111,17 @@ class Promote extends core_1.Command {
]);
};
const promoteXzTarballs = async (target) => {
+ // replace the git sha from the file name
const versionedTarXzName = (0, upload_util_1.templateShortKey)('versioned', {
arch: target.arch,
bin: config.bin,
ext: '.tar.xz',
platform: target.platform,
- sha: flags.sha,
version: flags.version,
- });
+ })
const versionedTarXzKey = cloudBucketCommitKey(versionedTarXzName);
// strip version & sha so update/scripts can point to a static channel tarball
- const unversionedTarXzName = versionedTarXzName.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedTarXzName = versionedTarXzName.replace(`-v${flags.version}`, '');
const unversionedTarXzKey = cloudChannelKey(unversionedTarXzName);
await Promise.all([
aws_1.default.s3.copyObject({
@@ -139,10 +138,11 @@ class Promote extends core_1.Command {
this.log(`Promoting macos pkgs to ${flags.channel}`);
const arches = (0, util_1.uniq)(buildConfig.targets.filter((t) => t.platform === 'darwin').map((t) => t.arch));
await Promise.all(arches.map(async (arch) => {
- const darwinPkg = (0, upload_util_1.templateShortKey)('macos', { arch, bin: config.bin, sha: flags.sha, version: flags.version });
+ // replace the git sha from the file name
+ const darwinPkg = (0, upload_util_1.templateShortKey)('macos', { arch, bin: config.bin, version: flags.version });
const darwinCopySource = cloudBucketCommitKey(darwinPkg);
// strip version & sha so scripts can point to a static channel pkg
- const unversionedPkg = darwinPkg.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedPkg = darwinPkg.replace(`-v${flags.version}`, '');
await Promise.all([
aws_1.default.s3.copyObject({
...awsDefaults,
@@ -160,10 +160,11 @@ class Promote extends core_1.Command {
this.log(`Promoting windows exe to ${flags.channel}`);
const arches = buildConfig.targets.filter((t) => t.platform === 'win32').map((t) => t.arch);
await Promise.all(arches.map(async (arch) => {
- const winPkg = (0, upload_util_1.templateShortKey)('win32', { arch, bin: config.bin, sha: flags.sha, version: flags.version });
+ // replace the git sha from the file name
+ const winPkg = (0, upload_util_1.templateShortKey)('win32', { arch, bin: config.bin, version: flags.version });
const winCopySource = cloudBucketCommitKey(winPkg);
// strip version & sha so scripts can point to a static channel exe
- const unversionedExe = winPkg.replace(`-v${flags.version}-${flags.sha}`, '');
+ const unversionedExe = winPkg.replace(`-v${flags.version}`, '');
await Promise.all([
aws_1.default.s3.copyObject({
...awsDefaults,
diff --git a/lib/commands/upload/deb.js b/lib/commands/upload/deb.js
index b9aaf097275cc8caa13603d7bd09e41e9bea7dee..d375d11272bf8ff3e2a32fcbd5bb86ce548c40b4 100644
--- a/lib/commands/upload/deb.js
+++ b/lib/commands/upload/deb.js
@@ -51,7 +51,7 @@ class UploadDeb extends core_1.Command {
this.error('Cannot find debian artifacts', {
suggestions: ['Run "oclif pack deb" before uploading'],
});
- const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.pjson.version, buildConfig.gitSha, s3Config);
+ const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.pjson.version, s3Config);
const upload = (file) => {
const cloudKey = `${cloudKeyBase}/apt/${file}`;
return aws_1.default.s3.uploadFile(dist(file), { ...S3Options, CacheControl: 'max-age=86400', Key: cloudKey });
@@ -88,7 +88,7 @@ class UploadDeb extends core_1.Command {
...(fs.existsSync(dist('InRelease')) ? [upload('InRelease'), uploadWorkaround('InRelease')] : []),
...(fs.existsSync(dist('Release.gpg')) ? [upload('Release.gpg'), uploadWorkaround('Release.gpg')] : []),
]);
- (0, log_1.log)(`done uploading deb artifacts for v${config.version}-${buildConfig.gitSha}`);
+ (0, log_1.log)(`done uploading deb artifacts for v${config.version}`);
}
}
exports.default = UploadDeb;
diff --git a/lib/commands/upload/macos.js b/lib/commands/upload/macos.js
index fb271df255f5df66401a7a172e7f49d23d3742e3..ff042ee310dd691625b7a95a7462ba0363576de4 100644
--- a/lib/commands/upload/macos.js
+++ b/lib/commands/upload/macos.js
@@ -50,12 +50,11 @@ class UploadMacos extends core_1.Command {
ACL: s3Config.acl || 'public-read',
Bucket: s3Config.bucket,
};
- const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.version, buildConfig.gitSha, s3Config);
+ const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.version, s3Config);
const upload = async (arch) => {
const templateKey = (0, upload_util_1.templateShortKey)('macos', {
arch,
bin: config.bin,
- sha: buildConfig.gitSha,
version: config.version,
});
const cloudKey = `${cloudKeyBase}/${templateKey}`;
@@ -69,7 +68,7 @@ class UploadMacos extends core_1.Command {
};
const arches = (0, util_1.uniq)(buildConfig.targets.filter((t) => t.platform === 'darwin').map((t) => t.arch));
await Promise.all(arches.map((a) => upload(a)));
- (0, log_1.log)(`done uploading macos pkgs for v${config.version}-${buildConfig.gitSha}`);
+ (0, log_1.log)(`done uploading macos pkgs for v${config.version}`);
}
}
exports.default = UploadMacos;
diff --git a/lib/commands/upload/tarballs.js b/lib/commands/upload/tarballs.js
index dd732e93883a61dd491f797e2576eea432500b51..4e9f087de4e52c5777a80b42f5b45bfcde9a9bc7 100644
--- a/lib/commands/upload/tarballs.js
+++ b/lib/commands/upload/tarballs.js
@@ -50,7 +50,6 @@ class UploadTarballs extends core_1.Command {
const tarball = dist((0, upload_util_1.templateShortKey)('versioned', {
bin: config.bin,
ext: '.tar.gz',
- sha: buildConfig.gitSha,
version: config.version,
...target,
}));
@@ -75,7 +74,7 @@ class UploadTarballs extends core_1.Command {
};
const releaseTarballs = async (ext) => {
const localKey = (0, upload_util_1.templateShortKey)('versioned', { ...shortKeyInputs, ext });
- const cloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, buildConfig.gitSha, s3Config)}/${localKey}`;
+ const cloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, s3Config)}/${localKey}`;
await aws_1.default.s3.uploadFile(dist(localKey), {
...S3Options,
CacheControl: 'max-age=604800',
@@ -84,7 +83,7 @@ class UploadTarballs extends core_1.Command {
});
};
const manifest = (0, upload_util_1.templateShortKey)('manifest', shortKeyInputs);
- const cloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, buildConfig.gitSha, s3Config)}/${manifest}`;
+ const cloudKey = `${(0, upload_util_1.commitAWSDir)(config.version, s3Config)}/${manifest}`;
await Promise.all([
releaseTarballs('.tar.gz'),
aws_1.default.s3.uploadFile(dist(manifest), {
@@ -99,7 +98,7 @@ class UploadTarballs extends core_1.Command {
if (buildConfig.targets.length > 0)
(0, log_1.log)('uploading targets');
await Promise.all(buildConfig.targets.map((t) => uploadTarball(t)));
- (0, log_1.log)(`done uploading tarballs & manifests for v${config.version}-${buildConfig.gitSha}`);
+ (0, log_1.log)(`done uploading tarballs & manifests for v${config.version}`);
}
}
exports.default = UploadTarballs;
diff --git a/lib/commands/upload/win.js b/lib/commands/upload/win.js
index 2769337ddf0effe5f397300972b989db20616b4a..ca45979abf87bc16dbe085681fcd3407c59a3be8 100644
--- a/lib/commands/upload/win.js
+++ b/lib/commands/upload/win.js
@@ -60,12 +60,11 @@ class UploadWin extends core_1.Command {
suggestions: ['Run "oclif pack win" before uploading'],
});
}
- const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.pjson.version, buildConfig.gitSha, s3Config);
+ const cloudKeyBase = (0, upload_util_1.commitAWSDir)(config.pjson.version, s3Config);
const uploadWin = async (arch) => {
const templateKey = (0, upload_util_1.templateShortKey)('win32', {
arch,
bin: config.bin,
- sha: buildConfig.gitSha,
version: config.version,
});
const localExe = dist(`win32/${templateKey}`);
@@ -74,7 +73,7 @@ class UploadWin extends core_1.Command {
await aws_1.default.s3.uploadFile(localExe, { ...S3Options, CacheControl: 'max-age=86400', Key: cloudKey });
};
await Promise.all([uploadWin('x64'), uploadWin('x86')]);
- (0, log_1.log)(`done uploading windows executables for v${config.version}-${buildConfig.gitSha}`);
+ (0, log_1.log)(`done uploading windows executables for v${config.version}`);
}
}
exports.default = UploadWin;
diff --git a/lib/tarballs/bin.js b/lib/tarballs/bin.js
index 5740bb13522fdad4c65554534bfa286b6dd94f11..619b510d75605a28de2063df316fcbd0890935c6 100644
--- a/lib/tarballs/bin.js
+++ b/lib/tarballs/bin.js
@@ -30,77 +30,38 @@ const path = __importStar(require("node:path"));
const node_util_1 = require("node:util");
const exec = (0, node_util_1.promisify)(node_child_process_1.exec);
async function writeBinScripts({ baseWorkspace, config, nodeOptions, nodeVersion, }) {
- const binPathEnvVar = config.scopedEnvVarKey('BINPATH');
- const redirectedEnvVar = config.scopedEnvVarKey('REDIRECTED');
- const clientHomeEnvVar = config.scopedEnvVarKey('OCLIF_CLIENT_HOME');
const writeWin32 = async (bin) => {
await fs.promises.writeFile(path.join(baseWorkspace, 'bin', `${bin}.cmd`), `@echo off
setlocal enableextensions
-if not "%${redirectedEnvVar}%"=="1" if exist "%LOCALAPPDATA%\\${bin}\\client\\bin\\${bin}.cmd" (
- set ${redirectedEnvVar}=1
- "%LOCALAPPDATA%\\${bin}\\client\\bin\\${bin}.cmd" %*
- goto:EOF
-)
-
-if not defined ${binPathEnvVar} set ${binPathEnvVar}="%~dp0${bin}.cmd"
-
-if exist "%~dp0..\\bin\\node.exe" (
- "%~dp0..\\bin\\node.exe" ${`${nodeOptions.join(' ')} `}"%~dp0..\\bin\\run" %*
-) else if exist "%LOCALAPPDATA%\\oclif\\node\\node-${nodeVersion}.exe" (
- "%LOCALAPPDATA%\\oclif\\node\\node-${nodeVersion}.exe" ${`${nodeOptions.join(' ')} `}"%~dp0..\\bin\\run" %*
-) else (
- node ${`${nodeOptions.join(' ')} `}"%~dp0..\\bin\\run" %*
-)
+"%~dp0..\\bin\\node.exe" ${`${nodeOptions.join(' ')} `}"%~dp0..\\bin\\run" %*
`);
};
const writeUnix = async () => {
const bin = path.join(baseWorkspace, 'bin', config.bin);
- await fs.promises.writeFile(bin, `#!/usr/bin/env bash
+ await fs.promises.writeFile(bin, `#!/usr/bin/env sh
set -e
-echoerr() { echo "$@" 1>&2; }
get_script_dir () {
- SOURCE="\${BASH_SOURCE[0]}"
+ SOURCE="\$0"
# While \$SOURCE is a symlink, resolve it
while [ -h "\$SOURCE" ]; do
DIR="\$( cd -P "\$( dirname "\$SOURCE" )" && pwd )"
SOURCE="\$( readlink "\$SOURCE" )"
# If \$SOURCE was a relative symlink (so no "/" as prefix, need to resolve it relative to the symlink base directory
- [[ \$SOURCE != /* ]] && SOURCE="\$DIR/\$SOURCE"
+ if echo "\$SOURCE" | grep -q '^/*\$'; then
+ SOURCE="\$DIR/\$SOURCE"
+ fi
done
DIR="\$( cd -P "\$( dirname "\$SOURCE" )" && pwd )"
echo "\$DIR"
}
DIR=\$(get_script_dir)
-CLI_HOME=\$(cd && pwd)
-XDG_DATA_HOME=\${XDG_DATA_HOME:="\$CLI_HOME/.local/share"}
-CLIENT_HOME=\${${clientHomeEnvVar}:=$XDG_DATA_HOME/${config.dirname}/client}
-BIN_PATH="\$CLIENT_HOME/bin/${config.bin}"
-if [ -z "\$${redirectedEnvVar}" ] && [ -x "\$BIN_PATH" ] && [[ ! "\$DIR/${config.bin}" -ef "\$BIN_PATH" ]]; then
- if [ "\$DEBUG" == "*" ]; then
- echoerr "\$BIN_PATH" "\$@"
- fi
- ${binPathEnvVar}="\$BIN_PATH" ${redirectedEnvVar}=1 "\$BIN_PATH" "\$@"
-else
- export ${binPathEnvVar}=\${${binPathEnvVar}:="\$DIR/${config.bin}"}
- if [ -x "$(command -v "\$XDG_DATA_HOME/oclif/node/node-custom")" ]; then
- NODE="\$XDG_DATA_HOME/oclif/node/node-custom"
- elif [ -x "$(command -v "\$DIR/node")" ]; then
- NODE="\$DIR/node"
- elif [ -x "$(command -v "\$XDG_DATA_HOME/oclif/node/node-${nodeVersion}")" ]; then
- NODE="\$XDG_DATA_HOME/oclif/node/node-${nodeVersion}"
- elif [ -x "$(command -v node)" ]; then
- NODE=node
- else
- echoerr 'Error: node is not installed.' >&2
- exit 1
- fi
- if [ "\$DEBUG" == "*" ]; then
- echoerr ${binPathEnvVar}="\$${binPathEnvVar}" "\$NODE" ${`${nodeOptions.join(' ')} `}"\$DIR/run" "\$@"
- fi
- "\$NODE" ${`${nodeOptions.join(' ')} `}"\$DIR/run" "\$@"
+NODE="\$DIR/node"
+if [ "\$DEBUG" = "1" ]; then
+ echo "script_dir: \$DIR"
fi
+"\$NODE" ${`${nodeOptions.join(' ')} `}"\$DIR/run" "\$@"
`, { mode: 0o755 });
};
await Promise.all([
diff --git a/lib/tarballs/build.js b/lib/tarballs/build.js
index 43c98a2727341d2728d3432caf7c1ff72bf7b73d..b0d919f04f766308006f6e7ba91c8ce5c06321a9 100644
--- a/lib/tarballs/build.js
+++ b/lib/tarballs/build.js
@@ -174,8 +174,10 @@ const addDependencies = async (c) => {
else {
const lockpath = (0, node_fs_1.existsSync)(path.join(c.root, 'package-lock.json'))
? path.join(c.root, 'package-lock.json')
- : path.join(c.root, 'npm-shrinkwrap.json');
- await (0, fs_extra_1.copy)(lockpath, path.join(c.workspace(), path.basename(lockpath)));
+ : null;
+ if (lockpath) {
+ await (0, fs_extra_1.copy)(lockpath, path.join(c.workspace(), path.basename(lockpath)));
+ }
await exec('npm install --production', { cwd: c.workspace() });
}
};
@@ -230,8 +232,8 @@ const buildTarget = async (target, c, options) => {
if (!c.updateConfig.s3.host)
return;
const rollout = typeof c.updateConfig.autoupdate === 'object' && c.updateConfig.autoupdate.rollout;
- const gzCloudKey = `${(0, upload_util_1.commitAWSDir)(version, sha, c.updateConfig.s3)}/${gzLocalKey}`;
- const xzCloudKey = `${(0, upload_util_1.commitAWSDir)(version, sha, c.updateConfig.s3)}/${xzLocalKey}`;
+ const gzCloudKey = `${(0, upload_util_1.commitAWSDir)(version, c.updateConfig.s3)}/${gzLocalKey}`;
+ const xzCloudKey = `${(0, upload_util_1.commitAWSDir)(version, c.updateConfig.s3)}/${xzLocalKey}`;
const [sha256gz, sha256xz] = await Promise.all([
(0, util_1.hash)('sha256', c.dist(gzLocalKey)),
...(c.xz ? [(0, util_1.hash)('sha256', c.dist(xzLocalKey))] : []),
diff --git a/lib/upload-util.js b/lib/upload-util.js
index 19298c77a203e70f451d31c70c92442a8a3d5137..905230e5f605346ae6a65e617230ef84fce73c0b 100644
--- a/lib/upload-util.js
+++ b/lib/upload-util.js
@@ -3,11 +3,11 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.debVersion = exports.debArch = exports.templateShortKey = exports.channelAWSDir = exports.commitAWSDir = void 0;
const ejs_1 = require("ejs");
const node_path_1 = require("node:path");
-function commitAWSDir(version, sha, s3Config) {
+function commitAWSDir(version, s3Config) {
let s3SubDir = s3Config.folder || '';
if (s3SubDir !== '' && s3SubDir.slice(-1) !== '/')
s3SubDir = `${s3SubDir}/`;
- return (0, node_path_1.join)(s3SubDir, 'versions', version, sha);
+ return (0, node_path_1.join)(s3SubDir, 'versions', version);
}
exports.commitAWSDir = commitAWSDir;
function channelAWSDir(channel, s3Config) {
@@ -26,11 +26,11 @@ function templateShortKey(type, options) {
const templates = {
baseDir: '<%- bin %>',
deb: '<%- bin %>_<%- versionShaRevision %>_<%- arch %>.deb',
- macos: '<%- bin %>-v<%- version %>-<%- sha %>-<%- arch %>.pkg',
- manifest: '<%- bin %>-v<%- version %>-<%- sha %>-<%- platform %>-<%- arch %>-buildmanifest',
+ macos: '<%- bin %>-v<%- version %>-<%- arch %>.pkg',
+ manifest: '<%- bin %>-v<%- version %>-<%- platform %>-<%- arch %>-buildmanifest',
unversioned: '<%- bin %>-<%- platform %>-<%- arch %><%- ext %>',
- versioned: '<%- bin %>-v<%- version %>-<%- sha %>-<%- platform %>-<%- arch %><%- ext %>',
- win32: '<%- bin %>-v<%- version %>-<%- sha %>-<%- arch %>.exe',
+ versioned: '<%- bin %>-v<%- version %>-<%- platform %>-<%- arch %><%- ext %>',
+ win32: '<%- bin %>-v<%- version %>-<%- arch %>.exe',
};
return (0, ejs_1.render)(templates[type], { ...options });
}

File diff suppressed because it is too large Load diff

View file

@ -7,3 +7,4 @@ packages:
- integration-tests
- deployment
- scripts
- rules

View file

@ -9,7 +9,7 @@
/// @ts-check
const path = require('path');
const fs = require('fs');
const minimatch = require('minimatch');
const { minimatch } = require('minimatch');
const readPkgUp = require('eslint-module-utils/readPkgUp').default;
const moduleVisitor = require('eslint-module-utils/moduleVisitor').default;

View file

@ -3,5 +3,9 @@
"version": "0.0.0",
"type": "commonjs",
"private": true,
"main": "index.cjs"
"main": "index.cjs",
"devDependencies": {
"@types/minimatch": "5.1.2",
"minimatch": "9.0.4"
}
}