mirror of
https://github.com/Narcooo/inkos
synced 2026-04-21 14:37:16 +00:00
fix: align node22 runtime and publish verification
This commit is contained in:
parent
29109f35b2
commit
d24597cd89
6 changed files with 232 additions and 11 deletions
1
.node-version
Normal file
1
.node-version
Normal file
|
|
@ -0,0 +1 @@
|
|||
22
|
||||
1
.nvmrc
Normal file
1
.nvmrc
Normal file
|
|
@ -0,0 +1 @@
|
|||
22
|
||||
|
|
@ -108,6 +108,105 @@ describe.sequential("publish packaging", () => {
|
|||
);
|
||||
});
|
||||
|
||||
it("allows source workspace protocol manifests when they normalize cleanly for publish", async () => {
|
||||
const tempRoot = await mkdtemp(join(tmpdir(), "inkos-publish-verify-pass-"));
|
||||
const tempPackagesDir = join(tempRoot, "packages");
|
||||
const tempCoreDir = join(tempPackagesDir, "core");
|
||||
const tempCliDir = join(tempPackagesDir, "cli");
|
||||
|
||||
try {
|
||||
await mkdir(tempCoreDir, { recursive: true });
|
||||
await mkdir(tempCliDir, { recursive: true });
|
||||
|
||||
await writeFile(
|
||||
join(tempRoot, "package.json"),
|
||||
`${JSON.stringify({ name: "inkos", version: "0.5.1" }, null, 2)}\n`,
|
||||
);
|
||||
await writeFile(
|
||||
join(tempCoreDir, "package.json"),
|
||||
`${JSON.stringify({ name: "@actalk/inkos-core", version: "0.5.1" }, null, 2)}\n`,
|
||||
);
|
||||
await writeFile(
|
||||
join(tempCliDir, "package.json"),
|
||||
`${JSON.stringify(
|
||||
{
|
||||
name: "@actalk/inkos",
|
||||
version: "0.5.1",
|
||||
dependencies: {
|
||||
"@actalk/inkos-core": "workspace:*",
|
||||
commander: "^13.0.0",
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
);
|
||||
|
||||
expect(() =>
|
||||
execFileSync(
|
||||
"node",
|
||||
[resolve(workspaceRoot, "scripts/verify-no-workspace-protocol.mjs"), "packages/core", "packages/cli"],
|
||||
{
|
||||
cwd: tempRoot,
|
||||
env: process.env,
|
||||
encoding: "utf-8",
|
||||
stdio: "pipe",
|
||||
},
|
||||
)).not.toThrow();
|
||||
} finally {
|
||||
await rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("rejects workspace protocol manifests that normalize to the wrong internal version", async () => {
|
||||
const tempRoot = await mkdtemp(join(tmpdir(), "inkos-publish-verify-fail-"));
|
||||
const tempPackagesDir = join(tempRoot, "packages");
|
||||
const tempCoreDir = join(tempPackagesDir, "core");
|
||||
const tempCliDir = join(tempPackagesDir, "cli");
|
||||
|
||||
try {
|
||||
await mkdir(tempCoreDir, { recursive: true });
|
||||
await mkdir(tempCliDir, { recursive: true });
|
||||
|
||||
await writeFile(
|
||||
join(tempRoot, "package.json"),
|
||||
`${JSON.stringify({ name: "inkos", version: "0.5.1" }, null, 2)}\n`,
|
||||
);
|
||||
await writeFile(
|
||||
join(tempCoreDir, "package.json"),
|
||||
`${JSON.stringify({ name: "@actalk/inkos-core", version: "0.5.1" }, null, 2)}\n`,
|
||||
);
|
||||
await writeFile(
|
||||
join(tempCliDir, "package.json"),
|
||||
`${JSON.stringify(
|
||||
{
|
||||
name: "@actalk/inkos",
|
||||
version: "0.5.1",
|
||||
dependencies: {
|
||||
"@actalk/inkos-core": "workspace:0.5.0",
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
);
|
||||
|
||||
expect(() =>
|
||||
execFileSync(
|
||||
"node",
|
||||
[resolve(workspaceRoot, "scripts/verify-no-workspace-protocol.mjs"), "packages/cli"],
|
||||
{
|
||||
cwd: tempRoot,
|
||||
env: process.env,
|
||||
encoding: "utf-8",
|
||||
stdio: "pipe",
|
||||
},
|
||||
)).toThrow(/normalizes to 0\.5\.0, expected 0\.5\.1/);
|
||||
} finally {
|
||||
await rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("replaces workspace dependencies before npm pack", { timeout: 30_000 }, async () => {
|
||||
const packDir = await mkdtemp(join(tmpdir(), "inkos-cli-pack-"));
|
||||
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ import { ChapterIntentSchema, type ContextPackage, type RuleStack } from "../mod
|
|||
import { buildLengthSpec, countChapterLength, formatLengthCount, isOutsideHardRange, isOutsideSoftRange, resolveLengthCountingMode, type LengthLanguage } from "../utils/length-metrics.js";
|
||||
import { analyzeLongSpanFatigue } from "../utils/long-span-fatigue.js";
|
||||
import { loadNarrativeMemorySeed, loadSnapshotCurrentStateFacts } from "../state/runtime-state-store.js";
|
||||
import { rewriteStructuredStateFromMarkdown } from "../state/state-bootstrap.js";
|
||||
import { readFile, readdir, writeFile, mkdir, rm } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
|
||||
|
|
@ -451,6 +452,7 @@ export class PipelineRunner {
|
|||
this.logStage(stageLanguage, { zh: "落盘草稿与真相文件", en: "persisting draft and truth files" });
|
||||
await writer.saveChapter(bookDir, draftOutput, gp.numericalSystem, resolvedLang);
|
||||
await writer.saveNewTruthFiles(bookDir, draftOutput, resolvedLang);
|
||||
await this.syncLegacyStructuredStateFromMarkdown(bookDir, chapterNumber, draftOutput);
|
||||
await this.syncNarrativeMemoryIndex(bookId);
|
||||
|
||||
// Update index
|
||||
|
|
@ -793,6 +795,7 @@ export class PipelineRunner {
|
|||
if (reviseOutput.updatedHooks !== "(伏笔池未更新)") {
|
||||
await writeFile(join(storyDir, "pending_hooks.md"), reviseOutput.updatedHooks, "utf-8");
|
||||
}
|
||||
await this.syncLegacyStructuredStateFromMarkdown(bookDir, targetChapter);
|
||||
|
||||
// Update index
|
||||
const updatedIndex = index.map((ch) =>
|
||||
|
|
@ -1152,6 +1155,7 @@ export class PipelineRunner {
|
|||
|
||||
await writer.saveChapter(bookDir, persistenceOutput, gp.numericalSystem, pipelineLang);
|
||||
await writer.saveNewTruthFiles(bookDir, persistenceOutput, pipelineLang);
|
||||
await this.syncLegacyStructuredStateFromMarkdown(bookDir, chapterNumber, persistenceOutput);
|
||||
this.logStage(stageLanguage, { zh: "同步记忆索引", en: "syncing memory indexes" });
|
||||
await this.syncNarrativeMemoryIndex(bookId);
|
||||
|
||||
|
|
@ -1549,6 +1553,7 @@ ${matrix}`,
|
|||
postWriteErrors: [],
|
||||
postWriteWarnings: [],
|
||||
}, resolvedLanguage);
|
||||
await this.syncLegacyStructuredStateFromMarkdown(bookDir, chapterNumber, output);
|
||||
await this.syncNarrativeMemoryIndex(input.bookId);
|
||||
|
||||
// Update chapter index
|
||||
|
|
@ -1835,6 +1840,24 @@ ${matrix}`,
|
|||
}
|
||||
}
|
||||
|
||||
private async syncLegacyStructuredStateFromMarkdown(
|
||||
bookDir: string,
|
||||
chapterNumber: number,
|
||||
output?: {
|
||||
readonly runtimeStateDelta?: WriteChapterOutput["runtimeStateDelta"];
|
||||
readonly runtimeStateSnapshot?: WriteChapterOutput["runtimeStateSnapshot"];
|
||||
},
|
||||
): Promise<void> {
|
||||
if (output?.runtimeStateDelta || output?.runtimeStateSnapshot) {
|
||||
return;
|
||||
}
|
||||
|
||||
await rewriteStructuredStateFromMarkdown({
|
||||
bookDir,
|
||||
fallbackChapter: chapterNumber,
|
||||
});
|
||||
}
|
||||
|
||||
private async syncNarrativeMemoryIndex(bookId: string): Promise<void> {
|
||||
const bookDir = this.state.bookDir(bookId);
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -89,6 +89,70 @@ export async function bootstrapStructuredStateFromMarkdown(params: {
|
|||
};
|
||||
}
|
||||
|
||||
export async function rewriteStructuredStateFromMarkdown(params: {
|
||||
readonly bookDir: string;
|
||||
readonly fallbackChapter?: number;
|
||||
}): Promise<BootstrapStructuredStateResult> {
|
||||
const storyDir = join(params.bookDir, "story");
|
||||
const stateDir = join(storyDir, "state");
|
||||
const manifestPath = join(stateDir, "manifest.json");
|
||||
const currentStatePath = join(stateDir, "current_state.json");
|
||||
const hooksPath = join(stateDir, "hooks.json");
|
||||
const summariesPath = join(stateDir, "chapter_summaries.json");
|
||||
|
||||
await mkdir(stateDir, { recursive: true });
|
||||
|
||||
const warnings: string[] = [];
|
||||
const existingManifest = await loadJsonIfValid(manifestPath, StateManifestSchema, warnings, "manifest.json");
|
||||
const language = existingManifest?.language ?? await resolveRuntimeLanguage(params.bookDir);
|
||||
|
||||
const summariesMarkdown = await readFile(join(storyDir, "chapter_summaries.md"), "utf-8").catch(() => "");
|
||||
const summariesState = ChapterSummariesStateSchema.parse({
|
||||
rows: parseChapterSummariesMarkdown(summariesMarkdown),
|
||||
});
|
||||
|
||||
const hooksMarkdown = await readFile(join(storyDir, "pending_hooks.md"), "utf-8").catch(() => "");
|
||||
const hooksState = parsePendingHooksStateMarkdown(hooksMarkdown, warnings);
|
||||
|
||||
const inferredFallbackChapter = Math.max(
|
||||
params.fallbackChapter ?? 0,
|
||||
maxSummaryChapter(summariesState),
|
||||
maxHookChapter(hooksState.hooks),
|
||||
);
|
||||
const currentStateMarkdown = await readFile(join(storyDir, "current_state.md"), "utf-8").catch(() => "");
|
||||
const currentState = parseCurrentStateStateMarkdown(currentStateMarkdown, inferredFallbackChapter, warnings);
|
||||
|
||||
const manifest = StateManifestSchema.parse({
|
||||
schemaVersion: 2,
|
||||
language,
|
||||
lastAppliedChapter: Math.max(
|
||||
existingManifest?.lastAppliedChapter ?? 0,
|
||||
inferredFallbackChapter,
|
||||
currentState.chapter,
|
||||
maxSummaryChapter(summariesState),
|
||||
maxHookChapter(hooksState.hooks),
|
||||
),
|
||||
projectionVersion: existingManifest?.projectionVersion ?? 1,
|
||||
migrationWarnings: uniqueStrings([
|
||||
...(existingManifest?.migrationWarnings ?? []),
|
||||
...warnings,
|
||||
]),
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
writeFile(manifestPath, JSON.stringify(manifest, null, 2), "utf-8"),
|
||||
writeFile(currentStatePath, JSON.stringify(currentState, null, 2), "utf-8"),
|
||||
writeFile(hooksPath, JSON.stringify(hooksState, null, 2), "utf-8"),
|
||||
writeFile(summariesPath, JSON.stringify(summariesState, null, 2), "utf-8"),
|
||||
]);
|
||||
|
||||
return {
|
||||
createdFiles: [],
|
||||
warnings: manifest.migrationWarnings,
|
||||
manifest,
|
||||
};
|
||||
}
|
||||
|
||||
export function parseChapterSummariesMarkdown(markdown: string): StoredSummary[] {
|
||||
const rows = parseMarkdownTableRows(markdown)
|
||||
.filter((row) => /^\d+$/.test(row[0] ?? ""));
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
/**
|
||||
* Standalone verification for publish-time package manifests.
|
||||
* Verify that source manifests are publishable once prepack normalization runs.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/verify-no-workspace-protocol.mjs packages/cli packages/core
|
||||
* node ../../scripts/verify-no-workspace-protocol.mjs .
|
||||
*
|
||||
* Run this only after publish versions have been rewritten.
|
||||
* This script is safe to run on source manifests before prepack.
|
||||
*
|
||||
* Checks two invariants before publish:
|
||||
* 1. publishable dependency fields must not contain workspace: specifiers
|
||||
* 2. internal workspace dependencies must match the current workspace package version exactly
|
||||
* 1. workspace:* / workspace:^ / workspace:~ references can be normalized to real versions
|
||||
* 2. non-workspace internal dependencies already point at the current workspace version
|
||||
*/
|
||||
|
||||
import { access, readdir, readFile } from "node:fs/promises";
|
||||
|
|
@ -64,6 +64,14 @@ async function loadWorkspaceVersions(workspaceRoot) {
|
|||
return versions;
|
||||
}
|
||||
|
||||
function normalizeWorkspaceSpecifier(specifier, version) {
|
||||
const value = specifier.slice("workspace:".length);
|
||||
if (value === "*" || value === "") return version;
|
||||
if (value === "^") return `^${version}`;
|
||||
if (value === "~") return `~${version}`;
|
||||
return value;
|
||||
}
|
||||
|
||||
let failed = false;
|
||||
const workspaceRoot = await findWorkspaceRoot(process.cwd());
|
||||
const workspaceVersions = await loadWorkspaceVersions(workspaceRoot);
|
||||
|
|
@ -79,17 +87,42 @@ for (const dirArg of dirs) {
|
|||
const deps = pkg[field];
|
||||
if (!deps) continue;
|
||||
for (const [name, specifier] of Object.entries(deps)) {
|
||||
if (typeof specifier === "string" && specifier.startsWith("workspace:")) {
|
||||
process.stderr.write(`FAIL: ${dir} — ${field}.${name}: ${specifier}\n`);
|
||||
dirFailed = true;
|
||||
failed = true;
|
||||
const workspaceVersion = workspaceVersions.get(name);
|
||||
if (typeof specifier !== "string") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const workspaceVersion = workspaceVersions.get(name);
|
||||
if (workspaceVersion && specifier !== workspaceVersion) {
|
||||
if (specifier.startsWith("workspace:")) {
|
||||
if (!workspaceVersion) {
|
||||
process.stderr.write(`FAIL: ${dir} — ${field}.${name}: ${specifier} (workspace package not found)\n`);
|
||||
dirFailed = true;
|
||||
failed = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
const normalized = normalizeWorkspaceSpecifier(specifier, workspaceVersion);
|
||||
if (
|
||||
normalized !== workspaceVersion
|
||||
&& normalized !== `^${workspaceVersion}`
|
||||
&& normalized !== `~${workspaceVersion}`
|
||||
) {
|
||||
process.stderr.write(
|
||||
`FAIL: ${dir} — ${field}.${name}: ${specifier} normalizes to ${normalized}, expected ${workspaceVersion}, ^${workspaceVersion}, or ~${workspaceVersion}\n`,
|
||||
);
|
||||
dirFailed = true;
|
||||
failed = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
workspaceVersion
|
||||
&& specifier !== workspaceVersion
|
||||
&& specifier !== `^${workspaceVersion}`
|
||||
&& specifier !== `~${workspaceVersion}`
|
||||
) {
|
||||
process.stderr.write(
|
||||
`FAIL: ${dir} — ${field}.${name}: expected ${workspaceVersion}, got ${specifier}\n`,
|
||||
`FAIL: ${dir} — ${field}.${name}: expected ${workspaceVersion}, ^${workspaceVersion}, or ~${workspaceVersion}, got ${specifier}\n`,
|
||||
);
|
||||
dirFailed = true;
|
||||
failed = true;
|
||||
|
|
|
|||
Loading…
Reference in a new issue