diff --git a/.changeset/funny-parks-like.md b/.changeset/funny-parks-like.md new file mode 100644 index 0000000..72258b1 --- /dev/null +++ b/.changeset/funny-parks-like.md @@ -0,0 +1,5 @@ +--- +"@lazy-release/changesets": feat +--- + +Added snapshots diff --git a/README.md b/README.md index f4c0c68..f423de8 100644 --- a/README.md +++ b/README.md @@ -101,6 +101,65 @@ This will: - `--github-token ` - GitHub token for releases (defaults to `GITHUB_TOKEN` env var) - `--draft` - Create GitHub releases as drafts +### 6. Snapshot Releases (Testing) + +Create temporary snapshot releases for testing unpublished changes: + +```bash +changeset snapshot +``` + +This will: +- Generate a unique version: `0.0.0-` (e.g., `0.0.0-1705242645`) +- Update all affected packages and their dependents +- Update internal dependencies to use exact snapshot versions +- Publish to npm with `snapshot` tag (not `latest`) +- Restore package.json files to original state +- Skip git tags and GitHub releases + +**Snapshot releases are temporary and don't modify your version history.** + +#### Options + +- `--dry-run` - Preview what would be published without actually publishing + +#### Installing Snapshots + +Install snapshot releases in other projects: + +```bash +npm install my-package@snapshot +``` + +#### Use Cases + +- **Test changes before releasing**: Validate your changes in a real environment +- **Share work in progress**: Let others test your changes without a formal release +- **CI/CD testing**: Test integration with dependent projects in CI pipelines + +#### Example Workflow + +```bash +# 1. Make changes and create changesets +git checkout -b feature/new-api +# ... make code changes ... +changeset +# Select packages, type: feat, message: "Add new API method" + +# 2. Publish snapshot for testing +changeset snapshot +# Output: Published my-package@0.0.0-1705242645 with 'snapshot' tag + +# 3. Test in another project +cd ../my-app +npm install my-package@snapshot + +# 4. Once testing is complete, create proper release +cd ../my-package +changeset version +changeset publish +``` + ## šŸ“‹ Configuration Edit `.changeset/config.json` to customize behavior: diff --git a/src/index.ts b/src/index.ts index e3b8c9d..36586f0 100755 --- a/src/index.ts +++ b/src/index.ts @@ -10,6 +10,7 @@ import pc from "picocolors"; import { ChangesetConfig, readConfig } from "./config.js"; import { version } from "./version.js"; import { publish } from "./publish.js"; +import { snapshot } from "./snapshot.js"; import { parseChangesetFile } from "./version.js"; async function findPackages(config: ChangesetConfig): Promise> { @@ -329,6 +330,15 @@ program process.exit(0); }); +program + .command("snapshot") + .description("Publish snapshot versions for testing (0.0.0-TIMESTAMP with 'snapshot' tag)") + .option("--dry-run", "Preview what would be published without actually publishing", false) + .action(async (options) => { + await snapshot({ dryRun: options.dryRun }); + process.exit(0); + }); + program.parse(process.argv); async function init() { diff --git a/src/publish.ts b/src/publish.ts index e8d7fef..b55c8d7 100644 --- a/src/publish.ts +++ b/src/publish.ts @@ -178,7 +178,11 @@ async function tagExistsRemote(tag: string): Promise { } } -async function publishToNpm(pkg: PackageInfo, config: ChangesetConfig) { +export async function publishToNpm( + pkg: PackageInfo, + config: ChangesetConfig, + tag: string = "latest", +) { const detected = await detect(); if (!detected) { console.warn(pc.yellow("Could not detect package manager. Skipping npm publish.")); @@ -189,21 +193,22 @@ async function publishToNpm(pkg: PackageInfo, config: ChangesetConfig) { let publishCmd = ""; const access = pkg.access || config.access; const accessFlag = access === "public" || access === "restricted" ? `--access ${access}` : ""; + const tagFlag = `--tag ${tag}`; switch (agent) { case "npm": - publishCmd = `npm publish ${accessFlag}`.trim(); + publishCmd = `npm publish ${tagFlag} ${accessFlag}`.trim(); break; case "yarn": case "yarn@berry": - publishCmd = `yarn publish --non-interactive ${accessFlag}`.trim(); + publishCmd = `yarn publish --non-interactive ${tagFlag} ${accessFlag}`.trim(); break; case "pnpm": case "pnpm@6": - publishCmd = `pnpm publish --no-git-checks ${accessFlag}`.trim(); + publishCmd = `pnpm publish --no-git-checks ${tagFlag} ${accessFlag}`.trim(); break; case "bun": - publishCmd = `bun publish ${accessFlag}`.trim(); + publishCmd = `bun publish ${tagFlag} ${accessFlag}`.trim(); break; default: console.warn(pc.yellow(`Unsupported package manager: ${agent}. Skipping npm publish.`)); @@ -212,12 +217,8 @@ async function publishToNpm(pkg: PackageInfo, config: ChangesetConfig) { console.log(pc.dim("Publishing to npm...")); - try { - execSync(publishCmd, { cwd: pkg.dir, stdio: "inherit" }); - console.log(pc.green("āœ”"), "Published to npm"); - } catch (error) { - throw error; - } + execSync(publishCmd, { cwd: pkg.dir, stdio: "inherit" }); + console.log(pc.green("āœ”"), "Published to npm"); } async function createGitHubRelease( @@ -237,48 +238,44 @@ async function createGitHubRelease( console.log(pc.dim("Creating GitHub release...")); - try { - const { owner, repo } = getGitHubRepoInfo(); - const token = githubToken || process.env.GITHUB_TOKEN; - - if (!token) { - throw new Error( - "GITHUB_TOKEN environment variable is required. " + - 'Create a token at https://github.com/settings/tokens with "repo" scope.', - ); - } + const { owner, repo } = getGitHubRepoInfo(); + const token = githubToken || process.env.GITHUB_TOKEN; - const response = await fetch(`https://api.github.com/repos/${owner}/${repo}/releases`, { - method: "POST", - headers: { - Authorization: `Bearer ${token}`, - "Content-Type": "application/json", - }, - body: JSON.stringify({ - tag_name: tag, - name: tag, - body: releaseNotes, - draft: draft ?? false, - prerelease: false, - }), - }); + if (!token) { + throw new Error( + "GITHUB_TOKEN environment variable is required. " + + 'Create a token at https://github.com/settings/tokens with "repo" scope.', + ); + } - if (!response.ok) { - const error = await response.text(); + const response = await fetch(`https://api.github.com/repos/${owner}/${repo}/releases`, { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + tag_name: tag, + name: tag, + body: releaseNotes, + draft: draft ?? false, + prerelease: false, + }), + }); - // GitHub returns 422 when a release already exists for the tag - if (response.status === 422) { - console.log(pc.dim(`GitHub release for ${tag} already exists. Skipping.`)); - return; - } + if (!response.ok) { + const error = await response.text(); - throw new Error(`GitHub API error: ${response.status} ${error}`); + // GitHub returns 422 when a release already exists for the tag + if (response.status === 422) { + console.log(pc.dim(`GitHub release for ${tag} already exists. Skipping.`)); + return; } - console.log(pc.green("āœ”"), "Created GitHub release"); - } catch (error) { - throw error; + throw new Error(`GitHub API error: ${response.status} ${error}`); } + + console.log(pc.green("āœ”"), "Created GitHub release"); } function getGitHubRepoInfo(): { owner: string; repo: string } { diff --git a/src/snapshot.test.ts b/src/snapshot.test.ts new file mode 100644 index 0000000..60cd82f --- /dev/null +++ b/src/snapshot.test.ts @@ -0,0 +1,765 @@ +import { describe, test, expect, beforeEach, afterEach, spyOn, mock } from "bun:test"; + +mock.module("./config.js", () => ({ + readConfig: () => ({ + access: "restricted", + baseBranch: "main", + updateInternalDependencies: "patch", + ignore: [], + lazyChangesets: { + types: [ + { + type: "feat", + displayName: "New Features", + emoji: "šŸš€", + releaseType: "minor", + promptBreakingChange: true, + }, + ], + }, + }), +})); + +import * as fs from "node:fs"; +import * as tinyglobby from "tinyglobby"; +import * as childProcess from "node:child_process"; +import * as packageManagerDetector from "package-manager-detector"; +import { + generateSnapshotVersion, + findAffectedPackages, + cascadeDependents, + backupPackageJsonFiles, + updatePackagesToSnapshot, + restorePackageJsonFiles, + snapshot, +} from "./snapshot.js"; +import type { DependencyGraph } from "./version.js"; + +describe("generateSnapshotVersion", () => { + test("should generate version with correct format", () => { + const version = generateSnapshotVersion(); + expect(version).toMatch(/^0\.0\.0-\d+$/); + }); + + test("should use unix timestamp", () => { + const beforeTimestamp = Math.floor(Date.now() / 1000); + const version = generateSnapshotVersion(); + const afterTimestamp = Math.floor(Date.now() / 1000); + + const versionTimestamp = parseInt(version.split("-")[1]); + expect(versionTimestamp).toBeGreaterThanOrEqual(beforeTimestamp); + expect(versionTimestamp).toBeLessThanOrEqual(afterTimestamp); + }); + + test("should generate different versions over time", async () => { + const version1 = generateSnapshotVersion(); + await new Promise((resolve) => setTimeout(resolve, 1100)); // Wait 1.1 seconds + const version2 = generateSnapshotVersion(); + + expect(version1).not.toBe(version2); + }); +}); + +describe("findAffectedPackages", () => { + test("should find packages from single changeset", () => { + const mockChangesetContent = `--- +"@test/package-a": feat +--- + +Added new feature`; + + spyOn(fs, "readFileSync").mockReturnValue(mockChangesetContent); + + const packages = findAffectedPackages([".changeset/test.md"]); + + expect(packages.size).toBe(1); + expect(packages.has("@test/package-a")).toBe(true); + }); + + test("should find multiple packages from single changeset", () => { + const mockChangesetContent = `--- +"@test/package-a": feat +"@test/package-b": fix +--- + +Updated multiple packages`; + + spyOn(fs, "readFileSync").mockReturnValue(mockChangesetContent); + + const packages = findAffectedPackages([".changeset/test.md"]); + + expect(packages.size).toBe(2); + expect(packages.has("@test/package-a")).toBe(true); + expect(packages.has("@test/package-b")).toBe(true); + }); + + test("should find packages across multiple changesets", () => { + spyOn(fs, "readFileSync").mockImplementation((path: any) => { + const pathStr = typeof path === "string" ? path : path.toString(); + if (pathStr.includes("changeset1")) { + return `--- +"@test/package-a": feat +--- + +Feature A`; + } + return `--- +"@test/package-b": fix +--- + +Fix B`; + }); + + const packages = findAffectedPackages([".changeset/changeset1.md", ".changeset/changeset2.md"]); + + expect(packages.size).toBe(2); + expect(packages.has("@test/package-a")).toBe(true); + expect(packages.has("@test/package-b")).toBe(true); + }); + + test("should handle empty changesets", () => { + spyOn(fs, "readFileSync").mockReturnValue("---\n---\n\n"); + + const packages = findAffectedPackages([".changeset/empty.md"]); + + expect(packages.size).toBe(0); + }); +}); + +describe("cascadeDependents", () => { + test("should return only affected packages when no dependents", () => { + const affected = new Set(["@test/package-a"]); + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { name: "@test/package-a", version: "1.0.0", path: "", packageJson: {} }, + ], + ]), + dependents: new Map(), + }; + + const result = cascadeDependents(affected, graph); + + expect(result.size).toBe(1); + expect(result.has("@test/package-a")).toBe(true); + }); + + test("should include direct dependents", () => { + const affected = new Set(["@test/package-a"]); + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { name: "@test/package-a", version: "1.0.0", path: "", packageJson: {} }, + ], + [ + "@test/package-b", + { name: "@test/package-b", version: "1.0.0", path: "", packageJson: {} }, + ], + ]), + dependents: new Map([["@test/package-a", new Set(["@test/package-b"])]]), + }; + + const result = cascadeDependents(affected, graph); + + expect(result.size).toBe(2); + expect(result.has("@test/package-a")).toBe(true); + expect(result.has("@test/package-b")).toBe(true); + }); + + test("should cascade through multiple levels", () => { + const affected = new Set(["@test/package-a"]); + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { name: "@test/package-a", version: "1.0.0", path: "", packageJson: {} }, + ], + [ + "@test/package-b", + { name: "@test/package-b", version: "1.0.0", path: "", packageJson: {} }, + ], + [ + "@test/package-c", + { name: "@test/package-c", version: "1.0.0", path: "", packageJson: {} }, + ], + ]), + dependents: new Map([ + ["@test/package-a", new Set(["@test/package-b"])], + ["@test/package-b", new Set(["@test/package-c"])], + ]), + }; + + const result = cascadeDependents(affected, graph); + + expect(result.size).toBe(3); + expect(result.has("@test/package-a")).toBe(true); + expect(result.has("@test/package-b")).toBe(true); + expect(result.has("@test/package-c")).toBe(true); + }); + + test("should handle diamond dependencies", () => { + const affected = new Set(["@test/package-a"]); + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { name: "@test/package-a", version: "1.0.0", path: "", packageJson: {} }, + ], + [ + "@test/package-b", + { name: "@test/package-b", version: "1.0.0", path: "", packageJson: {} }, + ], + [ + "@test/package-c", + { name: "@test/package-c", version: "1.0.0", path: "", packageJson: {} }, + ], + [ + "@test/package-d", + { name: "@test/package-d", version: "1.0.0", path: "", packageJson: {} }, + ], + ]), + dependents: new Map([ + ["@test/package-a", new Set(["@test/package-b", "@test/package-c"])], + ["@test/package-b", new Set(["@test/package-d"])], + ["@test/package-c", new Set(["@test/package-d"])], + ]), + }; + + const result = cascadeDependents(affected, graph); + + expect(result.size).toBe(4); + }); +}); + +describe("backupPackageJsonFiles", () => { + test("should backup package.json content", () => { + const packages = new Set(["@test/package-a"]); + const mockContent = JSON.stringify({ name: "@test/package-a", version: "1.0.0" }, null, 2); + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { + name: "@test/package-a", + version: "1.0.0", + path: "/test/package.json", + packageJson: {}, + }, + ], + ]), + dependents: new Map(), + }; + + spyOn(fs, "readFileSync").mockReturnValue(mockContent); + + const backups = backupPackageJsonFiles(packages, graph); + + expect(backups.size).toBe(1); + expect(backups.get("@test/package-a")?.path).toBe("/test/package.json"); + expect(backups.get("@test/package-a")?.content).toBe(mockContent); + }); + + test("should backup multiple packages", () => { + const packages = new Set(["@test/package-a", "@test/package-b"]); + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { + name: "@test/package-a", + version: "1.0.0", + path: "/test/a/package.json", + packageJson: {}, + }, + ], + [ + "@test/package-b", + { + name: "@test/package-b", + version: "2.0.0", + path: "/test/b/package.json", + packageJson: {}, + }, + ], + ]), + dependents: new Map(), + }; + + spyOn(fs, "readFileSync").mockImplementation((path: any) => { + const pathStr = typeof path === "string" ? path : path.toString(); + if (pathStr.includes("/a/")) { + return JSON.stringify({ name: "@test/package-a", version: "1.0.0" }); + } + return JSON.stringify({ name: "@test/package-b", version: "2.0.0" }); + }); + + const backups = backupPackageJsonFiles(packages, graph); + + expect(backups.size).toBe(2); + }); +}); + +describe("updatePackagesToSnapshot", () => { + let consoleLogSpy: any; + + beforeEach(() => { + consoleLogSpy = spyOn(console, "log").mockImplementation(() => {}); + }); + + afterEach(() => { + consoleLogSpy.mockRestore(); + }); + + test("should update package version to snapshot", () => { + const packages = new Set(["@test/package-a"]); + const snapshotVersion = "0.0.0-1234567890"; + const packageJson = { name: "@test/package-a", version: "1.0.0" }; + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { + name: "@test/package-a", + version: "1.0.0", + path: "/test/package.json", + packageJson, + }, + ], + ]), + dependents: new Map(), + }; + + spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + updatePackagesToSnapshot(packages, snapshotVersion, graph); + + expect(packageJson.version).toBe(snapshotVersion); + }); + + test("should update internal dependencies to exact snapshot version", () => { + const packages = new Set(["@test/package-a", "@test/package-b"]); + const snapshotVersion = "0.0.0-1234567890"; + const packageJsonA = { name: "@test/package-a", version: "1.0.0" }; + const packageJsonB = { + name: "@test/package-b", + version: "2.0.0", + dependencies: { + "@test/package-a": "^1.0.0", + }, + }; + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { + name: "@test/package-a", + version: "1.0.0", + path: "/test/a/package.json", + packageJson: packageJsonA, + }, + ], + [ + "@test/package-b", + { + name: "@test/package-b", + version: "2.0.0", + path: "/test/b/package.json", + packageJson: packageJsonB, + }, + ], + ]), + dependents: new Map(), + }; + + spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + const updates = updatePackagesToSnapshot(packages, snapshotVersion, graph); + + expect(packageJsonB.dependencies["@test/package-a"]).toBe(snapshotVersion); + expect(updates.get("@test/package-b")).toContain( + `@test/package-a: ^1.0.0 → ${snapshotVersion}`, + ); + }); + + test("should update devDependencies and peerDependencies", () => { + const packages = new Set(["@test/package-a", "@test/package-b"]); + const snapshotVersion = "0.0.0-1234567890"; + const packageJsonA = { name: "@test/package-a", version: "1.0.0" }; + const packageJsonB = { + name: "@test/package-b", + version: "2.0.0", + devDependencies: { + "@test/package-a": "~1.0.0", + }, + peerDependencies: { + "@test/package-a": ">=1.0.0", + }, + }; + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { + name: "@test/package-a", + version: "1.0.0", + path: "/test/a/package.json", + packageJson: packageJsonA, + }, + ], + [ + "@test/package-b", + { + name: "@test/package-b", + version: "2.0.0", + path: "/test/b/package.json", + packageJson: packageJsonB, + }, + ], + ]), + dependents: new Map(), + }; + + spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + updatePackagesToSnapshot(packages, snapshotVersion, graph); + + expect(packageJsonB.devDependencies["@test/package-a"]).toBe(snapshotVersion); + expect(packageJsonB.peerDependencies["@test/package-a"]).toBe(snapshotVersion); + }); + + test("should not update external dependencies", () => { + const packages = new Set(["@test/package-a"]); + const snapshotVersion = "0.0.0-1234567890"; + const packageJson = { + name: "@test/package-a", + version: "1.0.0", + dependencies: { + react: "^18.0.0", + }, + }; + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { + name: "@test/package-a", + version: "1.0.0", + path: "/test/package.json", + packageJson, + }, + ], + ]), + dependents: new Map(), + }; + + spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + updatePackagesToSnapshot(packages, snapshotVersion, graph); + + expect(packageJson.dependencies.react).toBe("^18.0.0"); + }); + + test("should write updated package.json to disk", () => { + const packages = new Set(["@test/package-a"]); + const snapshotVersion = "0.0.0-1234567890"; + const packageJson = { name: "@test/package-a", version: "1.0.0" }; + const graph: DependencyGraph = { + packages: new Map([ + [ + "@test/package-a", + { + name: "@test/package-a", + version: "1.0.0", + path: "/test/package.json", + packageJson, + }, + ], + ]), + dependents: new Map(), + }; + + const writeFileSpy = spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + updatePackagesToSnapshot(packages, snapshotVersion, graph); + + expect(writeFileSpy).toHaveBeenCalledWith( + "/test/package.json", + expect.stringContaining(snapshotVersion), + "utf-8", + ); + }); +}); + +describe("restorePackageJsonFiles", () => { + let consoleErrorSpy: any; + + beforeEach(() => { + consoleErrorSpy = spyOn(console, "error").mockImplementation(() => {}); + }); + + afterEach(() => { + consoleErrorSpy.mockRestore(); + }); + + test("should restore package.json files from backup", () => { + const backups = new Map([ + [ + "@test/package-a", + { + path: "/test/package.json", + content: JSON.stringify({ name: "@test/package-a", version: "1.0.0" }), + }, + ], + ]); + + const writeFileSpy = spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + restorePackageJsonFiles(backups); + + expect(writeFileSpy).toHaveBeenCalledWith( + "/test/package.json", + JSON.stringify({ name: "@test/package-a", version: "1.0.0" }), + "utf-8", + ); + }); + + test("should restore multiple files", () => { + const backups = new Map([ + [ + "@test/package-a", + { path: "/test/a/package.json", content: JSON.stringify({ version: "1.0.0" }) }, + ], + [ + "@test/package-b", + { path: "/test/b/package.json", content: JSON.stringify({ version: "2.0.0" }) }, + ], + ]); + + const writeFileSpy = spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + restorePackageJsonFiles(backups); + + // Check that both files were restored + const calls = writeFileSpy.mock.calls; + const pathA = calls.find((call) => call[0] === "/test/a/package.json"); + const pathB = calls.find((call) => call[0] === "/test/b/package.json"); + expect(pathA).toBeDefined(); + expect(pathB).toBeDefined(); + }); + + test("should handle restore errors gracefully", () => { + const backups = new Map([["@test/package-a", { path: "/test/package.json", content: "{}" }]]); + + spyOn(fs, "writeFileSync").mockImplementation(() => { + throw new Error("Permission denied"); + }); + + expect(() => restorePackageJsonFiles(backups)).not.toThrow(); + expect(consoleErrorSpy).toHaveBeenCalled(); + }); +}); + +describe("snapshot command", () => { + let consoleLogSpy: any; + let consoleErrorSpy: any; + let processExitSpy: any; + + beforeEach(() => { + consoleLogSpy = spyOn(console, "log").mockImplementation(() => {}); + consoleErrorSpy = spyOn(console, "error").mockImplementation(() => {}); + processExitSpy = spyOn(process, "exit").mockImplementation((() => {}) as any); + }); + + afterEach(() => { + consoleLogSpy.mockRestore(); + consoleErrorSpy.mockRestore(); + processExitSpy.mockRestore(); + mock.clearAllMocks(); + }); + + test("should error when .changeset directory not found", async () => { + spyOn(fs, "existsSync").mockReturnValue(false); + + await snapshot({ dryRun: false }); + + expect(consoleErrorSpy).toHaveBeenCalledWith( + expect.stringContaining("No .changeset directory"), + ); + expect(processExitSpy).toHaveBeenCalledWith(1); + }); + + test("should error when no changeset files found", async () => { + spyOn(fs, "existsSync").mockReturnValue(true); + spyOn(tinyglobby, "globSync").mockImplementation((options: any) => { + if (options.patterns.includes(".changeset/*.md")) { + return []; + } + return []; + }); + + await snapshot({ dryRun: false }); + + expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining("No changeset files")); + expect(processExitSpy).toHaveBeenCalledWith(1); + }); + + test("should show packages in dry run mode", async () => { + spyOn(fs, "existsSync").mockReturnValue(true); + spyOn(tinyglobby, "globSync").mockImplementation((options: any) => { + if (options.patterns.includes(".changeset/*.md")) { + return [".changeset/test.md"]; + } + return ["package.json"]; + }); + spyOn(fs, "readFileSync").mockImplementation((path: any) => { + const pathStr = typeof path === "string" ? path : path.toString(); + if (pathStr.includes(".changeset")) { + return `--- +"@test/package-a": feat +--- + +Test feature`; + } + return JSON.stringify({ name: "@test/package-a", version: "1.0.0" }); + }); + + await snapshot({ dryRun: true }); + + expect(consoleLogSpy).toHaveBeenCalledWith(expect.stringContaining("Dry run")); + const calls = consoleLogSpy.mock.calls.flat(); + const hasPackageName = calls.some( + (arg: any) => typeof arg === "string" && arg.includes("@test/package-a"), + ); + expect(hasPackageName).toBe(true); + }); + + test("should publish packages with snapshot tag", async () => { + spyOn(fs, "existsSync").mockReturnValue(true); + spyOn(tinyglobby, "globSync").mockImplementation((options: any) => { + if (options.patterns.includes(".changeset/*.md")) { + return [".changeset/test.md"]; + } + return ["package.json"]; + }); + spyOn(fs, "readFileSync").mockImplementation((path: any) => { + const pathStr = typeof path === "string" ? path : path.toString(); + if (pathStr.includes(".changeset")) { + return `--- +"@test/package-a": feat +--- + +Test feature`; + } + return JSON.stringify({ name: "@test/package-a", version: "1.0.0" }); + }); + spyOn(fs, "writeFileSync").mockImplementation(() => {}); + spyOn(packageManagerDetector, "detect").mockResolvedValue({ name: "npm", agent: "npm" }); + spyOn(childProcess, "execSync").mockImplementation(() => ""); + + await snapshot({ dryRun: false }); + + const calls = (childProcess.execSync as any).mock.calls; + const publishCall = calls.find((call: any) => call[0].includes("npm publish")); + expect(publishCall).toBeDefined(); + expect(publishCall[0]).toContain("--tag snapshot"); + }); + + test("should restore files after successful publish", async () => { + spyOn(fs, "existsSync").mockReturnValue(true); + spyOn(tinyglobby, "globSync").mockImplementation((options: any) => { + if (options.patterns.includes(".changeset/*.md")) { + return [".changeset/test.md"]; + } + return ["package.json"]; + }); + const mockOriginalContent = JSON.stringify({ name: "@test/package-a", version: "1.0.0" }); + let readCallCount = 0; + spyOn(fs, "readFileSync").mockImplementation((path: any) => { + readCallCount++; + const pathStr = typeof path === "string" ? path : path.toString(); + if (pathStr.includes(".changeset")) { + return `--- +"@test/package-a": feat +--- + +Test feature`; + } + // First read for backup, subsequent reads for parsing + return mockOriginalContent; + }); + const writeFileSpy = spyOn(fs, "writeFileSync").mockImplementation(() => {}); + spyOn(packageManagerDetector, "detect").mockResolvedValue({ name: "npm", agent: "npm" }); + spyOn(childProcess, "execSync").mockImplementation(() => ""); + + await snapshot({ dryRun: false }); + + // Check that restore was called + const restoreCalls = writeFileSpy.mock.calls.filter((call) => call[1] === mockOriginalContent); + expect(restoreCalls.length).toBeGreaterThan(0); + }); + + test("should restore files after publish failure", async () => { + spyOn(fs, "existsSync").mockReturnValue(true); + spyOn(tinyglobby, "globSync").mockImplementation((options: any) => { + if (options.patterns.includes(".changeset/*.md")) { + return [".changeset/test.md"]; + } + return ["package.json"]; + }); + const mockOriginalContent = JSON.stringify({ name: "@test/package-a", version: "1.0.0" }); + spyOn(fs, "readFileSync").mockImplementation((path: any) => { + const pathStr = typeof path === "string" ? path : path.toString(); + if (pathStr.includes(".changeset")) { + return `--- +"@test/package-a": feat +--- + +Test feature`; + } + return mockOriginalContent; + }); + const writeFileSpy = spyOn(fs, "writeFileSync").mockImplementation(() => {}); + spyOn(packageManagerDetector, "detect").mockResolvedValue({ name: "npm", agent: "npm" }); + spyOn(childProcess, "execSync").mockImplementation(() => { + throw new Error("npm publish failed"); + }); + + try { + await snapshot({ dryRun: false }); + } catch (error) { + // Expected to throw + } + + // Check that restore was called even after error + const restoreCalls = writeFileSpy.mock.calls.filter((call) => call[1] === mockOriginalContent); + expect(restoreCalls.length).toBeGreaterThan(0); + }); + + test("should skip private packages", async () => { + spyOn(fs, "existsSync").mockReturnValue(true); + spyOn(tinyglobby, "globSync").mockImplementation((options: any) => { + if (options.patterns.includes(".changeset/*.md")) { + return [".changeset/test.md"]; + } + return ["package.json"]; + }); + spyOn(fs, "readFileSync").mockImplementation((path: any) => { + const pathStr = typeof path === "string" ? path : path.toString(); + if (pathStr.includes(".changeset")) { + return `--- +"@test/package-a": feat +--- + +Test feature`; + } + return JSON.stringify({ name: "@test/package-a", version: "1.0.0", private: true }); + }); + spyOn(fs, "writeFileSync").mockImplementation(() => {}); + + await snapshot({ dryRun: false }); + + expect(consoleLogSpy).toHaveBeenCalledWith(expect.stringContaining("skipped (private)")); + }); +}); diff --git a/src/snapshot.ts b/src/snapshot.ts new file mode 100644 index 0000000..9ee1768 --- /dev/null +++ b/src/snapshot.ts @@ -0,0 +1,317 @@ +import { readFileSync, writeFileSync, existsSync } from "node:fs"; +import { globSync } from "tinyglobby"; +import path from "node:path"; +import pc from "picocolors"; +import { readConfig } from "./config.js"; +import { parseChangesetFile, buildDependencyGraph } from "./version.js"; +import type { DependencyGraph } from "./version.js"; +import { publishToNpm } from "./publish.js"; +import type { PackageInfo as PublishPackageInfo } from "./publish.js"; + +export interface PackageBackup { + path: string; + content: string; +} + +export function generateSnapshotVersion(): string { + const timestamp = Math.floor(Date.now() / 1000); + return `0.0.0-${timestamp}`; +} + +export function findAffectedPackages(changesetFiles: string[]): Set { + const affectedPackages = new Set(); + + for (const changesetFile of changesetFiles) { + const releases = parseChangesetFile(changesetFile); + for (const release of releases) { + affectedPackages.add(release.packageName); + } + } + + return affectedPackages; +} + +export function cascadeDependents( + affectedPackages: Set, + dependencyGraph: DependencyGraph, +): Set { + const allPackages = new Set(affectedPackages); + const queue = Array.from(affectedPackages); + const processed = new Set(); + + while (queue.length > 0) { + const current = queue.shift()!; + if (processed.has(current)) continue; + processed.add(current); + + const dependents = dependencyGraph.dependents.get(current); + if (!dependents) continue; + + for (const dependent of dependents) { + if (!allPackages.has(dependent)) { + allPackages.add(dependent); + queue.push(dependent); + } + } + } + + return allPackages; +} + +export function backupPackageJsonFiles( + packages: Set, + dependencyGraph: DependencyGraph, +): Map { + const backups = new Map(); + + for (const pkgName of packages) { + const pkgInfo = dependencyGraph.packages.get(pkgName); + if (!pkgInfo) continue; + + const content = readFileSync(pkgInfo.path, "utf-8"); + backups.set(pkgName, { + path: pkgInfo.path, + content, + }); + } + + return backups; +} + +export function updatePackagesToSnapshot( + packages: Set, + snapshotVersion: string, + dependencyGraph: DependencyGraph, +): Map { + const dependencyUpdates = new Map(); + + for (const pkgName of packages) { + const pkgInfo = dependencyGraph.packages.get(pkgName); + if (!pkgInfo) continue; + + const packageJson = pkgInfo.packageJson; + const updates: string[] = []; + + // Update the package version + packageJson.version = snapshotVersion; + + // Update internal dependencies to exact snapshot versions + const depTypes = ["dependencies", "devDependencies", "peerDependencies"] as const; + + for (const depType of depTypes) { + if (!packageJson[depType]) continue; + + for (const depName of Object.keys(packageJson[depType])) { + // Only update if this is an internal package that's being snapshot + if (packages.has(depName)) { + const oldVersion = packageJson[depType][depName]; + packageJson[depType][depName] = snapshotVersion; + updates.push(`${depName}: ${oldVersion} → ${snapshotVersion}`); + } + } + } + + if (updates.length > 0) { + dependencyUpdates.set(pkgName, updates); + } + + // Write updated package.json + writeFileSync(pkgInfo.path, JSON.stringify(packageJson, null, 2) + "\n", "utf-8"); + } + + return dependencyUpdates; +} + +export function restorePackageJsonFiles(backups: Map): void { + for (const [_, backup] of backups) { + try { + writeFileSync(backup.path, backup.content, "utf-8"); + } catch (error) { + console.error( + pc.red(`āœ— Failed to restore ${backup.path}`), + error instanceof Error ? error.message : String(error), + ); + } + } +} + +export async function snapshot({ dryRun = false }: { dryRun?: boolean } = {}) { + const changesetDir = path.join(process.cwd(), ".changeset"); + + if (!existsSync(changesetDir)) { + console.error(pc.red("No .changeset directory found.")); + console.log( + pc.yellow("Please run"), + pc.cyan("changeset init"), + pc.yellow("to initialize changesets."), + ); + process.exit(1); + } + + const config = readConfig(); + + // Find changesets + const changesetFiles = globSync({ + patterns: [".changeset/*.md"], + ignore: [".changeset/README.md"], + }); + + if (changesetFiles.length === 0) { + console.error(pc.red("No changeset files found.")); + console.log(pc.yellow("Create a changeset first with:"), pc.cyan("changeset")); + process.exit(1); + } + + // Find affected packages from changesets + const affectedPackages = findAffectedPackages(changesetFiles); + + if (affectedPackages.size === 0) { + console.error(pc.red("No packages found in changesets.")); + process.exit(1); + } + + // Build dependency graph + const packageJsonPaths = globSync({ + patterns: ["**/package.json", "!**/node_modules/**", "!**/dist/**"], + }); + + const dependencyGraph = buildDependencyGraph(packageJsonPaths); + + // Validate all affected packages exist + for (const pkgName of affectedPackages) { + if (!dependencyGraph.packages.has(pkgName)) { + console.error(pc.red(`Package "${pkgName}" referenced in changeset not found.`)); + process.exit(1); + } + } + + // Cascade to all dependents + const allPackagesToUpdate = cascadeDependents(affectedPackages, dependencyGraph); + + // Generate snapshot version + const snapshotVersion = generateSnapshotVersion(); + + console.log(pc.bold(`\nšŸ“ø Snapshot version: ${pc.cyan(snapshotVersion)}\n`)); + + if (dryRun) { + console.log(pc.yellow("Dry run - no files will be modified or published.\n")); + } + + // Show what will be updated + console.log(pc.bold(`Packages to publish (${allPackagesToUpdate.size}):\n`)); + + for (const pkgName of allPackagesToUpdate) { + const pkgInfo = dependencyGraph.packages.get(pkgName); + if (!pkgInfo) continue; + + const isAffected = affectedPackages.has(pkgName); + const icon = isAffected ? pc.green("ā—") : pc.dim("↳"); + const reason = isAffected ? "" : pc.dim(" [dependent]"); + + console.log( + icon, + pc.cyan(pkgName), + pc.dim(`(${pkgInfo.version} → ${snapshotVersion})`), + reason, + ); + } + + if (dryRun) { + console.log(pc.yellow("\nDry run complete - no changes were made.")); + return; + } + + console.log(pc.dim("\n" + "─".repeat(60) + "\n")); + + // Backup package.json files + const backups = backupPackageJsonFiles(allPackagesToUpdate, dependencyGraph); + + try { + // Update package.json files with snapshot versions + const dependencyUpdates = updatePackagesToSnapshot( + allPackagesToUpdate, + snapshotVersion, + dependencyGraph, + ); + + // Show dependency updates + if (dependencyUpdates.size > 0) { + console.log(pc.bold("Updated internal dependencies:\n")); + for (const [pkgName, updates] of dependencyUpdates) { + console.log(pc.cyan(` ${pkgName}:`)); + for (const update of updates) { + console.log(pc.dim(` ${update}`)); + } + } + console.log(pc.dim("\n" + "─".repeat(60) + "\n")); + } + + // Publish packages + console.log(pc.bold("Publishing to npm with --tag snapshot...\n")); + + const results = { success: 0, failed: 0 }; + + for (const pkgName of allPackagesToUpdate) { + const pkgInfo = dependencyGraph.packages.get(pkgName); + if (!pkgInfo) continue; + + const packageJson = pkgInfo.packageJson; + const isPrivate = packageJson.private === true; + + const publishInfo: PublishPackageInfo = { + name: pkgName, + version: snapshotVersion, + dir: path.dirname(pkgInfo.path), + isPrivate, + access: packageJson.publishConfig?.access, + }; + + if (isPrivate) { + console.log(pc.dim(` ā—‹ ${pkgName} - skipped (private)`)); + continue; + } + + try { + await publishToNpm(publishInfo, config, "snapshot"); + console.log(pc.green(` āœ“ ${pkgName}@${snapshotVersion}`)); + results.success++; + } catch (error) { + console.error(pc.red(` āœ— ${pkgName} - failed`)); + if (error instanceof Error) { + console.error(pc.red(` ${error.message}`)); + } + results.failed++; + } + } + + console.log(pc.dim("\n" + "─".repeat(60) + "\n")); + + // Restore package.json files + console.log(pc.bold("Restoring package.json files...\n")); + restorePackageJsonFiles(backups); + console.log(pc.green(` āœ“ Restored ${backups.size} package.json file(s)\n`)); + + // Summary + if (results.failed === 0) { + console.log( + pc.green(`āœ” Snapshot published successfully! ${results.success} package(s) published.\n`), + ); + } else { + console.log( + pc.yellow( + `⚠ Snapshot completed with errors. ${results.success} successful, ${results.failed} failed.\n`, + ), + ); + } + + console.log(pc.bold("Install snapshots with:")); + console.log(pc.cyan(` npm install @snapshot\n`)); + } catch (error) { + // Always restore on error + console.log(pc.yellow("\n⚠ Error occurred, restoring package.json files...\n")); + restorePackageJsonFiles(backups); + console.log(pc.green(` āœ“ Restored ${backups.size} package.json file(s)\n`)); + + throw error; + } +}