diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b332f39..2ba1d6b 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,7 +3,13 @@ { "name": "Ubuntu", // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile - "image": "mcr.microsoft.com/devcontainers/base:jammy" + "image": "mcr.microsoft.com/devcontainers/base:jammy", + "features": { + "ghcr.io/devcontainers/features/node:1": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, + "ghcr.io/devcontainers/features/azure-cli:1": {}, + "ghcr.io/azure/azure-dev/azd:0": {} + }, // Features to add to the dev container. More info: https://containers.dev/features. // "features": {}, @@ -18,5 +24,5 @@ // "customizations": {}, // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. - // "remoteUser": "root" + "remoteUser": "root" } diff --git a/.github/workflows/ossf.yml b/.github/workflows/ossf.yml new file mode 100644 index 0000000..1824090 --- /dev/null +++ b/.github/workflows/ossf.yml @@ -0,0 +1,229 @@ +# OSSF Scorecard Analysis workflow +# - Trigger: workflow_dispatch with input "repo" (owner/repo or full URL) and optional "checks". +# - Runs the OpenSSF Scorecard Docker image (gcr.io/openssf/scorecard:stable) on an Ubuntu runner. +# - Produces a JSON result at ossf-scorecard-output/scorecard.json and uploads it as an artifact. +# - Job outputs: artifact_name, aggregate_score, analysis_b64 (base64 JSON if small enough; analysis_truncated=true otherwise). +# - How to run: from the Actions tab, start the workflow and provide repo = owner/repo (or full GitHub URL). +# - For higher rate limits or private repos, set repository secret SCORECARD_TOKEN (falls back to GITHUB_TOKEN). + +name: OSSF Scorecard Analysis + +on: + workflow_dispatch: + inputs: + repo: + description: 'Repository to analyze (owner/repo or full URL)' + required: true + default: '' + id: + description: 'Unique identifier for the workflow run (for tracking purposes)' + required: true + default: '' + +# read access is required for retrieving repository metadata if using the default GITHUB_TOKEN +permissions: + contents: read + +jobs: + ossf-scorecard: + name: Run OSSF Scorecard + runs-on: ubuntu-latest + outputs: + artifact_name: ${{ steps.set-id.outputs.artifact_name }} + aggregate_score: ${{ steps.process-results.outputs.aggregate_score }} + analysis_b64: ${{ steps.validate-json.outputs.analysis_b64 }} + analysis_truncated: ${{ steps.validate-json.outputs.analysis_truncated }} + artifact_run_id: ${{ github.run_id }} + + steps: + + - name: Set workflow run ID + id: set-id + run: | + # If the 'id' input is empty or an empty string, set it to a unique GUID + if [ -z "${{ github.event.inputs.id }}" ] || [ "${{ github.event.inputs.id }}" = "null" ]; then + # Generate a GUID (UUID v4) + if command -v uuidgen >/dev/null 2>&1; then + ID=$(uuidgen) + else + ID=$(cat /proc/sys/kernel/random/uuid 2>/dev/null || (head -c16 /dev/urandom | xxd -p | sed 's/\(..\)/\1-/g; s/-$//')) + fi + else + ID="${{ github.event.inputs.id }}" + fi + + # Sanitize repo input to a filesystem-safe token for artifact names + REPO_INPUT_RAW="${{ github.event.inputs.repo }}" + REPO_PATH="$REPO_INPUT_RAW" + # strip protocol/host for full URLs (e.g. https://github.com/owner/repo -> owner/repo) + if echo "$REPO_PATH" | grep -E '^https?://' >/dev/null 2>&1; then + REPO_PATH=$(echo "$REPO_PATH" | sed -E 's#^https?://[^/]+/##') + fi + # strip git@host:owner/repo forms + if echo "$REPO_PATH" | grep -E '^git@' >/dev/null 2>&1; then + REPO_PATH=$(echo "$REPO_PATH" | sed -E 's#^git@[^:]+:##') + fi + # trim trailing .git if present + REPO_PATH=$(echo "$REPO_PATH" | sed -E 's#\.git$##') + # fallback if empty + if [ -z "$REPO_PATH" ] || [ "$REPO_PATH" = "null" ]; then + REPO_PATH="unknown_repo" + fi + # make filename-safe: replace / and other non-alphanum with _ + REPO_SAFE=$(printf '%s' "$REPO_PATH" | sed -E 's#[/\\ ]#_#g' | sed 's/[^0-9A-Za-z._-]/_/g') + + ARTIFACT_NAME="ossf-scorecard_${REPO_SAFE}_${ID}" + + echo "id=$ID" >> "$GITHUB_OUTPUT" + echo "artifact_name=$ARTIFACT_NAME" >> "$GITHUB_OUTPUT" + echo "Workflow run ID: $ID" + echo "Artifact name: $ARTIFACT_NAME" + + # Add the ID to the step summary + echo "Workflow run INPUT_GUID_ID: $ID" >> $GITHUB_STEP_SUMMARY + + - name: Restore Scorecard image cache + id: cache-scorecard-image + uses: actions/cache@v4 + with: + path: scorecard_image.tar + key: scorecard-image-${{ runner.os }}-gcr-ossf-scorecard-stable + + - name: Load cached Scorecard image + if: steps.cache-scorecard-image.outputs.cache-hit == 'true' + run: | + if [ -f scorecard_image.tar ]; then + echo "Loading cached Scorecard image..." + docker load -i scorecard_image.tar || true + else + echo "Cache indicated hit but tarball missing; proceeding to pull image." + fi + + - name: Pull and save Scorecard image (cache miss) + if: steps.cache-scorecard-image.outputs.cache-hit != 'true' + run: | + echo "Pulling Scorecard image from registry..." + docker pull gcr.io/openssf/scorecard:stable + echo "Saving Scorecard image to tar for caching..." + docker save gcr.io/openssf/scorecard:stable -o scorecard_image.tar + + - name: Normalize Repository Input + id: normalize-repo + run: | + REPO_INPUT="${{ github.event.inputs.repo }}" + if [ -z "$REPO_INPUT" ] || [ "$REPO_INPUT" = "null" ]; then + echo "error: Repository input is required." >&2 + exit 1 + fi + + # Normalize the repo argument + if echo "$REPO_INPUT" | grep -E '^https?://' >/dev/null 2>&1; then + REPO_ARG="$REPO_INPUT" + elif echo "$REPO_INPUT" | grep -E '^[^/]+/[^/]+$' >/dev/null 2>&1; then + REPO_ARG="https://github.com/$REPO_INPUT" + else + echo "error: Repo input must be 'owner/repo' or a full URL." >&2 + exit 1 + fi + + echo "repo_arg=$REPO_ARG" >> "$GITHUB_OUTPUT" + + - name: Prepare Output Directory + id: prepare-output + run: | + OUT_DIR="${{ github.workspace }}/ossf-scorecard-output" + mkdir -p "$OUT_DIR" + RESULT_FILE="$OUT_DIR/scorecard_${{ steps.set-id.outputs.id }}.json" + STDERR_FILE="$OUT_DIR/scorecard_${{ steps.set-id.outputs.id }}.stderr.log" + RESULT_LOG="$OUT_DIR/scorecard_${{ steps.set-id.outputs.id }}.log" + + echo "result_file=$RESULT_FILE" >> "$GITHUB_OUTPUT" + echo "stderr_file=$STDERR_FILE" >> "$GITHUB_OUTPUT" + echo "result_log=$RESULT_LOG" >> "$GITHUB_OUTPUT" + + - name: Run Scorecard (Docker) + id: run-docker + run: | + TOKEN="${{ secrets.SCORECARD_TOKEN }}" + if [ -z "$TOKEN" ] || [ "$TOKEN" = "null" ]; then + TOKEN="${{ secrets.GITHUB_TOKEN }}" + fi + + DOCKER_ENV_ARGS=() + if [ -n "$TOKEN" ]; then + DOCKER_ENV_ARGS+=( -e GITHUB_AUTH_TOKEN="$TOKEN" ) + fi + + echo "Running OSSF Scorecard on ${{ steps.normalize-repo.outputs.repo_arg }}..." | tee "${{ steps.prepare-output.outputs.result_log }}" + echo "Command: docker run --rm [AUTH_TOKEN_HIDDEN] gcr.io/openssf/scorecard:stable --format=json --repo=\"${{ steps.normalize-repo.outputs.repo_arg }}\"" >> "${{ steps.prepare-output.outputs.result_log }}" + echo "$(date -u +"%Y-%m-%dT%H:%M:%SZ") Started analysis" >> "${{ steps.prepare-output.outputs.result_log }}" + + docker run --rm "${DOCKER_ENV_ARGS[@]}" gcr.io/openssf/scorecard:stable --format=json --repo="${{ steps.normalize-repo.outputs.repo_arg }}" > "${{ steps.prepare-output.outputs.result_file }}" 2> "${{ steps.prepare-output.outputs.stderr_file }}" + DOCKER_EXIT=$? + echo "docker_exit_code=$DOCKER_EXIT" >> "$GITHUB_OUTPUT" + + echo "$(date -u +"%Y-%m-%dT%H:%M:%SZ") Completed analysis with exit code: $DOCKER_EXIT" >> "${{ steps.prepare-output.outputs.result_log }}" + + - name: Validate JSON Output + id: validate-json + run: | + RESULT_FILE="${{ steps.prepare-output.outputs.result_file }}" + if jq -e . "$RESULT_FILE" >/dev/null 2>&1; then + echo "is_json=true" >> "$GITHUB_OUTPUT" + # Add base64 encoding of the JSON for smaller files + if [ "$(wc -c < "$RESULT_FILE")" -lt 10000 ]; then + echo "analysis_b64=$(base64 -w0 "$RESULT_FILE")" >> "$GITHUB_OUTPUT" + echo "analysis_truncated=false" >> "$GITHUB_OUTPUT" + else + echo "analysis_truncated=true" >> "$GITHUB_OUTPUT" + echo "Analysis file too large for base64 encoding in outputs ($(wc -c < "$RESULT_FILE") bytes)" >> "$GITHUB_STEP_SUMMARY" + fi + else + echo "is_json=false" >> "$GITHUB_OUTPUT" + fi + + - name: Process Results + id: process-results + if: steps.validate-json.outputs.is_json == 'true' + run: | + RESULT_FILE="${{ steps.prepare-output.outputs.result_file }}" + AGG_SCORE=$(jq -r '.summary.score // .score // ""' "$RESULT_FILE" || true) + + if [ -n "$AGG_SCORE" ]; then + SCORE_SAFE=$(printf '%s' "$AGG_SCORE" | sed 's/[^0-9A-Za-z._-]/_/g' | sed 's/\./_/g') + ARTIFACT_NAME="${{ steps.set-id.outputs.artifact_name }}_score_${SCORE_SAFE}" + else + ARTIFACT_NAME="${{ steps.set-id.outputs.artifact_name }}" + fi + + echo "artifact_name=$ARTIFACT_NAME" >> "$GITHUB_OUTPUT" + echo "aggregate_score=$AGG_SCORE" >> "$GITHUB_OUTPUT" + + - name: Upload Artifacts + id: upload-artifacts + uses: actions/upload-artifact@v4 + with: + name: ${{ steps.process-results.outputs.artifact_name }} + path: | + ${{ steps.prepare-output.outputs.result_file }} + ${{ steps.prepare-output.outputs.result_log }} + ${{ steps.prepare-output.outputs.stderr_file }} + + - name: Show short summary + run: | + echo "Workflow input run: ${{ steps.set-id.outputs.id }}" + echo "Workflow input repo: ${{ github.event.inputs.repo }}" + echo "Workflow run ID: ${{ github.run_id }}" + echo "Artifact: ${{ steps.process-results.outputs.artifact_name }}" + echo "Scorecard FileName: ${{ steps.prepare-output.outputs.result_file }}" + echo "Aggregate score: ${{ steps.process-results.outputs.aggregate_score }}" + echo "Docker exit code: ${{ steps.run-docker.outputs.docker_exit_code }}" + + # Add detailed info to step summary + echo "## OSSF Scorecard Analysis Results" >> $GITHUB_STEP_SUMMARY + echo "* **Repository:** ${{ github.event.inputs.repo }}" >> $GITHUB_STEP_SUMMARY + echo "* **Run ID:** ${{ steps.set-id.outputs.id }}" >> $GITHUB_STEP_SUMMARY + echo "* **Workflow Run ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY + echo "* **Artifact Name:** ${{ steps.process-results.outputs.artifact_name }}" >> $GITHUB_STEP_SUMMARY + echo "* **Aggregate Score:** ${{ steps.process-results.outputs.aggregate_score }}" >> $GITHUB_STEP_SUMMARY + echo "* **Docker Exit Code:** ${{ steps.run-docker.outputs.docker_exit_code }}" >> $GITHUB_STEP_SUMMARY diff --git a/.gitignore b/.gitignore index e43b0f9..8c394d2 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ .DS_Store +.env* \ No newline at end of file diff --git a/packages/github-move-folders-between-repos/README.md b/packages/github-move-folders-between-repos/README.md new file mode 100644 index 0000000..bec4256 --- /dev/null +++ b/packages/github-move-folders-between-repos/README.md @@ -0,0 +1,164 @@ +# GitHub Move Folders Between Repos + +A Node.js tool to move specific files and folders from one GitHub repository to another, with optional git history preservation. + +## Features + +- 🎯 **Selective Migration** - Move only specific files/folders, not entire repos +- 📜 **History Preservation** - Optionally preserve git history using `git-filter-repo` +- 📦 **Snapshot Mode** - Or create clean snapshots without history +- 📂 **Subdirectory Support** - Relocate content to a subdirectory in the target repo +- 🔒 **Safe Operation** - Creates a new branch in target repo (no direct commits) + +## Requirements + +- **git** >= 2.30 +- **Node.js** >= 16 +- **GitHub PAT** with appropriate scope (see below) +- **git-filter-repo** (optional, only needed if `keepHistory: true`) + - Install: `pip install git-filter-repo` or `brew install git-filter-repo` + +## GitHub Personal Access Token (PAT) + +### Required Permissions + +**For Classic Tokens:** +- **`repo`** (Full control of private repositories) - for both private repos +- **`public_repo`** (Access public repositories) - if both repos are public + +**For Fine-Grained Tokens (Beta):** +- **Contents**: Read and Write access (for both source and target repos) +- **Metadata**: Read access (automatically included) + +### Creating a PAT + +1. Go to GitHub → Settings → Developer settings → Personal access tokens +2. Choose "Tokens (classic)" or "Fine-grained tokens" +3. Click "Generate new token" +4. Select the appropriate scopes listed above +5. Generate and copy the token +6. Export it as an environment variable: + ```bash + export GH_PAT=ghp_your_token_here + ``` + +## Installation + +```bash +npm install +``` + +Or install globally: +```bash +npm install -g . +``` + +## Usage + +### Using a Config File (Recommended) + +1. Create a `config.json` file: + ```json + { + "srcRepo": "org/source-repo", + "targetRepo": "org/target-repo", + "srcRef": "main", + "keepHistory": true, + "destSubdir": "", + "paths": ["docs", "src/lib", "README.md", ".github/workflows"] + } + ``` + +2. Run the tool: + ```bash + export GH_PAT=ghp_your_token_here + node gh-move-folders-between-repos.js --config ./config.json + ``` + +### Using Command Line Arguments + +```bash +node gh-move-folders-between-repos.js \ + --src-repo org/source-repo \ + --target-repo org/target-repo \ + --src-ref main \ + --keep-history true \ + --dest-subdir imported \ + --paths '["docs","src/lib","README.md",".github/workflows"]' +``` + +## Configuration Options + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `srcRepo` | string | *required* | Source repository (format: `owner/repo`) | +| `targetRepo` | string | *required* | Target repository (format: `owner/repo`) | +| `srcRef` | string | `"main"` | Branch/tag/commit to copy from | +| `keepHistory` | boolean | `true` | Preserve git history (requires git-filter-repo) | +| `destSubdir` | string | `""` | Optional subdirectory in target repo | +| `paths` | array | *required* | List of files/folders to move | + +## How It Works + +1. **Clones** the source repository to a temporary directory +2. **Extracts** only the specified paths: + - If `keepHistory: true` - Uses `git-filter-repo` to rewrite history + - If `keepHistory: false` - Creates a clean snapshot using `git archive` +3. **Relocates** content to subdirectory (if `destSubdir` is specified) +4. **Creates** a new branch with timestamp (e.g., `import/source-repo-20260105-143025`) +5. **Pushes** to the target repository +6. **Cleanup** - Automatically removes temporary files + +## Example Workflows + +### Move documentation to a docs repo (with history) +```json +{ + "srcRepo": "myorg/monorepo", + "targetRepo": "myorg/documentation", + "srcRef": "main", + "keepHistory": true, + "destSubdir": "", + "paths": ["docs", "README.md"] +} +``` + +### Extract libraries to separate repo (snapshot only) +```json +{ + "srcRepo": "myorg/legacy-app", + "targetRepo": "myorg/shared-libs", + "srcRef": "main", + "keepHistory": false, + "destSubdir": "legacy", + "paths": ["src/utils", "src/helpers"] +} +``` + +## After Running + +1. The tool creates a new branch in the target repository +2. Open the provided GitHub URL to create a Pull Request +3. Review the changes before merging +4. The source repository remains unchanged + +## Troubleshooting + +**Error: "GH_PAT environment variable is not set"** +- Make sure you've exported the token: `export GH_PAT=your_token` + +**Error: "Path 'xyz' not found in HEAD"** +- Check spelling and case sensitivity of the path +- Ensure the path exists in the specified branch (`srcRef`) + +**Error: "Cannot access target repo"** +- Verify your PAT has the correct permissions +- Check that the target repo exists and you have write access + +**Warning: "git-filter-repo not found"** +- The tool automatically falls back to snapshot mode +- Install git-filter-repo if you need history preservation + +## License + +MIT diff --git a/packages/github-move-folders-between-repos/config.json b/packages/github-move-folders-between-repos/config.json new file mode 100644 index 0000000..75fb3b1 --- /dev/null +++ b/packages/github-move-folders-between-repos/config.json @@ -0,0 +1,9 @@ + +{ + "srcRepo": "diberry/gh", + "targetRepo": "diberry/b", + "srcRef": "main", + "keepHistory": true, + "destSubdir": "", + "paths": ["scripts"] +} diff --git a/packages/github-move-folders-between-repos/gh-move-folders-between-repos.js b/packages/github-move-folders-between-repos/gh-move-folders-between-repos.js new file mode 100644 index 0000000..78f3d3a --- /dev/null +++ b/packages/github-move-folders-between-repos/gh-move-folders-between-repos.js @@ -0,0 +1,317 @@ +#!/usr/bin/env node +/** + * Move a subset of files/directories from a source GitHub repo into a target repo. + * Supports preserving history for just those paths (via git-filter-repo) or doing a snapshot copy. + * + * Requirements: + * - git >= 2.30 + * - Node.js >= 16 + * - A GitHub PAT with 'repo' scope (GH_PAT env var) + * - For KEEP_HISTORY=true: git-filter-repo must be installed and available on PATH + * + * Usage examples: + * node gh-move-folders-between-repos.js \ + * --src-repo org/source-repo \ + * --target-repo org/target-repo \ + * --src-ref main \ + * --keep-history true \ + * --dest-subdir imported \ + * --paths '["docs","src/lib","README.md",".github/workflows"]' + * + * Or use a config file: + * node gh-move-folders-between-repos.js --config ./config.json + * + * Sample config JSON: + * { + * "srcRepo": "org/source-repo", + * "targetRepo": "org/target-repo", + * "srcRef": "main", + * "keepHistory": true, + * "destSubdir": "", + * "paths": ["docs","src/lib","README.md",".github/workflows"] + * } + */ + +const { execSync, spawnSync } = require("child_process"); +const fs = require("fs"); +const os = require("os"); +const path = require("path"); + +// ---------------------- +// Utility helpers +// ---------------------- +function sh(cmd, opts = {}) { + const options = { + stdio: opts.stdio || "pipe", + env: { ...process.env, ...(opts.env || {}) }, + cwd: opts.cwd || process.cwd(), + shell: true, + }; + try { + const out = execSync(cmd, options); + return out?.toString() ?? ""; + } catch (err) { + if (opts.allowFail) return ""; + const msg = err?.stderr?.toString() || err?.message || String(err); + throw new Error(`Command failed: ${cmd}\n${msg}`); + } +} + +function hasCommand(cmd) { + const res = spawnSync(cmd, ["--version"], { stdio: "pipe", shell: true }); + return res.status === 0; +} + +function ensureNoLeadingSlash(paths) { + for (const p of paths) { + if (p.startsWith("/")) { + throw new Error( + `Invalid path '${p}': use repo-root relative paths without leading '/'.` + ); + } + } +} + +function nowBranchSuffix() { + const pad = (n) => String(n).padStart(2, "0"); + const d = new Date(); + const ts = + d.getFullYear().toString() + + pad(d.getMonth() + 1) + + pad(d.getDate()) + + "-" + + pad(d.getHours()) + + pad(d.getMinutes()) + + pad(d.getSeconds()); + return ts; +} + +// ---------------------- +// Parse args / config +// ---------------------- +function parseArgs() { + const args = process.argv.slice(2); + const config = {}; + + for (let i = 0; i < args.length; i++) { + const a = args[i]; + if (a === "--config") { + const file = args[++i]; + const json = JSON.parse(fs.readFileSync(file, "utf8")); + Object.assign(config, json); + } else if (a === "--src-repo") { + config.srcRepo = args[++i]; + } else if (a === "--target-repo") { + config.targetRepo = args[++i]; + } else if (a === "--src-ref") { + config.srcRef = args[++i]; + } else if (a === "--keep-history") { + const v = args[++i]; + config.keepHistory = v === "true" || v === true; + } else if (a === "--dest-subdir") { + config.destSubdir = args[++i]; + } else if (a === "--paths") { + // Accept JSON array or comma-separated string + const v = args[++i]; + try { + const arr = JSON.parse(v); + if (!Array.isArray(arr)) throw new Error(); + config.paths = arr; + } catch { + config.paths = v.split(",").map((s) => s.trim()).filter(Boolean); + } + } + } + + // Defaults + if (!("srcRef" in config)) config.srcRef = "main"; + if (!("keepHistory" in config)) config.keepHistory = true; + if (!("destSubdir" in config)) config.destSubdir = ""; + + return config; +} + +function validateConfig(cfg) { + if (!process.env.GH_PAT || !process.env.GH_PAT.trim()) { + throw new Error( + "GH_PAT environment variable is not set. Create a GitHub Personal Access Token with 'repo' scope and export GH_PAT." + ); + } + if (!cfg.srcRepo || !cfg.targetRepo) { + throw new Error( + "srcRepo and targetRepo are required (e.g., 'org/source-repo' and 'org/target-repo')." + ); + } + if (!cfg.paths || !Array.isArray(cfg.paths) || cfg.paths.length === 0) { + throw new Error( + "paths (array) is required. Example: [\"docs\",\"src/lib\",\"README.md\",\".github/workflows\"]" + ); + } + ensureNoLeadingSlash(cfg.paths); +} + +// ---------------------- +// Main +// ---------------------- +(async function main() { + const cfg = parseArgs(); + validateConfig(cfg); + + const GH_PAT = process.env.GH_PAT.trim(); + + // Check basic tools + if (!hasCommand("git")) { + throw new Error("git is not installed or not on PATH."); + } + + const workdir = fs.mkdtempSync(path.join(os.tmpdir(), "move-subset-")); + process.on("exit", () => { + try { + fs.rmSync(workdir, { recursive: true, force: true }); + } catch {} + }); + + console.log(`Working directory: ${workdir}`); + + // Clone source + console.log(`Cloning source repo: ${cfg.srcRepo} @ ${cfg.srcRef}`); + sh( + `git clone --no-tags --depth 1 --branch "${cfg.srcRef}" "https://${GH_PAT}@github.com/${cfg.srcRepo}.git" src`, + { cwd: workdir, stdio: "inherit" } + ); + + const srcDir = path.join(workdir, "src"); + + // Fetch full history for filter-repo correctness + sh(`git fetch --unshallow || true`, { cwd: srcDir, stdio: "inherit" }); + sh(`git fetch --tags || true`, { cwd: srcDir, stdio: "inherit" }); + + if (cfg.keepHistory) { + if (!hasCommand("git-filter-repo")) { + console.warn( + "WARNING: git-filter-repo not found on PATH. Falling back to snapshot copy (no history)." + ); + cfg.keepHistory = false; + } + } + + if (cfg.keepHistory) { + console.log( + "KEEP_HISTORY=true => Preserving history for selected paths via git-filter-repo" + ); + + const args = cfg.paths.map((p) => `--path "${p}"`).join(" "); + // Rewrite history in-place to only include provided paths + sh(`git filter-repo --force --quiet ${args}`, { + cwd: srcDir, + stdio: "inherit", + }); + } else { + console.log( + "KEEP_HISTORY=false => Copying snapshot of selected paths at HEAD (no history)" + ); + + const extractedDir = path.join(workdir, "extracted"); + fs.mkdirSync(extractedDir, { recursive: true }); + + // Validate existence and export each path + for (const p of cfg.paths) { + console.log(`Archiving path: ${p}`); + + // Check whether path exists in git tree (file or directory) + // Try ls-tree directly on the path - will output something if it exists + const checkResult = sh( + `git ls-tree HEAD "${p}"`, + { cwd: srcDir, allowFail: true } + ).trim(); + + if (!checkResult) { + // List available top-level paths to help debug + console.log("\nAvailable top-level paths in repository:"); + const availablePaths = sh(`git ls-tree --name-only HEAD`, { cwd: srcDir }); + console.log(availablePaths); + throw new Error( + `Path '${p}' not found in HEAD of ${cfg.srcRef}. Check path spelling and case (see available paths above).` + ); + } + + sh(`git archive --format=tar HEAD "${p}" | tar -x -C "${extractedDir}"`, { + cwd: srcDir, + stdio: "inherit", + }); + } + + // Re-init repo with only extracted content + sh(`rm -rf "${srcDir}"`, { cwd: workdir }); + sh(`mv "${extractedDir}" "${srcDir}"`, { cwd: workdir }); + sh(`git init`, { cwd: srcDir, stdio: "inherit" }); + sh(`git add .`, { cwd: srcDir, stdio: "inherit" }); + try { + sh(`git commit -m "Import selected paths (snapshot without history)"`, { + cwd: srcDir, + stdio: "inherit", + allowFail: false, + }); + } catch (e) { + // No changes case (shouldn't happen if paths existed) + console.warn("No changes to commit:", e.message); + } + } + + // Optional relocation + if (cfg.destSubdir && cfg.destSubdir.trim() !== "") { + console.log(`Relocating imported content into subdir: ${cfg.destSubdir}`); + const dest = cfg.destSubdir.trim(); + + // Create subdir and move all tracked files except .git dir + sh(`mkdir -p "${dest}"`, { cwd: srcDir, stdio: "inherit" }); + + // Move top-level items except the destination itself and .git + const moveScript = `bash -c ' + shopt -s dotglob nullglob + for f in *; do + [ "$f" = ".git" ] && continue + [ "$f" = "${dest}" ] && continue + git mv -k "$f" "${dest}/" || true + done + '`; + sh(moveScript, { cwd: srcDir, stdio: "inherit" }); + + // Commit relocation (if any changes) + sh(`git commit -m "Relocate imported content into '${dest}'" || true`, { + cwd: srcDir, + stdio: "inherit", + allowFail: true, + }); + } + + // Create branch and push to target + const branch = `import/${path.basename(cfg.srcRepo)}-${nowBranchSuffix()}`; + sh(`git checkout -b "${branch}"`, { cwd: srcDir, stdio: "inherit" }); + + console.log(`Preparing to push to target repo: ${cfg.targetRepo}`); + sh( + `git remote add target "https://${GH_PAT}@github.com/${cfg.targetRepo}.git"`, + { cwd: srcDir, stdio: "inherit" } + ); + + // Verify access before push + try { + sh(`git ls-remote target`, { cwd: srcDir, stdio: "inherit" }); + } catch (e) { + throw new Error( + `Cannot access target repo '${cfg.targetRepo}'. Check GH_PAT permissions or repo visibility.\n${e.message}` + ); + } + + sh(`git push target "${branch}"`, { cwd: srcDir, stdio: "inherit" }); + + console.log("✅ Done."); + console.log(`Pushed branch: ${branch}`); + console.log( + `Open a PR in https://github.com/${cfg.targetRepo} against your base branch (e.g., main).` + ); +})().catch((err) => { + console.error(`❌ Error: ${err.message}`); + process.exit(1); +}); diff --git a/packages/github-move-folders-between-repos/package.json b/packages/github-move-folders-between-repos/package.json new file mode 100644 index 0000000..9e1f687 --- /dev/null +++ b/packages/github-move-folders-between-repos/package.json @@ -0,0 +1,12 @@ +{ + "name": "github-move-folders-between-repos", + "version": "1.0.0", + "description": "", + "license": "ISC", + "author": "", + "type": "commonjs", + "main": "index.js", + "scripts": { + "start": "node --env-file .env gh-move-folders-between-repos.js --config ./config.json" + } +}