Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
98 changes: 52 additions & 46 deletions .github/workflows/sync-issues.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Sync Issues to JSON

on:
issues:
Copy link

Copilot AI Oct 27, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The addition of 'labeled' and 'reopened' trigger types is undocumented in the PR description. Consider documenting why these triggers were added to help future maintainers understand the complete scope of issue events that trigger synchronization.

Suggested change
issues:
issues:
# The workflow is triggered on multiple issue events to ensure the JSON data stays in sync.
# 'labeled' is included to capture changes in issue labels, which may affect downstream processing.
# 'reopened' is included to handle issues that are reopened, ensuring their state is updated in the JSON.
# Other events ('opened', 'edited', 'closed') are standard for issue lifecycle changes.

Copilot uses AI. Check for mistakes.
types: [opened, edited, closed]
types: [opened, labeled, edited, reopened, closed]

permissions:
contents: write
Expand All @@ -15,58 +15,64 @@ concurrency:
jobs:
sync-issues:
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}

- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'

- name: Create issues directory if not exists
run: mkdir -p public/data

- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
- name: Install action dependencies (sync-issue)
working-directory: actions/sync-issue
run: npm ci

- name: Create issues directory if not exists
run: mkdir -p public/data
- name: Sync issue to JSON (local action)
uses: ./actions/sync-issue
with:
json-path: public/data/issues.json

- name: Sync issue to JSON
env:
ISSUE_JSON: ${{ toJSON(github.event.issue) }}
ISSUE_ACTION: ${{ github.event.action }}
OUTPUT_PATH: public/data/issues.json
run: node scripts/sync-issue.js
- name: Check for changes in data file
id: verify
run: |
if git diff --quiet -- public/data/issues.json; then
echo "changed=false" >> $GITHUB_OUTPUT
else
echo "changed=true" >> $GITHUB_OUTPUT
fi

- name: Commit and push changes
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
if [[ -n $(git status --porcelain) ]]; then
- name: Commit and push changes to main
if: steps.verify.outputs.changed == 'true'
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git add public/data/issues.json
git commit -m "🤖 Sync issue #${{ github.event.issue.number }} (${{ github.event.action }})"
git pull --rebase origin main
git push
echo "✅ Cambios commiteados y pusheados"
else
echo "ℹ️ No hay cambios para commitear"
fi
git push origin HEAD:main

- name: Save issues.json to temp before switching branch
run: cp public/data/issues.json /tmp/issues.json
- name: Save issues.json to temp before switching branch
if: steps.verify.outputs.changed == 'true'
run: cp public/data/issues.json /tmp/issues.json

- name: Push issues.json to gh-pages
run: |
git fetch origin gh-pages
git checkout gh-pages
mkdir -p data
cp /tmp/issues.json data/issues.json
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
if [[ -n $(git status --porcelain data/issues.json) ]]; then
git add data/issues.json
git commit -m "🤖 Sync issue #${{ github.event.issue.number }} (${{ github.event.action }}) [gh-pages]"
git push origin gh-pages
echo "✅ issues.json actualizado en gh-pages"
else
echo "ℹ️ No hay cambios para commitear en gh-pages"
fi
- name: Push issues.json to gh-pages
if: steps.verify.outputs.changed == 'true'
run: |
git fetch origin gh-pages || true
git checkout gh-pages || git switch -c gh-pages
Comment on lines +68 to +69
Copy link

Copilot AI Oct 27, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The || true on line 68 silently ignores fetch errors, which could mask legitimate issues like network failures or repository access problems. Consider checking if gh-pages exists first with git ls-remote --heads origin gh-pages and handling the branch creation case explicitly rather than suppressing all errors.

Suggested change
git fetch origin gh-pages || true
git checkout gh-pages || git switch -c gh-pages
if git ls-remote --exit-code --heads origin gh-pages; then
git fetch origin gh-pages
git checkout gh-pages
else
git switch --create gh-pages
fi

Copilot uses AI. Check for mistakes.
mkdir -p data
cp /tmp/issues.json data/issues.json
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
if [[ -n $(git status --porcelain data/issues.json) ]]; then
git add data/issues.json
git commit -m "🤖 Sync issue #${{ github.event.issue.number }} (${{ github.event.action }}) [gh-pages]"
git push origin gh-pages
fi
10 changes: 10 additions & 0 deletions actions/sync-issue/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
name: 'Sync Single Issue to JSON'
description: 'Parse a GitHub issue and update public/data/issues.json with an event entry'
inputs:
json-path:
description: 'Path to the JSON file to update'
required: false
default: 'public/data/issues.json'
runs:
using: 'node20'
main: 'index.js'
16 changes: 16 additions & 0 deletions actions/sync-issue/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
const core = require('@actions/core');
const github = require('@actions/github');
const { syncIssue } = require('./lib/core');

async function run() {
try {
const jsonPath = core.getInput('json-path') || 'public/data/issues.json';
const payload = github.context.payload;
if (!payload || !payload.issue) throw new Error('No issue payload available');
await syncIssue(payload.issue, payload.action || 'unknown', jsonPath);
} catch (error) {
core.setFailed(error.message);
}
}

run();
153 changes: 153 additions & 0 deletions actions/sync-issue/lib/core.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
const fs = require('fs');
const path = require('path');
const https = require('https');
const cheerio = require('cheerio');

// Fetch OG image from a Meetup event page
function getEventImage(eventLink) {
return new Promise((resolve) => {
if (!eventLink || !eventLink.includes('meetup.com')) {
resolve(null);
return;
}
https
.get(eventLink, (res) => {
let data = '';
res.on('data', (chunk) => {
data += chunk;
});
res.on('end', () => {
try {
const $ = cheerio.load(data);
const ogImage = $('meta[property="og:image"]').attr('content');
resolve(ogImage || null);
} catch (error) {
console.warn(`Error parsing HTML for ${eventLink}: ${error.message}`);
resolve(null);
}
});
})
.on('error', (error) => {
console.warn(`Error fetching ${eventLink}: ${error.message}`);
resolve(null);
});
});
}

// From issue payload: title -> id; body: name;link;date
async function parseIssueContent(issue) {
const eventId = (issue.title || '').trim();
const body = issue.body || '';
const parts = body.split(';').map((p) => p.trim());
const eventName = parts[0] || '';
const eventLink = parts[1] || '';
const eventDate = parts[2] || '';
const eventImage = await getEventImage(eventLink);
return { eventId, eventName, eventLink, eventDate, eventImage };
}

function shouldProcessIssue(issueData) {
const isCorrectAuthor =
issueData.user && (issueData.user.login === 'ghspain-user' || issueData.user.login === 'alexcerezo');
const hasEventLabel =
issueData.labels && issueData.labels.some((label) => label.name && label.name.toLowerCase() === 'event');
const ok = !!(isCorrectAuthor && hasEventLabel);
if (!ok) {
console.log(
`⚠️ Issue #${issueData.number} no cumple filtros: autor=${issueData.user?.login || 'unknown'}, label Event=${hasEventLabel}`
);
}
return ok;
}

function loadExistingIssues(jsonPath) {
if (fs.existsSync(jsonPath)) {
try {
const content = fs.readFileSync(jsonPath, 'utf8');
return JSON.parse(content);
} catch (e) {
console.warn(`⚠️ Error al leer JSON existente en ${jsonPath}: ${e.message}`);
}
}
return [];
}

function saveIssues(jsonPath, issues) {
const dir = path.dirname(jsonPath);
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(jsonPath, JSON.stringify(issues, null, 2), 'utf8');
}

async function syncIssue(issueData, action, jsonPath = 'public/data/issues.json') {
console.log(`🔄 Procesando issue #${issueData.number} (acción: ${action})`);
if (!shouldProcessIssue(issueData)) {
console.log(`⏭️ Issue #${issueData.number} omitido (no cumple los filtros)`);
return loadExistingIssues(jsonPath);
}

const existingIssues = loadExistingIssues(jsonPath);
const parsed = await parseIssueContent(issueData);
const eventData = {
event_id: parsed.eventId,
event_name: parsed.eventName,
event_link: parsed.eventLink,
event_date: parsed.eventDate,
event_image: parsed.eventImage,
};

const idx = existingIssues.findIndex((e) => e.event_id === parsed.eventId);
if (idx >= 0) {
existingIssues[idx] = eventData;
console.log(`✅ Evento actualizado: ${parsed.eventId} - ${parsed.eventName}`);
} else {
existingIssues.push(eventData);
console.log(`➕ Evento añadido: ${parsed.eventId} - ${parsed.eventName}`);
}

existingIssues.sort((a, b) => {
const aId = isNaN(a.event_id) ? a.event_id : parseInt(a.event_id, 10);
const bId = isNaN(b.event_id) ? b.event_id : parseInt(b.event_id, 10);
if (typeof aId === 'number' && typeof bId === 'number') return aId - bId;
return String(aId).localeCompare(String(bId));
});

saveIssues(jsonPath, existingIssues);
console.log(`📊 Total de eventos en el archivo: ${existingIssues.length}`);
return existingIssues;
}

// Batch utility for images
async function updateEventImages(jsonPath = 'public/data/issues.json') {
console.log(`🔄 Actualizando imágenes de eventos en ${jsonPath}...`);
const events = loadExistingIssues(jsonPath);
for (let i = 0; i < events.length; i++) {
const event = events[i];
if (event.event_link) {
console.log(`📸 Obteniendo imagen para evento: ${event.event_name}`);
try {
const imageUrl = await getEventImage(event.event_link);
if (imageUrl) {
event.event_image = imageUrl;
console.log(`✅ Imagen obtenida: ${imageUrl}`);
} else {
console.log(`⚠️ No se encontró imagen para ${event.event_name}`);
}
} catch (error) {
console.error(`❌ Error obteniendo imagen para ${event.event_name}: ${error.message}`);
}
}
}
saveIssues(jsonPath, events);
console.log(`📁 Archivo actualizado: ${jsonPath}`);
return events;
}

module.exports = {
getEventImage,
parseIssueContent,
shouldProcessIssue,
loadExistingIssues,
saveIssues,
syncIssue,
updateEventImages,
};
12 changes: 12 additions & 0 deletions actions/sync-issue/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"name": "sync-issue",
"version": "1.0.0",
"private": true,
"main": "index.js",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.11.1",
"@actions/github": "^6.0.0",
"cheerio": "^1.0.0"
}
}
Loading