Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
run: |
python -m pip install --upgrade pip
pip install -e .
pip install pytest pytest-asyncio pytest-cov black flake8 mypy
pip install pytest pytest-asyncio pytest-cov 'black>=24.0.0,<26.0.0' flake8 mypy

- name: Lint with flake8
run: |
Expand Down
66 changes: 44 additions & 22 deletions sugar/memory/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,8 @@ def _init_db(self):
cursor = conn.cursor()

# Main memory entries table
cursor.execute("""
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS memory_entries (
id TEXT PRIMARY KEY,
memory_type TEXT NOT NULL,
Expand All @@ -111,64 +112,81 @@ def _init_db(self):
access_count INTEGER DEFAULT 0,
expires_at TIMESTAMP
)
""")
"""
)

# Indexes
cursor.execute("""
cursor.execute(
"""
CREATE INDEX IF NOT EXISTS idx_memory_type
ON memory_entries(memory_type)
""")
cursor.execute("""
"""
)
cursor.execute(
"""
CREATE INDEX IF NOT EXISTS idx_memory_importance
ON memory_entries(importance DESC)
""")
cursor.execute("""
"""
)
cursor.execute(
"""
CREATE INDEX IF NOT EXISTS idx_memory_created
ON memory_entries(created_at DESC)
""")
"""
)

# FTS5 for keyword search (always available)
cursor.execute("""
cursor.execute(
"""
CREATE VIRTUAL TABLE IF NOT EXISTS memory_fts USING fts5(
id,
content,
summary,
content='memory_entries',
content_rowid='rowid'
)
""")
"""
)

# Triggers to keep FTS in sync
cursor.execute("""
cursor.execute(
"""
CREATE TRIGGER IF NOT EXISTS memory_ai AFTER INSERT ON memory_entries BEGIN
INSERT INTO memory_fts(rowid, id, content, summary)
VALUES (new.rowid, new.id, new.content, new.summary);
END
""")
cursor.execute("""
"""
)
cursor.execute(
"""
CREATE TRIGGER IF NOT EXISTS memory_ad AFTER DELETE ON memory_entries BEGIN
INSERT INTO memory_fts(memory_fts, rowid, id, content, summary)
VALUES ('delete', old.rowid, old.id, old.content, old.summary);
END
""")
cursor.execute("""
"""
)
cursor.execute(
"""
CREATE TRIGGER IF NOT EXISTS memory_au AFTER UPDATE ON memory_entries BEGIN
INSERT INTO memory_fts(memory_fts, rowid, id, content, summary)
VALUES ('delete', old.rowid, old.id, old.content, old.summary);
INSERT INTO memory_fts(rowid, id, content, summary)
VALUES (new.rowid, new.id, new.content, new.summary);
END
""")
"""
)

# Vector storage table (if sqlite-vec available)
if self._has_vec:
try:
cursor.execute(f"""
cursor.execute(
f"""
CREATE VIRTUAL TABLE IF NOT EXISTS memory_vectors USING vec0(
id TEXT PRIMARY KEY,
embedding float[{EMBEDDING_DIM}]
)
""")
"""
)
except Exception as e:
logger.warning(f"Failed to create vector table: {e}")
self._has_vec = False
Expand Down Expand Up @@ -584,20 +602,24 @@ def prune_expired(self) -> int:
cursor = conn.cursor()

# Get IDs to delete (for vector cleanup)
cursor.execute("""
cursor.execute(
"""
SELECT id FROM memory_entries
WHERE expires_at IS NOT NULL AND expires_at < datetime('now')
""")
"""
)
expired_ids = [row["id"] for row in cursor.fetchall()]

if not expired_ids:
return 0

# Delete from main table
cursor.execute("""
cursor.execute(
"""
DELETE FROM memory_entries
WHERE expires_at IS NOT NULL AND expires_at < datetime('now')
""")
"""
)
deleted = cursor.rowcount

# Clean up vectors
Expand Down
16 changes: 13 additions & 3 deletions sugar/orchestration/task_orchestrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -721,7 +721,9 @@ def _build_stage_prompt(
"""

if stage == OrchestrationStage.RESEARCH:
return base_prompt + """
return (
base_prompt
+ """
## Your Role
You are conducting research for this task. Your goals:
1. Search for relevant best practices and documentation
Expand All @@ -736,6 +738,7 @@ def _build_stage_prompt(
- Technical requirements
- Recommendations for implementation
"""
)

elif stage == OrchestrationStage.PLANNING:
research_context = ""
Expand All @@ -744,7 +747,10 @@ def _build_stage_prompt(
f"\n## Research Findings\n{context['research_output']}\n"
)

return base_prompt + research_context + """
return (
base_prompt
+ research_context
+ """
## Your Role
You are creating an implementation plan for this task. Your goals:
1. Break down the task into manageable subtasks
Expand All @@ -769,12 +775,15 @@ def _build_stage_prompt(
## Dependencies
Explain the order of execution and why.
"""
)

elif stage == OrchestrationStage.REVIEW:
impl_results = context.get("subtask_results", [])
files_modified = context.get("files_modified", [])

return base_prompt + f"""
return (
base_prompt
+ f"""
## Implementation Complete
The following subtasks have been completed:
{json.dumps(impl_results, indent=2)}
Expand All @@ -797,6 +806,7 @@ def _build_stage_prompt(
- Recommendations for improvement
- Overall assessment (pass/fail)
"""
)

else:
return base_prompt
Expand Down
12 changes: 8 additions & 4 deletions sugar/storage/issue_response_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ async def initialize(self) -> None:
return

async with aiosqlite.connect(self.db_path) as db:
await db.execute("""
await db.execute(
"""
CREATE TABLE IF NOT EXISTS issue_responses (
id TEXT PRIMARY KEY,
repo TEXT NOT NULL,
Expand All @@ -40,12 +41,15 @@ async def initialize(self) -> None:
was_auto_posted BOOLEAN DEFAULT 0,
UNIQUE(repo, issue_number, response_type)
)
""")
"""
)

await db.execute("""
await db.execute(
"""
CREATE INDEX IF NOT EXISTS idx_issue_responses_repo_number
ON issue_responses (repo, issue_number)
""")
"""
)

await db.commit()

Expand Down
6 changes: 4 additions & 2 deletions sugar/storage/task_type_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ async def initialize(self):

if not table_exists:
# Create task_types table
await db.execute("""
await db.execute(
"""
CREATE TABLE task_types (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
Expand All @@ -56,7 +57,8 @@ async def initialize(self):
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
"""
)

# Populate with default types
default_types = self._get_default_task_types()
Expand Down
56 changes: 38 additions & 18 deletions sugar/storage/work_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ async def initialize(self):
return

async with aiosqlite.connect(self.db_path) as db:
await db.execute("""
await db.execute(
"""
CREATE TABLE IF NOT EXISTS work_items (
id TEXT PRIMARY KEY,
type TEXT NOT NULL,
Expand All @@ -51,17 +52,22 @@ async def initialize(self):
total_elapsed_time REAL DEFAULT 0.0,
commit_sha TEXT
)
""")
"""
)

await db.execute("""
await db.execute(
"""
CREATE INDEX IF NOT EXISTS idx_work_items_priority_status
ON work_items (priority ASC, status, created_at)
""")
"""
)

await db.execute("""
await db.execute(
"""
CREATE INDEX IF NOT EXISTS idx_work_items_status
ON work_items (status)
""")
"""
)

# Migrate existing databases to add timing columns and task types table
await self._migrate_timing_columns(db)
Expand Down Expand Up @@ -120,7 +126,8 @@ async def _migrate_task_types_table(self, db):

if not table_exists:
# Create task_types table
await db.execute("""
await db.execute(
"""
CREATE TABLE task_types (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
Expand All @@ -133,7 +140,8 @@ async def _migrate_task_types_table(self, db):
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
"""
)

# Insert default task types
default_types = [
Expand Down Expand Up @@ -261,10 +269,12 @@ async def _migrate_orchestration_columns(self, db):
logger.info("Added assigned_agent column to existing database")

# Create index for parent_task_id queries
await db.execute("""
await db.execute(
"""
CREATE INDEX IF NOT EXISTS idx_work_items_parent_task_id
ON work_items (parent_task_id)
""")
"""
)

except Exception as e:
logger.warning(f"Orchestration migration warning (non-critical): {e}")
Expand Down Expand Up @@ -446,12 +456,14 @@ async def get_next_work(self) -> Optional[Dict[str, Any]]:
db.row_factory = aiosqlite.Row

# Get highest priority pending work item (exclude hold status)
cursor = await db.execute("""
cursor = await db.execute(
"""
SELECT * FROM work_items
WHERE status = 'pending'
ORDER BY priority ASC, created_at ASC
LIMIT 1
""")
"""
)

row = await cursor.fetchone()

Expand Down Expand Up @@ -694,11 +706,13 @@ async def get_stats(self) -> Dict[str, int]:
stats = {}

# Count by status
cursor = await db.execute("""
cursor = await db.execute(
"""
SELECT status, COUNT(*) as count
FROM work_items
GROUP BY status
""")
"""
)

rows = await cursor.fetchall()
for row in rows:
Expand All @@ -712,22 +726,28 @@ async def get_stats(self) -> Dict[str, int]:
stats["total"] = sum(stats.values())

# Recent activity (last 24 hours)
cursor = await db.execute("""
cursor = await db.execute(
"""
SELECT COUNT(*) FROM work_items
WHERE created_at > datetime('now', '-1 day')
""")
"""
)
stats["recent_24h"] = (await cursor.fetchone())[0]

return stats

async def cleanup_old_items(self, days_old: int = 30):
"""Clean up old completed/failed items"""
async with aiosqlite.connect(self.db_path) as db:
cursor = await db.execute("""
cursor = await db.execute(
"""
DELETE FROM work_items
WHERE status IN ('completed', 'failed')
AND created_at < datetime('now', '-{} days')
""".format(days_old))
""".format(
days_old
)
)

deleted_count = cursor.rowcount
await db.commit()
Expand Down
Loading