Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions tools/migrations/26-02-10-a--add_monthly_activity_stats_cache.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
-- Cache table for monthly activity statistics by type
-- Historical months are cached permanently, current month refreshed periodically

CREATE TABLE IF NOT EXISTS monthly_activity_stats_cache (
id INT AUTO_INCREMENT PRIMARY KEY,
`year_month` VARCHAR(7) NOT NULL UNIQUE COMMENT 'Format: YYYY-MM',
`exercise_minutes` INT NOT NULL DEFAULT 0,
`reading_minutes` INT NOT NULL DEFAULT 0,
`browsing_minutes` INT NOT NULL DEFAULT 0,
`audio_minutes` INT NOT NULL DEFAULT 0,
`computed_at` DATETIME NOT NULL,
INDEX idx_year_month (`year_month`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;
39 changes: 35 additions & 4 deletions zeeguu/api/endpoints/article.py
Original file line number Diff line number Diff line change
Expand Up @@ -627,19 +627,50 @@ def clear_article_cache(article_id):
)

bookmark_count = len(bookmarks)
bookmark_ids_to_delete = {b.id for b in bookmarks}

# First pass: clear all preferred_bookmark_id references
# Track UserWords that need cleanup
user_words_to_check = {}
for bookmark in bookmarks:
user_word = bookmark.user_word
if user_word and user_word.preferred_bookmark_id == bookmark.id:
user_word.preferred_bookmark_id = None
if user_word:
user_words_to_check[user_word.id] = user_word
# Clear preferred_bookmark if it's one we're deleting
if user_word.preferred_bookmark_id == bookmark.id:
user_word.preferred_bookmark_id = None

db_session.flush() # Commit the nullifications before deleting

# Second pass: delete bookmarks
# Delete bookmarks
for bookmark in bookmarks:
db_session.delete(bookmark)
db_session.flush()

# Handle UserWords that are now orphaned or need new preferred_bookmark
user_words_marked_unfit = 0
for user_word in user_words_to_check.values():
# Check for remaining bookmarks not in our delete set
remaining_bookmarks = (
Bookmark.query
.filter(Bookmark.user_word_id == user_word.id)
.filter(Bookmark.id.notin_(bookmark_ids_to_delete))
.all()
)

if remaining_bookmarks:
# Set a new preferred bookmark if needed
if user_word.preferred_bookmark is None:
user_word.preferred_bookmark = remaining_bookmarks[0]
else:
# No bookmarks left - keep UserWord for history but mark unfit for study
user_word.set_unfit_for_study(db_session)
user_words_marked_unfit += 1

db_session.commit()

if user_words_marked_unfit > 0:
log(f"[DEV] Marked {user_words_marked_unfit} UserWords as unfit for study (no bookmarks)")

log(f"[DEV] Cleared cache and {bookmark_count} bookmarks for article {article_id}")

return json_result({
Expand Down
3 changes: 2 additions & 1 deletion zeeguu/api/endpoints/bookmarks_and_words.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,8 @@ def delete_bookmark(bookmark_id):
# in the future we can generate an example for this user word with the help of the robots!
user_word.set_unfit_for_study(db_session)
else:
# No other bookmarks exist - ALWAYS keep the user_word for historical data
# No other bookmarks exist - keep UserWord for historical data
# but mark as unfit for study (won't appear in exercises)
user_word.set_unfit_for_study(db_session)

# Delete any ExampleSentenceContext records that reference this bookmark
Expand Down
22 changes: 1 addition & 21 deletions zeeguu/api/endpoints/exercises.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,6 @@ def _user_words_as_json_result(user_words):
log(f"Failed to get tokenized context for user_word {uw.id}: {e}")

dicts = []
words_to_delete = []

for user_word in user_words:
try:
Expand All @@ -320,28 +319,9 @@ def _user_words_as_json_result(user_words):
schedule=schedule,
pre_tokenized_context=tokenized_context
))
except ValueError as e:
# This means validate_data_integrity() couldn't repair the issue
# (i.e., UserWord has no bookmarks at all)
log(f"UserWord {user_word.id} failed validation and cannot be repaired: {str(e)}")
words_to_delete.append(user_word)
except Exception as e:
# Log any other unexpected errors and skip
# Log unexpected errors and skip (orphaned UserWords are handled gracefully)
log(f"Unexpected error processing UserWord {user_word.id}: {str(e)}")
continue

# Delete UserWords that couldn't be repaired
if words_to_delete:
for word in words_to_delete:
try:
db.session.delete(word)
log(f"Deleted UserWord {word.id} due to unrepairable data integrity issues")
except:
log(f"Failed to delete UserWord {word.id}")
try:
db.session.commit()
except:
db.session.rollback()
log("Failed to commit UserWord deletions")

return json_result(dicts)
Loading