Adding files.

This commit is contained in:
2026-02-03 10:13:33 -05:00
parent fc44a7834a
commit 9dec4a472f
34 changed files with 5984 additions and 0 deletions

218
modules/web_tasks.py Normal file
View File

@@ -0,0 +1,218 @@
import os
import json
import time
import sqlite3
import shutil
from datetime import datetime
from huey import SqliteHuey
from .web_db import db, Run, User, Project
from . import utils
import main
import config
# Configure Huey (Task Queue)
huey = SqliteHuey('bookapp_queue', filename=os.path.join(config.DATA_DIR, 'queue.db'))
def db_log_callback(db_path, run_id, phase, msg):
"""Writes log entry directly to SQLite to avoid Flask Context issues in threads."""
for _ in range(5):
try:
with sqlite3.connect(db_path, timeout=5) as conn:
conn.execute("INSERT INTO log_entry (run_id, timestamp, phase, message) VALUES (?, ?, ?, ?)",
(run_id, datetime.utcnow(), phase, str(msg)))
break
except sqlite3.OperationalError:
time.sleep(0.1)
except: break
@huey.task()
def generate_book_task(run_id, project_path, bible_path, allow_copy=True):
"""
Background task to run the book generation.
"""
# 1. Setup Logging
log_filename = f"system_log_{run_id}.txt"
log_path = os.path.join(project_path, "runs", "bible", f"run_{run_id}", log_filename)
# Log to project root initially until run folder is created by main
initial_log = os.path.join(project_path, log_filename)
utils.set_log_file(initial_log)
# Hook up Database Logging
db_path = os.path.join(config.DATA_DIR, "bookapp.db")
utils.set_log_callback(lambda p, m: db_log_callback(db_path, run_id, p, m))
# Set Status to Running
try:
with sqlite3.connect(db_path, timeout=10) as conn:
conn.execute("UPDATE run SET status = 'running' WHERE id = ?", (run_id,))
except: pass
utils.log("SYSTEM", f"Starting Job #{run_id}")
try:
# 1.5 Copy Forward Logic (Series Optimization)
# Check for previous runs and copy completed books to skip re-generation
runs_dir = os.path.join(project_path, "runs", "bible")
if allow_copy and os.path.exists(runs_dir):
# Get all run folders except current
all_runs = [d for d in os.listdir(runs_dir) if d.startswith("run_") and d != f"run_{run_id}"]
# Sort by ID (ascending)
all_runs.sort(key=lambda x: int(x.split('_')[1]) if x.split('_')[1].isdigit() else 0)
if all_runs:
latest_run_dir = os.path.join(runs_dir, all_runs[-1])
current_run_dir = os.path.join(runs_dir, f"run_{run_id}")
if not os.path.exists(current_run_dir): os.makedirs(current_run_dir)
utils.log("SYSTEM", f"Checking previous run ({all_runs[-1]}) for completed books...")
for item in os.listdir(latest_run_dir):
# Copy only folders that look like books and have a manuscript
if item.startswith("Book_") and os.path.isdir(os.path.join(latest_run_dir, item)):
if os.path.exists(os.path.join(latest_run_dir, item, "manuscript.json")):
src = os.path.join(latest_run_dir, item)
dst = os.path.join(current_run_dir, item)
try:
shutil.copytree(src, dst)
utils.log("SYSTEM", f" -> Copied {item} (Skipping generation).")
except Exception as e:
utils.log("SYSTEM", f" -> Failed to copy {item}: {e}")
# 2. Run Generation
# We call the existing entry point
main.run_generation(bible_path, specific_run_id=run_id)
utils.log("SYSTEM", "Job Complete.")
status = "completed"
except Exception as e:
utils.log("ERROR", f"Job Failed: {e}")
status = "failed"
# 3. Calculate Cost & Cleanup
# Use the specific run folder we know main.py used
run_dir = os.path.join(project_path, "runs", "bible", f"run_{run_id}")
total_cost = 0.0
final_log_path = initial_log
if os.path.exists(run_dir):
# Move our log file there
final_log_path = os.path.join(run_dir, "web_console.log")
if os.path.exists(initial_log):
try:
os.rename(initial_log, final_log_path)
except OSError:
# If rename fails (e.g. across filesystems), copy and delete
shutil.copy2(initial_log, final_log_path)
os.remove(initial_log)
# Calculate Total Cost from all Book subfolders
# usage_log.json is inside each Book folder
for item in os.listdir(run_dir):
item_path = os.path.join(run_dir, item)
if os.path.isdir(item_path) and item.startswith("Book_"):
usage_path = os.path.join(item_path, "usage_log.json")
if os.path.exists(usage_path):
data = utils.load_json(usage_path)
total_cost += data.get('totals', {}).get('est_cost_usd', 0.0)
# 4. Update Database with Final Status
try:
with sqlite3.connect(db_path, timeout=10) as conn:
conn.execute("UPDATE run SET status = ?, cost = ?, end_time = ?, log_file = ? WHERE id = ?",
(status, total_cost, datetime.utcnow(), final_log_path, run_id))
except Exception as e:
print(f"Failed to update run status in DB: {e}")
return {"run_id": run_id, "status": status, "cost": total_cost, "final_log": final_log_path}
@huey.task()
def regenerate_artifacts_task(run_id, project_path, feedback=None):
# Hook up Database Logging & Status
db_path = os.path.join(config.DATA_DIR, "bookapp.db")
# Truncate log file to ensure clean slate
log_filename = f"system_log_{run_id}.txt"
initial_log = os.path.join(project_path, log_filename)
with open(initial_log, 'w', encoding='utf-8') as f: f.write("")
utils.set_log_file(initial_log)
utils.set_log_callback(lambda p, m: db_log_callback(db_path, run_id, p, m))
try:
with sqlite3.connect(db_path) as conn:
conn.execute("UPDATE run SET status = 'running' WHERE id = ?", (run_id,))
except: pass
utils.log("SYSTEM", "Starting Artifact Regeneration...")
# 1. Setup Paths
run_dir = os.path.join(project_path, "runs", "bible", f"run_{run_id}")
# Detect Book Subfolder
book_dir = run_dir
if os.path.exists(run_dir):
subdirs = sorted([d for d in os.listdir(run_dir) if os.path.isdir(os.path.join(run_dir, d)) and d.startswith("Book_")])
if subdirs: book_dir = os.path.join(run_dir, subdirs[0])
bible_path = os.path.join(project_path, "bible.json")
if not os.path.exists(run_dir) or not os.path.exists(bible_path):
utils.log("ERROR", "Run directory or Bible not found.")
return
# 2. Load Data
bible = utils.load_json(bible_path)
final_bp_path = os.path.join(book_dir, "final_blueprint.json")
ms_path = os.path.join(book_dir, "manuscript.json")
if not os.path.exists(final_bp_path) or not os.path.exists(ms_path):
utils.log("ERROR", f"Blueprint or Manuscript not found in {book_dir}")
return
bp = utils.load_json(final_bp_path)
ms = utils.load_json(ms_path)
# 3. Update Blueprint with new Metadata from Bible
meta = bible.get('project_metadata', {})
if 'book_metadata' in bp:
# Sync all core metadata
for k in ['author', 'genre', 'target_audience', 'style']:
if k in meta:
bp['book_metadata'][k] = meta[k]
if bp.get('series_metadata', {}).get('is_series'):
bp['series_metadata']['series_title'] = meta.get('title', bp['series_metadata'].get('series_title'))
# Find specific book title from Bible
b_num = bp['series_metadata'].get('book_number')
for b in bible.get('books', []):
if b.get('book_number') == b_num:
bp['book_metadata']['title'] = b.get('title', bp['book_metadata'].get('title'))
break
else:
bp['book_metadata']['title'] = meta.get('title', bp['book_metadata'].get('title'))
with open(final_bp_path, 'w') as f: json.dump(bp, f, indent=2)
# 4. Regenerate
try:
main.ai.init_models()
tracking = None
events_path = os.path.join(book_dir, "tracking_events.json")
if os.path.exists(events_path):
tracking = {"events": utils.load_json(events_path), "characters": utils.load_json(os.path.join(book_dir, "tracking_characters.json"))}
main.marketing.generate_cover(bp, book_dir, tracking, feedback=feedback)
main.export.compile_files(bp, ms, book_dir)
utils.log("SYSTEM", "Regeneration Complete.")
final_status = 'completed'
except Exception as e:
utils.log("ERROR", f"Regeneration Failed: {e}")
final_status = 'failed'
try:
with sqlite3.connect(db_path) as conn:
conn.execute("UPDATE run SET status = ? WHERE id = ?", (final_status, run_id))
except: pass