core/utils.py: - estimate_tokens: improved heuristic 4 chars/token → 3.5 chars/token (more accurate) - truncate_to_tokens: added keep_head=True mode for head+tail truncation (better context retention for story summaries that need both opening and recent content) - load_json: explicit exception handling (json.JSONDecodeError, OSError) with log instead of silent returns; added utf-8 encoding with error replacement - log_image_attempt: replaced bare except with (json.JSONDecodeError, OSError); added utf-8 encoding to output write - log_usage: replaced bare except with AttributeError for token count extraction story/bible_tracker.py: - merge_selected_changes: wrapped all int() key casts (char idx, book num, beat idx) in try/except with meaningful log warning instead of crashing on malformed keys - harvest_metadata: replaced bare except:pass with except Exception as e + log message cli/engine.py: - Persona validation: added warning when all 3 attempts fail and substandard persona is accepted — flags elevated voice-drift risk for the run - Lore index updates: throttled from every chapter to every 3 chapters; lore is stable after the first few chapters (~10% token saving per book) - Mid-gen consistency check: now samples first 2 + last 8 chapters instead of passing full manuscript — caps token cost regardless of book length story/writer.py: - Two-pass polish: added local filter-word density check (no API call); skips the Pro polish if density < 1 per 83 words — saves ~8K tokens on already-clean drafts - Polish prompt: added prev_context_block for continuity — polished chapter now maintains seamless flow from the previous chapter's ending marketing/fonts.py: - Separated requests.exceptions.Timeout with specific log message vs generic failure - Added explicit log message when Roboto fallback also fails (returns None) marketing/blurb.py: - Added word count trim: blurbs > 220 words trimmed to last sentence within 220 words - Changed bare except to except Exception as e with log message - Added utf-8 encoding to file writes; logs final word count Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
498 lines
24 KiB
Python
498 lines
24 KiB
Python
import json
|
|
import os
|
|
import time
|
|
import sys
|
|
import shutil
|
|
from rich.prompt import Confirm
|
|
from core import config, utils
|
|
from ai import models as ai_models
|
|
from ai import setup as ai_setup
|
|
from story import planner, writer as story_writer, editor as story_editor
|
|
from story import style_persona, bible_tracker, state as story_state
|
|
from story.writer import build_persona_info
|
|
from marketing import assets as marketing_assets
|
|
from export import exporter
|
|
|
|
|
|
def process_book(bp, folder, context="", resume=False, interactive=False):
|
|
# Create lock file to indicate active processing
|
|
lock_path = os.path.join(folder, ".in_progress")
|
|
with open(lock_path, "w") as f: f.write("running")
|
|
|
|
total_start = time.time()
|
|
|
|
try:
|
|
# 1. Check completion
|
|
if resume and os.path.exists(os.path.join(folder, "final_blueprint.json")):
|
|
utils.log("SYSTEM", f"Book in {folder} already finished. Skipping.")
|
|
if os.path.exists(lock_path): os.remove(lock_path)
|
|
return
|
|
|
|
# 2. Load or Create Blueprint
|
|
bp_path = os.path.join(folder, "blueprint_initial.json")
|
|
t_step = time.time()
|
|
utils.update_progress(5)
|
|
utils.log("SYSTEM", "--- Phase: Blueprint ---")
|
|
try:
|
|
if resume and os.path.exists(bp_path):
|
|
utils.log("RESUME", "Loading existing blueprint...")
|
|
saved_bp = utils.load_json(bp_path)
|
|
if saved_bp:
|
|
if 'book_metadata' in bp and 'book_metadata' in saved_bp:
|
|
for k in ['title', 'author', 'genre', 'target_audience', 'style', 'author_bio', 'author_details']:
|
|
if k in bp['book_metadata']:
|
|
saved_bp['book_metadata'][k] = bp['book_metadata'][k]
|
|
if 'series_metadata' in bp:
|
|
saved_bp['series_metadata'] = bp['series_metadata']
|
|
bp = saved_bp
|
|
with open(bp_path, "w") as f: json.dump(bp, f, indent=2)
|
|
else:
|
|
bp = planner.enrich(bp, folder, context)
|
|
with open(bp_path, "w") as f: json.dump(bp, f, indent=2)
|
|
|
|
# Ensure Persona Exists (Auto-create + Exp 6: Validate before accepting)
|
|
if 'author_details' not in bp['book_metadata'] or not bp['book_metadata']['author_details']:
|
|
max_persona_attempts = 3
|
|
for persona_attempt in range(1, max_persona_attempts + 1):
|
|
candidate_persona = style_persona.create_initial_persona(bp, folder)
|
|
is_valid, p_score = style_persona.validate_persona(bp, candidate_persona, folder)
|
|
if is_valid or persona_attempt == max_persona_attempts:
|
|
if not is_valid:
|
|
utils.log("SYSTEM", f" ⚠️ Persona accepted after {max_persona_attempts} attempts despite low score ({p_score}/10). Voice drift risk elevated.")
|
|
bp['book_metadata']['author_details'] = candidate_persona
|
|
break
|
|
utils.log("SYSTEM", f" -> Persona attempt {persona_attempt}/{max_persona_attempts} scored {p_score}/10. Regenerating...")
|
|
with open(bp_path, "w") as f: json.dump(bp, f, indent=2)
|
|
except Exception as _e:
|
|
utils.log("ERROR", f"Blueprint phase failed: {type(_e).__name__}: {_e}")
|
|
raise
|
|
|
|
utils.log("TIMING", f"Blueprint Phase: {time.time() - t_step:.1f}s")
|
|
|
|
# 3. Events (Plan & Expand)
|
|
events_path = os.path.join(folder, "events.json")
|
|
t_step = time.time()
|
|
utils.update_progress(10)
|
|
utils.log("SYSTEM", "--- Phase: Story Structure & Events ---")
|
|
try:
|
|
if resume and os.path.exists(events_path):
|
|
utils.log("RESUME", "Loading existing events...")
|
|
events = utils.load_json(events_path)
|
|
else:
|
|
events = planner.plan_structure(bp, folder)
|
|
depth = bp['length_settings']['depth']
|
|
target_chaps = bp['length_settings']['chapters']
|
|
for d in range(1, depth+1):
|
|
utils.log("SYSTEM", f" Expanding story structure depth {d}/{depth}...")
|
|
events = planner.expand(events, d, target_chaps, bp, folder)
|
|
time.sleep(1)
|
|
with open(events_path, "w") as f: json.dump(events, f, indent=2)
|
|
except Exception as _e:
|
|
utils.log("ERROR", f"Events/Structure phase failed: {type(_e).__name__}: {_e}")
|
|
raise
|
|
utils.log("TIMING", f"Structure & Expansion: {time.time() - t_step:.1f}s")
|
|
|
|
# 4. Chapter Plan
|
|
chapters_path = os.path.join(folder, "chapters.json")
|
|
t_step = time.time()
|
|
utils.update_progress(15)
|
|
utils.log("SYSTEM", "--- Phase: Chapter Planning ---")
|
|
try:
|
|
if resume and os.path.exists(chapters_path):
|
|
utils.log("RESUME", "Loading existing chapter plan...")
|
|
chapters = utils.load_json(chapters_path)
|
|
else:
|
|
chapters = planner.create_chapter_plan(events, bp, folder)
|
|
with open(chapters_path, "w") as f: json.dump(chapters, f, indent=2)
|
|
except Exception as _e:
|
|
utils.log("ERROR", f"Chapter planning phase failed: {type(_e).__name__}: {_e}")
|
|
raise
|
|
utils.log("TIMING", f"Chapter Planning: {time.time() - t_step:.1f}s")
|
|
|
|
# 4b. Outline Validation Gate (Alt 2-B: pre-generation quality check)
|
|
if chapters and not resume:
|
|
try:
|
|
planner.validate_outline(events, chapters, bp, folder)
|
|
except Exception as _e:
|
|
utils.log("ARCHITECT", f"Outline validation skipped: {_e}")
|
|
|
|
# 5. Writing Loop
|
|
ms_path = os.path.join(folder, "manuscript.json")
|
|
loaded_ms = utils.load_json(ms_path) if (resume and os.path.exists(ms_path)) else []
|
|
ms = loaded_ms if loaded_ms is not None else []
|
|
|
|
# Load Tracking
|
|
events_track_path = os.path.join(folder, "tracking_events.json")
|
|
chars_track_path = os.path.join(folder, "tracking_characters.json")
|
|
warn_track_path = os.path.join(folder, "tracking_warnings.json")
|
|
lore_track_path = os.path.join(folder, "tracking_lore.json")
|
|
|
|
tracking = {"events": [], "characters": {}, "content_warnings": [], "lore": {}}
|
|
if resume:
|
|
if os.path.exists(events_track_path):
|
|
tracking['events'] = utils.load_json(events_track_path)
|
|
if os.path.exists(chars_track_path):
|
|
tracking['characters'] = utils.load_json(chars_track_path)
|
|
if os.path.exists(warn_track_path):
|
|
tracking['content_warnings'] = utils.load_json(warn_track_path)
|
|
if os.path.exists(lore_track_path):
|
|
tracking['lore'] = utils.load_json(lore_track_path) or {}
|
|
|
|
# Load structured story state
|
|
current_story_state = story_state.load_story_state(folder)
|
|
|
|
summary = "The story begins."
|
|
if ms:
|
|
utils.log("RESUME", f"Rebuilding story context from {len(ms)} existing chapters...")
|
|
try:
|
|
selected = ms[:1] + ms[-4:] if len(ms) > 5 else ms
|
|
combined_text = "\n".join([f"Chapter {c['num']}: {c['content'][:3000]}" for c in selected])
|
|
resp_sum = ai_models.model_writer.generate_content(f"""
|
|
ROLE: Series Historian
|
|
TASK: Create a cumulative 'Story So Far' summary.
|
|
INPUT_TEXT:
|
|
{combined_text}
|
|
INSTRUCTIONS: Use dense, factual bullet points. Focus on character meetings, relationships, and known information.
|
|
OUTPUT: Summary text.
|
|
""")
|
|
utils.log_usage(folder, ai_models.model_writer.name, resp_sum.usage_metadata)
|
|
summary = resp_sum.text
|
|
except: summary = "The story continues."
|
|
|
|
utils.log("SYSTEM", f"--- Phase: Writing ({len(chapters)} chapters planned) ---")
|
|
t_step = time.time()
|
|
session_chapters = 0
|
|
session_time = 0
|
|
|
|
# Pre-load persona once for the entire writing phase (Alt 3-D: persona cache)
|
|
# Rebuilt after each refine_persona() call to pick up bio updates.
|
|
cached_persona = build_persona_info(bp)
|
|
|
|
i = len(ms)
|
|
while i < len(chapters):
|
|
ch_start = time.time()
|
|
ch = chapters[i]
|
|
|
|
# Check for stop signal from Web UI
|
|
run_dir = os.path.dirname(folder)
|
|
if os.path.exists(os.path.join(run_dir, ".stop")):
|
|
utils.log("SYSTEM", "Stop signal detected. Aborting generation.")
|
|
break
|
|
|
|
# Robust Resume: Check if this specific chapter number is already in the manuscript
|
|
if any(str(c.get('num')) == str(ch['chapter_number']) for c in ms):
|
|
i += 1
|
|
continue
|
|
|
|
# Progress Banner
|
|
utils.update_progress(15 + int((i / len(chapters)) * 75))
|
|
utils.log_banner("WRITER", f"Chapter {ch['chapter_number']}/{len(chapters)}: {ch['title']}")
|
|
|
|
prev_content = ms[-1]['content'] if ms else None
|
|
|
|
while True:
|
|
try:
|
|
# Build context: use structured state if available, fall back to summary blob
|
|
structured_ctx = story_state.format_for_prompt(current_story_state, ch.get('beats', []))
|
|
if structured_ctx:
|
|
summary_ctx = structured_ctx
|
|
else:
|
|
summary_ctx = summary[-8000:] if len(summary) > 8000 else summary
|
|
next_hint = chapters[i+1]['title'] if i + 1 < len(chapters) else ""
|
|
chap_pos = i / max(len(chapters) - 1, 1) if len(chapters) > 1 else 0.5
|
|
txt = story_writer.write_chapter(ch, bp, folder, summary_ctx, tracking, prev_content, next_chapter_hint=next_hint, prebuilt_persona=cached_persona, chapter_position=chap_pos)
|
|
except Exception as e:
|
|
utils.log("SYSTEM", f"Chapter generation failed: {e}")
|
|
if interactive:
|
|
if Confirm.ask("Generation failed (quality/error). Retry?", default=True):
|
|
continue
|
|
raise e
|
|
|
|
if interactive:
|
|
print(f"\n--- Chapter {ch['chapter_number']} Preview ---\n{txt[:800]}...\n-------------------------------")
|
|
if Confirm.ask(f"Accept Chapter {ch['chapter_number']}?", default=True):
|
|
break
|
|
else:
|
|
utils.log("SYSTEM", "Regenerating chapter...")
|
|
else:
|
|
break
|
|
|
|
# Refine Persona to match the actual output (every 5 chapters)
|
|
if (i == 0 or i % 5 == 0) and txt:
|
|
bp['book_metadata']['author_details'] = style_persona.refine_persona(bp, txt, folder)
|
|
with open(bp_path, "w") as f: json.dump(bp, f, indent=2)
|
|
cached_persona = build_persona_info(bp) # Rebuild cache with updated bio
|
|
|
|
# Look ahead for context
|
|
next_info = ""
|
|
if i + 1 < len(chapters):
|
|
next_ch = chapters[i+1]
|
|
next_info = f"\nUPCOMING CONTEXT (Prioritize details relevant to this): {next_ch.get('title')} - {json.dumps(next_ch.get('beats', []))}"
|
|
|
|
try:
|
|
update_prompt = f"""
|
|
ROLE: Series Historian
|
|
TASK: Update the 'Story So Far' summary to include the events of this new chapter.
|
|
|
|
INPUT_DATA:
|
|
- CURRENT_SUMMARY:
|
|
{summary}
|
|
- NEW_CHAPTER_TEXT:
|
|
{txt}
|
|
- UPCOMING_CONTEXT_HINT: {next_info}
|
|
|
|
INSTRUCTIONS:
|
|
1. STYLE: Dense, factual, chronological bullet points. Avoid narrative prose.
|
|
2. CUMULATIVE: Do NOT remove old events. Append and integrate new information.
|
|
3. TRACKING: Explicitly note who met whom, who knows what, and current locations.
|
|
4. RELEVANCE: Ensure details needed for the UPCOMING CONTEXT are preserved.
|
|
|
|
OUTPUT: Updated summary text.
|
|
"""
|
|
resp_sum = ai_models.model_writer.generate_content(update_prompt)
|
|
utils.log_usage(folder, ai_models.model_writer.name, resp_sum.usage_metadata)
|
|
summary = resp_sum.text
|
|
except:
|
|
try:
|
|
resp_fallback = ai_models.model_writer.generate_content(f"ROLE: Summarizer\nTASK: Summarize plot points.\nTEXT: {txt}\nOUTPUT: Bullet points.")
|
|
utils.log_usage(folder, ai_models.model_writer.name, resp_fallback.usage_metadata)
|
|
summary += f"\n\nChapter {ch['chapter_number']}: " + resp_fallback.text
|
|
except: summary += f"\n\nChapter {ch['chapter_number']}: [Content processed]"
|
|
|
|
ms.append({'num': ch['chapter_number'], 'title': ch['title'], 'pov_character': ch.get('pov_character'), 'content': txt})
|
|
|
|
with open(ms_path, "w") as f: json.dump(ms, f, indent=2)
|
|
utils.send_heartbeat() # Signal that the task is still alive
|
|
|
|
# Update Tracking
|
|
tracking = bible_tracker.update_tracking(folder, ch['chapter_number'], txt, tracking)
|
|
with open(events_track_path, "w") as f: json.dump(tracking['events'], f, indent=2)
|
|
with open(chars_track_path, "w") as f: json.dump(tracking['characters'], f, indent=2)
|
|
with open(warn_track_path, "w") as f: json.dump(tracking.get('content_warnings', []), f, indent=2)
|
|
|
|
# Update Lore Index (Item 8: RAG-Lite) — every 3 chapters (lore is stable after ch 1-3)
|
|
if i == 0 or i % 3 == 0:
|
|
tracking['lore'] = bible_tracker.update_lore_index(folder, txt, tracking.get('lore', {}))
|
|
with open(lore_track_path, "w") as f: json.dump(tracking['lore'], f, indent=2)
|
|
|
|
# Update Structured Story State (Item 9: Thread Tracking)
|
|
current_story_state = story_state.update_story_state(txt, ch['chapter_number'], current_story_state, folder)
|
|
|
|
# Exp 5: Mid-gen Consistency Snapshot (every 10 chapters)
|
|
# Sample: first 2 + last 8 chapters to keep token cost bounded regardless of book length
|
|
if len(ms) > 0 and len(ms) % 10 == 0:
|
|
utils.log("EDITOR", f"--- Mid-gen consistency check after chapter {ch['chapter_number']} ({len(ms)} written) ---")
|
|
try:
|
|
ms_sample = (ms[:2] + ms[-8:]) if len(ms) > 10 else ms
|
|
consistency = story_editor.analyze_consistency(bp, ms_sample, folder)
|
|
issues = consistency.get('issues', [])
|
|
if issues:
|
|
for issue in issues:
|
|
utils.log("EDITOR", f" ⚠️ {issue}")
|
|
c_score = consistency.get('score', 'N/A')
|
|
c_summary = consistency.get('summary', '')
|
|
utils.log("EDITOR", f" Consistency score: {c_score}/10 — {c_summary}")
|
|
except Exception as _ce:
|
|
utils.log("EDITOR", f" Mid-gen consistency check failed (non-blocking): {_ce}")
|
|
|
|
# Dynamic Pacing Check (every other chapter)
|
|
remaining = chapters[i+1:]
|
|
if remaining and len(remaining) >= 2 and i % 2 == 1:
|
|
pacing = story_editor.check_pacing(bp, summary, txt, ch, remaining, folder)
|
|
if pacing and pacing.get('status') == 'add_bridge':
|
|
new_data = pacing.get('new_chapter', {})
|
|
if chapters:
|
|
avg_words = int(sum(c.get('estimated_words', 1500) for c in chapters) / len(chapters))
|
|
else:
|
|
avg_words = 1500
|
|
new_ch = {
|
|
"chapter_number": ch['chapter_number'] + 1,
|
|
"title": new_data.get('title', 'Bridge Chapter'),
|
|
"pov_character": new_data.get('pov_character', ch.get('pov_character')),
|
|
"pacing": "Slow",
|
|
"estimated_words": avg_words,
|
|
"beats": new_data.get('beats', [])
|
|
}
|
|
chapters.insert(i+1, new_ch)
|
|
for k in range(i+1, len(chapters)): chapters[k]['chapter_number'] = k + 1
|
|
with open(chapters_path, "w") as f: json.dump(chapters, f, indent=2)
|
|
utils.log("ARCHITECT", f" -> Pacing Intervention: Added bridge chapter '{new_ch['title']}' to fix rushing.")
|
|
|
|
elif pacing and pacing.get('status') == 'cut_next':
|
|
removed = chapters.pop(i+1)
|
|
for k in range(i+1, len(chapters)): chapters[k]['chapter_number'] = k + 1
|
|
with open(chapters_path, "w") as f: json.dump(chapters, f, indent=2)
|
|
utils.log("ARCHITECT", f" -> Pacing Intervention: Removed redundant chapter '{removed['title']}'.")
|
|
elif pacing:
|
|
utils.log("ARCHITECT", f" -> Pacing OK. {pacing.get('reason', '')[:100]}")
|
|
|
|
# Increment loop
|
|
i += 1
|
|
|
|
duration = time.time() - ch_start
|
|
session_chapters += 1
|
|
session_time += duration
|
|
avg_time = session_time / session_chapters
|
|
eta = avg_time * (len(chapters) - (i + 1))
|
|
|
|
prog = 15 + int((i / len(chapters)) * 75)
|
|
utils.update_progress(prog)
|
|
|
|
word_count = len(txt.split()) if txt else 0
|
|
utils.log("TIMING", f" -> Ch {ch['chapter_number']} done in {duration:.1f}s | {word_count:,} words | Avg: {avg_time:.1f}s | ETA: {int(eta//60)}m {int(eta%60)}s")
|
|
|
|
utils.log("TIMING", f"Writing Phase: {time.time() - t_step:.1f}s")
|
|
|
|
# Post-Processing
|
|
t_step = time.time()
|
|
utils.log("SYSTEM", "--- Phase: Post-Processing (Harvest, Cover, Export) ---")
|
|
try:
|
|
utils.update_progress(92)
|
|
utils.log("SYSTEM", " Harvesting metadata from manuscript...")
|
|
bp = bible_tracker.harvest_metadata(bp, folder, ms)
|
|
with open(os.path.join(folder, "final_blueprint.json"), "w") as f: json.dump(bp, f, indent=2)
|
|
|
|
utils.update_progress(95)
|
|
utils.log("SYSTEM", " Generating cover and marketing assets...")
|
|
marketing_assets.create_marketing_assets(bp, folder, tracking, interactive=interactive)
|
|
|
|
utils.log("SYSTEM", " Updating author persona sample...")
|
|
style_persona.update_persona_sample(bp, folder)
|
|
|
|
utils.update_progress(98)
|
|
utils.log("SYSTEM", " Compiling final export files...")
|
|
exporter.compile_files(bp, ms, folder)
|
|
except Exception as _e:
|
|
utils.log("ERROR", f"Post-processing phase failed: {type(_e).__name__}: {_e}")
|
|
raise
|
|
|
|
utils.log("TIMING", f"Post-Processing: {time.time() - t_step:.1f}s")
|
|
utils.log("SYSTEM", f"Book Finished. Total Time: {time.time() - total_start:.1f}s")
|
|
|
|
finally:
|
|
if os.path.exists(lock_path): os.remove(lock_path)
|
|
|
|
|
|
def run_generation(target=None, specific_run_id=None, interactive=False):
|
|
utils.log("SYSTEM", "=== run_generation: Initialising AI models ===")
|
|
ai_setup.init_models()
|
|
|
|
if not target: target = config.DEFAULT_BLUEPRINT
|
|
data = utils.load_json(target)
|
|
|
|
if not data:
|
|
utils.log("ERROR", f"Could not load bible/target: {target}")
|
|
return
|
|
|
|
utils.log("SYSTEM", f"=== Starting Series Generation: {data.get('project_metadata', {}).get('title', 'Untitled')} ===")
|
|
|
|
project_dir = os.path.dirname(os.path.abspath(target))
|
|
runs_base = os.path.join(project_dir, "runs")
|
|
|
|
run_dir = None
|
|
resume_mode = False
|
|
|
|
if specific_run_id:
|
|
run_dir = os.path.join(runs_base, f"run_{specific_run_id}")
|
|
if not os.path.exists(run_dir): os.makedirs(run_dir)
|
|
resume_mode = True
|
|
else:
|
|
latest_run = utils.get_latest_run_folder(runs_base)
|
|
if latest_run:
|
|
has_lock = False
|
|
for root, dirs, files in os.walk(latest_run):
|
|
if ".in_progress" in files:
|
|
has_lock = True
|
|
break
|
|
|
|
if has_lock:
|
|
if Confirm.ask(f"Found incomplete run '{os.path.basename(latest_run)}'. Resume generation?", default=True):
|
|
run_dir = latest_run
|
|
resume_mode = True
|
|
elif Confirm.ask(f"Delete artifacts in '{os.path.basename(latest_run)}' and start over?", default=False):
|
|
shutil.rmtree(latest_run)
|
|
os.makedirs(latest_run)
|
|
run_dir = latest_run
|
|
|
|
if not run_dir: run_dir = utils.get_run_folder(runs_base)
|
|
utils.log("SYSTEM", f"Run Directory: {run_dir}")
|
|
|
|
previous_context = ""
|
|
|
|
for i, book in enumerate(data['books']):
|
|
utils.log("SERIES", f"Processing Book {book.get('book_number')}: {book.get('title')}")
|
|
|
|
if os.path.exists(os.path.join(run_dir, ".stop")):
|
|
utils.log("SYSTEM", "Stop signal detected. Aborting series generation.")
|
|
break
|
|
|
|
meta = data['project_metadata']
|
|
bp = {
|
|
"book_metadata": {
|
|
"title": book.get('title'),
|
|
"filename": book.get('filename'),
|
|
"author": meta.get('author'),
|
|
"genre": meta.get('genre'),
|
|
"target_audience": meta.get('target_audience'),
|
|
"style": meta.get('style', {}),
|
|
"author_details": meta.get('author_details', {}),
|
|
"author_bio": meta.get('author_bio', ''),
|
|
},
|
|
"length_settings": meta.get('length_settings', {}),
|
|
"characters": data.get('characters', []),
|
|
"manual_instruction": book.get('manual_instruction', ''),
|
|
"plot_beats": book.get('plot_beats', []),
|
|
"series_metadata": {
|
|
"is_series": meta.get('is_series', False),
|
|
"series_title": meta.get('title', ''),
|
|
"book_number": book.get('book_number', i+1),
|
|
"total_books": len(data['books'])
|
|
}
|
|
}
|
|
|
|
safe_title = utils.sanitize_filename(book.get('title', f"Book_{i+1}"))
|
|
book_folder = os.path.join(run_dir, f"Book_{book.get('book_number', i+1)}_{safe_title}")
|
|
os.makedirs(book_folder, exist_ok=True)
|
|
|
|
utils.log("SYSTEM", f"--- Starting process_book for '{book.get('title')}' in {book_folder} ---")
|
|
try:
|
|
process_book(bp, book_folder, context=previous_context, resume=resume_mode, interactive=interactive)
|
|
except Exception as _e:
|
|
utils.log("ERROR", f"process_book failed for Book {book.get('book_number')}: {type(_e).__name__}: {_e}")
|
|
raise
|
|
utils.log("SYSTEM", f"--- Finished process_book for '{book.get('title')}' ---")
|
|
|
|
final_bp_path = os.path.join(book_folder, "final_blueprint.json")
|
|
if os.path.exists(final_bp_path):
|
|
final_bp = utils.load_json(final_bp_path)
|
|
|
|
new_chars = final_bp.get('characters', [])
|
|
|
|
if os.path.exists(target):
|
|
current_bible = utils.load_json(target)
|
|
|
|
existing_names = {c['name'].lower() for c in current_bible.get('characters', [])}
|
|
for char in new_chars:
|
|
if char['name'].lower() not in existing_names:
|
|
current_bible['characters'].append(char)
|
|
|
|
for b in current_bible.get('books', []):
|
|
if b.get('book_number') == book.get('book_number'):
|
|
b['title'] = final_bp['book_metadata'].get('title', b.get('title'))
|
|
b['plot_beats'] = final_bp.get('plot_beats', b.get('plot_beats'))
|
|
b['manual_instruction'] = final_bp.get('manual_instruction', b.get('manual_instruction'))
|
|
break
|
|
|
|
with open(target, 'w') as f: json.dump(current_bible, f, indent=2)
|
|
utils.log("SERIES", "Updated World Bible with new characters and plot data.")
|
|
|
|
last_beat = final_bp.get('plot_beats', [])[-1] if final_bp.get('plot_beats') else "End of book."
|
|
previous_context = f"PREVIOUS BOOK SUMMARY: {last_beat}\nCHARACTERS: {json.dumps(final_bp.get('characters', []))}"
|
|
|
|
return
|
|
|
|
|
|
if __name__ == "__main__":
|
|
target_arg = sys.argv[1] if len(sys.argv) > 1 else None
|
|
run_generation(target_arg, interactive=True)
|