import json, os, time, sys, shutil import config from rich.prompt import Confirm from modules import ai, story, marketing, export, utils def process_book(bp, folder, context="", resume=False, interactive=False): # Create lock file to indicate active processing lock_path = os.path.join(folder, ".in_progress") with open(lock_path, "w") as f: f.write("running") total_start = time.time() try: # 1. Check completion if resume and os.path.exists(os.path.join(folder, "final_blueprint.json")): utils.log("SYSTEM", f"Book in {folder} already finished. Skipping.") # Clean up zombie lock file if it exists if os.path.exists(lock_path): os.remove(lock_path) return # 2. Load or Create Blueprint bp_path = os.path.join(folder, "blueprint_initial.json") t_step = time.time() utils.update_progress(5) if resume and os.path.exists(bp_path): utils.log("RESUME", "Loading existing blueprint...") saved_bp = utils.load_json(bp_path) # Merge latest metadata from Bible (passed in bp) into saved blueprint if saved_bp: if 'book_metadata' in bp and 'book_metadata' in saved_bp: for k in ['title', 'author', 'genre', 'target_audience', 'style', 'author_bio', 'author_details']: if k in bp['book_metadata']: saved_bp['book_metadata'][k] = bp['book_metadata'][k] if 'series_metadata' in bp: saved_bp['series_metadata'] = bp['series_metadata'] bp = saved_bp with open(bp_path, "w") as f: json.dump(bp, f, indent=2) else: bp = story.enrich(bp, folder, context) with open(bp_path, "w") as f: json.dump(bp, f, indent=2) # Ensure Persona Exists (Auto-create if missing) if 'author_details' not in bp['book_metadata'] or not bp['book_metadata']['author_details']: bp['book_metadata']['author_details'] = story.create_initial_persona(bp, folder) with open(bp_path, "w") as f: json.dump(bp, f, indent=2) utils.log("TIMING", f"Blueprint Phase: {time.time() - t_step:.1f}s") # 3. Events (Plan & Expand) events_path = os.path.join(folder, "events.json") t_step = time.time() utils.update_progress(10) if resume and os.path.exists(events_path): utils.log("RESUME", "Loading existing events...") events = utils.load_json(events_path) else: events = story.plan_structure(bp, folder) depth = bp['length_settings']['depth'] target_chaps = bp['length_settings']['chapters'] for d in range(1, depth+1): events = story.expand(events, d, target_chaps, bp, folder) time.sleep(1) with open(events_path, "w") as f: json.dump(events, f, indent=2) utils.log("TIMING", f"Structure & Expansion: {time.time() - t_step:.1f}s") # 4. Chapter Plan chapters_path = os.path.join(folder, "chapters.json") t_step = time.time() utils.update_progress(15) if resume and os.path.exists(chapters_path): utils.log("RESUME", "Loading existing chapter plan...") chapters = utils.load_json(chapters_path) else: chapters = story.create_chapter_plan(events, bp, folder) with open(chapters_path, "w") as f: json.dump(chapters, f, indent=2) utils.log("TIMING", f"Chapter Planning: {time.time() - t_step:.1f}s") # 5. Writing Loop ms_path = os.path.join(folder, "manuscript.json") ms = utils.load_json(ms_path) if (resume and os.path.exists(ms_path)) else [] # Load Tracking events_track_path = os.path.join(folder, "tracking_events.json") chars_track_path = os.path.join(folder, "tracking_characters.json") warn_track_path = os.path.join(folder, "tracking_warnings.json") tracking = {"events": [], "characters": {}, "content_warnings": []} if resume: if os.path.exists(events_track_path): tracking['events'] = utils.load_json(events_track_path) if os.path.exists(chars_track_path): tracking['characters'] = utils.load_json(chars_track_path) if os.path.exists(warn_track_path): tracking['content_warnings'] = utils.load_json(warn_track_path) summary = "The story begins." if ms: # Generate summary from ALL written chapters to maintain continuity utils.log("RESUME", "Rebuilding 'Story So Far' from existing manuscript...") try: combined_text = "\n".join([f"Chapter {c['num']}: {c['content']}" for c in ms]) resp_sum = ai.model_writer.generate_content(f"Create a detailed, cumulative 'Story So Far' summary from the following text. Use dense, factual bullet points. Focus on character meetings, relationships, and known information:\n{combined_text}") utils.log_usage(folder, "writer-flash", resp_sum.usage_metadata) summary = resp_sum.text except: summary = "The story continues." t_step = time.time() session_chapters = 0 session_time = 0 i = len(ms) while i < len(chapters): ch_start = time.time() ch = chapters[i] # Check for stop signal from Web UI run_dir = os.path.dirname(folder) if os.path.exists(os.path.join(run_dir, ".stop")): utils.log("SYSTEM", "🛑 Stop signal detected. Aborting generation.") break # Robust Resume: Check if this specific chapter number is already in the manuscript # (Handles cases where plan changed or ms is out of sync with index) if any(c.get('num') == ch['chapter_number'] for c in ms): i += 1 continue # Pass previous chapter content for continuity if available prev_content = ms[-1]['content'] if ms else None while True: try: txt = story.write_chapter(ch, bp, folder, summary, tracking, prev_content) except Exception as e: utils.log("SYSTEM", f"Chapter generation failed: {e}") if interactive: if Confirm.ask("Generation failed (quality/error). Retry?", default=True): continue raise e if interactive: print(f"\n--- Chapter {ch['chapter_number']} Preview ---\n{txt[:800]}...\n-------------------------------") if Confirm.ask(f"Accept Chapter {ch['chapter_number']}?", default=True): break else: utils.log("SYSTEM", "Regenerating chapter...") else: break # Refine Persona to match the actual output (Consistency Loop) if (i == 0 or i % 3 == 0) and txt: bp['book_metadata']['author_details'] = story.refine_persona(bp, txt, folder) with open(bp_path, "w") as f: json.dump(bp, f, indent=2) # Look ahead for context to ensure relevant details are captured next_info = "" if i + 1 < len(chapters): next_ch = chapters[i+1] next_info = f"\nUPCOMING CONTEXT (Prioritize details relevant to this): {next_ch.get('title')} - {json.dumps(next_ch.get('beats', []))}" try: update_prompt = f""" Update the 'Story So Far' summary to include the events of this new chapter. STYLE: Dense, factual, chronological bullet points. Avoid narrative prose. GOAL: Maintain a perfect memory of the plot for continuity. CRITICAL INSTRUCTIONS: 1. CUMULATIVE: Do NOT remove old events. Append and integrate new information. 2. TRACKING: Explicitly note who met whom, who knows what, and current locations. 3. RELEVANCE: Ensure details needed for the UPCOMING CONTEXT are preserved. CURRENT STORY SO FAR: {summary} NEW CHAPTER CONTENT: {txt} {next_info} """ resp_sum = ai.model_writer.generate_content(update_prompt) utils.log_usage(folder, "writer-flash", resp_sum.usage_metadata) summary = resp_sum.text except: try: resp_fallback = ai.model_writer.generate_content(f"Summarize plot points:\n{txt}") utils.log_usage(folder, "writer-flash", resp_fallback.usage_metadata) summary += f"\n\nChapter {ch['chapter_number']}: " + resp_fallback.text except: summary += f"\n\nChapter {ch['chapter_number']}: [Content processed]" ms.append({'num': ch['chapter_number'], 'title': ch['title'], 'pov_character': ch.get('pov_character'), 'content': txt}) with open(ms_path, "w") as f: json.dump(ms, f, indent=2) # Update Tracking tracking = story.update_tracking(folder, ch['chapter_number'], txt, tracking) with open(events_track_path, "w") as f: json.dump(tracking['events'], f, indent=2) with open(chars_track_path, "w") as f: json.dump(tracking['characters'], f, indent=2) with open(warn_track_path, "w") as f: json.dump(tracking.get('content_warnings', []), f, indent=2) # --- DYNAMIC PACING CHECK --- remaining = chapters[i+1:] if remaining: pacing = story.check_pacing(bp, summary, txt, ch, remaining, folder) if pacing and pacing.get('status') == 'add_bridge': new_data = pacing.get('new_chapter', {}) new_ch = { "chapter_number": ch['chapter_number'] + 1, "title": new_data.get('title', 'Bridge Chapter'), "pov_character": new_data.get('pov_character', ch.get('pov_character')), "pacing": "Slow", "estimated_words": 1500, "beats": new_data.get('beats', []) } chapters.insert(i+1, new_ch) # Renumber subsequent chapters for k in range(i+1, len(chapters)): chapters[k]['chapter_number'] = k + 1 with open(chapters_path, "w") as f: json.dump(chapters, f, indent=2) utils.log("ARCHITECT", f" -> ⚠️ Pacing Intervention: Added bridge chapter '{new_ch['title']}' to fix rushing.") elif pacing and pacing.get('status') == 'cut_next': removed = chapters.pop(i+1) # Renumber subsequent chapters for k in range(i+1, len(chapters)): chapters[k]['chapter_number'] = k + 1 with open(chapters_path, "w") as f: json.dump(chapters, f, indent=2) utils.log("ARCHITECT", f" -> ⚠️ Pacing Intervention: Removed redundant chapter '{removed['title']}'.") # Increment loop i += 1 duration = time.time() - ch_start session_chapters += 1 session_time += duration avg_time = session_time / session_chapters eta = avg_time * (len(chapters) - (i + 1)) # Calculate Progress (15% to 90%) prog = 15 + int((i / len(chapters)) * 75) utils.update_progress(prog) utils.log("TIMING", f" -> Chapter {ch['chapter_number']} finished in {duration:.1f}s | Avg: {avg_time:.1f}s | ETA: {int(eta//60)}m {int(eta%60)}s") utils.log("TIMING", f"Writing Phase: {time.time() - t_step:.1f}s") # Harvest t_step = time.time() utils.update_progress(92) bp = story.harvest_metadata(bp, folder, ms) with open(os.path.join(folder, "final_blueprint.json"), "w") as f: json.dump(bp, f, indent=2) # Create Assets utils.update_progress(95) marketing.create_marketing_assets(bp, folder, tracking, interactive=interactive) # Update Persona story.update_persona_sample(bp, folder) utils.update_progress(98) export.compile_files(bp, ms, folder) utils.log("TIMING", f"Post-Processing: {time.time() - t_step:.1f}s") utils.log("SYSTEM", f"Book Finished. Total Time: {time.time() - total_start:.1f}s") finally: # Remove lock file on success or failure if os.path.exists(lock_path): os.remove(lock_path) # --- 6. ENTRY POINT --- def run_generation(target=None, specific_run_id=None, interactive=False): ai.init_models() if not target: target = config.DEFAULT_BLUEPRINT data = utils.load_json(target) if not data: utils.log("SYSTEM", f"Could not load {target}") return # --- BIBLE FORMAT --- utils.log("SYSTEM", "Starting Series Generation...") # Determine Run Directory: projects/{Project}/runs/run_X project_dir = os.path.dirname(os.path.abspath(target)) runs_base = os.path.join(project_dir, "runs") run_dir = None resume_mode = False if specific_run_id: # WEB/WORKER MODE: Non-interactive, specific ID run_dir = os.path.join(runs_base, f"run_{specific_run_id}") if not os.path.exists(run_dir): os.makedirs(run_dir) resume_mode = True # Always try to resume if files exist in this specific run else: # CLI MODE: Interactive checks latest_run = utils.get_latest_run_folder(runs_base) if latest_run: has_lock = False for root, dirs, files in os.walk(latest_run): if ".in_progress" in files: has_lock = True break if has_lock: if Confirm.ask(f"Found incomplete run '{os.path.basename(latest_run)}'. Resume generation?", default=True): run_dir = latest_run resume_mode = True elif Confirm.ask(f"Delete artifacts in '{os.path.basename(latest_run)}' and start over?", default=False): shutil.rmtree(latest_run) os.makedirs(latest_run) run_dir = latest_run if not run_dir: run_dir = utils.get_run_folder(runs_base) utils.log("SYSTEM", f"Run Directory: {run_dir}") previous_context = "" for i, book in enumerate(data['books']): utils.log("SERIES", f"Processing Book {book.get('book_number')}: {book.get('title')}") # Check for stop signal at book level if os.path.exists(os.path.join(run_dir, ".stop")): utils.log("SYSTEM", "🛑 Stop signal detected. Aborting series generation.") break # Adapter: Bible -> Blueprint meta = data['project_metadata'] bp = { "book_metadata": { "title": book.get('title'), "filename": book.get('filename'), "author": meta.get('author'), "genre": meta.get('genre'), "target_audience": meta.get('target_audience'), "style": meta.get('style', {}), "author_details": meta.get('author_details', {}), "author_bio": meta.get('author_bio', ''), }, "length_settings": meta.get('length_settings', {}), "characters": data.get('characters', []), "manual_instruction": book.get('manual_instruction', ''), "plot_beats": book.get('plot_beats', []), "series_metadata": { "is_series": meta.get('is_series', False), "series_title": meta.get('title', ''), "book_number": book.get('book_number', i+1), "total_books": len(data['books']) } } # Create Book Subfolder safe_title = utils.sanitize_filename(book.get('title', f"Book_{i+1}")) book_folder = os.path.join(run_dir, f"Book_{book.get('book_number', i+1)}_{safe_title}") os.makedirs(book_folder, exist_ok=True) # Process process_book(bp, book_folder, context=previous_context, resume=resume_mode, interactive=interactive) # Update Context for next book final_bp_path = os.path.join(book_folder, "final_blueprint.json") if os.path.exists(final_bp_path): final_bp = utils.load_json(final_bp_path) # --- Update World Bible with new characters --- # This ensures future books know about characters invented in this book new_chars = final_bp.get('characters', []) # RELOAD BIBLE to avoid race conditions (User might have edited it in UI) if os.path.exists(target): current_bible = utils.load_json(target) # 1. Merge New Characters existing_names = {c['name'].lower() for c in current_bible.get('characters', [])} for char in new_chars: if char['name'].lower() not in existing_names: current_bible['characters'].append(char) # 2. Sync Generated Book Metadata (Title, Beats) back to Bible for b in current_bible.get('books', []): if b.get('book_number') == book.get('book_number'): b['title'] = final_bp['book_metadata'].get('title', b.get('title')) b['plot_beats'] = final_bp.get('plot_beats', b.get('plot_beats')) b['manual_instruction'] = final_bp.get('manual_instruction', b.get('manual_instruction')) break with open(target, 'w') as f: json.dump(current_bible, f, indent=2) utils.log("SERIES", "Updated World Bible with new characters and plot data.") last_beat = final_bp.get('plot_beats', [])[-1] if final_bp.get('plot_beats') else "End of book." previous_context = f"PREVIOUS BOOK SUMMARY: {last_beat}\nCHARACTERS: {json.dumps(final_bp.get('characters', []))}" return if __name__ == "__main__": target_arg = sys.argv[1] if len(sys.argv) > 1 else None run_generation(target_arg, interactive=True)