256 lines
8.2 KiB
Python
256 lines
8.2 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
pipeline.py
|
|
───────────────────────────────────────────────────────────────
|
|
Translation + render pipeline
|
|
|
|
Flow per page:
|
|
1) translate_manga_text() -> output.txt + bubbles.json (+ debug_clusters.png if DEBUG)
|
|
2) render_translations() -> page_translated.png
|
|
3) Pack CBZ with originals + rendered pages + text outputs
|
|
|
|
Folder structure:
|
|
<CHAPTER_DIR>/
|
|
├── 000.png
|
|
├── 001.png
|
|
└── translated/
|
|
├── 000/
|
|
│ ├── output.txt
|
|
│ ├── bubbles.json
|
|
│ ├── page_translated.png
|
|
│ └── debug_clusters.png (optional)
|
|
├── 001/
|
|
│ └── ...
|
|
└── ...
|
|
|
|
CBZ:
|
|
- pages/<original pages>
|
|
- rendered/<page_stem>_translated.png
|
|
- translations/<page_stem>_output.txt
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import zipfile
|
|
import importlib.util
|
|
from pathlib import Path
|
|
|
|
|
|
# ─────────────────────────────────────────────
|
|
# CONFIG
|
|
# ─────────────────────────────────────────────
|
|
CHAPTER_DIR = "/Users/guillemhernandezsola/Downloads/Dandadan_059_2022_Digital_1r0n"
|
|
OUTPUT_CBZ = "/Users/guillemhernandezsola/Downloads/Dandadan_059_2022_Digital_1r0n_translated.cbz"
|
|
|
|
SOURCE_LANG = "en"
|
|
TARGET_LANG = "ca"
|
|
|
|
# translator (NEW signature-compatible)
|
|
CONFIDENCE_THRESHOLD = 0.10
|
|
MIN_TEXT_LENGTH = 1
|
|
GAP_PX = "auto" # was cluster/proximity in old version
|
|
FILTER_SFX = True
|
|
QUALITY_THRESHOLD = 0.50
|
|
READING_MODE = "ltr"
|
|
DEBUG = True
|
|
|
|
# renderer
|
|
RENDER_ENABLED = True
|
|
RENDER_OUTPUT_NAME = "page_translated.png"
|
|
|
|
# optional custom font list for renderer
|
|
FONT_CANDIDATES = [
|
|
"fonts/ComicNeue-Regular.ttf",
|
|
"fonts/ComicRelief-Regular.ttf"
|
|
]
|
|
|
|
|
|
# ─────────────────────────────────────────────
|
|
# DYNAMIC MODULE LOADER
|
|
# ─────────────────────────────────────────────
|
|
def load_module(name, filepath):
|
|
spec = importlib.util.spec_from_file_location(name, filepath)
|
|
if spec is None or spec.loader is None:
|
|
raise FileNotFoundError(f"Cannot load spec for {filepath}")
|
|
module = importlib.util.module_from_spec(spec)
|
|
spec.loader.exec_module(module)
|
|
return module
|
|
|
|
|
|
# ─────────────────────────────────────────────
|
|
# HELPERS
|
|
# ─────────────────────────────────────────────
|
|
def sorted_pages(chapter_dir):
|
|
exts = {".jpg", ".jpeg", ".png", ".webp"}
|
|
pages = [
|
|
p for p in Path(chapter_dir).iterdir()
|
|
if p.is_file() and p.suffix.lower() in exts
|
|
]
|
|
return sorted(pages, key=lambda p: p.stem)
|
|
|
|
|
|
def make_page_workdir(chapter_dir, page_stem):
|
|
workdir = Path(chapter_dir) / "translated" / page_stem
|
|
workdir.mkdir(parents=True, exist_ok=True)
|
|
return workdir
|
|
|
|
|
|
def pack_cbz(chapter_dir, translated_dir, output_cbz):
|
|
exts = {".jpg", ".jpeg", ".png", ".webp"}
|
|
|
|
pages = sorted(
|
|
[p for p in Path(chapter_dir).iterdir()
|
|
if p.is_file() and p.suffix.lower() in exts],
|
|
key=lambda p: p.stem
|
|
)
|
|
|
|
txts = sorted(
|
|
translated_dir.rglob("output.txt"),
|
|
key=lambda p: p.parent.name
|
|
)
|
|
|
|
rendered = sorted(
|
|
translated_dir.rglob(RENDER_OUTPUT_NAME),
|
|
key=lambda p: p.parent.name
|
|
)
|
|
|
|
if not pages:
|
|
print("⚠️ No original pages found — CBZ not created.")
|
|
return
|
|
|
|
with zipfile.ZipFile(output_cbz, "w", compression=zipfile.ZIP_STORED) as zf:
|
|
# original pages
|
|
for img in pages:
|
|
arcname = f"pages/{img.name}"
|
|
zf.write(img, arcname)
|
|
print(f" 🖼 {arcname}")
|
|
|
|
# rendered pages
|
|
for rp in rendered:
|
|
arcname = f"rendered/{rp.parent.name}_translated.png"
|
|
zf.write(rp, arcname)
|
|
print(f" 🎨 {arcname}")
|
|
|
|
# text outputs
|
|
for txt in txts:
|
|
arcname = f"translations/{txt.parent.name}_output.txt"
|
|
zf.write(txt, arcname)
|
|
print(f" 📄 {arcname}")
|
|
|
|
print(
|
|
f"\n✅ CBZ saved → {output_cbz} "
|
|
f"({len(pages)} original, {len(rendered)} rendered, {len(txts)} text)"
|
|
)
|
|
|
|
|
|
# ─────────────────────────────────────────────
|
|
# PER-PAGE PIPELINE
|
|
# ─────────────────────────────────────────────
|
|
def process_page(page_path, workdir, translator_module, renderer_module):
|
|
"""
|
|
Runs translator + renderer for one page.
|
|
All generated files are written inside workdir.
|
|
"""
|
|
print(f"\n{'─' * 70}")
|
|
print(f"PAGE: {page_path.name}")
|
|
print(f"{'─' * 70}")
|
|
|
|
orig_dir = os.getcwd()
|
|
try:
|
|
os.chdir(workdir)
|
|
|
|
# 1) translate
|
|
translator_module.translate_manga_text(
|
|
image_path= str(page_path.resolve()),
|
|
source_lang=SOURCE_LANG,
|
|
target_lang=TARGET_LANG,
|
|
confidence_threshold=CONFIDENCE_THRESHOLD,
|
|
min_text_length=MIN_TEXT_LENGTH,
|
|
gap_px=GAP_PX,
|
|
filter_sound_effects=FILTER_SFX,
|
|
quality_threshold=QUALITY_THRESHOLD,
|
|
export_to_file="output.txt",
|
|
export_bubbles_to="bubbles.json",
|
|
reading_mode=READING_MODE,
|
|
debug=DEBUG
|
|
)
|
|
print(" ✅ translator done")
|
|
|
|
# 2) render
|
|
if RENDER_ENABLED:
|
|
renderer_module.render_translations(
|
|
input_image=str(page_path.resolve()),
|
|
output_image=RENDER_OUTPUT_NAME,
|
|
translations_file="output.txt",
|
|
bubbles_file="bubbles.json",
|
|
font_candidates=FONT_CANDIDATES
|
|
)
|
|
print(" ✅ renderer done")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f" ❌ Failed: {e}")
|
|
return False
|
|
|
|
finally:
|
|
os.chdir(orig_dir)
|
|
|
|
|
|
# ─────────────────────────────────────────────
|
|
# MAIN
|
|
# ─────────────────────────────────────────────
|
|
def main():
|
|
print("Loading modules...")
|
|
|
|
try:
|
|
translator = load_module("manga_translator", "manga-translator.py")
|
|
except Exception as e:
|
|
print(f"❌ Could not load manga-translator.py: {e}")
|
|
sys.exit(1)
|
|
|
|
try:
|
|
renderer = load_module("manga_renderer", "manga-renderer.py")
|
|
except Exception as e:
|
|
print(f"❌ Could not load manga-renderer.py: {e}")
|
|
sys.exit(1)
|
|
|
|
pages = sorted_pages(CHAPTER_DIR)
|
|
if not pages:
|
|
print(f"❌ No images found in: {CHAPTER_DIR}")
|
|
sys.exit(1)
|
|
|
|
print(f"\n📖 Chapter : {CHAPTER_DIR}")
|
|
print(f" Pages : {len(pages)}")
|
|
print(f" Source : {SOURCE_LANG} → Target: {TARGET_LANG}")
|
|
print(f" Render : {'ON' if RENDER_ENABLED else 'OFF'}\n")
|
|
|
|
translated_dir = Path(CHAPTER_DIR) / "translated"
|
|
succeeded = []
|
|
failed = []
|
|
|
|
for i, page_path in enumerate(pages, start=1):
|
|
print(f"[{i}/{len(pages)}] {page_path.name}")
|
|
workdir = make_page_workdir(CHAPTER_DIR, page_path.stem)
|
|
ok = process_page(page_path, workdir, translator, renderer)
|
|
if ok:
|
|
succeeded.append(page_path.name)
|
|
else:
|
|
failed.append(page_path.name)
|
|
|
|
print(f"\n{'═' * 70}")
|
|
print("PIPELINE COMPLETE")
|
|
print(f"✅ {len(succeeded)} page(s) succeeded")
|
|
if failed:
|
|
print(f"❌ {len(failed)} page(s) failed:")
|
|
for f in failed:
|
|
print(f" • {f}")
|
|
print(f"{'═' * 70}\n")
|
|
|
|
print("Packing CBZ...")
|
|
pack_cbz(CHAPTER_DIR, translated_dir, OUTPUT_CBZ)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|