Compare commits

..

2 Commits

2 changed files with 18 additions and 6 deletions

View File

@@ -549,6 +549,14 @@ def update_batch_stats(store: dict[str, Any], batch_meta: dict[str, Any]) -> Non
store.setdefault("meta", {})["updated_at"] = now_iso()
def atomic_write_json(path: Path, payload: dict[str, Any]) -> None:
"""Atomically write JSON to avoid partial/corrupted files on interruption."""
path.parent.mkdir(parents=True, exist_ok=True)
tmp_path = path.with_suffix(path.suffix + ".tmp")
tmp_path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
tmp_path.replace(path)
def compute_batch_summary(results: list[DocumentEvaluation]) -> dict[str, Any]:
wins = {"LangChain": 0, "LlamaIndex": 0, "Tie": 0}
scores_lc: list[float] = []
@@ -660,9 +668,7 @@ def run_evaluation(doc_from: int, doc_to: int, mode: str) -> None:
batch_results.append(doc_result)
# Save incremental progress after each file/step
OUTPUT_JSON.write_text(
json.dumps(store, ensure_ascii=False, indent=2), encoding="utf-8"
)
atomic_write_json(OUTPUT_JSON, store)
print(" -> step saved")
summary = compute_batch_summary(batch_results)
@@ -674,9 +680,7 @@ def run_evaluation(doc_from: int, doc_to: int, mode: str) -> None:
"mode": mode,
}
update_batch_stats(store, batch_meta)
OUTPUT_JSON.write_text(
json.dumps(store, ensure_ascii=False, indent=2), encoding="utf-8"
)
atomic_write_json(OUTPUT_JSON, store)
print("\nBatch complete.")
print(json.dumps(summary, ensure_ascii=False, indent=2))

8
requirements.txt Normal file
View File

@@ -0,0 +1,8 @@
certifi==2026.2.25
charset-normalizer==3.4.5
dotenv==0.9.9
idna==3.11
python-dotenv==1.2.2
requests==2.32.5
urllib3==2.6.3
yadisk==3.4.0