# logging.py import os import uuid import time from huggingface_hub import CommitScheduler, HfApi from PIL import Image APP_VERSION = "0_3" HF_DATASET_REPO = "LPX55/upscaler_logs" # Change to your dataset repo HF_TOKEN = os.environ.get("HUGGINGFACE_TOKEN") # Make sure this is set in your environment LOG_DIR = "logs_" + APP_VERSION IMAGE_DIR = os.path.join(LOG_DIR, "upscaler") LOG_FILE = os.path.join(LOG_DIR, f"{int(time.time())}-logs.csv") os.makedirs(IMAGE_DIR, exist_ok=True) scheduler = CommitScheduler( repo_id=HF_DATASET_REPO, repo_type="dataset", folder_path=LOG_DIR, every=5, private=True, token=HF_TOKEN, allow_patterns=["*.csv", "images/*.png", "images/*.webp", "images/*.jpg", "images/*.jpeg"], path_in_repo="v" + APP_VERSION ) def log_params( prompt, scale, steps, controlnet_conditioning_scale, guidance_scale, seed, guidance_end, before_image, after_image, user=None ): before_id = str(uuid.uuid4()) + "_before.png" after_id = str(uuid.uuid4()) + "_after.png" before_path = os.path.join(IMAGE_DIR, before_id) after_path = os.path.join(IMAGE_DIR, after_id) before_image.save(before_path) after_image.save(after_path) is_new = not os.path.exists(LOG_FILE) with open(LOG_FILE, "a", newline='') as f: import csv writer = csv.writer(f) if is_new: writer.writerow([ "timestamp", "user", "prompt", "scale", "steps", "controlnet_conditioning_scale", "guidance_scale", "seed", "guidance_end", "before_image", "after_image" ]) writer.writerow([ time.strftime("%Y-%m-%dT%H:%M:%S"), user or "anonymous", prompt, scale, steps, controlnet_conditioning_scale, guidance_scale, seed, guidance_end, before_path, after_path ])