|
import gradio as gr |
|
import json |
|
import pandas as pd |
|
from typing import List |
|
import tempfile |
|
import os |
|
from pathlib import Path |
|
import re |
|
import shutil |
|
|
|
def clean_json_content(content: str) -> str: |
|
"""JSON ๋ฌธ์์ด์์ ์ฃผ์์ ์ ๊ฑฐํ๊ณ JSON ํ์์ ์ ๋ฆฌํฉ๋๋ค.""" |
|
|
|
content = re.sub(r'^\s*#.*$', '', content, flags=re.MULTILINE) |
|
|
|
|
|
content = re.sub(r'},\s*,\s*{', '},{', content) |
|
|
|
|
|
content = content.strip() |
|
if not content.startswith('['): |
|
content = '[' + content |
|
if not content.endswith(']'): |
|
content = content + ']' |
|
|
|
|
|
content = re.sub(r'},\s*]', '}]', content) |
|
|
|
return content |
|
|
|
def process_json_files(files: List[tempfile._TemporaryFileWrapper]) -> tuple[str, str]: |
|
try: |
|
|
|
all_data = [] |
|
|
|
|
|
for file in files: |
|
try: |
|
|
|
if hasattr(file, 'name'): |
|
with open(file.name, 'r', encoding='utf-8') as f: |
|
content = f.read() |
|
else: |
|
content = file |
|
|
|
|
|
cleaned_content = clean_json_content(content) |
|
|
|
try: |
|
json_data = json.loads(cleaned_content) |
|
except json.JSONDecodeError as e: |
|
return None, f"JSON ํ์ฑ ์ค๋ฅ: {str(e)}\n์ ๋ฆฌ๋ ๋ด์ฉ:\n{cleaned_content}" |
|
|
|
|
|
if isinstance(json_data, dict): |
|
json_data = [json_data] |
|
|
|
all_data.extend(json_data) |
|
except Exception as e: |
|
return None, f"ํ์ผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" |
|
|
|
if not all_data: |
|
return None, "์ฒ๋ฆฌํ ๋ฐ์ดํฐ๊ฐ ์์ต๋๋ค." |
|
|
|
|
|
df = pd.DataFrame(all_data) |
|
df_deduplicated = df.drop_duplicates(subset=['repo']) |
|
|
|
|
|
output_path = "loras.json" |
|
result_json = df_deduplicated.to_dict('records') |
|
|
|
with open(output_path, 'w', encoding='utf-8') as f: |
|
json.dump(result_json, f, ensure_ascii=False, indent=2) |
|
|
|
return output_path, f"์ฑ๊ณต์ ์ผ๋ก ์ฒ๋ฆฌ๋์์ต๋๋ค. ์ค๋ณต ์ ๊ฑฐ ์ {len(all_data)}๊ฐ, ์ค๋ณต ์ ๊ฑฐ ํ {len(df_deduplicated)}๊ฐ์ ํญ๋ชฉ์ด ์์ต๋๋ค." |
|
|
|
except Exception as e: |
|
return None, f"์ฒ๋ฆฌ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}" |
|
|
|
def create_example_file(): |
|
example_data = [ |
|
{ |
|
"image": "https://huggingface.co/strangerzonehf/Flux-Super-Realism-LoRA/resolve/main/images/1.png", |
|
"title": "Super Realism", |
|
"repo": "strangerzonehf/Flux-Super-Realism-LoRA", |
|
"weights": "super-realism.safetensors", |
|
"trigger_word": "Super Realism" |
|
}, |
|
{ |
|
"image": "https://huggingface.co/prithivMLmods/Flux-Dalle-Mix-LoRA/resolve/main/images/D3.png", |
|
"title": "Dalle Mix", |
|
"repo": "prithivMLmods/Flux-Dalle-Mix-LoRA", |
|
"weights": "dalle-mix.safetensors", |
|
"trigger_word": "dalle-mix" |
|
} |
|
] |
|
|
|
example_path = "example_loras.json" |
|
|
|
with open(example_path, 'w', encoding='utf-8') as f: |
|
json.dump(example_data, f, ensure_ascii=False, indent=2) |
|
|
|
return example_path |
|
|
|
|
|
iface = gr.Interface( |
|
fn=process_json_files, |
|
inputs=gr.File(file_count="multiple", label="JSON ํ์ผ ์
๋ก๋ (์ฌ๋ฌ ๊ฐ ๊ฐ๋ฅ)"), |
|
outputs=[ |
|
gr.File(label="loras.json ๋ค์ด๋ก๋"), |
|
gr.Textbox(label="์ฒ๋ฆฌ ๊ฒฐ๊ณผ") |
|
], |
|
title="JSON ํ์ผ ์ค๋ณต ์ ๊ฑฐ ๋๊ตฌ", |
|
description="repo ๊ฐ์ ๊ธฐ์ค์ผ๋ก ์ค๋ณต์ ์ ๊ฑฐํ loras.json ํ์ผ์ ์์ฑํฉ๋๋ค.\n์ฃผ์์ด๋ ๋ถ์์ ํ JSON ํ์๋ ์ฒ๋ฆฌ ๊ฐ๋ฅํฉ๋๋ค.", |
|
examples=[[create_example_file()]] |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
iface.launch(share=True) |