|
import argparse |
|
import os |
|
import queue |
|
import sqlite3 |
|
import threading |
|
|
|
import requests |
|
from rich.progress import BarColumn, Progress, TimeElapsedColumn, TimeRemainingColumn |
|
|
|
parser = argparse.ArgumentParser() |
|
parser.add_argument("--db_path", type=str, default="danbooru_metadata.db") |
|
parser.add_argument("--out_dir", type=str, default="./image") |
|
parser.add_argument("--num_threads", type=int, default=16) |
|
|
|
args = parser.parse_args() |
|
|
|
DB_PATH = args.db_path |
|
OUT_DIR = args.out_dir |
|
NUM_THREADS = args.num_threads |
|
|
|
|
|
task_queue = queue.Queue(64) |
|
|
|
IMAGE_EXTENSIONS = [ |
|
"png", |
|
"jpg", |
|
"jpeg", |
|
"webp", |
|
"bmp", |
|
"PNG", |
|
"JPG", |
|
"JPEG", |
|
"WEBP", |
|
"BMP", |
|
] |
|
|
|
|
|
def fetch_posts_to_queue(): |
|
"""Fetch posts from SQLite database and put them in the task queue.""" |
|
conn = sqlite3.connect(DB_PATH) |
|
cursor = conn.cursor() |
|
|
|
count = cursor.execute("SELECT COUNT(*) FROM posts").fetchone()[0] |
|
progress = Progress( |
|
"[progress.description]{task.description}", |
|
BarColumn(), |
|
"[progress.percentage]{task.percentage:>3.0f}%", |
|
TimeElapsedColumn(), |
|
TimeRemainingColumn(), |
|
"[green] [{task.completed} / {task.total}] [/]", |
|
) |
|
task = progress.add_task("Downloading", total=count) |
|
progress.start() |
|
print("Fetching posts") |
|
cursor.execute("SELECT id, file_url, file_ext FROM posts") |
|
while True: |
|
progress.update(task, advance=1) |
|
post = cursor.fetchone() |
|
if post is None: |
|
break |
|
(post_id, file_url, file_ext) = post |
|
|
|
|
|
if os.path.exists( |
|
os.path.join(OUT_DIR, f"{post_id % 1000:04d}", f"{post_id}.{file_ext}") |
|
): |
|
continue |
|
|
|
|
|
if file_url is not None and file_ext in IMAGE_EXTENSIONS: |
|
task_queue.put((post_id, file_url)) |
|
print("All posts fetched") |
|
conn.close() |
|
|
|
|
|
def download_image(post_id, url): |
|
"""Download image from the given URL and save it to the out_dir.""" |
|
try: |
|
response = requests.get(url, stream=True) |
|
response.raise_for_status() |
|
|
|
|
|
directory = os.path.join(OUT_DIR, f"{post_id % 1000:04d}") |
|
os.makedirs(directory, exist_ok=True) |
|
ext = url.split(".")[-1] |
|
|
|
file_path = os.path.join(directory, f"{post_id}.{ext}") |
|
with open(file_path, "wb") as f: |
|
for chunk in response.iter_content(1024): |
|
f.write(chunk) |
|
|
|
except requests.RequestException as e: |
|
print(f"Failed to download {url}: {e}") |
|
|
|
|
|
def worker(): |
|
"""Worker thread function to download images.""" |
|
while True: |
|
try: |
|
post_id, url = task_queue.get() |
|
except queue.Empty: |
|
break |
|
download_image(post_id, url) |
|
task_queue.task_done() |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
|
|
threads = [] |
|
for _ in range(NUM_THREADS): |
|
thread = threading.Thread(target=worker) |
|
thread.start() |
|
threads.append(thread) |
|
|
|
try: |
|
fetch_posts_to_queue() |
|
|
|
|
|
task_queue.join() |
|
|
|
for thread in threads: |
|
thread.join() |
|
|
|
print("All downloads completed.") |
|
except KeyboardInterrupt: |
|
print("Download interrupted.") |
|
os._exit(1) |
|
|