danbooru_metadata / download_images.py
Jannchie's picture
fix(download): check local file correctly
a1e7b26
import argparse
import os
import queue
import sqlite3
import threading
import requests
from rich.progress import BarColumn, Progress, TimeElapsedColumn, TimeRemainingColumn
parser = argparse.ArgumentParser()
parser.add_argument("--db_path", type=str, default="danbooru_metadata.db")
parser.add_argument("--out_dir", type=str, default="./image")
parser.add_argument("--num_threads", type=int, default=16)
args = parser.parse_args()
DB_PATH = args.db_path
OUT_DIR = args.out_dir
NUM_THREADS = args.num_threads
# Task queue to hold file URLs and IDs
task_queue = queue.Queue(64)
IMAGE_EXTENSIONS = [
"png",
"jpg",
"jpeg",
"webp",
"bmp",
"PNG",
"JPG",
"JPEG",
"WEBP",
"BMP",
]
def fetch_posts_to_queue():
"""Fetch posts from SQLite database and put them in the task queue."""
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
count = cursor.execute("SELECT COUNT(*) FROM posts").fetchone()[0]
progress = Progress(
"[progress.description]{task.description}",
BarColumn(),
"[progress.percentage]{task.percentage:>3.0f}%",
TimeElapsedColumn(),
TimeRemainingColumn(),
"[green] [{task.completed} / {task.total}] [/]",
)
task = progress.add_task("Downloading", total=count)
progress.start()
print("Fetching posts")
cursor.execute("SELECT id, file_url, file_ext FROM posts")
while True:
progress.update(task, advance=1)
post = cursor.fetchone()
if post is None:
break
(post_id, file_url, file_ext) = post
# Check if the file already exists
if os.path.exists(
os.path.join(OUT_DIR, f"{post_id % 1000:04d}", f"{post_id}.{file_ext}")
):
continue
# Check if the file_ext is an image
if file_url is not None and file_ext in IMAGE_EXTENSIONS:
task_queue.put((post_id, file_url))
print("All posts fetched")
conn.close()
def download_image(post_id, url):
"""Download image from the given URL and save it to the out_dir."""
try:
response = requests.get(url, stream=True)
response.raise_for_status()
# Calculate directory based on post_id and create it if necessary
directory = os.path.join(OUT_DIR, f"{post_id % 1000:04d}")
os.makedirs(directory, exist_ok=True)
ext = url.split(".")[-1]
# Determine file path and save the image
file_path = os.path.join(directory, f"{post_id}.{ext}")
with open(file_path, "wb") as f:
for chunk in response.iter_content(1024):
f.write(chunk)
# print(f"Downloaded {file_path}")
except requests.RequestException as e:
print(f"Failed to download {url}: {e}")
def worker():
"""Worker thread function to download images."""
while True:
try:
post_id, url = task_queue.get()
except queue.Empty:
break
download_image(post_id, url)
task_queue.task_done()
if __name__ == "__main__":
# Populate the task queue
# Create and start worker threads
threads = []
for _ in range(NUM_THREADS):
thread = threading.Thread(target=worker)
thread.start()
threads.append(thread)
# Wait for all threads to finish
try:
fetch_posts_to_queue()
# Ensure all tasks are done
task_queue.join()
for thread in threads:
thread.join()
print("All downloads completed.")
except KeyboardInterrupt:
print("Download interrupted.")
os._exit(1)