%%capture !pip install transformers from transformers import pipeline from google.colab import files uploaded = files.upload() print(f"Uploaded files: {list(uploaded.keys())}") from google.colab import userdata my_key = userdata.get('key') import requests API_URL = "https://api-inference.huggingface.co/models/openai/whisper-large-v3-turbo" headers = {"Authorization": f"Bearer {my_key}"} def query(filename): filename = list(uploaded.keys())[0] # Get the uploaded file's nameprint("Uploaded filename:", filename) with open(filename, "rb") as f: data = f.read() response = requests.post(API_URL, headers=headers, data=data) return response.json() results = {} for filename, file_data in uploaded.items(): # Save the file locally with open(filename, "wb") as f: f.write(file_data) print(f"Sending {filename} to API...") output = query(filename) # Store the result results[filename] = output # Step 3: Print results for file, result in results.items(): print(f"\nResults for {file}:\n{result}")