Spaces:
Running
Running
import os | |
import torch | |
import torchaudio | |
# import spaces | |
from generator import Segment, load_csm_1b | |
from huggingface_hub import login | |
def login_huggingface(): | |
"""Login to Hugging Face Hub using token from environment variable or user input""" | |
hf_token = os.environ.get("HF_TOKEN") | |
if not hf_token: | |
print("HF_TOKEN not found in environment variables.") | |
hf_token = input("Please enter your Hugging Face token: ") | |
if hf_token: | |
print("Logging in to Hugging Face Hub...") | |
login(token=hf_token) | |
print("Login successful!") | |
return True | |
else: | |
print("No token provided. Some models may not be accessible.") | |
return False | |
# @spaces.GPU | |
def generate_test_audio(text, speaker_id, device): | |
"""Generate test audio using ZeroGPU""" | |
generator = load_csm_1b(device=device) | |
print("Model loaded successfully!") | |
print(f"Generating audio for text: '{text}'") | |
audio = generator.generate( | |
text=text, | |
speaker=speaker_id, | |
context=[], | |
max_audio_length_ms=10000, | |
temperature=0.9, | |
topk=50 | |
) | |
return audio, generator.sample_rate | |
def test_model(): | |
print("Testing CSM-1B model...") | |
# Login to Hugging Face Hub | |
login_huggingface() | |
# Check if GPU is available and configure the device | |
device = "cuda" if torch.cuda.is_available() else "cpu" | |
print(f"Using device: {device}") | |
# Load CSM-1B model and generate audio | |
print("Loading CSM-1B model...") | |
try: | |
# Use ZeroGPU to generate audio | |
text = "Hello, this is a test of the CSM-1B model." | |
speaker_id = 0 | |
audio, sample_rate = generate_test_audio(text, speaker_id, device) | |
# Save audio to file | |
output_path = "test_output.wav" | |
torchaudio.save(output_path, audio.unsqueeze(0), sample_rate) | |
print(f"Audio saved to file: {output_path}") | |
print("Test completed!") | |
except Exception as e: | |
print(f"Error testing model: {e}") | |
print("Please check your token and access permissions.") | |
if __name__ == "__main__": | |
test_model() |