YT-analyzer / app.py
Badro's picture
Update app.py
efe0e7b verified
import gradio as gr
import pytube
from transformers import pipeline
from textblob import TextBlob
# Initialize sentiment analysis pipeline
sentiment_analyzer = pipeline("sentiment-analysis")
def analyze_youtube_content(youtube_url, transcript_text=""):
"""Main function to analyze YouTube content"""
results = {}
# If URL is provided, get video info
if youtube_url:
try:
# Create a YouTube object
yt = pytube.YouTube(youtube_url)
results["video_info"] = {
"title": yt.title,
"status": "success"
}
except Exception as e:
results["video_info"] = {
"status": "error",
"message": str(e)
}
# If transcript is provided, analyze it
if transcript_text:
# Analyze sentiment with TextBlob
blob = TextBlob(transcript_text)
textblob_sentiment = blob.sentiment
# Analyze sentiment with Hugging Face
hf_result = sentiment_analyzer(transcript_text[:512])[0]
results["sentiment"] = {
"textblob": {
"polarity": round(textblob_sentiment.polarity, 2),
"assessment": "positive" if textblob_sentiment.polarity > 0 else "negative" if textblob_sentiment.polarity < 0 else "neutral"
},
"huggingface": {
"label": hf_result["label"],
"score": round(hf_result["score"], 4)
}
}
# Identify key moments based on sentiment
sentences = [str(sentence) for sentence in blob.sentences]
key_moments = []
for i, sentence in enumerate(sentences):
sentiment = TextBlob(sentence).sentiment.polarity
if abs(sentiment) > 0.5:
key_moments.append({
"text": sentence,
"sentiment": sentiment
})
results["key_moments"] = key_moments[:5] # Top 5 moments
return results
# Create Gradio interface
demo = gr.Interface(
fn=analyze_youtube_content,
inputs=[
gr.Textbox(label="YouTube URL"),
gr.Textbox(label="Transcript Text", lines=10)
],
outputs=gr.JSON(label="Analysis Results"),
title="YouTube Viral Moment Analyzer",
description="Analyze viral moments from YouTube videos using ML models"
)
# Launch the app with MCP server enabled
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", share=True, mcp_server=True)