from google.cloud import bigquery
import functions_framework
from query_seller_sale import queries
from google.oauth2 import service_account
import json
import requests
import streamlit as st
import pyperclip
from ap import send_message_via_webhook, Webhook_urls
import pandas as pd
import io
import time
# HTML for button styles and progress bar
html_subject = """
"""
st.markdown(html_subject, unsafe_allow_html=True)
def check_duplicates(credentials_file):
"""Check for duplicates using BigQuery with the provided credentials file."""
results = {}
credentials = service_account.Credentials.from_service_account_info(json.loads(credentials_file))
scopes = ['https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/drive']
client = bigquery.Client(credentials=credentials, project=credentials.project_id)
# Format and execute all queries
for query_name, query_template in queries.items():
formatted_query = query_template.format(start_date=start_date_str, end_date=end_date_str)
# Execute the query
try:
query_job = client.query(formatted_query)
df = query_job.result().to_dataframe()
results[query_name] = df # Store the DataFrame in the results dictionary
# Display the DataFrame in the UI
subheader_html = f"""
Results for {query_name}
"""
# Render the subheader
st.markdown(subheader_html, unsafe_allow_html=True)
st.dataframe(df) # Display each DataFrame in the Streamlit app
except Exception as e:
st.error(f"An error occurred while querying {query_name}: {e}")
# Check if the expected DataFrames are available for downloading
if 'query_seller_net_data' in results and 'query_brand_accounting_entries' in results:
df_net_data = results['query_seller_net_data']
df_brand_entries = results['query_brand_accounting_entries']
# Save both DataFrames to an Excel file with different sheets
excel_buffer = io.BytesIO()
with pd.ExcelWriter(excel_buffer, engine='openpyxl') as writer:
df_net_data.to_excel(writer, sheet_name='ss_data', index=False) # Changed to Net Data
df_brand_entries.to_excel(writer, sheet_name='ss_entry', index=False) # Changed to Brand Accounting
excel_buffer.seek(0)
# Create a CSV output with both DataFrames
csv_buffer = io.StringIO()
df_net_data.to_csv(csv_buffer, index=False, header=True) # Write first DataFrame to CSV
csv_buffer.write("\n") # Add a new line to separate the sections
df_net_data.to_csv(csv_buffer, index=False)
df_brand_entries.to_csv(csv_buffer, index=False, header=True) # Write second DataFrame to CSV
csv_buffer.seek(0)
# # Buttons to download the Excel and CSV files
col1, col2 = st.columns(2)
with col1:
button_styles = """
"""
# Inject styles into the app
st.markdown(button_styles, unsafe_allow_html=True)
# Display the download button
st.download_button(
label="Download Excel file",
data=excel_buffer, # Use the base64 data URL for the file
file_name="query_results.xlsx",
mime="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
key="download_excel" # Optionally specify a key for the button
)
return results
# Streamlit UI for uploading credentials
st.markdown(html_subject, unsafe_allow_html=True)
# Upload credentials file
html_subject = """
Upload the JSON file
"""
st.markdown(html_subject, unsafe_allow_html=True)
credentials_file = st.file_uploader("", type="json")
st.write("")
col1, col2 = st.columns([0.118, 0.125])
# First column for Start Date
with col1:
html_subject_start = """
Start date
"""
st.markdown(html_subject_start, unsafe_allow_html=True)
date_input_key_start = "start_date_input"
start_date = st.date_input("", value=None, key=date_input_key_start)
# Second column for End Date
with col2:
html_subject_end = """
End date
"""
st.markdown(html_subject_end, unsafe_allow_html=True)
date_input_key_end = "end_date_input" # Changed key to be unique
end_date = st.date_input("", value=None, key=date_input_key_end)
if credentials_file is not None:
if start_date and end_date: # Ensure dates are selected
start_date_str = start_date.strftime("%Y-%m-%d")
end_date_str = end_date.strftime("%Y-%m-%d")
# Read the credentials file
credentials_data = credentials_file.read().decode("utf-8")
# Check for duplicates
results = check_duplicates(credentials_data)
# Prepare to save results in an Excel file
if results:
excel_buffer = io.BytesIO()
with pd.ExcelWriter(excel_buffer, engine='openpyxl') as writer:
if 'query_seller_net_data' in results:
results['query_seller_net_data'].to_excel(writer, sheet_name='Net Data', index=False)
if 'query_brand_accounting_entries' in results:
results['query_brand_accounting_entries'].to_excel(writer, sheet_name='Brand Accounting', index=False)
else:
st.error("No results found.")
else:
st.warning("Please select both the start and end dates to proceed.")