import threading
from flask import Flask, url_for, redirect
from flask import request as req
from flask_cors import CORS
import helpers.helper as helper
from helpers.provider import *
from utils.llms import gpt4,gpt4stream
app = Flask(__name__)
CORS(app)
import queue
from utils.functions import allocate
from werkzeug.utils import secure_filename
import os
from PIL import Image

#docker run dezsh/inlets client --url=wss://inlets-testing-secret.onrender.com  --upstream=http://192.168.1.8:1331 --token=secret --insecure
app.config['UPLOAD_FOLDER'] = "static"

@app.route("/v1/chat/completions", methods=['POST'])
@app.route("/chat/completions", methods=['POST'])
@app.route("/", methods=['POST'])
def chat_completions2():
    all_request_data = {}
    all_request_data['json'] = req.get_json(silent=True) or {}
    all_request_data['headers'] = dict(req.headers)

    all_request_data['args'] = req.args.to_dict(flat=False)
    all_request_data['form'] = req.form.to_dict(flat=False)
    try:
        all_request_data['raw_data'] = req.data.decode('utf-8')
    except Exception:
        all_request_data['raw_data'] = f"Could not decode raw data (length: {len(req.data)})"


    # # --- Now you can access your original values from this dict ---
    # print("--- Consolidated Request Data ---")
    # print(json.dumps(all_request_data, indent=2))
    # print("--------------------------------")

    streaming = req.json.get('stream', False)
    model = req.json.get('model', 'gpt-4-turbo')
    messages = req.json.get('messages')
    api_keys = req.headers.get('Authorization').replace('Bearer ', '')
    functions = req.json.get('functions')
    tools = req.json.get('tools')
    response_format = req.json.get('response_format')
    if streaming:
        helper.stopped=True


    if tools!=None:
        allocate(messages,api_keys,model,tools)
    else:
        allocate(messages,api_keys,model,[])

    t = time.time()

    def stream_response(messages,model,api_keys="",functions=[],tools=[]):
        helper.q = queue.Queue() # create a queue to store the response lines
        if helper.stopped:
            helper.stopped=False 



        threading.Thread(target=gpt4stream,args=(messages,model,api_keys)).start() # start the thread
        
        started=False
        while True: # loop until the queue is empty
            try:
                if 20>time.time()-t>18 and not started :
                    yield 'data: %s\n\n' % json.dumps(helper.streamer("> Thinking"), separators=(',' ':'))
                    time.sleep(2)
                elif time.time()-t>20 and not started :
                    yield 'data: %s\n\n' % json.dumps(helper.streamer("."), separators=(',' ':'))
                    time.sleep(1)
                if time.time()-t>100 and not started:
                    yield 'data: %s\n\n' % json.dumps(helper.streamer("Still Thinking...Do not terminate"), separators=(',' ':'))
                    break

                line = helper.q.get(block=False)
                if "RESULT: " in line:
                    line=line.replace("RESULT: ","")
                    if tools !=None:
                        yield f'data: {json.dumps(helper.stream_func(line,"tools"))}\n\n'
                    else:
                        yield f'data: {json.dumps(helper.end())}\n\n'

                    break


                if line == "END":
                    yield f'data: {json.dumps(helper.end())}\n\n'
                    break
                if not started:
                    started = True
                    yield 'data: %s\n\n' % json.dumps(helper.streamer("\n\n"), separators=(',' ':'))


                yield 'data: %s\n\n' % json.dumps(helper.streamer(line), separators=(',' ':'))

                helper.q.task_done() # mark the task as done


            except helper.queue.Empty: 
                pass
            except Exception as e:
                print(e)

    





    if not streaming :
        if functions != None :
            k=gpt4(messages,None,model)
            return helper.func_output(k,"functions")
        elif tools!=None:

            k=gpt4(messages,None,model)
            return helper.func_output(k,"tools")

        else:

            print("USING GPT_4 NO STREAM")
            print(model)

            k=gpt4(messages,response_format,model)
            return helper.output(k)

    elif  streaming   : 
        return app.response_class(stream_response(messages,model,api_keys,functions,tools), mimetype='text/event-stream')
    





@app.route('/upload', methods=['GET','POST'])
def index():
 
    # If a post method then handle file upload
    if req.method == 'POST':
 
        if 'file' not in req.files:
            return redirect('/')
 
        file = req.files['file']
 
 
        if file :
            filename = secure_filename(file.filename)
            file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
            if ("camera" in file.filename or "capture" in file.filename or "IMG" in file.filename or "Screenshot"  in file.filename) :
                img=Image.open(f"static/{filename}")
                img.thumbnail((512, 512),Image.Resampling.LANCZOS)

                img.save(f"static/{filename}")

            return filename
 
 
    # Get Files in the directory and create list items to be displayed to the user
    file_list = ''
    for f in os.listdir(app.config['UPLOAD_FOLDER']):
        # Create link html
        link = url_for("static", filename=f) 
        file_list = file_list + '<li><a href="%s">%s</a></li>' % (link, f)
 
    # Format return HTML - allow file upload and list all available files
    return_html = '''
    <!doctype html>
    <title>Upload File</title>
    <h1>Upload File</h1>
    <form method=post enctype=multipart/form-data>
            <input type=file name=file><br>
            <input type=submit value=Upload>
    </form>
    <hr>
    <h1>Files</h1>
    <ol>%s</ol>
    ''' % file_list
 
    return return_html


@app.route('/')
def yellow_name():
   return f'Hello world!'



@app.route("/v1/models")
@app.route("/models")
def models():
    print("Models")
    return helper.model



if __name__ == '__main__':
    config = {
        'host': '0.0.0.0',
        'port': 7860,
        'debug': False,
    }

    app.run(**config)