{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "import requests\n", "import json\n", "from urllib.request import urlretrieve\n", "import pandas as pd\n", "import time\n", "from allkeys import OPENAIKEY, GEMENIKEY" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import anvil.server\n", "anvil.server.connect('PLMOIU5VCGGUOJH2XORIBWV3-ZXZVFLWX7QFIIAF4')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def fetch_result(task_id):\n", " while True:\n", " result=anvil.server.call('poll',task_id)\n", " if result!='In Progress' or result=='No such task': break\n", " else: \n", " time.sleep(1)\n", " print(result)\n", " print(result)\n", " return result" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "text='write a python function to compute the nth digit of pi'\n", "model='gpt-3.5-turbo'" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "task_id=anvil.server.call('launch','call_gemini',text,GEMENIKEY)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "task_id=anvil.server.call('launch','call_gpt',text,OPENAIKEY,model)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "fetch_result(task_id)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "print(result)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "print(result[1],end='\\n')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import pathlib\n", "import textwrap\n", "from IPython.display import display\n", "from IPython.display import Markdown\n", "\n", "def to_markdown(text):\n", " text = text.replace('•', ' *')\n", " return Markdown(textwrap.indent(text, '> ', predicate=lambda _: True))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "prompt='write code that defines a transformer network from scratch in pytorch'" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "response=anvil.server.call('call_gemini',prompt)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "anvil.server.call('encode_anvil',prompt)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "to_markdown(response)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "anvil.server.call('encode_anvil','I am a robot')[0]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def encode(text,server='local'):\n", " headers = {'Content-Type': 'application/json'}\n", " if server=='local': url='http://127.0.0.1:7860/encode'\n", " elif server=='hf': url='https://huggingface.co/spaces/gmshroff/gmserver/encode'\n", " body={'text':text}\n", " response=requests.post(url=url,data=json.dumps(body),headers = {'Content-Type': 'application/json'})\n", " return response\n", " return json.loads(response.content)['embedding']" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "response=encode('I am a robot',server='local')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "response.content" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "headers = {'Content-Type': 'application/json'}\n", "# url='http://127.0.0.1:5000/run'\n", "url='https://huggingface.co/spaces/gmshroff/gmserver/'\n", "# url='http://127.0.0.1:7860/run'\n", "# body={\"script\":\"python update_valdata.py\"}\n", "# body={\"script\":\"pwd\"}" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "response=requests.get(url=url)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "response.content" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# url='http://127.0.0.1:7860/encode'\n", "body={'text':'I am very good'}\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "response=requests.post(url=url,data=json.dumps(body),headers = {'Content-Type': 'application/json'})\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "url" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "print(response)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "print(response.__dict__)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "print(json.loads(response.content)['embedding'])" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "tags": [] }, "outputs": [], "source": [ "urlretrieve(url='http://127.0.0.1:7860/file/data.csv',filename='./returned_file.csv')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "df=pd.read_parquet('/tmp/validation_subset_int8.parquet')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "\n", "class Transformer(nn.Module):\n", " def __init__(self, d_model, nhead, num_encoder_layers, num_decoder_layers, dim_feedforward, dropout=0.1):\n", " super(Transformer, self).__init__()\n", " self.transformer = nn.Transformer(d_model, nhead, num_encoder_layers, num_decoder_layers, dim_feedforward, dropout)\n", "\n", " def forward(self, src, tgt):\n", " output = self.transformer(src, tgt)\n", " return output\n", "\n", "# Example usage:\n", "# Define the model parameters\n", "d_model = 512\n", "nhead = 8\n", "num_encoder_layers = 6\n", "num_decoder_layers = 6\n", "dim_feedforward = 2048\n", "dropout = 0.1\n", "\n", "# Initialize the model\n", "model = Transformer(d_model, nhead, num_encoder_layers, num_decoder_layers, dim_feedforward, dropout)\n", "\n", "# Generate some sample data\n", "src = torch.rand(10, 32, 512)\n", "tgt = torch.rand(20, 32, 512)\n", "\n", "# Pass the data through the model\n", "output = model(src, tgt)\n", "\n", "# Print the output shape\n", "print(output.shape)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.13" } }, "nbformat": 4, "nbformat_minor": 4 }