lattmamb commited on
Commit
4a518d8
·
verified ·
1 Parent(s): 1a9ff7f

Upload 181 files

Browse files
.gitattributes CHANGED
@@ -34,3 +34,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  diagram.png filter=lfs diff=lfs merge=lfs -text
 
 
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  diagram.png filter=lfs diff=lfs merge=lfs -text
37
+ python3.11 filter=lfs diff=lfs merge=lfs -text
38
+ python3.9 filter=lfs diff=lfs merge=lfs -text
Activate.ps1 ADDED
@@ -0,0 +1,241 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <#
2
+ .Synopsis
3
+ Activate a Python virtual environment for the current PowerShell session.
4
+
5
+ .Description
6
+ Pushes the python executable for a virtual environment to the front of the
7
+ $Env:PATH environment variable and sets the prompt to signify that you are
8
+ in a Python virtual environment. Makes use of the command line switches as
9
+ well as the `pyvenv.cfg` file values present in the virtual environment.
10
+
11
+ .Parameter VenvDir
12
+ Path to the directory that contains the virtual environment to activate. The
13
+ default value for this is the parent of the directory that the Activate.ps1
14
+ script is located within.
15
+
16
+ .Parameter Prompt
17
+ The prompt prefix to display when this virtual environment is activated. By
18
+ default, this prompt is the name of the virtual environment folder (VenvDir)
19
+ surrounded by parentheses and followed by a single space (ie. '(.venv) ').
20
+
21
+ .Example
22
+ Activate.ps1
23
+ Activates the Python virtual environment that contains the Activate.ps1 script.
24
+
25
+ .Example
26
+ Activate.ps1 -Verbose
27
+ Activates the Python virtual environment that contains the Activate.ps1 script,
28
+ and shows extra information about the activation as it executes.
29
+
30
+ .Example
31
+ Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
32
+ Activates the Python virtual environment located in the specified location.
33
+
34
+ .Example
35
+ Activate.ps1 -Prompt "MyPython"
36
+ Activates the Python virtual environment that contains the Activate.ps1 script,
37
+ and prefixes the current prompt with the specified string (surrounded in
38
+ parentheses) while the virtual environment is active.
39
+
40
+ .Notes
41
+ On Windows, it may be required to enable this Activate.ps1 script by setting the
42
+ execution policy for the user. You can do this by issuing the following PowerShell
43
+ command:
44
+
45
+ PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
46
+
47
+ For more information on Execution Policies:
48
+ https://go.microsoft.com/fwlink/?LinkID=135170
49
+
50
+ #>
51
+ Param(
52
+ [Parameter(Mandatory = $false)]
53
+ [String]
54
+ $VenvDir,
55
+ [Parameter(Mandatory = $false)]
56
+ [String]
57
+ $Prompt
58
+ )
59
+
60
+ <# Function declarations --------------------------------------------------- #>
61
+
62
+ <#
63
+ .Synopsis
64
+ Remove all shell session elements added by the Activate script, including the
65
+ addition of the virtual environment's Python executable from the beginning of
66
+ the PATH variable.
67
+
68
+ .Parameter NonDestructive
69
+ If present, do not remove this function from the global namespace for the
70
+ session.
71
+
72
+ #>
73
+ function global:deactivate ([switch]$NonDestructive) {
74
+ # Revert to original values
75
+
76
+ # The prior prompt:
77
+ if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
78
+ Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
79
+ Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
80
+ }
81
+
82
+ # The prior PYTHONHOME:
83
+ if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
84
+ Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
85
+ Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
86
+ }
87
+
88
+ # The prior PATH:
89
+ if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
90
+ Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
91
+ Remove-Item -Path Env:_OLD_VIRTUAL_PATH
92
+ }
93
+
94
+ # Just remove the VIRTUAL_ENV altogether:
95
+ if (Test-Path -Path Env:VIRTUAL_ENV) {
96
+ Remove-Item -Path env:VIRTUAL_ENV
97
+ }
98
+
99
+ # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
100
+ if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
101
+ Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
102
+ }
103
+
104
+ # Leave deactivate function in the global namespace if requested:
105
+ if (-not $NonDestructive) {
106
+ Remove-Item -Path function:deactivate
107
+ }
108
+ }
109
+
110
+ <#
111
+ .Description
112
+ Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
113
+ given folder, and returns them in a map.
114
+
115
+ For each line in the pyvenv.cfg file, if that line can be parsed into exactly
116
+ two strings separated by `=` (with any amount of whitespace surrounding the =)
117
+ then it is considered a `key = value` line. The left hand string is the key,
118
+ the right hand is the value.
119
+
120
+ If the value starts with a `'` or a `"` then the first and last character is
121
+ stripped from the value before being captured.
122
+
123
+ .Parameter ConfigDir
124
+ Path to the directory that contains the `pyvenv.cfg` file.
125
+ #>
126
+ function Get-PyVenvConfig(
127
+ [String]
128
+ $ConfigDir
129
+ ) {
130
+ Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
131
+
132
+ # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
133
+ $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
134
+
135
+ # An empty map will be returned if no config file is found.
136
+ $pyvenvConfig = @{ }
137
+
138
+ if ($pyvenvConfigPath) {
139
+
140
+ Write-Verbose "File exists, parse `key = value` lines"
141
+ $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
142
+
143
+ $pyvenvConfigContent | ForEach-Object {
144
+ $keyval = $PSItem -split "\s*=\s*", 2
145
+ if ($keyval[0] -and $keyval[1]) {
146
+ $val = $keyval[1]
147
+
148
+ # Remove extraneous quotations around a string value.
149
+ if ("'""".Contains($val.Substring(0, 1))) {
150
+ $val = $val.Substring(1, $val.Length - 2)
151
+ }
152
+
153
+ $pyvenvConfig[$keyval[0]] = $val
154
+ Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
155
+ }
156
+ }
157
+ }
158
+ return $pyvenvConfig
159
+ }
160
+
161
+
162
+ <# Begin Activate script --------------------------------------------------- #>
163
+
164
+ # Determine the containing directory of this script
165
+ $VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
166
+ $VenvExecDir = Get-Item -Path $VenvExecPath
167
+
168
+ Write-Verbose "Activation script is located in path: '$VenvExecPath'"
169
+ Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
170
+ Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
171
+
172
+ # Set values required in priority: CmdLine, ConfigFile, Default
173
+ # First, get the location of the virtual environment, it might not be
174
+ # VenvExecDir if specified on the command line.
175
+ if ($VenvDir) {
176
+ Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
177
+ }
178
+ else {
179
+ Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
180
+ $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
181
+ Write-Verbose "VenvDir=$VenvDir"
182
+ }
183
+
184
+ # Next, read the `pyvenv.cfg` file to determine any required value such
185
+ # as `prompt`.
186
+ $pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
187
+
188
+ # Next, set the prompt from the command line, or the config file, or
189
+ # just use the name of the virtual environment folder.
190
+ if ($Prompt) {
191
+ Write-Verbose "Prompt specified as argument, using '$Prompt'"
192
+ }
193
+ else {
194
+ Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
195
+ if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
196
+ Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
197
+ $Prompt = $pyvenvCfg['prompt'];
198
+ }
199
+ else {
200
+ Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)"
201
+ Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
202
+ $Prompt = Split-Path -Path $venvDir -Leaf
203
+ }
204
+ }
205
+
206
+ Write-Verbose "Prompt = '$Prompt'"
207
+ Write-Verbose "VenvDir='$VenvDir'"
208
+
209
+ # Deactivate any currently active virtual environment, but leave the
210
+ # deactivate function in place.
211
+ deactivate -nondestructive
212
+
213
+ # Now set the environment variable VIRTUAL_ENV, used by many tools to determine
214
+ # that there is an activated venv.
215
+ $env:VIRTUAL_ENV = $VenvDir
216
+
217
+ if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
218
+
219
+ Write-Verbose "Setting prompt to '$Prompt'"
220
+
221
+ # Set the prompt to include the env name
222
+ # Make sure _OLD_VIRTUAL_PROMPT is global
223
+ function global:_OLD_VIRTUAL_PROMPT { "" }
224
+ Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
225
+ New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
226
+
227
+ function global:prompt {
228
+ Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
229
+ _OLD_VIRTUAL_PROMPT
230
+ }
231
+ }
232
+
233
+ # Clear PYTHONHOME
234
+ if (Test-Path -Path Env:PYTHONHOME) {
235
+ Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
236
+ Remove-Item -Path Env:PYTHONHOME
237
+ }
238
+
239
+ # Add the venv to the PATH
240
+ Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
241
+ $Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
README.md CHANGED
@@ -1,9 +1,36 @@
1
- ---
2
- license: mit
3
- title: vision2.0
4
- sdk: streamlit
5
- emoji: 📊
6
- colorFrom: green
7
- colorTo: gray
8
- pinned: true
9
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app).
2
+
3
+ ## Getting Started
4
+
5
+ First, run the development server:
6
+
7
+ ```bash
8
+ npm run dev
9
+ # or
10
+ yarn dev
11
+ # or
12
+ pnpm dev
13
+ # or
14
+ bun dev
15
+ ```
16
+
17
+ Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
18
+
19
+ You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
20
+
21
+ This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel.
22
+
23
+ ## Learn More
24
+
25
+ To learn more about Next.js, take a look at the following resources:
26
+
27
+ - [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
28
+ - [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
29
+
30
+ You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!
31
+
32
+ ## Deploy on Vercel
33
+
34
+ The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
35
+
36
+ Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.
activate ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file must be used with "source bin/activate" *from bash*
2
+ # you cannot run it directly
3
+
4
+ deactivate () {
5
+ # reset old environment variables
6
+ if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
7
+ PATH="${_OLD_VIRTUAL_PATH:-}"
8
+ export PATH
9
+ unset _OLD_VIRTUAL_PATH
10
+ fi
11
+ if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
12
+ PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
13
+ export PYTHONHOME
14
+ unset _OLD_VIRTUAL_PYTHONHOME
15
+ fi
16
+
17
+ # This should detect bash and zsh, which have a hash command that must
18
+ # be called to get it to forget past commands. Without forgetting
19
+ # past commands the $PATH changes we made may not be respected
20
+ if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
21
+ hash -r 2> /dev/null
22
+ fi
23
+
24
+ if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
25
+ PS1="${_OLD_VIRTUAL_PS1:-}"
26
+ export PS1
27
+ unset _OLD_VIRTUAL_PS1
28
+ fi
29
+
30
+ unset VIRTUAL_ENV
31
+ if [ ! "${1:-}" = "nondestructive" ] ; then
32
+ # Self destruct!
33
+ unset -f deactivate
34
+ fi
35
+ }
36
+
37
+ # unset irrelevant variables
38
+ deactivate nondestructive
39
+
40
+ VIRTUAL_ENV="/Users/lattm/Downloads/home/visionos_venv"
41
+ export VIRTUAL_ENV
42
+
43
+ _OLD_VIRTUAL_PATH="$PATH"
44
+ PATH="$VIRTUAL_ENV/bin:$PATH"
45
+ export PATH
46
+
47
+ # unset PYTHONHOME if set
48
+ # this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
49
+ # could use `if (set -u; : $PYTHONHOME) ;` in bash
50
+ if [ -n "${PYTHONHOME:-}" ] ; then
51
+ _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
52
+ unset PYTHONHOME
53
+ fi
54
+
55
+ if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
56
+ _OLD_VIRTUAL_PS1="${PS1:-}"
57
+ PS1="(visionos_venv) ${PS1:-}"
58
+ export PS1
59
+ fi
60
+
61
+ # This should detect bash and zsh, which have a hash command that must
62
+ # be called to get it to forget past commands. Without forgetting
63
+ # past commands the $PATH changes we made may not be respected
64
+ if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
65
+ hash -r 2> /dev/null
66
+ fi
activate.csh ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file must be used with "source bin/activate.csh" *from csh*.
2
+ # You cannot run it directly.
3
+ # Created by Davide Di Blasi <[email protected]>.
4
+ # Ported to Python 3.3 venv by Andrew Svetlov <[email protected]>
5
+
6
+ alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
7
+
8
+ # Unset irrelevant variables.
9
+ deactivate nondestructive
10
+
11
+ setenv VIRTUAL_ENV "/Users/lattm/Downloads/home/visionos_venv"
12
+
13
+ set _OLD_VIRTUAL_PATH="$PATH"
14
+ setenv PATH "$VIRTUAL_ENV/bin:$PATH"
15
+
16
+
17
+ set _OLD_VIRTUAL_PROMPT="$prompt"
18
+
19
+ if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
20
+ set prompt = "(visionos_venv) $prompt"
21
+ endif
22
+
23
+ alias pydoc python -m pydoc
24
+
25
+ rehash
activate.fish ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file must be used with "source <venv>/bin/activate.fish" *from fish*
2
+ # (https://fishshell.com/); you cannot run it directly.
3
+
4
+ function deactivate -d "Exit virtual environment and return to normal shell environment"
5
+ # reset old environment variables
6
+ if test -n "$_OLD_VIRTUAL_PATH"
7
+ set -gx PATH $_OLD_VIRTUAL_PATH
8
+ set -e _OLD_VIRTUAL_PATH
9
+ end
10
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
11
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
12
+ set -e _OLD_VIRTUAL_PYTHONHOME
13
+ end
14
+
15
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
16
+ functions -e fish_prompt
17
+ set -e _OLD_FISH_PROMPT_OVERRIDE
18
+ functions -c _old_fish_prompt fish_prompt
19
+ functions -e _old_fish_prompt
20
+ end
21
+
22
+ set -e VIRTUAL_ENV
23
+ if test "$argv[1]" != "nondestructive"
24
+ # Self-destruct!
25
+ functions -e deactivate
26
+ end
27
+ end
28
+
29
+ # Unset irrelevant variables.
30
+ deactivate nondestructive
31
+
32
+ set -gx VIRTUAL_ENV "/Users/lattm/Downloads/home/visionos_venv"
33
+
34
+ set -gx _OLD_VIRTUAL_PATH $PATH
35
+ set -gx PATH "$VIRTUAL_ENV/bin" $PATH
36
+
37
+ # Unset PYTHONHOME if set.
38
+ if set -q PYTHONHOME
39
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
40
+ set -e PYTHONHOME
41
+ end
42
+
43
+ if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
44
+ # fish uses a function instead of an env var to generate the prompt.
45
+
46
+ # Save the current fish_prompt function as the function _old_fish_prompt.
47
+ functions -c fish_prompt _old_fish_prompt
48
+
49
+ # With the original prompt function renamed, we can override with our own.
50
+ function fish_prompt
51
+ # Save the return status of the last command.
52
+ set -l old_status $status
53
+
54
+ # Output the venv prompt; color taken from the blue of the Python logo.
55
+ printf "%s%s%s" (set_color 4B8BBE) "(visionos_venv) " (set_color normal)
56
+
57
+ # Restore the return status of the previous command.
58
+ echo "exit $old_status" | .
59
+ # Output the original/"old" prompt.
60
+ _old_fish_prompt
61
+ end
62
+
63
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
64
+ end
agent_logic.py ADDED
@@ -0,0 +1,250 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /home/ubuntu/visionos_farm/agents/coding_agent/agent_logic.py
2
+
3
+ import asyncio
4
+ import json
5
+ import logging
6
+ import ollama
7
+ import redis.asyncio as redis
8
+ from typing import Dict, Any, List, Optional
9
+ import uuid
10
+ import datetime
11
+
12
+ # Assuming shared schemas are importable (adjust path if needed)
13
+ # In a real setup, this would likely be a proper package install
14
+ import sys
15
+ sys.path.append("/home/ubuntu/visionos_farm/shared")
16
+ from schemas import Task, TaskStatus, AgentHeartbeat, AgentStatus, AgentType, RedisTaskMessage, RedisResultMessage, RedisHeartbeatMessage, ToolCall, ToolResult
17
+
18
+ from config import settings
19
+
20
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
21
+ logger = logging.getLogger(__name__)
22
+
23
+ # --- Ollama Client Setup ---
24
+ # TODO: Make default model configurable via settings
25
+ DEFAULT_OLLAMA_MODEL = "llama3"
26
+ ollama_client = ollama.AsyncClient()
27
+
28
+ # --- Redis Connection Setup ---
29
+ # TODO: Centralize Redis connection management (e.g., using a connection pool)
30
+ async def get_redis_connection():
31
+ # Consider adding error handling for connection creation
32
+ return await redis.from_url(f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}", decode_responses=True)
33
+
34
+ # --- Agent State ---
35
+ agent_id = f"{settings.AGENT_TYPE.lower()}-{uuid.uuid4()}" # Generate a unique ID for this instance
36
+ current_status = AgentStatus.IDLE
37
+ agent_capabilities = ["ollama_chat", "ui_design_principles"] # Base capability for UI Agent
38
+
39
+ # --- Helper Functions ---
40
+
41
+ async def log_reasoning(task_id: str, step: str, details: Dict[str, Any]):
42
+ """Placeholder for logging reasoning steps ('think mode')."""
43
+ # TODO: Implement actual logging to Redis stream or DB for UI display
44
+ logger.info(f"[Task {task_id}][Think] {step}: {json.dumps(details)}")
45
+
46
+ async def execute_tool(tool_call: ToolCall) -> ToolResult:
47
+ """Placeholder for dynamically loading and executing tools."""
48
+ await log_reasoning(task_id="N/A", step="Tool Execution Start", details=tool_call.model_dump())
49
+ # TODO: Implement dynamic tool loading based on extensibility framework
50
+ # TODO: Add security sandboxing for tool execution
51
+ # TODO: Integrate local agent communication for local tools (VS Code, terminal, files)
52
+ logger.warning(f"Tool execution not yet implemented for: {tool_call.tool_name}")
53
+ return ToolResult(
54
+ tool_name=tool_call.tool_name,
55
+ status="FAILURE",
56
+ error="Tool execution not implemented."
57
+ )
58
+
59
+ async def perform_deep_search(query: str, config: Dict[str, Any]) -> List[Dict[str, Any]]:
60
+ """Placeholder for deep search functionality."""
61
+ await log_reasoning(task_id="N/A", step="Deep Search Start", details={"query": query, "config": config})
62
+ # TODO: Implement search across configured sources (web, knowledge base, files)
63
+ logger.warning(f"Deep search not yet implemented for query: {query}")
64
+ return []
65
+
66
+ async def manage_artifacts(action: str, paths: List[str], config: Dict[str, Any]) -> Dict[str, Any]:
67
+ """Placeholder for artifact management (upload/download/list)."""
68
+ await log_reasoning(task_id="N/A", step="Artifact Management", details={"action": action, "paths": paths, "config": config})
69
+ # TODO: Implement artifact handling (e.g., interaction with S3, local storage via local agent)
70
+ logger.warning(f"Artifact management not yet implemented for action: {action}")
71
+ return {"status": "not_implemented"}
72
+
73
+ # --- Core Agent Logic ---
74
+
75
+ async def process_task(task: Task):
76
+ """Processes a single task based on parameters and configuration."""
77
+ global current_status
78
+ current_status = AgentStatus.BUSY
79
+ logger.info(f"Agent {agent_id} starting task {task.task_id}")
80
+ redis_conn = await get_redis_connection()
81
+ results = {}
82
+ final_status = TaskStatus.COMPLETED
83
+ error_message = None
84
+
85
+ try:
86
+ await log_reasoning(task.task_id, "Task Received", task.model_dump(exclude={"parameters": "..."}))
87
+
88
+ # --- Configuration Extraction (Example) ---
89
+ # These would ideally come from task.parameters or a config fetched based on task/user
90
+ task_config = task.parameters.get("config", {})
91
+ llm_model = task_config.get("llm_model", DEFAULT_OLLAMA_MODEL)
92
+ prompt = task.parameters.get("prompt", "Perform the requested action.")
93
+ tool_calls_config = task_config.get("tool_calls", [])
94
+ search_config = task_config.get("search", None)
95
+ artifact_config = task_config.get("artifacts", None)
96
+ reasoning_level = task_config.get("reasoning_level", "basic") # Example: basic, detailed
97
+
98
+ await log_reasoning(task.task_id, "Configuration Parsed", {"llm_model": llm_model, "reasoning_level": reasoning_level})
99
+
100
+ # --- Artifact Handling (Input) ---
101
+ if artifact_config and artifact_config.get("inputs"):
102
+ await manage_artifacts("download", artifact_config["inputs"], task_config)
103
+ # TODO: Handle downloaded artifact paths
104
+
105
+ # --- Deep Search (If configured) ---
106
+ search_results = []
107
+ if search_config and search_config.get("query"):
108
+ search_results = await perform_deep_search(search_config["query"], task_config)
109
+ # TODO: Incorporate search_results into context/prompt
110
+
111
+ # --- Core LLM Interaction (Example) ---
112
+ await log_reasoning(task.task_id, "LLM Interaction Start", {"model": llm_model, "prompt_length": len(prompt)})
113
+ # TODO: Build more sophisticated context including search results, artifact info
114
+ llm_response = await ollama_client.chat(
115
+ model=llm_model,
116
+ messages=[{"role": "user", "content": prompt}]
117
+ )
118
+ llm_output = llm_response["message"]["content"]
119
+ results["llm_output"] = llm_output
120
+ await log_reasoning(task.task_id, "LLM Interaction Complete", {"response_length": len(llm_output)})
121
+
122
+ # --- Tool Execution (If configured) ---
123
+ tool_results = []
124
+ if tool_calls_config:
125
+ for tool_call_data in tool_calls_config:
126
+ # TODO: Validate tool_call_data against ToolCall schema
127
+ tool_call = ToolCall(**tool_call_data)
128
+ tool_result = await execute_tool(tool_call)
129
+ tool_results.append(tool_result.model_dump())
130
+ results["tool_results"] = tool_results
131
+ # TODO: Potentially feed tool results back to LLM for final response
132
+
133
+ # --- Artifact Handling (Output) ---
134
+ if artifact_config and artifact_config.get("outputs"):
135
+ # TODO: Determine which files need uploading based on results/LLM output
136
+ await manage_artifacts("upload", artifact_config["outputs"], task_config)
137
+
138
+ # --- Final Result Aggregation ---
139
+ # Combine results from LLM, tools, etc. into the final output structure
140
+ # This structure should be flexible and defined by the task type/config
141
+ results["final_summary"] = results.get("llm_output", "Task completed.") # Simple example
142
+
143
+ except Exception as e:
144
+ logger.error(f"Agent {agent_id} failed task {task.task_id}: {e}", exc_info=True)
145
+ final_status = TaskStatus.FAILED
146
+ error_message = str(e)
147
+ results = None # Clear partial results on failure
148
+
149
+ finally:
150
+ # --- Publish Result ---
151
+ task_result_payload = {
152
+ "task_id": task.task_id,
153
+ "agent_id": agent_id,
154
+ "status": final_status,
155
+ "output": results,
156
+ "error_message": error_message
157
+ }
158
+ # Ensure datetime fields are handled if added to schema
159
+ result_message = RedisResultMessage(result=task_result_payload)
160
+ try:
161
+ await redis_conn.publish(settings.REDIS_RESULTS_CHANNEL, result_message.model_dump_json())
162
+ logger.info(f"Agent {agent_id} {final_status.lower()} task {task.task_id}")
163
+ except Exception as pub_e:
164
+ logger.error(f"Failed to publish result for task {task.task_id}: {pub_e}")
165
+
166
+ current_status = AgentStatus.IDLE
167
+ await redis_conn.close()
168
+
169
+ async def listen_for_tasks():
170
+ """Listens for tasks assigned to this agent type on Redis."""
171
+ redis_conn = await get_redis_connection()
172
+ pubsub = redis_conn.pubsub()
173
+ task_channel = f"{settings.REDIS_TASK_CHANNEL_PREFIX}:{settings.AGENT_TYPE}"
174
+ await pubsub.subscribe(task_channel)
175
+ logger.info(f"Agent {agent_id} listening for tasks on {task_channel}")
176
+
177
+ while True:
178
+ try:
179
+ message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0)
180
+ if message and message["type"] == "message":
181
+ logger.info(f"Received task message on {task_channel}")
182
+ try:
183
+ task_data = json.loads(message["data"])
184
+ task_message = RedisTaskMessage.model_validate(task_data)
185
+ if current_status == AgentStatus.IDLE:
186
+ asyncio.create_task(process_task(task_message.task))
187
+ else:
188
+ logger.warning(f"Agent {agent_id} is BUSY, skipping task {task_message.task.task_id}")
189
+ # TODO: Implement task queuing or NACK mechanism
190
+ except (json.JSONDecodeError, Exception) as e:
191
+ logger.error(f"Failed to parse or validate task message: {e}")
192
+ await asyncio.sleep(0.1)
193
+ except redis.ConnectionError as e:
194
+ logger.error(f"Redis connection error while listening: {e}. Reconnecting...")
195
+ await asyncio.sleep(5)
196
+ try:
197
+ await pubsub.unsubscribe(task_channel)
198
+ await redis_conn.close()
199
+ except: pass
200
+ redis_conn = await get_redis_connection()
201
+ pubsub = redis_conn.pubsub()
202
+ await pubsub.subscribe(task_channel)
203
+ logger.info(f"Re-subscribed to {task_channel} after connection error.")
204
+ except Exception as e:
205
+ logger.error(f"Unexpected error in task listener: {e}", exc_info=True)
206
+ await asyncio.sleep(5)
207
+
208
+ async def send_heartbeat():
209
+ """Periodically sends a heartbeat signal to Redis."""
210
+ redis_conn = await get_redis_connection()
211
+ while True:
212
+ try:
213
+ # TODO: Dynamically determine capabilities based on loaded tools/config
214
+ current_capabilities = agent_capabilities # Use current state
215
+ heartbeat_msg = RedisHeartbeatMessage(
216
+ heartbeat={
217
+ "agent_id": agent_id,
218
+ "agent_type": settings.AGENT_TYPE,
219
+ "status": current_status,
220
+ "capabilities": current_capabilities
221
+ }
222
+ )
223
+ await redis_conn.publish(settings.REDIS_HEARTBEAT_CHANNEL, heartbeat_msg.model_dump_json())
224
+ await asyncio.sleep(settings.HEARTBEAT_INTERVAL)
225
+ except redis.ConnectionError as e:
226
+ logger.error(f"Redis connection error during heartbeat: {e}. Reconnecting...")
227
+ await asyncio.sleep(5)
228
+ try:
229
+ await redis_conn.close()
230
+ except: pass
231
+ redis_conn = await get_redis_connection()
232
+ logger.info("Reconnected to Redis for heartbeat.")
233
+ except Exception as e:
234
+ logger.error(f"Unexpected error in heartbeat loop: {e}", exc_info=True)
235
+ await asyncio.sleep(settings.HEARTBEAT_INTERVAL)
236
+
237
+ # --- Main Execution ---
238
+ async def run_agent():
239
+ logger.info(f"Starting agent {agent_id} of type {settings.AGENT_TYPE}")
240
+ heartbeat_task = asyncio.create_task(send_heartbeat())
241
+ listener_task = asyncio.create_task(listen_for_tasks())
242
+ await asyncio.gather(heartbeat_task, listener_task)
243
+
244
+ # Example of how to run (usually triggered from main.py)
245
+ # if __name__ == "__main__":
246
+ # try:
247
+ # asyncio.run(run_agent())
248
+ # except KeyboardInterrupt:
249
+ # logger.info("Agent shutdown requested.")
250
+
alembic ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/visionos_venv/bin/python3
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from alembic.config import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
api.py CHANGED
@@ -1,311 +1,20 @@
1
- import os
2
- from typing import List, Optional
3
 
4
- from fastapi import FastAPI, UploadFile, File, HTTPException, APIRouter, Form, Depends, Request
5
- from fastapi.responses import Response, JSONResponse
6
- from pydantic import BaseModel
7
 
8
- from utils.logger import logger
9
- from utils.auth_utils import get_current_user_id, get_user_id_from_stream_auth, get_optional_user_id
10
- from sandbox.sandbox import get_or_start_sandbox
11
- from services.supabase import DBConnection
12
- from agent.api import get_or_create_project_sandbox
13
 
 
14
 
15
- # Initialize shared resources
16
- router = APIRouter(tags=["sandbox"])
17
- db = None
 
 
 
18
 
19
- def initialize(_db: DBConnection):
20
- """Initialize the sandbox API with resources from the main API."""
21
- global db
22
- db = _db
23
- logger.info("Initialized sandbox API with database connection")
24
 
25
- class FileInfo(BaseModel):
26
- """Model for file information"""
27
- name: str
28
- path: str
29
- is_dir: bool
30
- size: int
31
- mod_time: str
32
- permissions: Optional[str] = None
33
-
34
- async def verify_sandbox_access(client, sandbox_id: str, user_id: Optional[str] = None):
35
- """
36
- Verify that a user has access to a specific sandbox based on account membership.
37
-
38
- Args:
39
- client: The Supabase client
40
- sandbox_id: The sandbox ID to check access for
41
- user_id: The user ID to check permissions for. Can be None for public resource access.
42
-
43
- Returns:
44
- dict: Project data containing sandbox information
45
-
46
- Raises:
47
- HTTPException: If the user doesn't have access to the sandbox or sandbox doesn't exist
48
- """
49
- # Find the project that owns this sandbox
50
- project_result = await client.table('projects').select('*').filter('sandbox->>id', 'eq', sandbox_id).execute()
51
-
52
- if not project_result.data or len(project_result.data) == 0:
53
- raise HTTPException(status_code=404, detail="Sandbox not found")
54
-
55
- project_data = project_result.data[0]
56
-
57
- if project_data.get('is_public'):
58
- return project_data
59
-
60
- # For private projects, we must have a user_id
61
- if not user_id:
62
- raise HTTPException(status_code=401, detail="Authentication required for this resource")
63
-
64
- account_id = project_data.get('account_id')
65
-
66
- # Verify account membership
67
- if account_id:
68
- account_user_result = await client.schema('basejump').from_('account_user').select('account_role').eq('user_id', user_id).eq('account_id', account_id).execute()
69
- if account_user_result.data and len(account_user_result.data) > 0:
70
- return project_data
71
-
72
- raise HTTPException(status_code=403, detail="Not authorized to access this sandbox")
73
-
74
- async def get_sandbox_by_id_safely(client, sandbox_id: str):
75
- """
76
- Safely retrieve a sandbox object by its ID, using the project that owns it.
77
-
78
- Args:
79
- client: The Supabase client
80
- sandbox_id: The sandbox ID to retrieve
81
-
82
- Returns:
83
- Sandbox: The sandbox object
84
-
85
- Raises:
86
- HTTPException: If the sandbox doesn't exist or can't be retrieved
87
- """
88
- # Find the project that owns this sandbox
89
- project_result = await client.table('projects').select('project_id').filter('sandbox->>id', 'eq', sandbox_id).execute()
90
-
91
- if not project_result.data or len(project_result.data) == 0:
92
- logger.error(f"No project found for sandbox ID: {sandbox_id}")
93
- raise HTTPException(status_code=404, detail="Sandbox not found - no project owns this sandbox ID")
94
-
95
- project_id = project_result.data[0]['project_id']
96
- logger.debug(f"Found project {project_id} for sandbox {sandbox_id}")
97
-
98
- try:
99
- # Get the sandbox
100
- sandbox, retrieved_sandbox_id, sandbox_pass = await get_or_create_project_sandbox(client, project_id)
101
-
102
- # Verify we got the right sandbox
103
- if retrieved_sandbox_id != sandbox_id:
104
- logger.warning(f"Retrieved sandbox ID {retrieved_sandbox_id} doesn't match requested ID {sandbox_id} for project {project_id}")
105
- # Fall back to the direct method if IDs don't match (shouldn't happen but just in case)
106
- sandbox = await get_or_start_sandbox(sandbox_id)
107
-
108
- return sandbox
109
- except Exception as e:
110
- logger.error(f"Error retrieving sandbox {sandbox_id}: {str(e)}")
111
- raise HTTPException(status_code=500, detail=f"Failed to retrieve sandbox: {str(e)}")
112
-
113
- @router.post("/sandboxes/{sandbox_id}/files")
114
- async def create_file(
115
- sandbox_id: str,
116
- path: str = Form(...),
117
- file: UploadFile = File(...),
118
- request: Request = None,
119
- user_id: Optional[str] = Depends(get_optional_user_id)
120
- ):
121
- """Create a file in the sandbox using direct file upload"""
122
- logger.info(f"Received file upload request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
123
- client = await db.client
124
-
125
- # Verify the user has access to this sandbox
126
- await verify_sandbox_access(client, sandbox_id, user_id)
127
-
128
- try:
129
- # Get sandbox using the safer method
130
- sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
131
-
132
- # Read file content directly from the uploaded file
133
- content = await file.read()
134
-
135
- # Create file using raw binary content
136
- sandbox.fs.upload_file(path, content)
137
- logger.info(f"File created at {path} in sandbox {sandbox_id}")
138
-
139
- return {"status": "success", "created": True, "path": path}
140
- except Exception as e:
141
- logger.error(f"Error creating file in sandbox {sandbox_id}: {str(e)}")
142
- raise HTTPException(status_code=500, detail=str(e))
143
-
144
- # For backward compatibility, keep the JSON version too
145
- @router.post("/sandboxes/{sandbox_id}/files/json")
146
- async def create_file_json(
147
- sandbox_id: str,
148
- file_request: dict,
149
- request: Request = None,
150
- user_id: Optional[str] = Depends(get_optional_user_id)
151
- ):
152
- """Create a file in the sandbox using JSON (legacy support)"""
153
- logger.info(f"Received JSON file creation request for sandbox {sandbox_id}, user_id: {user_id}")
154
- client = await db.client
155
-
156
- # Verify the user has access to this sandbox
157
- await verify_sandbox_access(client, sandbox_id, user_id)
158
-
159
- try:
160
- # Get sandbox using the safer method
161
- sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
162
-
163
- # Get file path and content
164
- path = file_request.get("path")
165
- content = file_request.get("content", "")
166
-
167
- if not path:
168
- logger.error(f"Missing file path in request for sandbox {sandbox_id}")
169
- raise HTTPException(status_code=400, detail="File path is required")
170
-
171
- # Convert string content to bytes
172
- if isinstance(content, str):
173
- content = content.encode('utf-8')
174
-
175
- # Create file
176
- sandbox.fs.upload_file(path, content)
177
- logger.info(f"File created at {path} in sandbox {sandbox_id}")
178
-
179
- return {"status": "success", "created": True, "path": path}
180
- except Exception as e:
181
- logger.error(f"Error creating file in sandbox {sandbox_id}: {str(e)}")
182
- raise HTTPException(status_code=500, detail=str(e))
183
-
184
- @router.get("/sandboxes/{sandbox_id}/files")
185
- async def list_files(
186
- sandbox_id: str,
187
- path: str,
188
- request: Request = None,
189
- user_id: Optional[str] = Depends(get_optional_user_id)
190
- ):
191
- """List files and directories at the specified path"""
192
- logger.info(f"Received list files request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
193
- client = await db.client
194
-
195
- # Verify the user has access to this sandbox
196
- await verify_sandbox_access(client, sandbox_id, user_id)
197
-
198
- try:
199
- # Get sandbox using the safer method
200
- sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
201
-
202
- # List files
203
- files = sandbox.fs.list_files(path)
204
- result = []
205
-
206
- for file in files:
207
- # Convert file information to our model
208
- # Ensure forward slashes are used for paths, regardless of OS
209
- full_path = f"{path.rstrip('/')}/{file.name}" if path != '/' else f"/{file.name}"
210
- file_info = FileInfo(
211
- name=file.name,
212
- path=full_path, # Use the constructed path
213
- is_dir=file.is_dir,
214
- size=file.size,
215
- mod_time=str(file.mod_time),
216
- permissions=getattr(file, 'permissions', None)
217
- )
218
- result.append(file_info)
219
-
220
- logger.info(f"Successfully listed {len(result)} files in sandbox {sandbox_id}")
221
- return {"files": [file.dict() for file in result]}
222
- except Exception as e:
223
- logger.error(f"Error listing files in sandbox {sandbox_id}: {str(e)}")
224
- raise HTTPException(status_code=500, detail=str(e))
225
-
226
- @router.get("/sandboxes/{sandbox_id}/files/content")
227
- async def read_file(
228
- sandbox_id: str,
229
- path: str,
230
- request: Request = None,
231
- user_id: Optional[str] = Depends(get_optional_user_id)
232
- ):
233
- """Read a file from the sandbox"""
234
- logger.info(f"Received file read request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
235
- client = await db.client
236
-
237
- # Verify the user has access to this sandbox
238
- await verify_sandbox_access(client, sandbox_id, user_id)
239
-
240
- try:
241
- # Get sandbox using the safer method
242
- sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
243
-
244
- # Read file
245
- content = sandbox.fs.download_file(path)
246
-
247
- # Return a Response object with the content directly
248
- filename = os.path.basename(path)
249
- logger.info(f"Successfully read file {filename} from sandbox {sandbox_id}")
250
- return Response(
251
- content=content,
252
- media_type="application/octet-stream",
253
- headers={"Content-Disposition": f"attachment; filename={filename}"}
254
- )
255
- except Exception as e:
256
- logger.error(f"Error reading file in sandbox {sandbox_id}: {str(e)}")
257
- raise HTTPException(status_code=500, detail=str(e))
258
-
259
- @router.post("/project/{project_id}/sandbox/ensure-active")
260
- async def ensure_project_sandbox_active(
261
- project_id: str,
262
- request: Request = None,
263
- user_id: Optional[str] = Depends(get_optional_user_id)
264
- ):
265
- """
266
- Ensure that a project's sandbox is active and running.
267
- Checks the sandbox status and starts it if it's not running.
268
- """
269
- logger.info(f"Received ensure sandbox active request for project {project_id}, user_id: {user_id}")
270
- client = await db.client
271
-
272
- # Find the project and sandbox information
273
- project_result = await client.table('projects').select('*').eq('project_id', project_id).execute()
274
-
275
- if not project_result.data or len(project_result.data) == 0:
276
- logger.error(f"Project not found: {project_id}")
277
- raise HTTPException(status_code=404, detail="Project not found")
278
-
279
- project_data = project_result.data[0]
280
-
281
- # For public projects, no authentication is needed
282
- if not project_data.get('is_public'):
283
- # For private projects, we must have a user_id
284
- if not user_id:
285
- logger.error(f"Authentication required for private project {project_id}")
286
- raise HTTPException(status_code=401, detail="Authentication required for this resource")
287
-
288
- account_id = project_data.get('account_id')
289
-
290
- # Verify account membership
291
- if account_id:
292
- account_user_result = await client.schema('basejump').from_('account_user').select('account_role').eq('user_id', user_id).eq('account_id', account_id).execute()
293
- if not (account_user_result.data and len(account_user_result.data) > 0):
294
- logger.error(f"User {user_id} not authorized to access project {project_id}")
295
- raise HTTPException(status_code=403, detail="Not authorized to access this project")
296
-
297
- try:
298
- # Get or create the sandbox
299
- logger.info(f"Ensuring sandbox is active for project {project_id}")
300
- sandbox, sandbox_id, sandbox_pass = await get_or_create_project_sandbox(client, project_id)
301
-
302
- logger.info(f"Successfully ensured sandbox {sandbox_id} is active for project {project_id}")
303
-
304
- return {
305
- "status": "success",
306
- "sandbox_id": sandbox_id,
307
- "message": "Sandbox is active"
308
- }
309
- except Exception as e:
310
- logger.error(f"Error ensuring sandbox is active for project {project_id}: {str(e)}")
311
- raise HTTPException(status_code=500, detail=str(e))
 
1
+ # /home/ubuntu/visionos_farm/daedalus/api/v1/api.py
 
2
 
3
+ from fastapi import APIRouter
 
 
4
 
5
+ # Import endpoint routers
6
+ from .endpoints import tasks, configurations, tool_sources, api_keys # Added api_keys
7
+ # Import other routers like agents when created
8
+ # from .endpoints import agents
 
9
 
10
+ api_router = APIRouter()
11
 
12
+ # Include endpoint routers
13
+ api_router.include_router(tasks.router, prefix="/tasks", tags=["Tasks"])
14
+ api_router.include_router(configurations.router, prefix="/configurations", tags=["Configurations"])
15
+ api_router.include_router(tool_sources.router, prefix="/tool-sources", tags=["Tool Sources"])
16
+ api_router.include_router(api_keys.router, prefix="/api-keys", tags=["API Keys"]) # Added api_keys router
17
+ # api_router.include_router(agents.router, prefix="/agents", tags=["Agents"])
18
 
19
+ # Add more routers as needed
 
 
 
 
20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
api_keys.py CHANGED
@@ -7,8 +7,8 @@ from fastapi import APIRouter, Depends, HTTPException, status, Security
7
  from sqlalchemy.orm import Session
8
 
9
  from shared import schemas
10
- from database import models, session
11
- from crud import crud_api_key
12
  from . import dependencies # Assuming dependencies.py for auth
13
 
14
  router = APIRouter()
 
7
  from sqlalchemy.orm import Session
8
 
9
  from shared import schemas
10
+ from daedalus.database import models, session
11
+ from daedalus.crud import crud_api_key
12
  from . import dependencies # Assuming dependencies.py for auth
13
 
14
  router = APIRouter()
architecture_overview.md ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # VisionOS SaaS - Architecture Overview (Refined)
2
+
3
+ ## 1. Introduction
4
+
5
+ This document outlines the high-level architecture for the VisionOS SaaS platform, refined based on detailed requirements analysis. VisionOS aims to be a comprehensive, AI-driven system capable of software development, automation (browser/computer), and potentially replacing other software stacks, built primarily using Free and Open Source Software (FOSS). It features a no-code customization interface and dynamic extensibility.
6
+
7
+ The architecture follows a modular, layered, and iterative design philosophy, emphasizing separation of concerns, scalability, robustness, and user-driven customization.
8
+
9
+ ## 2. Core Principles
10
+
11
+ - **Modularity:** Components are independent services with well-defined responsibilities.
12
+ - **Scalability:** Supports horizontal scaling of agents and potentially other services.
13
+ - **Asynchronicity:** Primary communication via Redis Streams for decoupling and resilience.
14
+ - **Extensibility:** Dynamically load new agents, tools (via GitHub URLs), and capabilities with robust sandboxing.
15
+ - **Customizability:** Enable system behavior modification via a no-code UI, driving configuration changes.
16
+ - **Iterative Development:** Build and deploy in layers, starting with core functionality.
17
+ - **FOSS-First:** Prioritize open-source technologies (Python, FastAPI, PostgreSQL, Redis, Docker, Kubernetes, React/Next.js, Ollama).
18
+ - **Production-Grade:** Focus on reliability, error handling, security, logging, and documentation.
19
+
20
+ ## 3. High-Level Components (Refined)
21
+
22
+ The system comprises the following major components:
23
+
24
+ 1. **Vision UI (Frontend):** Web interface (React/Next.js) for user interaction, task submission, monitoring, results viewing, and **no-code system customization** (workflows, agent parameters, tool selection).
25
+ 2. **Daedalus (Orchestrator - Backend API):** Central control plane (FastAPI). Manages tasks, coordinates agents, handles UI interactions via REST API, manages system/user configurations (driven by no-code UI), interacts with persistence/messaging, and exposes an **external integration API**.
26
+ 3. **Agents (Backend Services):** Specialized, independent Python services (containerized). Execute tasks based on configuration, listen via Redis Streams, utilize tools (including LLMs), and interact with the **Extensibility Framework**.
27
+ 4. **Local Agent (Client-Side Component):** (Future Implementation) A secure component installed on the user's machine (e.g., macOS) to execute local automation tasks (terminal, file system, application control like VS Code) under the direction of backend Agents via a secure channel.
28
+ 5. **Shared Library (Python):** Common Python package for data models (Pydantic), utilities, communication helpers, base classes.
29
+ 6. **Persistence Layer (Database):** PostgreSQL storing tasks, configurations (for no-code UI), agent registry, user data, knowledge base artifacts, artifact metadata.
30
+ 7. **Messaging/Cache Layer (Broker):** Redis for asynchronous task queuing (Streams), result passing, potentially caching, and configuration updates.
31
+ 8. **Extensibility Framework:** Backend mechanism (managed by Daedalus/Agents) to securely fetch, sandbox (e.g., subprocesses, containers), and execute tools/capabilities defined by GitHub URLs.
32
+ 9. **AI/LLM Integration:** Framework for agents to interact with local LLMs (Ollama) and external APIs (securely configured via backend).
33
+ 10. **Knowledge Base/Search:** Components for storing, indexing (e.g., vector embeddings in DB or dedicated vector store), and searching across documents, code, and web results (Deep Search).
34
+ 11. **Artifact Management:** System for storing and managing generated artifacts (code, docs, images) potentially using object storage (like MinIO or cloud provider services) linked from the database.
35
+ 12. **Deployment Infrastructure:** Containerization (Docker), Orchestration (Kubernetes recommended), Ingress/Proxy (Nginx/Traefik).
36
+
37
+ ## 4. Refined Communication & Interaction Flows
38
+
39
+ ### 4.1 Basic Task Flow (Unchanged)
40
+
41
+ (Remains as previously defined: User -> UI -> Daedalus -> Redis -> Agent -> LLM/Tools -> Redis -> Daedalus -> UI -> User)
42
+
43
+ ### 4.2 No-Code Customization Flow
44
+
45
+ ```mermaid
46
+ sequenceDiagram
47
+ participant User
48
+ participant VisionUI
49
+ participant DaedalusAPI
50
+ participant Database
51
+ participant Redis
52
+ participant Agent
53
+
54
+ User->>VisionUI: Modifies configuration (e.g., agent parameter) via No-Code Interface
55
+ VisionUI->>DaedalusAPI: PUT /configurations/{config_key} (New Value)
56
+ DaedalusAPI->>Database: Store/Update Configuration Record
57
+ DaedalusAPI->>Redis: (Optional) Publish config update event (Pub/Sub or Stream)
58
+ Note over DaedalusAPI, Agent: Agents might fetch config on task start or listen for updates
59
+ Agent->>DaedalusAPI: (On Task Start) GET /configurations/{config_key}
60
+ DaedalusAPI->>Database: Retrieve Configuration
61
+ Database-->>DaedalusAPI: Configuration Value
62
+ DaedalusAPI-->>Agent: Configuration Value
63
+ Agent->>Agent: Adjust behavior based on new configuration
64
+ ```
65
+
66
+ ### 4.3 Local Automation Flow (Conceptual)
67
+
68
+ ```mermaid
69
+ sequenceDiagram
70
+ participant BackendAgent
71
+ participant DaedalusAPI
72
+ participant SecureChannel
73
+ participant LocalAgent (on User's Mac)
74
+ participant LocalApp (e.g., VS Code)
75
+
76
+ BackendAgent->>DaedalusAPI: Request local action (e.g., run terminal command)
77
+ DaedalusAPI->>SecureChannel: Forward action request to specific LocalAgent
78
+ SecureChannel->>LocalAgent: Deliver action request
79
+ LocalAgent->>LocalApp: Execute action (e.g., via AppleScript, CLI, API)
80
+ LocalApp-->>LocalAgent: Action Result/Status
81
+ LocalAgent->>SecureChannel: Send result back
82
+ SecureChannel->>DaedalusAPI: Forward result
83
+ DaedalusAPI->>BackendAgent: Deliver result
84
+ ```
85
+
86
+ ## 5. Next Steps
87
+
88
+ Refine detailed architecture documents (Backend, Frontend, Data, Deployment) to reflect these changes, particularly focusing on the interfaces and data models required for the no-code customization, extensibility framework, local agent communication, and external API.
89
+
backend_architecture.md ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # VisionOS SaaS - Backend Architecture (Refined)
2
+
3
+ ## 1. Introduction
4
+
5
+ This document details the architecture for the backend components of the VisionOS SaaS platform, including the Daedalus Orchestrator, the Agent Services, the Shared Library, the Extensibility Framework, and interactions with other backend systems. It builds upon the refined high-level overview in `architecture_overview.md`.
6
+
7
+ ## 2. Daedalus (Orchestrator)
8
+
9
+ ### 2.1 Role (Expanded)
10
+
11
+ Daedalus serves as the central API and control plane for VisionOS.
12
+
13
+ - **API Gateway:** Exposes RESTful APIs for frontend interactions (tasks, status, results, **configurations**) and an **external integration API**.
14
+ - **Task Management:** Receives, validates, persists tasks (DB), assigns them to agents (Redis Streams), tracks status.
15
+ - **Agent Coordination:** Manages agent registry, potentially routes tasks based on capabilities.
16
+ - **Configuration Management:** Manages system-wide and agent-specific configurations, providing APIs for the no-code UI to modify them. Stores configurations persistently (DB).
17
+ - **State Management:** Maintains overall task state and configuration state.
18
+ - **Result Handling:** Consumes results from agents (Redis Streams), updates DB.
19
+ - **Extensibility Framework Management:** May coordinate the loading/updating of tools within the extensibility framework.
20
+ - **Local Agent Communication:** Relays requests from backend agents to the appropriate Local Agent via a secure channel.
21
+ - **External API Authentication:** Manages API keys for external system integration.
22
+ - **Notifications:** (Optional/Future) Pushes real-time updates (task status, config changes) to the frontend (WebSockets).
23
+
24
+ ### 2.2 Technology Stack
25
+
26
+ - **Framework:** FastAPI (Python)
27
+ - **Server:** Uvicorn
28
+ - **Data Validation:** Pydantic
29
+ - **Database Interaction:** SQLAlchemy (async) + AsyncPG
30
+ - **Messaging:** Redis-py (async)
31
+ - **Configuration:** Pydantic-Settings
32
+
33
+ ### 2.3 Key Modules/Components (Expanded)
34
+
35
+ - **API Endpoints (`api/v1/endpoints/`):** Routers for `tasks.py`, `agents.py`, **`configurations.py`**, **`external_api.py`**.
36
+ - **Schemas (`schemas/`):** Pydantic models for API requests/responses, DTOs, **configuration structures**.
37
+ - **Database (`database/`):** SQLAlchemy models (`models.py`), session management (`session.py`), CRUD operations.
38
+ - **Core (`core/`):** Configuration (`config.py`), security (API key handling, encryption), core utilities.
39
+ - **Orchestrator Logic (`orchestrator/`):** Task processing, agent communication, result handling, **configuration propagation**, **local agent request relay**.
40
+ - **Extensibility (`extensibility/`):** Modules related to managing dynamically loaded tools (if managed centrally).
41
+ - **Main Application (`main.py`):** FastAPI setup, middleware, routers, startup/shutdown events.
42
+
43
+ ### 2.4 Communication (Expanded)
44
+
45
+ - **Frontend <-> Daedalus:** Synchronous REST API calls (HTTPS).
46
+ - **Daedalus <-> Database:** Asynchronous SQL queries.
47
+ - **Daedalus <-> Redis:** Asynchronous commands (Streams for tasks/results, potentially Pub/Sub for config updates).
48
+ - **Daedalus -> Agents:** Indirectly via Redis Streams (tasks, potentially config updates).
49
+ - **Daedalus <- Agents:** Indirectly via Redis Streams (results).
50
+ - **Daedalus <-> Local Agent:** Via a secure channel (e.g., WebSocket, gRPC with mTLS - specific protocol TBD).
51
+ - **External Systems -> Daedalus:** Via secured REST API endpoints.
52
+
53
+ ## 3. Agent Services
54
+
55
+ ### 3.1 Role (Expanded)
56
+
57
+ Agents are specialized, independent, **configurable** backend services executing tasks.
58
+
59
+ - **Task Execution:** Consume tasks from Redis Streams, driven by parameters/configuration received with the task or fetched from Daedalus.
60
+ - **Tool Utilization:** Interact with internal tools (Knowledge Base, Artifact Management), the **Extensibility Framework** (dynamically loaded tools), external APIs (LLMs), libraries, and system resources.
61
+ - **LLM Interaction:** Utilize local Ollama or external LLM APIs based on configuration.
62
+ - **Knowledge Base Interaction:** Query knowledge base for context, potentially contribute new artifacts.
63
+ - **Artifact Management:** Store/retrieve generated artifacts (code, docs) via dedicated service/storage.
64
+ - **Local Task Delegation:** Request execution of local tasks via Daedalus to the Local Agent.
65
+ - **State Tracking & Result Publication:** Maintain task state, publish results/errors/logs/reasoning steps ("think mode" data) to Redis `result_stream`.
66
+ - **Error Handling:** Robust error handling and reporting.
67
+
68
+ ### 3.2 Architecture (Expanded)
69
+
70
+ - **Structure:** Separate Python applications/services, inheriting from a base agent class in `visionos_shared`.
71
+ - **Technology:** Python 3.11+, `asyncio`, `redis-py` (async), `httpx`, `ollama`, domain-specific libraries. **Parameter-driven logic** using configurations.
72
+ - **Deployment:** Containerized (Docker), orchestrated (Kubernetes).
73
+ - **Communication:** Asynchronous via Redis Streams. May make synchronous API calls to Daedalus for configuration or specific data if needed.
74
+
75
+ ### 3.3 Example Agents (Expanded List)
76
+
77
+ - **Coding Agent:** Code generation, modification, debugging, repo interactions.
78
+ - **UI Agent:** Frontend code generation (React, Vue, etc.).
79
+ - **Ops Agent:** Deployment, infrastructure (IaC tools, Docker SDK, K8s client), monitoring.
80
+ - **Error Analysis Agent:** Log/error analysis, debugging assistance.
81
+ - **Context Assist Agent:** Knowledge base interaction, context retrieval/management.
82
+ - **Data Science Agent:** Data analysis, visualization, model training/interaction (using relevant libraries).
83
+ - **Vision Agent:** Image analysis, generation, manipulation (using relevant libraries/APIs).
84
+ - **NLP Agent:** Advanced text processing, summarization, translation (using relevant libraries/models).
85
+ - **Research Agent:** Web browsing, information gathering, summarization, citation.
86
+ - **Planning Agent:** Task decomposition, workflow generation, coordination between other agents for complex tasks.
87
+ - **Web Browsing Agent:** Dedicated agent for interacting with web pages (using Playwright/Selenium).
88
+ - **(Future) Local Control Agent:** Backend counterpart communicating with the client-side Local Agent.
89
+
90
+ ### 3.4 Agent Lifecycle & Communication (Refined)
91
+
92
+ 1. **Startup:** Connect to Redis, join consumer group.
93
+ 2. **Listening:** Block on `XREADGROUP`.
94
+ 3. **Task Received:** Get task message (includes Task ID, input data, potentially initial configuration).
95
+ 4. **(Optional) Fetch Config:** Agent may fetch additional/updated configuration from Daedalus API based on task context.
96
+ 5. **Processing:** Execute task using configured tools, LLMs, knowledge base, artifact storage, potentially requesting local actions via Daedalus.
97
+ 6. **Result Publishing:** Publish result, status, logs, errors, **reasoning steps** to `result_stream`.
98
+ 7. **Acknowledgement:** `XACK` task message.
99
+ 8. **Loop:** Return to listening.
100
+
101
+ ## 4. Shared Library (`visionos_shared`)
102
+
103
+ ### 4.1 Role (Expanded)
104
+
105
+ Provides common code, data structures, and utilities.
106
+
107
+ - **Data Models:** Pydantic models for tasks, results, agent registration, **configurations**, **artifacts**, **knowledge entries**, **tool definitions**.
108
+ - **Communication Utilities:** Redis Streams helpers.
109
+ - **Constants & Enums:** Task statuses, stream names, agent types, **configuration keys**.
110
+ - **Base Classes:** Base `Agent` class, potentially base `Tool` class for extensibility framework.
111
+ - **Logging Configuration:** Standardized setup.
112
+ - **Security Utilities:** Common functions for secure operations (if any).
113
+
114
+ ### 4.2 Implementation
115
+
116
+ - Standard Python package, installed as a dependency.
117
+
118
+ ## 5. Extensibility Framework
119
+
120
+ ### 5.1 Role
121
+
122
+ Enables dynamic loading and execution of tools defined in external Git repositories.
123
+
124
+ - **Tool Discovery/Fetching:** Mechanism to fetch code/definitions from a specified GitHub URL.
125
+ - **Sandboxing:** **CRITICAL:** Execute fetched tool code in a secure, isolated environment (e.g., dedicated subprocess with restricted permissions, `seccomp` profiles, potentially Docker containers per execution) to mitigate risks.
126
+ - **Interface:** Define a standard interface (`Tool` base class) that dynamically loaded tools must implement.
127
+ - **Execution:** Agents utilize the framework to invoke specific tools with necessary inputs.
128
+ - **Management:** (Potentially via Daedalus/UI) Interface to manage registered tool sources.
129
+
130
+ ### 5.2 Implementation
131
+
132
+ - Likely involves Python modules within Agents or Daedalus using libraries like `gitpython` for fetching, `subprocess` or container runtimes for sandboxing, and `importlib` for dynamic loading (within the sandbox).
133
+
134
+ ## 6. Local Agent Communication Channel
135
+
136
+ ### 6.1 Role
137
+
138
+ Provides a secure, bidirectional communication path between the backend (Daedalus) and the Local Agent running on the user's machine.
139
+
140
+ ### 6.2 Implementation Considerations
141
+
142
+ - **Protocol:** WebSockets (potentially with WSS for encryption) or gRPC (with mTLS) are suitable options.
143
+ - **Security:** Requires robust authentication and authorization to ensure only the legitimate backend can command the Local Agent and only the correct Local Agent responds. Connection initiation likely from Local Agent to Backend.
144
+ - **Reliability:** Needs to handle disconnections and reconnections gracefully.
145
+
146
+ ## 7. Key Considerations (Expanded)
147
+
148
+ - **Error Handling:** Comprehensive error handling across all components, including tool execution failures in the extensibility framework and local agent communication issues.
149
+ - **Configuration Propagation:** Efficiently update agents when configurations change via the no-code UI (e.g., Redis Pub/Sub or fetching on task start).
150
+ - **Security:** Secure external API, secure local agent channel, robust sandboxing for extensibility, secure credential management (backend-only).
151
+ - **Scalability:** Horizontal scaling of agents. Consider potential bottlenecks in Daedalus, Database, Redis.
152
+ - **Monitoring & Logging:** Structured logging including task IDs, agent IDs, tool usage. Metrics for queue lengths, task durations, error rates, resource usage per agent/tool.
153
+
config.py CHANGED
@@ -1,164 +1,73 @@
1
- """
2
- Configuration management.
3
 
4
- This module provides a centralized way to access configuration settings and
5
- environment variables across the application. It supports different environment
6
- modes (development, staging, production) and provides validation for required
7
- values.
8
 
9
- Usage:
10
- from utils.config import config
11
-
12
- # Access configuration values
13
- api_key = config.OPENAI_API_KEY
14
- env_mode = config.ENV_MODE
15
- """
16
 
17
- import os
18
- from enum import Enum
19
- from typing import Dict, Any, Optional, get_type_hints, Union
20
- from dotenv import load_dotenv
21
- import logging
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
- logger = logging.getLogger(__name__)
 
 
 
24
 
25
- class EnvMode(Enum):
26
- """Environment mode enumeration."""
27
- LOCAL = "local"
28
- STAGING = "staging"
29
- PRODUCTION = "production"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
- class Configuration:
32
- """
33
- Centralized configuration for AgentPress backend.
34
-
35
- This class loads environment variables and provides type checking and validation.
36
- Default values can be specified for optional configuration items.
37
- """
38
-
39
- # Environment mode
40
- ENV_MODE: EnvMode = EnvMode.LOCAL
41
-
42
- # LLM API keys
43
- ANTHROPIC_API_KEY: str = None
44
- OPENAI_API_KEY: Optional[str] = None
45
- GROQ_API_KEY: Optional[str] = None
46
- OPENROUTER_API_KEY: Optional[str] = None
47
- OPENROUTER_API_BASE: Optional[str] = "https://openrouter.ai/api/v1"
48
- OR_SITE_URL: Optional[str] = None
49
- OR_APP_NAME: Optional[str] = "Suna.so"
50
-
51
- # AWS Bedrock credentials
52
- AWS_ACCESS_KEY_ID: Optional[str] = None
53
- AWS_SECRET_ACCESS_KEY: Optional[str] = None
54
- AWS_REGION_NAME: Optional[str] = None
55
-
56
- # Model configuration
57
- MODEL_TO_USE: Optional[str] = "anthropic/claude-3-7-sonnet-latest"
58
-
59
- # Supabase configuration
60
- SUPABASE_URL: str
61
- SUPABASE_ANON_KEY: str
62
- SUPABASE_SERVICE_ROLE_KEY: str
63
-
64
- # Redis configuration
65
- REDIS_HOST: str
66
- REDIS_PORT: int = 6379
67
- REDIS_PASSWORD: str
68
- REDIS_SSL: bool = True
69
-
70
- # Daytona sandbox configuration
71
- DAYTONA_API_KEY: str
72
- DAYTONA_SERVER_URL: str
73
- DAYTONA_TARGET: str
74
-
75
- # Search and other API keys
76
- TAVILY_API_KEY: str
77
- RAPID_API_KEY: str
78
- CLOUDFLARE_API_TOKEN: Optional[str] = None
79
- FIRECRAWL_API_KEY: str
80
-
81
- # Stripe configuration
82
- STRIPE_SECRET_KEY: Optional[str] = None
83
- STRIPE_DEFAULT_PLAN_ID: Optional[str] = None
84
- STRIPE_DEFAULT_TRIAL_DAYS: int = 14
85
-
86
-
87
- def __init__(self):
88
- """Initialize configuration by loading from environment variables."""
89
- # Load environment variables from .env file if it exists
90
- load_dotenv()
91
-
92
- # Set environment mode first
93
- env_mode_str = os.getenv("ENV_MODE", EnvMode.LOCAL.value)
94
- try:
95
- self.ENV_MODE = EnvMode(env_mode_str.lower())
96
- except ValueError:
97
- logger.warning(f"Invalid ENV_MODE: {env_mode_str}, defaulting to LOCAL")
98
- self.ENV_MODE = EnvMode.LOCAL
99
-
100
- logger.info(f"Environment mode: {self.ENV_MODE.value}")
101
-
102
- # Load configuration from environment variables
103
- self._load_from_env()
104
-
105
- # Perform validation
106
- self._validate()
107
-
108
- def _load_from_env(self):
109
- """Load configuration values from environment variables."""
110
- for key, expected_type in get_type_hints(self.__class__).items():
111
- env_val = os.getenv(key)
112
-
113
- if env_val is not None:
114
- # Convert environment variable to the expected type
115
- if expected_type == bool:
116
- # Handle boolean conversion
117
- setattr(self, key, env_val.lower() in ('true', 't', 'yes', 'y', '1'))
118
- elif expected_type == int:
119
- # Handle integer conversion
120
- try:
121
- setattr(self, key, int(env_val))
122
- except ValueError:
123
- logger.warning(f"Invalid value for {key}: {env_val}, using default")
124
- elif expected_type == EnvMode:
125
- # Already handled for ENV_MODE
126
- pass
127
- else:
128
- # String or other type
129
- setattr(self, key, env_val)
130
-
131
- def _validate(self):
132
- """Validate configuration based on type hints."""
133
- # Get all configuration fields and their type hints
134
- type_hints = get_type_hints(self.__class__)
135
-
136
- # Find missing required fields
137
- missing_fields = []
138
- for field, field_type in type_hints.items():
139
- # Check if the field is Optional
140
- is_optional = hasattr(field_type, "__origin__") and field_type.__origin__ is Union and type(None) in field_type.__args__
141
-
142
- # If not optional and value is None, add to missing fields
143
- if not is_optional and getattr(self, field) is None:
144
- missing_fields.append(field)
145
-
146
- if missing_fields:
147
- error_msg = f"Missing required configuration fields: {', '.join(missing_fields)}"
148
- logger.error(error_msg)
149
- raise ValueError(error_msg)
150
-
151
- def get(self, key: str, default: Any = None) -> Any:
152
- """Get a configuration value with an optional default."""
153
- return getattr(self, key, default)
154
-
155
- def as_dict(self) -> Dict[str, Any]:
156
- """Return configuration as a dictionary."""
157
- return {
158
- key: getattr(self, key)
159
- for key in get_type_hints(self.__class__).keys()
160
- if not key.startswith('_')
161
- }
162
 
163
- # Create a singleton instance
164
- config = Configuration()
 
1
+ # /home/ubuntu/visionos_farm/daedalus/core/config.py
 
2
 
3
+ import os
4
+ from typing import Optional, Any
5
+ from pydantic_settings import BaseSettings
6
+ from pydantic import PostgresDsn, RedisDsn
7
 
8
+ class Settings(BaseSettings):
9
+ PROJECT_NAME: str = "VisionOS Daedalus"
10
+ API_V1_STR: str = "/api/v1"
 
 
 
 
11
 
12
+ # Database Configuration
13
+ POSTGRES_SERVER: str = os.getenv("POSTGRES_SERVER", "localhost")
14
+ POSTGRES_PORT: str = os.getenv("POSTGRES_PORT", "5432")
15
+ POSTGRES_USER: str = os.getenv("POSTGRES_USER", "visionos_user")
16
+ POSTGRES_PASSWORD: str = os.getenv("POSTGRES_PASSWORD", "visionos_password")
17
+ POSTGRES_DB: str = os.getenv("POSTGRES_DB", "visionos_db")
18
+ # Asynchronous database connection string
19
+ DATABASE_URL: Optional[PostgresDsn] = None
20
+
21
+ # Redis Configuration
22
+ REDIS_HOST: str = os.getenv("REDIS_HOST", "localhost")
23
+ REDIS_PORT: int = int(os.getenv("REDIS_PORT", 6379))
24
+ REDIS_DB: int = int(os.getenv("REDIS_DB", 0))
25
+ REDIS_PASSWORD: Optional[str] = os.getenv("REDIS_PASSWORD", None)
26
+ REDIS_URL: Optional[RedisDsn] = None
27
+
28
+ # Redis Stream Names
29
+ REDIS_TASK_STREAM: str = "visionos_task_stream"
30
+ REDIS_RESULT_STREAM: str = "visionos_result_stream"
31
+ REDIS_CONFIG_UPDATE_CHANNEL: str = "visionos_config_updates"
32
+
33
+ # Agent Configuration
34
+ AGENT_HEARTBEAT_INTERVAL: int = 30 # seconds
35
+
36
+ # Security
37
+ SECRET_KEY: str = os.getenv("SECRET_KEY", "a_very_secret_key_that_should_be_changed") # CHANGE THIS!
38
+ # Define algorithm for JWT if using token auth
39
+ # ALGORITHM: str = "HS256"
40
+ # ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
41
+
42
+ # CORS Origins (Update in production)
43
+ BACKEND_CORS_ORIGINS: list[str] = ["http://localhost", "http://localhost:3000", "http://127.0.0.1:3000"]
44
 
45
+ class Config:
46
+ case_sensitive = True
47
+ env_file = ".env"
48
+ env_file_encoding = "utf-8"
49
 
50
+ def __init__(self, **values: Any):
51
+ super().__init__(**values)
52
+ # Construct DATABASE_URL from components if not set directly
53
+ if self.DATABASE_URL is None:
54
+ self.DATABASE_URL = PostgresDsn.build(
55
+ scheme="postgresql+asyncpg",
56
+ username=self.POSTGRES_USER,
57
+ password=self.POSTGRES_PASSWORD,
58
+ host=self.POSTGRES_SERVER,
59
+ port=int(self.POSTGRES_PORT),
60
+ path=self.POSTGRES_DB,
61
+ )
62
+ # Construct REDIS_URL from components if not set directly
63
+ if self.REDIS_URL is None:
64
+ self.REDIS_URL = RedisDsn.build(
65
+ scheme="redis",
66
+ host=self.REDIS_HOST,
67
+ port=self.REDIS_PORT,
68
+ password=self.REDIS_PASSWORD,
69
+ path=f"/{self.REDIS_DB}",
70
+ )
71
 
72
+ settings = Settings()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
 
 
 
configurations.py CHANGED
@@ -7,9 +7,9 @@ from sqlalchemy.ext.asyncio import AsyncSession
7
  import redis.asyncio as redis
8
 
9
  from shared import schemas
10
- from crud import crud_config
11
- from database.session import get_db
12
- from core.config import settings
13
 
14
  router = APIRouter()
15
 
 
7
  import redis.asyncio as redis
8
 
9
  from shared import schemas
10
+ from daedalus.crud import crud_config
11
+ from daedalus.database.session import get_db
12
+ from daedalus.core.config import settings
13
 
14
  router = APIRouter()
15
 
data_architecture.md ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # VisionOS SaaS - Data Architecture (Refined)
2
+
3
+ ## 1. Introduction
4
+
5
+ This document outlines the data architecture for the VisionOS SaaS platform, covering persistent storage (PostgreSQL), messaging/cache (Redis), and potentially artifact/vector storage. It complements the refined overall, backend, and frontend architecture documents.
6
+
7
+ ## 2. Persistence Layer (PostgreSQL)
8
+
9
+ ### 2.1 Role (Expanded)
10
+
11
+ PostgreSQL serves as the primary persistent storage for structured data.
12
+
13
+ - **Task Data:** Task details, inputs, status history, results, logs, reasoning steps.
14
+ - **Configuration Data:** Stores system-wide settings, agent parameters, tool configurations, and workflow definitions managed via the no-code UI.
15
+ - **Agent Registry:** (Future) Agent information, capabilities, status.
16
+ - **User Data:** (If implemented) User profiles, authentication (hashed credentials), preferences, **encrypted external API keys** (alternative to secrets manager).
17
+ - **Project/Workflow State:** Definitions and states of complex workflows.
18
+ - **Knowledge Base Metadata:** Stores structured metadata, indexed content, source URIs for knowledge artifacts. Vector embeddings might be stored here (using `pgvector`) or in a dedicated vector database.
19
+ - **Artifact Metadata:** Stores metadata about generated artifacts (code, docs, images), linking to their actual storage location (e.g., object storage URI).
20
+
21
+ ### 2.2 Technology
22
+
23
+ - **Database:** PostgreSQL (Version 15+).
24
+ - **Vector Support:** (Optional) `pgvector` extension if storing embeddings directly in PostgreSQL.
25
+ - **Interaction Library:** SQLAlchemy (async 2.0+) + `asyncpg`.
26
+ - **Schema Migrations:** Alembic.
27
+
28
+ ### 2.3 Key Schemas (Expanded & Refined)
29
+
30
+ - **`tasks` table:**
31
+ - `id` (UUID, PK)
32
+ - `user_id` (UUID, FK, nullable)
33
+ - `submitted_at` (TimestampTZ)
34
+ - `status` (String/Enum)
35
+ - `input_data` (JSONB)
36
+ - `result_data` (JSONB, nullable)
37
+ - `error_message` (Text, nullable)
38
+ - `logs` (JSONB or Text[], nullable)
39
+ - **`reasoning_steps`** (JSONB, nullable) - For "think mode"
40
+ - `configuration_snapshot` (JSONB, nullable) - Config used for this task
41
+ - **`configurations` table:**
42
+ - `key` (String, PK) - e.g., "agent.coding.model_name", "ui.theme"
43
+ - `value` (JSONB or Text) - The configuration value
44
+ - `description` (Text, nullable)
45
+ - `scope` (String/Enum: system, user, agent_type)
46
+ - `updated_at` (TimestampTZ)
47
+ - **`users` table:** (If implemented)
48
+ - `id` (UUID, PK)
49
+ - `username` (String, Unique)
50
+ - `hashed_password` (String)
51
+ - **`encrypted_api_keys`** (JSONB, nullable) - Encrypted keys for external services
52
+ - **`knowledge_artifacts` table:**
53
+ - `id` (UUID, PK)
54
+ - `source_uri` (String)
55
+ - `artifact_type` (String/Enum)
56
+ - `content_summary` (Text, nullable)
57
+ - `metadata` (JSONB)
58
+ - **`embedding`** (Vector, nullable) - Requires `pgvector` or link to external store
59
+ - `created_at` (TimestampTZ)
60
+ - **`generated_artifacts` table:**
61
+ - `id` (UUID, PK)
62
+ - `task_id` (UUID, FK)
63
+ - `artifact_name` (String)
64
+ - `artifact_type` (String/Enum: code, document, image)
65
+ - **`storage_uri`** (String) - Link to actual file in object storage
66
+ - `metadata` (JSONB)
67
+ - `created_at` (TimestampTZ)
68
+ - **`tool_sources` table:** (For Extensibility Framework)
69
+ - `id` (UUID, PK)
70
+ - `github_url` (String, Unique)
71
+ - `description` (Text, nullable)
72
+ - `status` (String/Enum: active, inactive, error)
73
+ - `last_checked_at` (TimestampTZ)
74
+
75
+ ### 2.4 Considerations
76
+
77
+ - **Indexing:** Crucial for `configurations` key, `tasks` status/user_id, `knowledge_artifacts` metadata/embeddings.
78
+ - **Data Integrity:** Use constraints.
79
+ - **Encryption:** Use strong encryption (e.g., `cryptography` library) for sensitive data like `encrypted_api_keys` stored in the DB.
80
+ - **Scalability:** Standard PostgreSQL scaling applies. Vector search performance might require tuning or a dedicated vector DB.
81
+ - **Backups:** Essential.
82
+
83
+ ## 3. Messaging/Cache Layer (Redis)
84
+
85
+ ### 3.1 Role (Expanded)
86
+
87
+ - **Task Queuing (Streams):** Distribute tasks (`task_stream`) and collect results (`result_stream`).
88
+ - **Configuration Cache:** (Recommended) Cache frequently accessed configurations from PostgreSQL to reduce DB load for agents/Daedalus.
89
+ - **Configuration Updates (Pub/Sub):** (Recommended) Daedalus publishes messages on a dedicated channel (e.g., `config_updates`) when configurations are changed via the API/UI. Interested services (Agents, potentially Daedalus instances) subscribe to invalidate their cache or reload config.
90
+ - **Real-time UI Notifications (Pub/Sub):** Can be used instead of or alongside WebSockets originating from Daedalus.
91
+
92
+ ### 3.2 Technology
93
+
94
+ - **Server:** Redis (Version 7+).
95
+ - **Interaction Library:** `redis-py` (async).
96
+
97
+ ### 3.3 Usage (Expanded)
98
+
99
+ - **Streams:** As previously defined for tasks/results.
100
+ - **Caching:** Standard Redis key-value caching (e.g., `GET config:agent.coding.model_name`, `SET config:agent.coding.model_name "value" EX <ttl>`).
101
+ - **Pub/Sub:**
102
+ - Daedalus uses `PUBLISH config_updates "{key: 'agent.coding.model_name', scope: 'agent_type'}"`.
103
+ - Subscribers use `SUBSCRIBE config_updates` and listen for messages.
104
+
105
+ ### 3.4 Considerations
106
+
107
+ - **Persistence:** Configure appropriately for durability needs (Streams benefit from AOF/RDB).
108
+ - **Scalability:** Redis scales well; clustering available if needed.
109
+ - **Stream Trimming:** Implement strategy (`MAXLEN`/`MINID`).
110
+ - **Cache Invalidation:** Use TTLs and Pub/Sub updates for effective cache management.
111
+ - **Error Handling:** Implement robust consumer logic for Streams and Pub/Sub.
112
+
113
+ ## 4. Artifact Storage (Object Storage)
114
+
115
+ ### 4.1 Role
116
+
117
+ Stores large binary artifacts generated by agents (e.g., code files, documents, images) that are not suitable for direct storage in PostgreSQL or Redis.
118
+
119
+ ### 4.2 Technology
120
+
121
+ - **Options:**
122
+ - Self-hosted: MinIO (S3-compatible open-source object storage).
123
+ - Cloud-based: AWS S3, Google Cloud Storage, Azure Blob Storage.
124
+ - **Interaction:** Use appropriate SDKs (e.g., `boto3` for S3/MinIO, `google-cloud-storage`).
125
+
126
+ ### 4.3 Usage
127
+
128
+ - Agents upload generated files to the object store.
129
+ - The storage URI (e.g., `s3://bucket-name/path/to/artifact.py`) is saved in the `generated_artifacts` table in PostgreSQL.
130
+ - Daedalus/UI can generate pre-signed URLs for secure, temporary access to download artifacts if needed.
131
+
132
+ ## 5. Vector Database (Optional/Alternative)
133
+
134
+ ### 5.1 Role
135
+
136
+ If `pgvector` performance is insufficient for large-scale similarity searches within the knowledge base, a dedicated vector database can be used.
137
+
138
+ ### 5.2 Technology
139
+
140
+ - **Options:** Milvus, Weaviate, Pinecone, Qdrant.
141
+ - **Interaction:** Use specific client libraries.
142
+
143
+ ### 5.3 Usage
144
+
145
+ - Store vector embeddings generated from knowledge artifacts.
146
+ - Perform similarity searches during context retrieval.
147
+ - Metadata might still reside in PostgreSQL, linking to the vector ID in the vector database.
148
+
149
+ ## 6. Data Flow Summary (Refined)
150
+
151
+ - Task flow remains similar, but agents now fetch configuration (potentially from Redis cache or Daedalus API backed by PostgreSQL) at the start.
152
+ - Agents may interact with Knowledge Base (PostgreSQL/Vector DB) and Artifact Storage (Object Store) during processing.
153
+ - Configuration changes via UI update PostgreSQL and potentially trigger Redis Pub/Sub messages for cache invalidation/updates.
154
+ - Results published to `result_stream` include links (URIs) to artifacts stored in object storage and reasoning steps.
155
+
deployment_architecture.md ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # VisionOS SaaS - Deployment Architecture (Refined)
2
+
3
+ ## 1. Introduction
4
+
5
+ This document outlines the deployment architecture for the VisionOS SaaS platform, refined to incorporate enhanced features like no-code customization, extensibility, local agent interaction, artifact storage, and advanced AI capabilities. It covers containerization, orchestration, networking, CI/CD, and deployment considerations for all components.
6
+
7
+ ## 2. Core Principles
8
+
9
+ - **Containerization:** All backend services (Daedalus, Agents) containerized (Docker).
10
+ - **Orchestration:** Kubernetes recommended for scalability and management. Docker Compose suitable for simpler local development/testing.
11
+ - **Infrastructure as Code (IaC):** Terraform/Pulumi for cloud infrastructure.
12
+ - **CI/CD:** Automated build, test, deploy pipelines (GitHub Actions/GitLab CI).
13
+ - **Scalability & Availability:** Horizontal scaling for stateless services (Agents, Daedalus instances); HA for stateful services (DB, Redis, Object Store).
14
+ - **Security:** Network policies, secrets management, secure communication channels.
15
+
16
+ ## 3. Deployment Model (Refined)
17
+
18
+ ### 3.1 Components & Containers/Services
19
+
20
+ - **Daedalus API:** Docker container (FastAPI/Uvicorn).
21
+ - **Agent Services:** Individual Docker containers per agent type.
22
+ - **Vision UI:** Static build (HTML/CSS/JS) served via Nginx container or dedicated hosting/CDN.
23
+ - **PostgreSQL:** Managed DB service (AWS RDS, Google Cloud SQL) or Kubernetes StatefulSet + Persistent Volumes.
24
+ - **Redis:** Managed cache service (ElastiCache, Memorystore) or Kubernetes StatefulSet + Persistent Volumes.
25
+ - **Ollama:** Separate containerized service, accessible by Agents.
26
+ - **Artifact Storage (Object Store):** Managed service (S3, GCS) or self-hosted MinIO (containerized, potentially StatefulSet).
27
+ - **Vector Database:** (If used) Managed service or self-hosted (e.g., Milvus/Weaviate on Kubernetes).
28
+ - **Extensibility Sandbox:** The environment for running dynamic tools. Could be:
29
+ - Managed within Agent containers (using subprocesses with strict resource limits and permissions via `prlimit`/`seccomp`).
30
+ - Dedicated Docker containers launched on demand by Agents (requires Docker-in-Docker or access to host Docker socket - security implications).
31
+ - **Local Agent:** **Not deployed centrally.** Requires separate installation package/process for user's local machine (macOS initially).
32
+ - **Secure Channel Endpoint:** Service within Kubernetes (potentially part of Daedalus or separate) to handle connections from Local Agents (e.g., WebSocket/gRPC endpoint).
33
+ - **Ingress/Proxy:** Nginx/Traefik (Kubernetes Ingress Controller or standalone).
34
+
35
+ ### 3.2 Orchestration (Kubernetes Example - Expanded)
36
+
37
+ - **Daedalus:** Deployment + Service + HPA (optional).
38
+ - **Agents:** Deployments (one per type) + Services + HPAs (based on CPU/memory/queue length).
39
+ - **PostgreSQL:** StatefulSet + PVC + Service (or external service endpoint).
40
+ - **Redis:** StatefulSet + PVC + Service (or external service endpoint).
41
+ - **MinIO (if self-hosted):** StatefulSet + PVC + Service.
42
+ - **Vector DB (if self-hosted):** Appropriate Helm chart or Operator (e.g., Milvus Operator).
43
+ - **Vision UI (Nginx):** Deployment + Service.
44
+ - **Secure Channel Endpoint:** Deployment + Service (potentially LoadBalancer or NodePort if needing external access for Local Agent connection).
45
+ - **Ingress:** Ingress Controller + Ingress Resource.
46
+ - **Configuration:** ConfigMaps, Secrets.
47
+ - **Sandboxing Resources:** If using containerized sandboxes, potentially requires privileged Agent containers or specific node configurations.
48
+
49
+ ### 3.3 Networking (Refined)
50
+
51
+ - **External Access:** Ingress for Vision UI, Daedalus API. Separate secure endpoint for Local Agent connections.
52
+ - **Internal Communication:** Kubernetes Services.
53
+ - **Security:** Network Policies to restrict traffic (e.g., Agents only talk to Redis, DB via Daedalus, Ollama, Artifact Store, Extensibility Sandbox; Daedalus talks to DB, Redis, Secure Channel Endpoint).
54
+ - **Local Agent Channel:** Requires secure, authenticated connection (WSS/mTLS) from user's network to the Secure Channel Endpoint.
55
+
56
+ ## 4. CI/CD Pipeline (Refined)
57
+
58
+ 1. **Trigger:** Code push/tag.
59
+ 2. **Build:** Docker images (Backend), Static assets (UI).
60
+ 3. **Test:** Unit, Integration (using Docker Compose), Frontend, E2E tests.
61
+ 4. **Push:** Docker images to registry.
62
+ 5. **Deploy (Backend/UI):** Update K8s manifests/Helm charts, apply changes (`kubectl`/`helm`). Upload UI assets.
63
+ 6. **Deploy (Local Agent):** Separate process to build installer/package for macOS.
64
+
65
+ ## 5. Configuration & Secrets Management (Refined)
66
+
67
+ - **Environment Variables:** Via ConfigMaps/Secrets.
68
+ - **Secrets:** K8s Secrets or Vault for DB passwords, internal keys, external service keys managed by the system.
69
+ - **User API Keys:** Stored encrypted in DB or secrets manager, accessed only by backend.
70
+ - **No-Code Config:** Stored in PostgreSQL, potentially cached in Redis, managed via Daedalus API.
71
+
72
+ ## 6. Monitoring & Logging (Refined)
73
+
74
+ - **Logging:** Structured JSON logs aggregated (EFK/Loki).
75
+ - **Metrics:** Prometheus scraping metrics (request latency, errors, queue lengths, agent/tool resource usage).
76
+ - **Tracing:** OpenTelemetry for distributed tracing.
77
+ - **Health Checks:** K8s readiness/liveness probes.
78
+ - **Alerting:** Alertmanager based on metrics/logs.
79
+
80
+ ## 7. Iterative Deployment Strategy (Refined)
81
+
82
+ 1. **Foundation:** K8s cluster, DB, Redis, Object Store (if self-hosted).
83
+ 2. **Core Backend:** Daedalus API.
84
+ 3. **Basic Agent:** Deploy Coding Agent.
85
+ 4. **Basic Frontend:** Deploy Vision UI (task submission/viewing).
86
+ 5. **Core Features:** Implement/Deploy Knowledge Base, Artifact Management integration.
87
+ 6. **Extensibility:** Deploy Extensibility Framework (including sandboxing).
88
+ 7. **No-Code UI:** Deploy configuration management UI.
89
+ 8. **Additional Agents:** Incrementally deploy other specialized agents.
90
+ 9. **Local Agent:** Develop and provide Local Agent installer and deploy Secure Channel Endpoint.
91
+ 10. **Iterate:** Continue adding features based on user feedback and priorities.
92
+
93
+ ## 8. Local Agent Deployment
94
+
95
+ - **Packaging:** Create a standard macOS application bundle (`.app`) or installer (`.pkg`).
96
+ - **Installation:** User downloads and installs manually.
97
+ - **Configuration:** Needs mechanism to securely configure connection details (URL of Secure Channel Endpoint, authentication token/certificate) during or after installation.
98
+ - **Permissions:** Installer may need to request necessary permissions (e.g., Accessibility for UI automation, Full Disk Access for file manipulation).
99
+ - **Updates:** Requires an update mechanism (manual download or auto-update feature).
100
+
distutils-precedence.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ea7ffef3fe2a117ee12c68ed6553617f0d7fd2f0590257c25c484959a3b7373
3
+ size 152
docker-compose.yml CHANGED
@@ -1,44 +1,26 @@
 
 
1
  services:
2
- kortix-suna:
3
- platform: linux/amd64
4
- build:
5
- context: .
6
- dockerfile: ${DOCKERFILE:-Dockerfile}
7
- args:
8
- TARGETPLATFORM: ${TARGETPLATFORM:-linux/amd64}
9
- image: adamcohenhillel/kortix-suna:0.0.20
10
- ports:
11
- - "6080:6080" # noVNC web interface
12
- - "5901:5901" # VNC port
13
- - "9222:9222" # Chrome remote debugging port
14
- - "8000:8000" # API server port
15
- - "8080:8080" # HTTP server port
16
  environment:
17
- - ANONYMIZED_TELEMETRY=${ANONYMIZED_TELEMETRY:-false}
18
- - CHROME_PATH=/usr/bin/google-chrome
19
- - CHROME_USER_DATA=/app/data/chrome_data
20
- - CHROME_PERSISTENT_SESSION=${CHROME_PERSISTENT_SESSION:-false}
21
- - CHROME_CDP=${CHROME_CDP:-http://localhost:9222}
22
- - DISPLAY=:99
23
- - PLAYWRIGHT_BROWSERS_PATH=/ms-playwright
24
- - RESOLUTION=${RESOLUTION:-1024x768x24}
25
- - RESOLUTION_WIDTH=${RESOLUTION_WIDTH:-1024}
26
- - RESOLUTION_HEIGHT=${RESOLUTION_HEIGHT:-768}
27
- - VNC_PASSWORD=${VNC_PASSWORD:-vncpassword}
28
- - CHROME_DEBUGGING_PORT=9222
29
- - CHROME_DEBUGGING_HOST=localhost
30
  volumes:
31
- - /tmp/.X11-unix:/tmp/.X11-unix
32
- restart: unless-stopped
33
- shm_size: '2gb'
34
- cap_add:
35
- - SYS_ADMIN
36
- security_opt:
37
- - seccomp=unconfined
38
- tmpfs:
39
- - /tmp
40
- healthcheck:
41
- test: ["CMD", "nc", "-z", "localhost", "5901"]
42
- interval: 10s
43
- timeout: 5s
44
- retries: 3
 
1
+ version: '3.8'
2
+
3
  services:
4
+ postgres:
5
+ image: postgres:15
6
+ restart: always
 
 
 
 
 
 
 
 
 
 
 
7
  environment:
8
+ - POSTGRES_USER=visionos_user
9
+ - POSTGRES_PASSWORD=visionos_pass
10
+ - POSTGRES_DB=visionos_db
11
+ ports:
12
+ - "5432:5432"
13
+ volumes:
14
+ - postgres_data:/var/lib/postgresql/data
15
+
16
+ redis:
17
+ image: redis:7
18
+ restart: always
19
+ ports:
20
+ - "6379:6379"
21
  volumes:
22
+ - redis_data:/data
23
+
24
+ volumes:
25
+ postgres_data:
26
+ redis_data:
 
 
 
 
 
 
 
 
 
dotenv ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/visionos_venv/bin/python3
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from dotenv.__main__ import cli
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(cli())
env.cpython-311.pyc ADDED
Binary file (4.18 kB). View file
 
extensibility_framework.md ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # VisionOS SaaS - Extensibility Framework Design
2
+
3
+ ## 1. Introduction
4
+
5
+ This document outlines the design for the extensibility framework within VisionOS, enabling the dynamic integration of tools, features, and functionalities specified via GitHub repository URLs. This framework is crucial for allowing VisionOS to evolve and adapt by incorporating new capabilities with minimal core system changes, aligning with the user's requirement for maximum versatility and future-proofing.
6
+
7
+ ## 2. Goals
8
+
9
+ - **Dynamic Loading:** Allow agents (primarily) to load and utilize code/tools from specified GitHub repositories at runtime or during initialization.
10
+ - **Standardized Interface:** Define a clear interface that dynamically loaded tools must adhere to for seamless integration.
11
+ - **Security:** Implement security measures to mitigate risks associated with executing external code (sandboxing, permission controls).
12
+ - **Management:** Provide mechanisms for registering, managing, and potentially versioning these external tools.
13
+ - **Simplicity:** Aim for a straightforward process for developers to package their tools for compatibility with VisionOS.
14
+
15
+ ## 3. Proposed Architecture (Agent-Focused)
16
+
17
+ The primary focus is enabling Agents to use external tools. A similar pattern could potentially be adapted for frontend plugins later, but is out of scope for the initial design.
18
+
19
+ ### 3.1 Tool Definition & Packaging (External Repository)
20
+
21
+ Developers creating tools for VisionOS would structure their GitHub repository in a defined way:
22
+
23
+ - **`manifest.json` (or `visionos_tool.yaml`):** A metadata file at the repository root describing the tool.
24
+ - `name`: Unique tool name.
25
+ - `version`: Tool version (e.g., SemVer).
26
+ - `description`: Brief description.
27
+ - `entry_point`: The main Python module/class to load (e.g., `src.my_tool:MyToolClass`).
28
+ - `dependencies`: A `requirements.txt` file path within the repo, listing Python dependencies.
29
+ - `permissions_required`: (Future) List of permissions needed (e.g., `filesystem_read`, `network_access`, `llm_call`).
30
+ - **Source Code:** Python code implementing the tool logic, adhering to the VisionOS Tool Interface.
31
+ - **`requirements.txt`:** Lists the tool's specific Python dependencies.
32
+
33
+ ### 3.2 Tool Loading & Execution (Agent Side)
34
+
35
+ 1. **Registration:** Daedalus (or an admin interface) would manage a list of registered external tools, mapping a tool name/ID to its GitHub URL and potentially a specific commit/tag/branch.
36
+ 2. **Task Assignment:** When a task requiring an external tool is assigned to an agent, the task details include the tool name/ID.
37
+ 3. **Tool Acquisition (Agent):**
38
+ a. The agent checks if the required tool (and specific version) is already locally available/cached.
39
+ b. If not, the agent uses `GitPython` to clone or pull the specified repository (and checkout the correct version) into a dedicated, isolated directory (e.g., `/home/ubuntu/visionos_farm/agent_tools/<agent_id>/<tool_name>/<version>`).
40
+ 4. **Environment Setup (Agent):**
41
+ a. The agent creates a dedicated Python virtual environment (`venv`) within the tool's isolated directory.
42
+ b. It installs the tool's dependencies listed in its `requirements.txt` into this dedicated `venv` using `pip`.
43
+ 5. **Dynamic Loading (Agent):**
44
+ a. The agent uses Python's `importlib` (or similar mechanisms) to dynamically load the specified `entry_point` module/class *from the tool's isolated directory and virtual environment context*.
45
+ b. This might involve manipulating `sys.path` temporarily or executing the tool's code within a subprocess that activates the tool's specific `venv`.
46
+ 6. **Interface Invocation (Agent):**
47
+ a. The agent instantiates the loaded tool class (if applicable).
48
+ b. It calls the standardized method (e.g., `execute(params)`) defined by the VisionOS Tool Interface, passing necessary parameters.
49
+ 7. **Execution & Result:** The tool code executes within its isolated environment, performs its function, and returns results to the agent.
50
+ 8. **Cleanup:** (Optional) Cache the cloned repo and environment for reuse, or clean up based on policy.
51
+
52
+ ### 3.3 VisionOS Tool Interface (Python Example)
53
+
54
+ External tools would implement a standard interface, potentially defined in the `visionos_shared` library.
55
+
56
+ ```python
57
+ # In visionos_shared.interfaces
58
+ from abc import ABC, abstractmethod
59
+ from typing import Any, Dict
60
+
61
+ class VisionOSTool(ABC):
62
+
63
+ def __init__(self, config: Dict[str, Any]):
64
+ """Initialize the tool with any required configuration."""
65
+ self.config = config
66
+
67
+ @abstractmethod
68
+ def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
69
+ """Execute the tool's main function."""
70
+ pass
71
+
72
+ # Optional: Methods for setup, teardown, status checks, etc.
73
+ ```
74
+
75
+ ### 3.4 Security Considerations
76
+
77
+ - **Isolation:** Cloning repos and creating separate virtual environments provides filesystem and dependency isolation.
78
+ - **Code Execution Risks:** Running arbitrary code from GitHub is inherently risky.
79
+ - **Sandboxing:** Consider running the tool loading/execution logic within a dedicated container or using OS-level sandboxing mechanisms (e.g., `nsjail`, `firecracker` - more complex) for stronger isolation.
80
+ - **Permission Model:** (Future) Implement a permission system where tools declare required permissions in their manifest, and the agent/orchestrator grants them selectively.
81
+ - **Code Review/Vetting:** Implement a process (manual or automated) for vetting registered tools before they are widely usable.
82
+ - **Network Policies:** Restrict network access for tool execution environments unless explicitly required and permitted.
83
+ - **Dependency Conflicts:** Per-tool virtual environments mitigate Python dependency conflicts between tools and the agent itself.
84
+
85
+ ## 4. Implementation Notes
86
+
87
+ - **Agent Modification:** Agents need logic to handle tool acquisition, environment setup, dynamic loading, and execution.
88
+ - **Shared Library:** Update `visionos_shared` with the `VisionOSTool` interface.
89
+ - **Daedalus:** Needs API endpoints and database schema updates to manage the registry of external tools.
90
+ - **Error Handling:** Robust error handling is needed for Git operations, dependency installation, code loading, and tool execution.
91
+
92
+ ## 5. Next Steps
93
+
94
+ - Refine the `manifest.json` structure and Tool Interface.
95
+ - Prototype the tool acquisition and dynamic loading mechanism within a sample agent.
96
+ - Integrate tool registration management into Daedalus.
97
+ - Address security sandboxing approaches in more detail.
98
+
fastapi ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/visionos_venv/bin/python3
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from fastapi.cli import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
frontend_architecture.md ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # VisionOS SaaS - Frontend Architecture (Vision UI) (Refined)
2
+
3
+ ## 1. Introduction
4
+
5
+ This document details the architecture for the Vision UI frontend component of the VisionOS SaaS platform, refined based on detailed requirements analysis. It serves as the primary user interface for interacting with the system, submitting tasks, viewing results, managing artifacts, and **customizing system behavior via a no-code interface**. It builds upon the refined high-level overview and backend architecture.
6
+
7
+ ## 2. Role (Expanded)
8
+
9
+ - **User Interaction:** Provides the graphical interface for users.
10
+ - **Task Submission:** Allows users to define and submit tasks to the Daedalus API.
11
+ - **Status Monitoring:** Displays the real-time status of ongoing tasks.
12
+ - **Results Display:** Presents results received from the backend in a user-friendly format (text, code, visualizations).
13
+ - **Artifact Management:** Interface for viewing and potentially managing generated artifacts (code, documents, images).
14
+ - **Reasoning Display ("Think Mode"):** Visualizes agent thought processes, plans, and tool usage steps.
15
+ - **No-Code Customization:** Provides a visual interface for users to:
16
+ - Adjust system-wide settings.
17
+ - Configure agent parameters and behavior.
18
+ - Select and configure tools available via the Extensibility Framework.
19
+ - Potentially define simple automation workflows visually (inspired by n8n, Zapier, Make.com).
20
+ - **Local Agent Interaction:** (Future) Display status of the local agent, potentially offer limited direct controls.
21
+
22
+ ## 3. Technology Stack
23
+
24
+ - **Framework:** Next.js (Recommended for routing, potential SSR/SSG) or React + Vite.
25
+ - **Language:** TypeScript.
26
+ - **Styling:** Tailwind CSS + Component Library (e.g., shadcn/ui).
27
+ - **State Management:** Zustand (Recommended) or Redux Toolkit.
28
+ - **Data Fetching:** React Query (Recommended) or SWR.
29
+ - **Routing:** Next.js built-in router or React Router.
30
+ - **Real-time Updates:** WebSocket client library.
31
+ - **No-Code UI Components:** Potentially libraries like `react-flow` for workflow visualization/editing.
32
+
33
+ ## 4. Key Modules/Components (Expanded & Refined)
34
+
35
+ - **Layout:** Main application shell, navigation (potentially configurable).
36
+ - **Dashboard:** Overview, quick actions, system status.
37
+ - **Task Submission Form:** Task definition, agent selection, file uploads, parameter inputs.
38
+ - **Task List/Monitor:** Task status tracking.
39
+ - **Task Detail View:** Detailed task info, inputs, logs, results.
40
+ - **Result Viewers:** Specialized components for code (syntax highlighting), markdown, images, tables, etc.
41
+ - **Artifact Viewer:** Dedicated section or components to browse and view managed artifacts.
42
+ - **Reasoning Display:** Component integrated into Task Detail View or a separate panel to show agent steps/reasoning.
43
+ - **No-Code Customization Module:**
44
+ - **Settings Editor:** Forms/interfaces to modify agent parameters and system settings fetched from Daedalus API.
45
+ - **Tool Selector:** Interface to browse available tools (from extensibility framework) and configure their usage for agents.
46
+ - **Workflow Builder:** (Future) Visual canvas (e.g., using `react-flow`) to define sequences of agent tasks or tool executions.
47
+ - **Settings:** User preferences, external API key management (secure input only), Local Agent status/connection.
48
+ - **Authentication:** (If user accounts implemented) Login/signup, profile.
49
+
50
+ ## 5. Communication
51
+
52
+ - **Vision UI -> Daedalus API:** Asynchronous REST API calls (HTTPS) for tasks, fetching/updating configurations, fetching artifacts/results.
53
+ - **Daedalus API -> Vision UI:** Real-time updates via WebSockets (task status, new results, potentially config change notifications).
54
+
55
+ ## 6. State Management
56
+
57
+ - **Server State (React Query/SWR):** Tasks, configurations, artifacts, knowledge base entries.
58
+ - **Client State (Zustand/Redux):** UI state, user preferences, WebSocket status, state for no-code builders.
59
+ - **Real-time Updates:** WebSocket messages trigger updates/invalidations in server state cache and updates to client state.
60
+
61
+ ## 7. Key Considerations (Expanded)
62
+
63
+ - **User Experience (UX):** CRITICAL for the no-code interface. Must be intuitive, visually clear, and provide good feedback. Consider UX patterns from `lovable.dev`, `n8n.io`, etc.
64
+ - **Responsiveness & Accessibility:** UI must be usable across devices and adhere to accessibility standards.
65
+ - **Performance:** Optimize rendering, bundle size, and data fetching, especially for potentially complex no-code interfaces.
66
+ - **Security:** Secure API interactions. **Strict adherence to backend-centric API key management (UI only collects and transmits securely, never stores).**
67
+ - **Modularity:** Design UI components to be reusable and maintainable.
68
+ - **Integration with Uploaded Assets:** Re-evaluate uploaded `.tsx` files (e.g., `APIKeyManager.tsx`, `Chat.client.tsx`) for conceptual reuse or specific component adaptation, discarding insecure patterns (like client-side key storage).
69
+
70
+ ## 8. Security: API Key Management (Reiteration)
71
+
72
+ **API keys for external services MUST be handled securely on the backend.** The frontend's role is limited to providing a secure input field and transmitting the key *once* via HTTPS to a dedicated Daedalus endpoint. The frontend MUST NOT store or retrieve the actual key. It should only display status (Set/Not Set) provided by the backend.
73
+
greenlet.h ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
2
+
3
+ /* Greenlet object interface */
4
+
5
+ #ifndef Py_GREENLETOBJECT_H
6
+ #define Py_GREENLETOBJECT_H
7
+
8
+
9
+ #include <Python.h>
10
+
11
+ #ifdef __cplusplus
12
+ extern "C" {
13
+ #endif
14
+
15
+ /* This is deprecated and undocumented. It does not change. */
16
+ #define GREENLET_VERSION "1.0.0"
17
+
18
+ #ifndef GREENLET_MODULE
19
+ #define implementation_ptr_t void*
20
+ #endif
21
+
22
+ typedef struct _greenlet {
23
+ PyObject_HEAD
24
+ PyObject* weakreflist;
25
+ PyObject* dict;
26
+ implementation_ptr_t pimpl;
27
+ } PyGreenlet;
28
+
29
+ #define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
30
+
31
+
32
+ /* C API functions */
33
+
34
+ /* Total number of symbols that are exported */
35
+ #define PyGreenlet_API_pointers 12
36
+
37
+ #define PyGreenlet_Type_NUM 0
38
+ #define PyExc_GreenletError_NUM 1
39
+ #define PyExc_GreenletExit_NUM 2
40
+
41
+ #define PyGreenlet_New_NUM 3
42
+ #define PyGreenlet_GetCurrent_NUM 4
43
+ #define PyGreenlet_Throw_NUM 5
44
+ #define PyGreenlet_Switch_NUM 6
45
+ #define PyGreenlet_SetParent_NUM 7
46
+
47
+ #define PyGreenlet_MAIN_NUM 8
48
+ #define PyGreenlet_STARTED_NUM 9
49
+ #define PyGreenlet_ACTIVE_NUM 10
50
+ #define PyGreenlet_GET_PARENT_NUM 11
51
+
52
+ #ifndef GREENLET_MODULE
53
+ /* This section is used by modules that uses the greenlet C API */
54
+ static void** _PyGreenlet_API = NULL;
55
+
56
+ # define PyGreenlet_Type \
57
+ (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
58
+
59
+ # define PyExc_GreenletError \
60
+ ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
61
+
62
+ # define PyExc_GreenletExit \
63
+ ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
64
+
65
+ /*
66
+ * PyGreenlet_New(PyObject *args)
67
+ *
68
+ * greenlet.greenlet(run, parent=None)
69
+ */
70
+ # define PyGreenlet_New \
71
+ (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
72
+ _PyGreenlet_API[PyGreenlet_New_NUM])
73
+
74
+ /*
75
+ * PyGreenlet_GetCurrent(void)
76
+ *
77
+ * greenlet.getcurrent()
78
+ */
79
+ # define PyGreenlet_GetCurrent \
80
+ (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
81
+
82
+ /*
83
+ * PyGreenlet_Throw(
84
+ * PyGreenlet *greenlet,
85
+ * PyObject *typ,
86
+ * PyObject *val,
87
+ * PyObject *tb)
88
+ *
89
+ * g.throw(...)
90
+ */
91
+ # define PyGreenlet_Throw \
92
+ (*(PyObject * (*)(PyGreenlet * self, \
93
+ PyObject * typ, \
94
+ PyObject * val, \
95
+ PyObject * tb)) \
96
+ _PyGreenlet_API[PyGreenlet_Throw_NUM])
97
+
98
+ /*
99
+ * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
100
+ *
101
+ * g.switch(*args, **kwargs)
102
+ */
103
+ # define PyGreenlet_Switch \
104
+ (*(PyObject * \
105
+ (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
106
+ _PyGreenlet_API[PyGreenlet_Switch_NUM])
107
+
108
+ /*
109
+ * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
110
+ *
111
+ * g.parent = new_parent
112
+ */
113
+ # define PyGreenlet_SetParent \
114
+ (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
115
+ _PyGreenlet_API[PyGreenlet_SetParent_NUM])
116
+
117
+ /*
118
+ * PyGreenlet_GetParent(PyObject* greenlet)
119
+ *
120
+ * return greenlet.parent;
121
+ *
122
+ * This could return NULL even if there is no exception active.
123
+ * If it does not return NULL, you are responsible for decrementing the
124
+ * reference count.
125
+ */
126
+ # define PyGreenlet_GetParent \
127
+ (*(PyGreenlet* (*)(PyGreenlet*)) \
128
+ _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
129
+
130
+ /*
131
+ * deprecated, undocumented alias.
132
+ */
133
+ # define PyGreenlet_GET_PARENT PyGreenlet_GetParent
134
+
135
+ # define PyGreenlet_MAIN \
136
+ (*(int (*)(PyGreenlet*)) \
137
+ _PyGreenlet_API[PyGreenlet_MAIN_NUM])
138
+
139
+ # define PyGreenlet_STARTED \
140
+ (*(int (*)(PyGreenlet*)) \
141
+ _PyGreenlet_API[PyGreenlet_STARTED_NUM])
142
+
143
+ # define PyGreenlet_ACTIVE \
144
+ (*(int (*)(PyGreenlet*)) \
145
+ _PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
146
+
147
+
148
+
149
+
150
+ /* Macro that imports greenlet and initializes C API */
151
+ /* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
152
+ keep the older definition to be sure older code that might have a copy of
153
+ the header still works. */
154
+ # define PyGreenlet_Import() \
155
+ { \
156
+ _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
157
+ }
158
+
159
+ #endif /* GREENLET_MODULE */
160
+
161
+ #ifdef __cplusplus
162
+ }
163
+ #endif
164
+ #endif /* !Py_GREENLETOBJECT_H */
main.cpython-311.pyc ADDED
Binary file (3.65 kB). View file
 
mako-render ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/visionos_venv/bin/python3
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from mako.cmd import cmdline
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(cmdline())
package-lock.json CHANGED
@@ -16,13 +16,13 @@
16
  "devDependencies": {
17
  "@eslint/eslintrc": "^3",
18
  "@tailwindcss/postcss": "^4",
19
- "@types/node": "^20",
20
  "@types/react": "^19",
21
  "@types/react-dom": "^19",
22
  "eslint": "^9",
23
  "eslint-config-next": "15.3.1",
24
  "tailwindcss": "^4",
25
- "typescript": "^5"
26
  }
27
  },
28
  "node_modules/@alloc/quick-lru": {
@@ -1570,9 +1570,9 @@
1570
  "license": "MIT"
1571
  },
1572
  "node_modules/@types/node": {
1573
- "version": "20.17.32",
1574
- "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.32.tgz",
1575
- "integrity": "sha512-zeMXFn8zQ+UkjK4ws0RiOC9EWByyW1CcVmLe+2rQocXRsGEDxUCwPEIVgpsGcLHS/P8JkT0oa3839BRABS0oPw==",
1576
  "dev": true,
1577
  "license": "MIT",
1578
  "dependencies": {
 
16
  "devDependencies": {
17
  "@eslint/eslintrc": "^3",
18
  "@tailwindcss/postcss": "^4",
19
+ "@types/node": "20.17.44",
20
  "@types/react": "^19",
21
  "@types/react-dom": "^19",
22
  "eslint": "^9",
23
  "eslint-config-next": "15.3.1",
24
  "tailwindcss": "^4",
25
+ "typescript": "5.8.3"
26
  }
27
  },
28
  "node_modules/@alloc/quick-lru": {
 
1570
  "license": "MIT"
1571
  },
1572
  "node_modules/@types/node": {
1573
+ "version": "20.17.44",
1574
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.44.tgz",
1575
+ "integrity": "sha512-50sE4Ibb4BgUMxHrcJQSAU0Fu7fLcTdwcXwRzEF7wnVMWvImFLg2Rxc7SW0vpvaJm4wvhoWEZaQiPpBpocZiUA==",
1576
  "dev": true,
1577
  "license": "MIT",
1578
  "dependencies": {
package.json CHANGED
@@ -17,12 +17,12 @@
17
  "devDependencies": {
18
  "@eslint/eslintrc": "^3",
19
  "@tailwindcss/postcss": "^4",
20
- "@types/node": "^20",
21
  "@types/react": "^19",
22
  "@types/react-dom": "^19",
23
  "eslint": "^9",
24
  "eslint-config-next": "15.3.1",
25
  "tailwindcss": "^4",
26
- "typescript": "^5"
27
  }
28
  }
 
17
  "devDependencies": {
18
  "@eslint/eslintrc": "^3",
19
  "@tailwindcss/postcss": "^4",
20
+ "@types/node": "20.17.44",
21
  "@types/react": "^19",
22
  "@types/react-dom": "^19",
23
  "eslint": "^9",
24
  "eslint-config-next": "15.3.1",
25
  "tailwindcss": "^4",
26
+ "typescript": "5.8.3"
27
  }
28
  }
pip ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/visionos_venv/bin/python3
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pip._internal.cli.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
pip3 ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/visionos_venv/bin/python3
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pip._internal.cli.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
pip3.10 ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/ubuntu/venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pip._internal.cli.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
pip3.11 ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/ubuntu/venv/bin/python3.11
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pip._internal.cli.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
pip3.9 ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/visionos_venv/bin/python3
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from pip._internal.cli.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())
poetry.lock CHANGED
The diff for this file is too large to render. See raw diff
 
pyproject.toml CHANGED
@@ -1,66 +1,17 @@
1
- [tool.poetry]
2
- name = "suna"
3
- version = "1.0"
4
- description = "open source generalist AI Agent"
5
- authors = ["marko-kraemer <[email protected]>"]
 
 
6
  readme = "README.md"
7
- license = "MIT"
8
- homepage = "https://www.suna.so/"
9
- repository = "https://github.com/kortix-ai/suna"
10
- classifiers = [
11
- "Development Status :: 4 - Beta",
12
- "Intended Audience :: Developers",
13
- "License :: OSI Approved :: MIT License",
14
- "Programming Language :: Python :: 3",
15
- "Programming Language :: Python :: 3.12",
16
- "Topic :: Software Development :: Libraries :: Python Modules",
17
  ]
18
 
19
- [tool.poetry.dependencies]
20
- python = "^3.11"
21
- streamlit-quill = "0.0.3"
22
- python-dotenv = "1.0.1"
23
- litellm = "^1.44.0"
24
- click = "8.1.7"
25
- questionary = "2.0.1"
26
- requests = "^2.31.0"
27
- packaging = "24.1"
28
- setuptools = "75.3.0"
29
- pytest = "8.3.3"
30
- pytest-asyncio = "0.24.0"
31
- asyncio = "3.4.3"
32
- altair = "4.2.2"
33
- prisma = "0.15.0"
34
- fastapi = "0.110.0"
35
- uvicorn = "0.27.1"
36
- python-multipart = "0.0.20"
37
- redis = "5.2.1"
38
- upstash-redis = "1.3.0"
39
- supabase = "^2.15.0"
40
- pyjwt = "2.10.1"
41
- exa-py = "^1.9.1"
42
- e2b-code-interpreter = "^1.2.0"
43
- certifi = "2024.2.2"
44
- python-ripgrep = "0.0.6"
45
- daytona_sdk = "^0.14.0"
46
- boto3 = "^1.34.0"
47
- openai = "^1.72.0"
48
- streamlit = "^1.44.1"
49
- nest-asyncio = "^1.6.0"
50
- vncdotool = "^1.2.0"
51
- tavily-python = "^0.5.4"
52
- pytesseract = "^0.3.13"
53
-
54
- [tool.poetry.scripts]
55
- agentpress = "agentpress.cli:main"
56
-
57
- [[tool.poetry.packages]]
58
- include = "agentpress"
59
-
60
-
61
- [tool.poetry.group.dev.dependencies]
62
- daytona-sdk = "^0.14.0"
63
 
64
  [build-system]
65
- requires = ["poetry-core"]
66
- build-backend = "poetry.core.masonry.api"
 
1
+ [project]
2
+ name = "visionos-shared"
3
+ version = "0.1.0"
4
+ description = ""
5
+ authors = [
6
+ {name = "Your Name",email = "[email protected]"}
7
+ ]
8
  readme = "README.md"
9
+ requires-python = "^3.11"
10
+ dependencies = [
11
+ "pydantic (>=2.11.4,<3.0.0)"
 
 
 
 
 
 
 
12
  ]
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  [build-system]
16
+ requires = ["poetry-core>=2.0.0,<3.0.0"]
17
+ build-backend = "poetry.core.masonry.api"
python3.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32835730b5f58e962221808865710cbcec50b2edc24184bf2bbaef6f1812e5b4
3
+ size 152624
python3.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:832c074c7cb29f0871c219c419e86b849edda11e83e76b191be278a399f9ded1
3
+ size 102352
pyvenv.cfg ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ home = /Users/lattm/Downloads/home/ubuntu/visionos_venv/bin
2
+ include-system-site-packages = false
3
+ version = 3.9.6
requirements.txt CHANGED
@@ -1,14 +1,3 @@
1
- flask>=2.2.3
2
- pillow>=10.0.0
3
- numpy>=1.24.3
4
- requests>=2.31.0
5
- python-dotenv>=1.0.0
6
- anthropic>=0.18.1
7
- openai>=1.13.0
8
- redis>=5.0.1
9
- fastapi>=0.109.0
10
- uvicorn>=0.27.0
11
- python-multipart>=0.0.9
12
- pydantic>=2.5.0
13
- pytest>=7.4.0
14
- opencv-python-headless>=4.8.0
 
1
+ fastapi
2
+ uvicorn
3
+ requests
 
 
 
 
 
 
 
 
 
 
 
sandbox.py CHANGED
@@ -1,213 +1,126 @@
 
 
 
 
1
  import os
2
- from typing import Optional
3
-
4
- from daytona_sdk import Daytona, DaytonaConfig, CreateSandboxParams, Sandbox, SessionExecuteRequest
5
- from daytona_api_client.models.workspace_state import WorkspaceState
6
- from dotenv import load_dotenv
7
-
8
- from agentpress.tool import Tool
9
- from utils.logger import logger
10
- from utils.config import config
11
- from utils.files_utils import clean_path
12
- from agentpress.thread_manager import ThreadManager
13
-
14
- load_dotenv()
15
-
16
- logger.debug("Initializing Daytona sandbox configuration")
17
- daytona_config = DaytonaConfig(
18
- api_key=config.DAYTONA_API_KEY,
19
- server_url=config.DAYTONA_SERVER_URL,
20
- target=config.DAYTONA_TARGET
21
- )
22
-
23
- if daytona_config.api_key:
24
- logger.debug("Daytona API key configured successfully")
25
- else:
26
- logger.warning("No Daytona API key found in environment variables")
27
-
28
- if daytona_config.server_url:
29
- logger.debug(f"Daytona server URL set to: {daytona_config.server_url}")
30
- else:
31
- logger.warning("No Daytona server URL found in environment variables")
32
-
33
- if daytona_config.target:
34
- logger.debug(f"Daytona target set to: {daytona_config.target}")
35
- else:
36
- logger.warning("No Daytona target found in environment variables")
37
-
38
- daytona = Daytona(daytona_config)
39
- logger.debug("Daytona client initialized")
40
-
41
- async def get_or_start_sandbox(sandbox_id: str):
42
- """Retrieve a sandbox by ID, check its state, and start it if needed."""
43
-
44
- logger.info(f"Getting or starting sandbox with ID: {sandbox_id}")
45
-
46
  try:
47
- sandbox = daytona.get_current_sandbox(sandbox_id)
48
-
49
- # Check if sandbox needs to be started
50
- if sandbox.instance.state == WorkspaceState.ARCHIVED or sandbox.instance.state == WorkspaceState.STOPPED:
51
- logger.info(f"Sandbox is in {sandbox.instance.state} state. Starting...")
52
- try:
53
- daytona.start(sandbox)
54
- # Wait a moment for the sandbox to initialize
55
- # sleep(5)
56
- # Refresh sandbox state after starting
57
- sandbox = daytona.get_current_sandbox(sandbox_id)
58
-
59
- # Start supervisord in a session when restarting
60
- start_supervisord_session(sandbox)
61
- except Exception as e:
62
- logger.error(f"Error starting sandbox: {e}")
63
- raise e
64
-
65
- logger.info(f"Sandbox {sandbox_id} is ready")
66
- return sandbox
67
-
68
- except Exception as e:
69
- logger.error(f"Error retrieving or starting sandbox: {str(e)}")
70
- raise e
71
 
72
- def start_supervisord_session(sandbox: Sandbox):
73
- """Start supervisord in a session."""
74
- session_id = "supervisord-session"
75
  try:
76
- logger.info(f"Creating session {session_id} for supervisord")
77
- sandbox.process.create_session(session_id)
78
-
79
- # Execute supervisord command
80
- sandbox.process.execute_session_command(session_id, SessionExecuteRequest(
81
- command="exec /usr/bin/supervisord -n -c /etc/supervisor/conf.d/supervisord.conf",
82
- var_async=True
83
- ))
84
- logger.info(f"Supervisord started in session {session_id}")
85
- except Exception as e:
86
- logger.error(f"Error starting supervisord session: {str(e)}")
87
- raise e
88
-
89
- def create_sandbox(password: str, project_id: str = None):
90
- """Create a new sandbox with all required services configured and running."""
91
-
92
- logger.debug("Creating new Daytona sandbox environment")
93
- logger.debug("Configuring sandbox with browser-use image and environment variables")
94
-
95
- labels = None
96
- if project_id:
97
- logger.debug(f"Using sandbox_id as label: {project_id}")
98
- labels = {'id': project_id}
99
-
100
- params = CreateSandboxParams(
101
- image="adamcohenhillel/kortix-suna:0.0.20",
102
- public=True,
103
- labels=labels,
104
- env_vars={
105
- "CHROME_PERSISTENT_SESSION": "true",
106
- "RESOLUTION": "1024x768x24",
107
- "RESOLUTION_WIDTH": "1024",
108
- "RESOLUTION_HEIGHT": "768",
109
- "VNC_PASSWORD": password,
110
- "ANONYMIZED_TELEMETRY": "false",
111
- "CHROME_PATH": "",
112
- "CHROME_USER_DATA": "",
113
- "CHROME_DEBUGGING_PORT": "9222",
114
- "CHROME_DEBUGGING_HOST": "localhost",
115
- "CHROME_CDP": ""
116
- },
117
- resources={
118
- "cpu": 2,
119
- "memory": 4,
120
- "disk": 5,
121
- }
122
- )
123
-
124
- # Create the sandbox
125
- sandbox = daytona.create(params)
126
- logger.debug(f"Sandbox created with ID: {sandbox.id}")
127
-
128
- # Start supervisord in a session for new sandbox
129
- start_supervisord_session(sandbox)
130
-
131
- logger.debug(f"Sandbox environment successfully initialized")
132
- return sandbox
133
-
134
-
135
- class SandboxToolsBase(Tool):
136
- """Base class for all sandbox tools that provides project-based sandbox access."""
137
-
138
- # Class variable to track if sandbox URLs have been printed
139
- _urls_printed = False
140
-
141
- def __init__(self, project_id: str, thread_manager: Optional[ThreadManager] = None):
142
- super().__init__()
143
- self.project_id = project_id
144
- self.thread_manager = thread_manager
145
- self.workspace_path = "/workspace"
146
- self._sandbox = None
147
- self._sandbox_id = None
148
- self._sandbox_pass = None
149
-
150
- async def _ensure_sandbox(self) -> Sandbox:
151
- """Ensure we have a valid sandbox instance, retrieving it from the project if needed."""
152
- if self._sandbox is None:
153
  try:
154
- # Get database client
155
- client = await self.thread_manager.db.client
156
-
157
- # Get project data
158
- project = await client.table('projects').select('*').eq('project_id', self.project_id).execute()
159
- if not project.data or len(project.data) == 0:
160
- raise ValueError(f"Project {self.project_id} not found")
161
-
162
- project_data = project.data[0]
163
- sandbox_info = project_data.get('sandbox', {})
164
-
165
- if not sandbox_info.get('id'):
166
- raise ValueError(f"No sandbox found for project {self.project_id}")
167
-
168
- # Store sandbox info
169
- self._sandbox_id = sandbox_info['id']
170
- self._sandbox_pass = sandbox_info.get('pass')
171
-
172
- # Get or start the sandbox
173
- self._sandbox = await get_or_start_sandbox(self._sandbox_id)
174
-
175
- # # Log URLs if not already printed
176
- # if not SandboxToolsBase._urls_printed:
177
- # vnc_link = self._sandbox.get_preview_link(6080)
178
- # website_link = self._sandbox.get_preview_link(8080)
179
-
180
- # vnc_url = vnc_link.url if hasattr(vnc_link, 'url') else str(vnc_link)
181
- # website_url = website_link.url if hasattr(website_link, 'url') else str(website_link)
182
-
183
- # print("\033[95m***")
184
- # print(f"VNC URL: {vnc_url}")
185
- # print(f"Website URL: {website_url}")
186
- # print("***\033[0m")
187
- # SandboxToolsBase._urls_printed = True
188
-
189
- except Exception as e:
190
- logger.error(f"Error retrieving sandbox for project {self.project_id}: {str(e)}", exc_info=True)
191
- raise e
192
-
193
- return self._sandbox
194
-
195
- @property
196
- def sandbox(self) -> Sandbox:
197
- """Get the sandbox instance, ensuring it exists."""
198
- if self._sandbox is None:
199
- raise RuntimeError("Sandbox not initialized. Call _ensure_sandbox() first.")
200
- return self._sandbox
201
-
202
- @property
203
- def sandbox_id(self) -> str:
204
- """Get the sandbox ID, ensuring it exists."""
205
- if self._sandbox_id is None:
206
- raise RuntimeError("Sandbox ID not initialized. Call _ensure_sandbox() first.")
207
- return self._sandbox_id
208
-
209
- def clean_path(self, path: str) -> str:
210
- """Clean and normalize a path to be relative to /workspace."""
211
- cleaned_path = clean_path(path, self.workspace_path)
212
- logger.debug(f"Cleaned path: {path} -> {cleaned_path}")
213
- return cleaned_path
 
1
+ # /home/ubuntu/visionos_farm/daedalus/extensibility/sandbox.py
2
+ import subprocess
3
+ import json
4
+ import logging
5
  import os
6
+ import asyncio
7
+ from typing import Dict, Any, Tuple
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ # --- Sandboxing Configuration ---
12
+ # WARNING: This assumes a pre-existing virtual environment dedicated to sandboxing.
13
+ # A more robust solution would involve dynamically creating environments per tool
14
+ # or using containerization (e.g., Docker) for stronger isolation.
15
+ SANDBOX_PYTHON_EXEC = "/home/ubuntu/sandbox_venv/bin/python"
16
+ SANDBOX_RUNNER_SCRIPT_PATH = "/home/ubuntu/visionos_farm/daedalus/extensibility/sandbox_runner.py"
17
+
18
+ async def execute_in_sandbox(repo_path: str, module_path: str, class_name: str, parameters: Dict[str, Any]) -> Dict[str, Any]:
19
+ """Executes a tool's method within a sandboxed Python environment.
20
+
21
+ Args:
22
+ repo_path: The absolute path to the cloned repository containing the tool.
23
+ module_path: The absolute path to the Python module file containing the tool class.
24
+ class_name: The name of the Tool class to execute.
25
+ parameters: The parameters dictionary to pass to the tool's execute method.
26
+
27
+ Returns:
28
+ A dictionary containing the result from the tool execution, including
29
+ status ("SUCCESS" or "FAILURE") and output/error details.
30
+ """
31
+ logger.info(f"Executing tool {class_name} from {module_path} in sandbox.")
32
+
33
+ # Ensure the sandbox runner script exists
34
+ if not os.path.exists(SANDBOX_RUNNER_SCRIPT_PATH):
35
+ logger.error(f"Sandbox runner script not found at {SANDBOX_RUNNER_SCRIPT_PATH}")
36
+ return {"status": "FAILURE", "error": "Sandbox runner script missing."}
37
+
38
+ # Ensure the sandboxed Python executable exists
39
+ if not os.path.exists(SANDBOX_PYTHON_EXEC):
40
+ logger.error(f"Sandboxed Python executable not found at {SANDBOX_PYTHON_EXEC}. Please create the virtual environment.")
41
+ # In a real scenario, might try to create it here, but for now, fail.
42
+ return {"status": "FAILURE", "error": f"Sandboxed Python not found at {SANDBOX_PYTHON_EXEC}."}
43
+
 
 
 
 
 
 
44
  try:
45
+ parameters_json = json.dumps(parameters)
46
+ except TypeError as e:
47
+ logger.error(f"Failed to serialize parameters for tool {class_name}: {e}")
48
+ return {"status": "FAILURE", "error": f"Could not serialize parameters: {e}"}
49
+
50
+ cmd = [
51
+ SANDBOX_PYTHON_EXEC,
52
+ SANDBOX_RUNNER_SCRIPT_PATH,
53
+ repo_path, # Pass repo path for sys.path manipulation within sandbox
54
+ module_path,
55
+ class_name,
56
+ parameters_json
57
+ ]
 
 
 
 
 
 
 
 
 
 
 
58
 
 
 
 
59
  try:
60
+ logger.debug(f"Running sandbox command: {' '.join(cmd)}")
61
+ process = await asyncio.create_subprocess_exec(
62
+ *cmd,
63
+ stdout=asyncio.subprocess.PIPE,
64
+ stderr=asyncio.subprocess.PIPE
65
+ )
66
+
67
+ stdout, stderr = await process.communicate()
68
+ stdout_decoded = stdout.decode().strip()
69
+ stderr_decoded = stderr.decode().strip()
70
+
71
+ if process.returncode != 0:
72
+ logger.error(f"Sandbox execution failed for {class_name}. Return code: {process.returncode}")
73
+ logger.error(f"Sandbox stderr: {stderr_decoded}")
74
+ logger.error(f"Sandbox stdout: {stdout_decoded}") # Log stdout even on error for debugging
75
+ error_detail = stderr_decoded or f"Sandbox process exited with code {process.returncode}"
76
+ # Try to parse stdout as JSON even on error, might contain partial failure info
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  try:
78
+ result_json = json.loads(stdout_decoded)
79
+ if isinstance(result_json, dict) and "error" in result_json:
80
+ error_detail = result_json["error"]
81
+ except json.JSONDecodeError:
82
+ pass # Stick with the original error detail
83
+ return {"status": "FAILURE", "error": error_detail}
84
+
85
+ logger.info(f"Sandbox execution successful for {class_name}.")
86
+ logger.debug(f"Sandbox stdout: {stdout_decoded}")
87
+ if stderr_decoded:
88
+ logger.warning(f"Sandbox stderr (non-fatal): {stderr_decoded}")
89
+
90
+ try:
91
+ result = json.loads(stdout_decoded)
92
+ if not isinstance(result, dict):
93
+ raise ValueError("Sandbox output is not a JSON dictionary")
94
+ # Ensure status is present, default to FAILURE if missing
95
+ if "status" not in result:
96
+ result["status"] = "FAILURE"
97
+ result["error"] = "Tool execution result missing 'status' field."
98
+ return result
99
+ except json.JSONDecodeError as e:
100
+ logger.error(f"Failed to decode JSON output from sandbox for {class_name}: {e}")
101
+ logger.error(f"Raw sandbox stdout: {stdout_decoded}")
102
+ return {"status": "FAILURE", "error": f"Could not decode sandbox output: {e}"}
103
+ except ValueError as e:
104
+ logger.error(f"Invalid JSON output structure from sandbox for {class_name}: {e}")
105
+ logger.error(f"Raw sandbox stdout: {stdout_decoded}")
106
+ return {"status": "FAILURE", "error": f"Invalid sandbox output structure: {e}"}
107
+
108
+ except Exception as e:
109
+ logger.error(f"Failed to run sandbox process for {class_name}: {e}", exc_info=True)
110
+ return {"status": "FAILURE", "error": f"Failed to start or manage sandbox process: {e}"}
111
+
112
+ # Example usage (would typically be called by an Agent)
113
+ # async def main():
114
+ # # Assume fetch_and_discover_tools ran and found a tool
115
+ # repo_p = "/path/to/cloned/repo"
116
+ # module_p = "/path/to/cloned/repo/tools/my_tool.py"
117
+ # class_n = "MyTool"
118
+ # params = {"input_file": "/data/input.txt", "threshold": 0.5}
119
+ # result = await execute_in_sandbox(repo_p, module_p, class_n, params)
120
+ # print("Sandbox Execution Result:", result)
121
+
122
+ # if __name__ == "__main__":
123
+ # import asyncio
124
+ # # Need to create the sandbox venv first: python3.11 -m venv /home/ubuntu/sandbox_venv
125
+ # asyncio.run(main())
126
+
 
 
 
 
 
 
 
 
 
 
 
setup_database.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # setup_database.py
2
+ # Simple script to initialize the database using SQLAlchemy directly
3
+
4
+ import os
5
+ import sys
6
+ from sqlalchemy import create_engine
7
+ from sqlalchemy.orm import sessionmaker
8
+
9
+ # Add the project root to the Python path
10
+ sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
11
+
12
+ # Import the Base and models
13
+ from daedalus.database.models import Base
14
+ from dotenv import load_dotenv
15
+
16
+ # Load environment variables from .env file
17
+ load_dotenv(os.path.join('daedalus', '.env'))
18
+
19
+ # Get database URL from environment
20
+ DATABASE_URL = os.getenv('DATABASE_URL', 'postgresql://visionos_user:visionos_pass@localhost:5432/visionos_db')
21
+
22
+ def setup_database():
23
+ """Create all tables in the database."""
24
+ print(f"Connecting to database: {DATABASE_URL}")
25
+ engine = create_engine(DATABASE_URL)
26
+
27
+ print("Creating database tables...")
28
+ Base.metadata.create_all(bind=engine)
29
+ print("Database tables created successfully!")
30
+
31
+ # Create a session to test the connection
32
+ Session = sessionmaker(bind=engine)
33
+ session = Session()
34
+
35
+ try:
36
+ # Test the connection by executing a simple query
37
+ result = session.execute("SELECT 1")
38
+ print("Database connection test successful!")
39
+ except Exception as e:
40
+ print(f"Error testing database connection: {e}")
41
+ finally:
42
+ session.close()
43
+
44
+ if __name__ == "__main__":
45
+ setup_database()
simple_api.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from fastapi.middleware.cors import CORSMiddleware
3
+ import uuid
4
+ from datetime import datetime
5
+ from pydantic import BaseModel, Field
6
+ from typing import Dict, Any, List, Optional
7
+ import uvicorn
8
+
9
+ # Create FastAPI app
10
+ app = FastAPI(
11
+ title="VisionOS SaaS Platform API",
12
+ description="Demo API for VisionOS SaaS Platform",
13
+ version="0.1.0"
14
+ )
15
+
16
+ # Enable CORS for frontend
17
+ app.add_middleware(
18
+ CORSMiddleware,
19
+ allow_origins=["http://localhost:3000", "http://localhost:3002"],
20
+ allow_credentials=True,
21
+ allow_methods=["*"],
22
+ allow_headers=["*"],
23
+ )
24
+
25
+ # Models for API
26
+ class TaskBase(BaseModel):
27
+ input_data: Optional[Dict[str, Any]] = None
28
+ agent_type_requested: Optional[str] = None
29
+
30
+ class TaskCreate(TaskBase):
31
+ pass
32
+
33
+ class Task(TaskBase):
34
+ id: str
35
+ status: str = "pending"
36
+ created_at: datetime
37
+ result_data: Optional[Dict[str, Any]] = None
38
+
39
+ # In-memory storage for demo
40
+ tasks = {}
41
+
42
+ @app.get("/")
43
+ def read_root():
44
+ return {"message": "Welcome to VisionOS SaaS Platform API", "status": "operational"}
45
+
46
+ @app.get("/api/v1/tasks", response_model=List[Task])
47
+ def list_tasks():
48
+ return list(tasks.values())
49
+
50
+ @app.post("/api/v1/tasks", response_model=Task)
51
+ def create_task(task: TaskCreate):
52
+ task_id = str(uuid.uuid4())
53
+ new_task = Task(
54
+ id=task_id,
55
+ input_data=task.input_data,
56
+ agent_type_requested=task.agent_type_requested,
57
+ created_at=datetime.now(),
58
+ status="pending"
59
+ )
60
+ tasks[task_id] = new_task
61
+ return new_task
62
+
63
+ @app.get("/api/v1/tasks/{task_id}", response_model=Task)
64
+ def get_task(task_id: str):
65
+ if task_id not in tasks:
66
+ raise HTTPException(status_code=404, detail="Task not found")
67
+ return tasks[task_id]
68
+
69
+ @app.put("/api/v1/tasks/{task_id}")
70
+ def update_task(task_id: str):
71
+ if task_id not in tasks:
72
+ raise HTTPException(status_code=404, detail="Task not found")
73
+ # Simulate task completion
74
+ tasks[task_id].status = "completed"
75
+ tasks[task_id].result_data = {"message": "Task completed successfully", "timestamp": str(datetime.now())}
76
+ return tasks[task_id]
77
+
78
+ if __name__ == "__main__":
79
+ uvicorn.run(app, host="0.0.0.0", port=8000)
tasks.py CHANGED
@@ -8,9 +8,9 @@ from sqlalchemy.ext.asyncio import AsyncSession
8
  import redis.asyncio as redis
9
 
10
  from shared import schemas
11
- from crud import crud_task
12
- from database.session import get_db
13
- from core.config import settings
14
 
15
  router = APIRouter()
16
 
 
8
  import redis.asyncio as redis
9
 
10
  from shared import schemas
11
+ from daedalus.crud import crud_task
12
+ from daedalus.database.session import get_db
13
+ from daedalus.core.config import settings
14
 
15
  router = APIRouter()
16
 
tool_sources.py CHANGED
@@ -4,8 +4,8 @@ from sqlalchemy.ext.asyncio import AsyncSession
4
  from typing import List
5
  from uuid import UUID
6
 
7
- from database import session as db_session
8
- from crud import crud_tool_source
9
  from shared.schemas import ToolSource, ToolSourceCreate, ToolSourceRead
10
 
11
  router = APIRouter()
 
4
  from typing import List
5
  from uuid import UUID
6
 
7
+ from daedalus.database import session as db_session
8
+ from daedalus.crud import crud_tool_source
9
  from shared.schemas import ToolSource, ToolSourceCreate, ToolSourceRead
10
 
11
  router = APIRouter()
typing_extensions.py ADDED
The diff for this file is too large to render. See raw diff
 
uvicorn ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ #!/Users/lattm/Downloads/home/visionos_venv/bin/python3
2
+ # -*- coding: utf-8 -*-
3
+ import re
4
+ import sys
5
+ from uvicorn.main import main
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
8
+ sys.exit(main())