Spaces:
Running
Running
Upload 251 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +5 -0
- 20240414161707_basejump-setup.sql +186 -0
- 20240414161947_basejump-accounts.sql +708 -0
- 20240414162100_basejump-invitations.sql +270 -0
- 20240414162131_basejump-billing.sql +236 -0
- 20250409211903_basejump-configure.sql +3 -0
- 20250409212058_initial.sql +189 -0
- 20250416133920_agentpress_schema.sql +382 -0
- ActiveJobsProvider.py +57 -0
- AmazonProvider.py +191 -0
- AuthProvider.tsx +75 -0
- BrowserToolView.tsx +195 -0
- CommandToolView.tsx +208 -0
- DataProviderToolView.tsx +229 -0
- Dockerfile +32 -0
- ExposePortToolView.tsx +194 -0
- FileOperationToolView.tsx +494 -0
- Frame 50.svg +31 -0
- GenericToolView.tsx +133 -0
- GoogleSignIn.tsx +182 -0
- LICENSE +201 -0
- LinkedinProvider.py +250 -0
- MANIFEST.in +17 -0
- README.md +36 -12
- RapidDataProviderBase.py +61 -0
- StrReplaceToolView.tsx +160 -0
- TwitterProvider.py +240 -0
- WebCrawlToolView.tsx +156 -0
- WebScrapeToolView.tsx +156 -0
- WebSearchToolView.tsx +129 -0
- YahooFinanceProvider.py +190 -0
- ZillowProvider.py +187 -0
- __init__.py +1 -0
- accept-team-invitation.tsx +36 -0
- accordion.tsx +66 -0
- account-billing-status.tsx +178 -0
- account-selector.tsx +165 -0
- actions.ts +140 -0
- alert-dialog.tsx +157 -0
- alert.tsx +66 -0
- api.py +311 -0
- api.py.bak +156 -0
- api.ts +1179 -0
- architecture_diagram.svg +0 -0
- auth_utils.py +177 -0
- avatar.tsx +53 -0
- badge.tsx +46 -0
- banner.png +3 -0
- bento-section.tsx +45 -0
- billing.py +125 -0
.gitattributes
CHANGED
@@ -33,3 +33,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
banner.png filter=lfs diff=lfs merge=lfs -text
|
37 |
+
diagram.png filter=lfs diff=lfs merge=lfs -text
|
38 |
+
holo.png filter=lfs diff=lfs merge=lfs -text
|
39 |
+
mac.png filter=lfs diff=lfs merge=lfs -text
|
40 |
+
worldoscollage.mp4 filter=lfs diff=lfs merge=lfs -text
|
20240414161707_basejump-setup.sql
ADDED
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/**
|
2 |
+
____ _
|
3 |
+
| _ \ (_)
|
4 |
+
| |_) | __ _ ___ ___ _ _ _ _ __ ___ _ __
|
5 |
+
| _ < / _` / __|/ _ \ | | | | '_ ` _ \| '_ \
|
6 |
+
| |_) | (_| \__ \ __/ | |_| | | | | | | |_) |
|
7 |
+
|____/ \__,_|___/\___| |\__,_|_| |_| |_| .__/
|
8 |
+
_/ | | |
|
9 |
+
|__/ |_|
|
10 |
+
|
11 |
+
Basejump is a starter kit for building SaaS products on top of Supabase.
|
12 |
+
Learn more at https://usebasejump.com
|
13 |
+
*/
|
14 |
+
|
15 |
+
|
16 |
+
/**
|
17 |
+
* -------------------------------------------------------
|
18 |
+
* Section - Basejump schema setup and utility functions
|
19 |
+
* -------------------------------------------------------
|
20 |
+
*/
|
21 |
+
|
22 |
+
-- revoke execution by default from public
|
23 |
+
ALTER DEFAULT PRIVILEGES REVOKE EXECUTE ON FUNCTIONS FROM PUBLIC;
|
24 |
+
ALTER DEFAULT PRIVILEGES IN SCHEMA PUBLIC REVOKE EXECUTE ON FUNCTIONS FROM anon, authenticated;
|
25 |
+
|
26 |
+
-- Create basejump schema
|
27 |
+
CREATE SCHEMA IF NOT EXISTS basejump;
|
28 |
+
GRANT USAGE ON SCHEMA basejump to authenticated;
|
29 |
+
GRANT USAGE ON SCHEMA basejump to service_role;
|
30 |
+
|
31 |
+
/**
|
32 |
+
* -------------------------------------------------------
|
33 |
+
* Section - Enums
|
34 |
+
* -------------------------------------------------------
|
35 |
+
*/
|
36 |
+
|
37 |
+
/**
|
38 |
+
* Invitation types are either email or link. Email invitations are sent to
|
39 |
+
* a single user and can only be claimed once. Link invitations can be used multiple times
|
40 |
+
* Both expire after 24 hours
|
41 |
+
*/
|
42 |
+
DO
|
43 |
+
$$
|
44 |
+
BEGIN
|
45 |
+
-- check it account_role already exists on basejump schema
|
46 |
+
IF NOT EXISTS(SELECT 1
|
47 |
+
FROM pg_type t
|
48 |
+
JOIN pg_namespace n ON n.oid = t.typnamespace
|
49 |
+
WHERE t.typname = 'invitation_type'
|
50 |
+
AND n.nspname = 'basejump') THEN
|
51 |
+
CREATE TYPE basejump.invitation_type AS ENUM ('one_time', '24_hour');
|
52 |
+
end if;
|
53 |
+
end;
|
54 |
+
$$;
|
55 |
+
|
56 |
+
/**
|
57 |
+
* -------------------------------------------------------
|
58 |
+
* Section - Basejump settings
|
59 |
+
* -------------------------------------------------------
|
60 |
+
*/
|
61 |
+
|
62 |
+
CREATE TABLE IF NOT EXISTS basejump.config
|
63 |
+
(
|
64 |
+
enable_team_accounts boolean default true,
|
65 |
+
enable_personal_account_billing boolean default true,
|
66 |
+
enable_team_account_billing boolean default true,
|
67 |
+
billing_provider text default 'stripe'
|
68 |
+
);
|
69 |
+
|
70 |
+
-- create config row
|
71 |
+
INSERT INTO basejump.config (enable_team_accounts, enable_personal_account_billing, enable_team_account_billing)
|
72 |
+
VALUES (true, true, true);
|
73 |
+
|
74 |
+
-- enable select on the config table
|
75 |
+
GRANT SELECT ON basejump.config TO authenticated, service_role;
|
76 |
+
|
77 |
+
-- enable RLS on config
|
78 |
+
ALTER TABLE basejump.config
|
79 |
+
ENABLE ROW LEVEL SECURITY;
|
80 |
+
|
81 |
+
create policy "Basejump settings can be read by authenticated users" on basejump.config
|
82 |
+
for select
|
83 |
+
to authenticated
|
84 |
+
using (
|
85 |
+
true
|
86 |
+
);
|
87 |
+
|
88 |
+
/**
|
89 |
+
* -------------------------------------------------------
|
90 |
+
* Section - Basejump utility functions
|
91 |
+
* -------------------------------------------------------
|
92 |
+
*/
|
93 |
+
|
94 |
+
/**
|
95 |
+
basejump.get_config()
|
96 |
+
Get the full config object to check basejump settings
|
97 |
+
This is not accessible from the outside, so can only be used inside postgres functions
|
98 |
+
*/
|
99 |
+
CREATE OR REPLACE FUNCTION basejump.get_config()
|
100 |
+
RETURNS json AS
|
101 |
+
$$
|
102 |
+
DECLARE
|
103 |
+
result RECORD;
|
104 |
+
BEGIN
|
105 |
+
SELECT * from basejump.config limit 1 into result;
|
106 |
+
return row_to_json(result);
|
107 |
+
END;
|
108 |
+
$$ LANGUAGE plpgsql;
|
109 |
+
|
110 |
+
grant execute on function basejump.get_config() to authenticated, service_role;
|
111 |
+
|
112 |
+
|
113 |
+
/**
|
114 |
+
basejump.is_set("field_name")
|
115 |
+
Check a specific boolean config value
|
116 |
+
*/
|
117 |
+
CREATE OR REPLACE FUNCTION basejump.is_set(field_name text)
|
118 |
+
RETURNS boolean AS
|
119 |
+
$$
|
120 |
+
DECLARE
|
121 |
+
result BOOLEAN;
|
122 |
+
BEGIN
|
123 |
+
execute format('select %I from basejump.config limit 1', field_name) into result;
|
124 |
+
return result;
|
125 |
+
END;
|
126 |
+
$$ LANGUAGE plpgsql;
|
127 |
+
|
128 |
+
grant execute on function basejump.is_set(text) to authenticated;
|
129 |
+
|
130 |
+
|
131 |
+
/**
|
132 |
+
* Automatic handling for maintaining created_at and updated_at timestamps
|
133 |
+
* on tables
|
134 |
+
*/
|
135 |
+
CREATE OR REPLACE FUNCTION basejump.trigger_set_timestamps()
|
136 |
+
RETURNS TRIGGER AS
|
137 |
+
$$
|
138 |
+
BEGIN
|
139 |
+
if TG_OP = 'INSERT' then
|
140 |
+
NEW.created_at = now();
|
141 |
+
NEW.updated_at = now();
|
142 |
+
else
|
143 |
+
NEW.updated_at = now();
|
144 |
+
NEW.created_at = OLD.created_at;
|
145 |
+
end if;
|
146 |
+
RETURN NEW;
|
147 |
+
END
|
148 |
+
$$ LANGUAGE plpgsql;
|
149 |
+
|
150 |
+
|
151 |
+
/**
|
152 |
+
* Automatic handling for maintaining created_by and updated_by timestamps
|
153 |
+
* on tables
|
154 |
+
*/
|
155 |
+
CREATE OR REPLACE FUNCTION basejump.trigger_set_user_tracking()
|
156 |
+
RETURNS TRIGGER AS
|
157 |
+
$$
|
158 |
+
BEGIN
|
159 |
+
if TG_OP = 'INSERT' then
|
160 |
+
NEW.created_by = auth.uid();
|
161 |
+
NEW.updated_by = auth.uid();
|
162 |
+
else
|
163 |
+
NEW.updated_by = auth.uid();
|
164 |
+
NEW.created_by = OLD.created_by;
|
165 |
+
end if;
|
166 |
+
RETURN NEW;
|
167 |
+
END
|
168 |
+
$$ LANGUAGE plpgsql;
|
169 |
+
|
170 |
+
/**
|
171 |
+
basejump.generate_token(length)
|
172 |
+
Generates a secure token - used internally for invitation tokens
|
173 |
+
but could be used elsewhere. Check out the invitations table for more info on
|
174 |
+
how it's used
|
175 |
+
*/
|
176 |
+
CREATE OR REPLACE FUNCTION basejump.generate_token(length int)
|
177 |
+
RETURNS text AS
|
178 |
+
$$
|
179 |
+
select regexp_replace(replace(
|
180 |
+
replace(replace(replace(encode(gen_random_bytes(length)::bytea, 'base64'), '/', ''), '+',
|
181 |
+
''), '\', ''),
|
182 |
+
'=',
|
183 |
+
''), E'[\\n\\r]+', '', 'g');
|
184 |
+
$$ LANGUAGE sql;
|
185 |
+
|
186 |
+
grant execute on function basejump.generate_token(int) to authenticated;
|
20240414161947_basejump-accounts.sql
ADDED
@@ -0,0 +1,708 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/**
|
2 |
+
____ _
|
3 |
+
| _ \ (_)
|
4 |
+
| |_) | __ _ ___ ___ _ _ _ _ __ ___ _ __
|
5 |
+
| _ < / _` / __|/ _ \ | | | | '_ ` _ \| '_ \
|
6 |
+
| |_) | (_| \__ \ __/ | |_| | | | | | | |_) |
|
7 |
+
|____/ \__,_|___/\___| |\__,_|_| |_| |_| .__/
|
8 |
+
_/ | | |
|
9 |
+
|__/ |_|
|
10 |
+
|
11 |
+
Basejump is a starter kit for building SaaS products on top of Supabase.
|
12 |
+
Learn more at https://usebasejump.com
|
13 |
+
*/
|
14 |
+
|
15 |
+
/**
|
16 |
+
* -------------------------------------------------------
|
17 |
+
* Section - Accounts
|
18 |
+
* -------------------------------------------------------
|
19 |
+
*/
|
20 |
+
|
21 |
+
/**
|
22 |
+
* Account roles allow you to provide permission levels to users
|
23 |
+
* when they're acting on an account. By default, we provide
|
24 |
+
* "owner" and "member". The only distinction is that owners can
|
25 |
+
* also manage billing and invite/remove account members.
|
26 |
+
*/
|
27 |
+
DO
|
28 |
+
$$
|
29 |
+
BEGIN
|
30 |
+
-- check it account_role already exists on basejump schema
|
31 |
+
IF NOT EXISTS(SELECT 1
|
32 |
+
FROM pg_type t
|
33 |
+
JOIN pg_namespace n ON n.oid = t.typnamespace
|
34 |
+
WHERE t.typname = 'account_role'
|
35 |
+
AND n.nspname = 'basejump') THEN
|
36 |
+
CREATE TYPE basejump.account_role AS ENUM ('owner', 'member');
|
37 |
+
end if;
|
38 |
+
end;
|
39 |
+
$$;
|
40 |
+
|
41 |
+
/**
|
42 |
+
* Accounts are the primary grouping for most objects within
|
43 |
+
* the system. They have many users, and all billing is connected to
|
44 |
+
* an account.
|
45 |
+
*/
|
46 |
+
CREATE TABLE IF NOT EXISTS basejump.accounts
|
47 |
+
(
|
48 |
+
id uuid unique NOT NULL DEFAULT extensions.uuid_generate_v4(),
|
49 |
+
-- defaults to the user who creates the account
|
50 |
+
-- this user cannot be removed from an account without changing
|
51 |
+
-- the primary owner first
|
52 |
+
primary_owner_user_id uuid references auth.users not null default auth.uid(),
|
53 |
+
-- Account name
|
54 |
+
name text,
|
55 |
+
slug text unique,
|
56 |
+
personal_account boolean default false not null,
|
57 |
+
updated_at timestamp with time zone,
|
58 |
+
created_at timestamp with time zone,
|
59 |
+
created_by uuid references auth.users,
|
60 |
+
updated_by uuid references auth.users,
|
61 |
+
private_metadata jsonb default '{}'::jsonb,
|
62 |
+
public_metadata jsonb default '{}'::jsonb,
|
63 |
+
PRIMARY KEY (id)
|
64 |
+
);
|
65 |
+
|
66 |
+
-- constraint that conditionally allows nulls on the slug ONLY if personal_account is true
|
67 |
+
-- remove this if you want to ignore accounts slugs entirely
|
68 |
+
ALTER TABLE basejump.accounts
|
69 |
+
ADD CONSTRAINT basejump_accounts_slug_null_if_personal_account_true CHECK (
|
70 |
+
(personal_account = true AND slug is null)
|
71 |
+
OR (personal_account = false AND slug is not null)
|
72 |
+
);
|
73 |
+
|
74 |
+
-- Open up access to accounts
|
75 |
+
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE basejump.accounts TO authenticated, service_role;
|
76 |
+
|
77 |
+
/**
|
78 |
+
* We want to protect some fields on accounts from being updated
|
79 |
+
* Specifically the primary owner user id and account id.
|
80 |
+
* primary_owner_user_id should be updated using the dedicated function
|
81 |
+
*/
|
82 |
+
CREATE OR REPLACE FUNCTION basejump.protect_account_fields()
|
83 |
+
RETURNS TRIGGER AS
|
84 |
+
$$
|
85 |
+
BEGIN
|
86 |
+
IF current_user IN ('authenticated', 'anon') THEN
|
87 |
+
-- these are protected fields that users are not allowed to update themselves
|
88 |
+
-- platform admins should be VERY careful about updating them as well.
|
89 |
+
if NEW.id <> OLD.id
|
90 |
+
OR NEW.personal_account <> OLD.personal_account
|
91 |
+
OR NEW.primary_owner_user_id <> OLD.primary_owner_user_id
|
92 |
+
THEN
|
93 |
+
RAISE EXCEPTION 'You do not have permission to update this field';
|
94 |
+
end if;
|
95 |
+
end if;
|
96 |
+
|
97 |
+
RETURN NEW;
|
98 |
+
END
|
99 |
+
$$ LANGUAGE plpgsql;
|
100 |
+
|
101 |
+
-- trigger to protect account fields
|
102 |
+
CREATE TRIGGER basejump_protect_account_fields
|
103 |
+
BEFORE UPDATE
|
104 |
+
ON basejump.accounts
|
105 |
+
FOR EACH ROW
|
106 |
+
EXECUTE FUNCTION basejump.protect_account_fields();
|
107 |
+
|
108 |
+
-- convert any character in the slug that's not a letter, number, or dash to a dash on insert/update for accounts
|
109 |
+
CREATE OR REPLACE FUNCTION basejump.slugify_account_slug()
|
110 |
+
RETURNS TRIGGER AS
|
111 |
+
$$
|
112 |
+
BEGIN
|
113 |
+
if NEW.slug is not null then
|
114 |
+
NEW.slug = lower(regexp_replace(NEW.slug, '[^a-zA-Z0-9-]+', '-', 'g'));
|
115 |
+
end if;
|
116 |
+
|
117 |
+
RETURN NEW;
|
118 |
+
END
|
119 |
+
$$ LANGUAGE plpgsql;
|
120 |
+
|
121 |
+
-- trigger to slugify the account slug
|
122 |
+
CREATE TRIGGER basejump_slugify_account_slug
|
123 |
+
BEFORE INSERT OR UPDATE
|
124 |
+
ON basejump.accounts
|
125 |
+
FOR EACH ROW
|
126 |
+
EXECUTE FUNCTION basejump.slugify_account_slug();
|
127 |
+
|
128 |
+
-- enable RLS for accounts
|
129 |
+
alter table basejump.accounts
|
130 |
+
enable row level security;
|
131 |
+
|
132 |
+
-- protect the timestamps
|
133 |
+
CREATE TRIGGER basejump_set_accounts_timestamp
|
134 |
+
BEFORE INSERT OR UPDATE
|
135 |
+
ON basejump.accounts
|
136 |
+
FOR EACH ROW
|
137 |
+
EXECUTE PROCEDURE basejump.trigger_set_timestamps();
|
138 |
+
|
139 |
+
-- set the user tracking
|
140 |
+
CREATE TRIGGER basejump_set_accounts_user_tracking
|
141 |
+
BEFORE INSERT OR UPDATE
|
142 |
+
ON basejump.accounts
|
143 |
+
FOR EACH ROW
|
144 |
+
EXECUTE PROCEDURE basejump.trigger_set_user_tracking();
|
145 |
+
|
146 |
+
/**
|
147 |
+
* Account users are the users that are associated with an account.
|
148 |
+
* They can be invited to join the account, and can have different roles.
|
149 |
+
* The system does not enforce any permissions for roles, other than restricting
|
150 |
+
* billing and account membership to only owners
|
151 |
+
*/
|
152 |
+
create table if not exists basejump.account_user
|
153 |
+
(
|
154 |
+
-- id of the user in the account
|
155 |
+
user_id uuid references auth.users on delete cascade not null,
|
156 |
+
-- id of the account the user is in
|
157 |
+
account_id uuid references basejump.accounts on delete cascade not null,
|
158 |
+
-- role of the user in the account
|
159 |
+
account_role basejump.account_role not null,
|
160 |
+
constraint account_user_pkey primary key (user_id, account_id)
|
161 |
+
);
|
162 |
+
|
163 |
+
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE basejump.account_user TO authenticated, service_role;
|
164 |
+
|
165 |
+
|
166 |
+
-- enable RLS for account_user
|
167 |
+
alter table basejump.account_user
|
168 |
+
enable row level security;
|
169 |
+
|
170 |
+
/**
|
171 |
+
* When an account gets created, we want to insert the current user as the first
|
172 |
+
* owner
|
173 |
+
*/
|
174 |
+
create or replace function basejump.add_current_user_to_new_account()
|
175 |
+
returns trigger
|
176 |
+
language plpgsql
|
177 |
+
security definer
|
178 |
+
set search_path = public
|
179 |
+
as
|
180 |
+
$$
|
181 |
+
begin
|
182 |
+
if new.primary_owner_user_id = auth.uid() then
|
183 |
+
insert into basejump.account_user (account_id, user_id, account_role)
|
184 |
+
values (NEW.id, auth.uid(), 'owner');
|
185 |
+
end if;
|
186 |
+
return NEW;
|
187 |
+
end;
|
188 |
+
$$;
|
189 |
+
|
190 |
+
-- trigger the function whenever a new account is created
|
191 |
+
CREATE TRIGGER basejump_add_current_user_to_new_account
|
192 |
+
AFTER INSERT
|
193 |
+
ON basejump.accounts
|
194 |
+
FOR EACH ROW
|
195 |
+
EXECUTE FUNCTION basejump.add_current_user_to_new_account();
|
196 |
+
|
197 |
+
/**
|
198 |
+
* When a user signs up, we need to create a personal account for them
|
199 |
+
* and add them to the account_user table so they can act on it
|
200 |
+
*/
|
201 |
+
create or replace function basejump.run_new_user_setup()
|
202 |
+
returns trigger
|
203 |
+
language plpgsql
|
204 |
+
security definer
|
205 |
+
set search_path = public
|
206 |
+
as
|
207 |
+
$$
|
208 |
+
declare
|
209 |
+
first_account_id uuid;
|
210 |
+
generated_user_name text;
|
211 |
+
begin
|
212 |
+
|
213 |
+
-- first we setup the user profile
|
214 |
+
-- TODO: see if we can get the user's name from the auth.users table once we learn how oauth works
|
215 |
+
if new.email IS NOT NULL then
|
216 |
+
generated_user_name := split_part(new.email, '@', 1);
|
217 |
+
end if;
|
218 |
+
-- create the new users's personal account
|
219 |
+
insert into basejump.accounts (name, primary_owner_user_id, personal_account, id)
|
220 |
+
values (generated_user_name, NEW.id, true, NEW.id)
|
221 |
+
returning id into first_account_id;
|
222 |
+
|
223 |
+
-- add them to the account_user table so they can act on it
|
224 |
+
insert into basejump.account_user (account_id, user_id, account_role)
|
225 |
+
values (first_account_id, NEW.id, 'owner');
|
226 |
+
|
227 |
+
return NEW;
|
228 |
+
end;
|
229 |
+
$$;
|
230 |
+
|
231 |
+
-- trigger the function every time a user is created
|
232 |
+
create trigger on_auth_user_created
|
233 |
+
after insert
|
234 |
+
on auth.users
|
235 |
+
for each row
|
236 |
+
execute procedure basejump.run_new_user_setup();
|
237 |
+
|
238 |
+
/**
|
239 |
+
* -------------------------------------------------------
|
240 |
+
* Section - Account permission utility functions
|
241 |
+
* -------------------------------------------------------
|
242 |
+
* These functions are stored on the basejump schema, and useful for things like
|
243 |
+
* generating RLS policies
|
244 |
+
*/
|
245 |
+
|
246 |
+
/**
|
247 |
+
* Returns true if the current user has the pass in role on the passed in account
|
248 |
+
* If no role is sent, will return true if the user is a member of the account
|
249 |
+
* NOTE: This is an inefficient function when used on large query sets. You should reach for the get_accounts_with_role and lookup
|
250 |
+
* the account ID in those cases.
|
251 |
+
*/
|
252 |
+
create or replace function basejump.has_role_on_account(account_id uuid, account_role basejump.account_role default null)
|
253 |
+
returns boolean
|
254 |
+
language sql
|
255 |
+
security definer
|
256 |
+
set search_path = public
|
257 |
+
as
|
258 |
+
$$
|
259 |
+
select exists(
|
260 |
+
select 1
|
261 |
+
from basejump.account_user wu
|
262 |
+
where wu.user_id = auth.uid()
|
263 |
+
and wu.account_id = has_role_on_account.account_id
|
264 |
+
and (
|
265 |
+
wu.account_role = has_role_on_account.account_role
|
266 |
+
or has_role_on_account.account_role is null
|
267 |
+
)
|
268 |
+
);
|
269 |
+
$$;
|
270 |
+
|
271 |
+
grant execute on function basejump.has_role_on_account(uuid, basejump.account_role) to authenticated, anon, public, service_role;
|
272 |
+
|
273 |
+
|
274 |
+
/**
|
275 |
+
* Returns account_ids that the current user is a member of. If you pass in a role,
|
276 |
+
* it'll only return accounts that the user is a member of with that role.
|
277 |
+
*/
|
278 |
+
create or replace function basejump.get_accounts_with_role(passed_in_role basejump.account_role default null)
|
279 |
+
returns setof uuid
|
280 |
+
language sql
|
281 |
+
security definer
|
282 |
+
set search_path = public
|
283 |
+
as
|
284 |
+
$$
|
285 |
+
select account_id
|
286 |
+
from basejump.account_user wu
|
287 |
+
where wu.user_id = auth.uid()
|
288 |
+
and (
|
289 |
+
wu.account_role = passed_in_role
|
290 |
+
or passed_in_role is null
|
291 |
+
);
|
292 |
+
$$;
|
293 |
+
|
294 |
+
grant execute on function basejump.get_accounts_with_role(basejump.account_role) to authenticated;
|
295 |
+
|
296 |
+
/**
|
297 |
+
* -------------------------
|
298 |
+
* Section - RLS Policies
|
299 |
+
* -------------------------
|
300 |
+
* This is where we define access to tables in the basejump schema
|
301 |
+
*/
|
302 |
+
|
303 |
+
create policy "users can view their own account_users" on basejump.account_user
|
304 |
+
for select
|
305 |
+
to authenticated
|
306 |
+
using (
|
307 |
+
user_id = auth.uid()
|
308 |
+
);
|
309 |
+
|
310 |
+
create policy "users can view their teammates" on basejump.account_user
|
311 |
+
for select
|
312 |
+
to authenticated
|
313 |
+
using (
|
314 |
+
basejump.has_role_on_account(account_id) = true
|
315 |
+
);
|
316 |
+
|
317 |
+
create policy "Account users can be deleted by owners except primary account owner" on basejump.account_user
|
318 |
+
for delete
|
319 |
+
to authenticated
|
320 |
+
using (
|
321 |
+
(basejump.has_role_on_account(account_id, 'owner') = true)
|
322 |
+
AND
|
323 |
+
user_id != (select primary_owner_user_id
|
324 |
+
from basejump.accounts
|
325 |
+
where account_id = accounts.id)
|
326 |
+
);
|
327 |
+
|
328 |
+
create policy "Accounts are viewable by members" on basejump.accounts
|
329 |
+
for select
|
330 |
+
to authenticated
|
331 |
+
using (
|
332 |
+
basejump.has_role_on_account(id) = true
|
333 |
+
);
|
334 |
+
|
335 |
+
-- Primary owner should always have access to the account
|
336 |
+
create policy "Accounts are viewable by primary owner" on basejump.accounts
|
337 |
+
for select
|
338 |
+
to authenticated
|
339 |
+
using (
|
340 |
+
primary_owner_user_id = auth.uid()
|
341 |
+
);
|
342 |
+
|
343 |
+
create policy "Team accounts can be created by any user" on basejump.accounts
|
344 |
+
for insert
|
345 |
+
to authenticated
|
346 |
+
with check (
|
347 |
+
basejump.is_set('enable_team_accounts') = true
|
348 |
+
and personal_account = false
|
349 |
+
);
|
350 |
+
|
351 |
+
|
352 |
+
create policy "Accounts can be edited by owners" on basejump.accounts
|
353 |
+
for update
|
354 |
+
to authenticated
|
355 |
+
using (
|
356 |
+
basejump.has_role_on_account(id, 'owner') = true
|
357 |
+
);
|
358 |
+
|
359 |
+
/**
|
360 |
+
* -------------------------------------------------------
|
361 |
+
* Section - Public functions
|
362 |
+
* -------------------------------------------------------
|
363 |
+
* Each of these functions exists in the public name space because they are accessible
|
364 |
+
* via the API. it is the primary way developers can interact with Basejump accounts
|
365 |
+
*/
|
366 |
+
|
367 |
+
/**
|
368 |
+
* Returns the account_id for a given account slug
|
369 |
+
*/
|
370 |
+
|
371 |
+
create or replace function public.get_account_id(slug text)
|
372 |
+
returns uuid
|
373 |
+
language sql
|
374 |
+
as
|
375 |
+
$$
|
376 |
+
select id
|
377 |
+
from basejump.accounts
|
378 |
+
where slug = get_account_id.slug;
|
379 |
+
$$;
|
380 |
+
|
381 |
+
grant execute on function public.get_account_id(text) to authenticated, service_role;
|
382 |
+
|
383 |
+
/**
|
384 |
+
* Returns the current user's role within a given account_id
|
385 |
+
*/
|
386 |
+
create or replace function public.current_user_account_role(account_id uuid)
|
387 |
+
returns jsonb
|
388 |
+
language plpgsql
|
389 |
+
as
|
390 |
+
$$
|
391 |
+
DECLARE
|
392 |
+
response jsonb;
|
393 |
+
BEGIN
|
394 |
+
|
395 |
+
select jsonb_build_object(
|
396 |
+
'account_role', wu.account_role,
|
397 |
+
'is_primary_owner', a.primary_owner_user_id = auth.uid(),
|
398 |
+
'is_personal_account', a.personal_account
|
399 |
+
)
|
400 |
+
into response
|
401 |
+
from basejump.account_user wu
|
402 |
+
join basejump.accounts a on a.id = wu.account_id
|
403 |
+
where wu.user_id = auth.uid()
|
404 |
+
and wu.account_id = current_user_account_role.account_id;
|
405 |
+
|
406 |
+
-- if the user is not a member of the account, throw an error
|
407 |
+
if response ->> 'account_role' IS NULL then
|
408 |
+
raise exception 'Not found';
|
409 |
+
end if;
|
410 |
+
|
411 |
+
return response;
|
412 |
+
END
|
413 |
+
$$;
|
414 |
+
|
415 |
+
grant execute on function public.current_user_account_role(uuid) to authenticated;
|
416 |
+
|
417 |
+
/**
|
418 |
+
* Let's you update a users role within an account if you are an owner of that account
|
419 |
+
**/
|
420 |
+
create or replace function public.update_account_user_role(account_id uuid, user_id uuid,
|
421 |
+
new_account_role basejump.account_role,
|
422 |
+
make_primary_owner boolean default false)
|
423 |
+
returns void
|
424 |
+
security definer
|
425 |
+
set search_path = public
|
426 |
+
language plpgsql
|
427 |
+
as
|
428 |
+
$$
|
429 |
+
declare
|
430 |
+
is_account_owner boolean;
|
431 |
+
is_account_primary_owner boolean;
|
432 |
+
changing_primary_owner boolean;
|
433 |
+
begin
|
434 |
+
-- check if the user is an owner, and if they are, allow them to update the role
|
435 |
+
select basejump.has_role_on_account(update_account_user_role.account_id, 'owner') into is_account_owner;
|
436 |
+
|
437 |
+
if not is_account_owner then
|
438 |
+
raise exception 'You must be an owner of the account to update a users role';
|
439 |
+
end if;
|
440 |
+
|
441 |
+
-- check if the user being changed is the primary owner, if so its not allowed
|
442 |
+
select primary_owner_user_id = auth.uid(), primary_owner_user_id = update_account_user_role.user_id
|
443 |
+
into is_account_primary_owner, changing_primary_owner
|
444 |
+
from basejump.accounts
|
445 |
+
where id = update_account_user_role.account_id;
|
446 |
+
|
447 |
+
if changing_primary_owner = true and is_account_primary_owner = false then
|
448 |
+
raise exception 'You must be the primary owner of the account to change the primary owner';
|
449 |
+
end if;
|
450 |
+
|
451 |
+
update basejump.account_user au
|
452 |
+
set account_role = new_account_role
|
453 |
+
where au.account_id = update_account_user_role.account_id
|
454 |
+
and au.user_id = update_account_user_role.user_id;
|
455 |
+
|
456 |
+
if make_primary_owner = true then
|
457 |
+
-- first we see if the current user is the owner, only they can do this
|
458 |
+
if is_account_primary_owner = false then
|
459 |
+
raise exception 'You must be the primary owner of the account to change the primary owner';
|
460 |
+
end if;
|
461 |
+
|
462 |
+
update basejump.accounts
|
463 |
+
set primary_owner_user_id = update_account_user_role.user_id
|
464 |
+
where id = update_account_user_role.account_id;
|
465 |
+
end if;
|
466 |
+
end;
|
467 |
+
$$;
|
468 |
+
|
469 |
+
grant execute on function public.update_account_user_role(uuid, uuid, basejump.account_role, boolean) to authenticated;
|
470 |
+
|
471 |
+
/**
|
472 |
+
Returns the current user's accounts
|
473 |
+
*/
|
474 |
+
create or replace function public.get_accounts()
|
475 |
+
returns json
|
476 |
+
language sql
|
477 |
+
as
|
478 |
+
$$
|
479 |
+
select coalesce(json_agg(
|
480 |
+
json_build_object(
|
481 |
+
'account_id', wu.account_id,
|
482 |
+
'account_role', wu.account_role,
|
483 |
+
'is_primary_owner', a.primary_owner_user_id = auth.uid(),
|
484 |
+
'name', a.name,
|
485 |
+
'slug', a.slug,
|
486 |
+
'personal_account', a.personal_account,
|
487 |
+
'created_at', a.created_at,
|
488 |
+
'updated_at', a.updated_at
|
489 |
+
)
|
490 |
+
), '[]'::json)
|
491 |
+
from basejump.account_user wu
|
492 |
+
join basejump.accounts a on a.id = wu.account_id
|
493 |
+
where wu.user_id = auth.uid();
|
494 |
+
$$;
|
495 |
+
|
496 |
+
grant execute on function public.get_accounts() to authenticated;
|
497 |
+
|
498 |
+
/**
|
499 |
+
Returns a specific account that the current user has access to
|
500 |
+
*/
|
501 |
+
create or replace function public.get_account(account_id uuid)
|
502 |
+
returns json
|
503 |
+
language plpgsql
|
504 |
+
as
|
505 |
+
$$
|
506 |
+
BEGIN
|
507 |
+
-- check if the user is a member of the account or a service_role user
|
508 |
+
if current_user IN ('anon', 'authenticated') and
|
509 |
+
(select current_user_account_role(get_account.account_id) ->> 'account_role' IS NULL) then
|
510 |
+
raise exception 'You must be a member of an account to access it';
|
511 |
+
end if;
|
512 |
+
|
513 |
+
|
514 |
+
return (select json_build_object(
|
515 |
+
'account_id', a.id,
|
516 |
+
'account_role', wu.account_role,
|
517 |
+
'is_primary_owner', a.primary_owner_user_id = auth.uid(),
|
518 |
+
'name', a.name,
|
519 |
+
'slug', a.slug,
|
520 |
+
'personal_account', a.personal_account,
|
521 |
+
'billing_enabled', case
|
522 |
+
when a.personal_account = true then
|
523 |
+
config.enable_personal_account_billing
|
524 |
+
else
|
525 |
+
config.enable_team_account_billing
|
526 |
+
end,
|
527 |
+
'billing_status', bs.status,
|
528 |
+
'created_at', a.created_at,
|
529 |
+
'updated_at', a.updated_at,
|
530 |
+
'metadata', a.public_metadata
|
531 |
+
)
|
532 |
+
from basejump.accounts a
|
533 |
+
left join basejump.account_user wu on a.id = wu.account_id and wu.user_id = auth.uid()
|
534 |
+
join basejump.config config on true
|
535 |
+
left join (select bs.account_id, status
|
536 |
+
from basejump.billing_subscriptions bs
|
537 |
+
where bs.account_id = get_account.account_id
|
538 |
+
order by created desc
|
539 |
+
limit 1) bs on bs.account_id = a.id
|
540 |
+
where a.id = get_account.account_id);
|
541 |
+
END;
|
542 |
+
$$;
|
543 |
+
|
544 |
+
grant execute on function public.get_account(uuid) to authenticated, service_role;
|
545 |
+
|
546 |
+
/**
|
547 |
+
Returns a specific account that the current user has access to
|
548 |
+
*/
|
549 |
+
create or replace function public.get_account_by_slug(slug text)
|
550 |
+
returns json
|
551 |
+
language plpgsql
|
552 |
+
as
|
553 |
+
$$
|
554 |
+
DECLARE
|
555 |
+
internal_account_id uuid;
|
556 |
+
BEGIN
|
557 |
+
select a.id
|
558 |
+
into internal_account_id
|
559 |
+
from basejump.accounts a
|
560 |
+
where a.slug IS NOT NULL
|
561 |
+
and a.slug = get_account_by_slug.slug;
|
562 |
+
|
563 |
+
return public.get_account(internal_account_id);
|
564 |
+
END;
|
565 |
+
$$;
|
566 |
+
|
567 |
+
grant execute on function public.get_account_by_slug(text) to authenticated;
|
568 |
+
|
569 |
+
/**
|
570 |
+
Returns the personal account for the current user
|
571 |
+
*/
|
572 |
+
create or replace function public.get_personal_account()
|
573 |
+
returns json
|
574 |
+
language plpgsql
|
575 |
+
as
|
576 |
+
$$
|
577 |
+
BEGIN
|
578 |
+
return public.get_account(auth.uid());
|
579 |
+
END;
|
580 |
+
$$;
|
581 |
+
|
582 |
+
grant execute on function public.get_personal_account() to authenticated;
|
583 |
+
|
584 |
+
/**
|
585 |
+
* Create an account
|
586 |
+
*/
|
587 |
+
create or replace function public.create_account(slug text default null, name text default null)
|
588 |
+
returns json
|
589 |
+
language plpgsql
|
590 |
+
as
|
591 |
+
$$
|
592 |
+
DECLARE
|
593 |
+
new_account_id uuid;
|
594 |
+
BEGIN
|
595 |
+
insert into basejump.accounts (slug, name)
|
596 |
+
values (create_account.slug, create_account.name)
|
597 |
+
returning id into new_account_id;
|
598 |
+
|
599 |
+
return public.get_account(new_account_id);
|
600 |
+
EXCEPTION
|
601 |
+
WHEN unique_violation THEN
|
602 |
+
raise exception 'An account with that unique ID already exists';
|
603 |
+
END;
|
604 |
+
$$;
|
605 |
+
|
606 |
+
grant execute on function public.create_account(slug text, name text) to authenticated;
|
607 |
+
|
608 |
+
/**
|
609 |
+
Update an account with passed in info. None of the info is required except for account ID.
|
610 |
+
If you don't pass in a value for a field, it will not be updated.
|
611 |
+
If you set replace_meta to true, the metadata will be replaced with the passed in metadata.
|
612 |
+
If you set replace_meta to false, the metadata will be merged with the passed in metadata.
|
613 |
+
*/
|
614 |
+
create or replace function public.update_account(account_id uuid, slug text default null, name text default null,
|
615 |
+
public_metadata jsonb default null,
|
616 |
+
replace_metadata boolean default false)
|
617 |
+
returns json
|
618 |
+
language plpgsql
|
619 |
+
as
|
620 |
+
$$
|
621 |
+
BEGIN
|
622 |
+
|
623 |
+
-- check if postgres role is service_role
|
624 |
+
if current_user IN ('anon', 'authenticated') and
|
625 |
+
not (select current_user_account_role(update_account.account_id) ->> 'account_role' = 'owner') then
|
626 |
+
raise exception 'Only account owners can update an account';
|
627 |
+
end if;
|
628 |
+
|
629 |
+
update basejump.accounts accounts
|
630 |
+
set slug = coalesce(update_account.slug, accounts.slug),
|
631 |
+
name = coalesce(update_account.name, accounts.name),
|
632 |
+
public_metadata = case
|
633 |
+
when update_account.public_metadata is null then accounts.public_metadata -- do nothing
|
634 |
+
when accounts.public_metadata IS NULL then update_account.public_metadata -- set metadata
|
635 |
+
when update_account.replace_metadata
|
636 |
+
then update_account.public_metadata -- replace metadata
|
637 |
+
else accounts.public_metadata || update_account.public_metadata end -- merge metadata
|
638 |
+
where accounts.id = update_account.account_id;
|
639 |
+
|
640 |
+
return public.get_account(account_id);
|
641 |
+
END;
|
642 |
+
$$;
|
643 |
+
|
644 |
+
grant execute on function public.update_account(uuid, text, text, jsonb, boolean) to authenticated, service_role;
|
645 |
+
|
646 |
+
/**
|
647 |
+
Returns a list of current account members. Only account owners can access this function.
|
648 |
+
It's a security definer because it requries us to lookup personal_accounts for existing members so we can
|
649 |
+
get their names.
|
650 |
+
*/
|
651 |
+
create or replace function public.get_account_members(account_id uuid, results_limit integer default 50,
|
652 |
+
results_offset integer default 0)
|
653 |
+
returns json
|
654 |
+
language plpgsql
|
655 |
+
security definer
|
656 |
+
set search_path = basejump
|
657 |
+
as
|
658 |
+
$$
|
659 |
+
BEGIN
|
660 |
+
|
661 |
+
-- only account owners can access this function
|
662 |
+
if (select public.current_user_account_role(get_account_members.account_id) ->> 'account_role' <> 'owner') then
|
663 |
+
raise exception 'Only account owners can access this function';
|
664 |
+
end if;
|
665 |
+
|
666 |
+
return (select json_agg(
|
667 |
+
json_build_object(
|
668 |
+
'user_id', wu.user_id,
|
669 |
+
'account_role', wu.account_role,
|
670 |
+
'name', p.name,
|
671 |
+
'email', u.email,
|
672 |
+
'is_primary_owner', a.primary_owner_user_id = wu.user_id
|
673 |
+
)
|
674 |
+
)
|
675 |
+
from basejump.account_user wu
|
676 |
+
join basejump.accounts a on a.id = wu.account_id
|
677 |
+
join basejump.accounts p on p.primary_owner_user_id = wu.user_id and p.personal_account = true
|
678 |
+
join auth.users u on u.id = wu.user_id
|
679 |
+
where wu.account_id = get_account_members.account_id
|
680 |
+
limit coalesce(get_account_members.results_limit, 50) offset coalesce(get_account_members.results_offset, 0));
|
681 |
+
END;
|
682 |
+
$$;
|
683 |
+
|
684 |
+
grant execute on function public.get_account_members(uuid, integer, integer) to authenticated;
|
685 |
+
|
686 |
+
/**
|
687 |
+
Allows an owner of the account to remove any member other than the primary owner
|
688 |
+
*/
|
689 |
+
|
690 |
+
create or replace function public.remove_account_member(account_id uuid, user_id uuid)
|
691 |
+
returns void
|
692 |
+
language plpgsql
|
693 |
+
as
|
694 |
+
$$
|
695 |
+
BEGIN
|
696 |
+
-- only account owners can access this function
|
697 |
+
if basejump.has_role_on_account(remove_account_member.account_id, 'owner') <> true then
|
698 |
+
raise exception 'Only account owners can access this function';
|
699 |
+
end if;
|
700 |
+
|
701 |
+
delete
|
702 |
+
from basejump.account_user wu
|
703 |
+
where wu.account_id = remove_account_member.account_id
|
704 |
+
and wu.user_id = remove_account_member.user_id;
|
705 |
+
END;
|
706 |
+
$$;
|
707 |
+
|
708 |
+
grant execute on function public.remove_account_member(uuid, uuid) to authenticated;
|
20240414162100_basejump-invitations.sql
ADDED
@@ -0,0 +1,270 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/**
|
2 |
+
* -------------------------------------------------------
|
3 |
+
* Section - Invitations
|
4 |
+
* -------------------------------------------------------
|
5 |
+
*/
|
6 |
+
|
7 |
+
/**
|
8 |
+
* Invitations are sent to users to join a account
|
9 |
+
* They pre-define the role the user should have once they join
|
10 |
+
*/
|
11 |
+
create table if not exists basejump.invitations
|
12 |
+
(
|
13 |
+
-- the id of the invitation
|
14 |
+
id uuid unique not null default extensions.uuid_generate_v4(),
|
15 |
+
-- what role should invitation accepters be given in this account
|
16 |
+
account_role basejump.account_role not null,
|
17 |
+
-- the account the invitation is for
|
18 |
+
account_id uuid references basejump.accounts (id) on delete cascade not null,
|
19 |
+
-- unique token used to accept the invitation
|
20 |
+
token text unique not null default basejump.generate_token(30),
|
21 |
+
-- who created the invitation
|
22 |
+
invited_by_user_id uuid references auth.users not null,
|
23 |
+
-- account name. filled in by a trigger
|
24 |
+
account_name text,
|
25 |
+
-- when the invitation was last updated
|
26 |
+
updated_at timestamp with time zone,
|
27 |
+
-- when the invitation was created
|
28 |
+
created_at timestamp with time zone,
|
29 |
+
-- what type of invitation is this
|
30 |
+
invitation_type basejump.invitation_type not null,
|
31 |
+
primary key (id)
|
32 |
+
);
|
33 |
+
|
34 |
+
-- Open up access to invitations
|
35 |
+
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE basejump.invitations TO authenticated, service_role;
|
36 |
+
|
37 |
+
-- manage timestamps
|
38 |
+
CREATE TRIGGER basejump_set_invitations_timestamp
|
39 |
+
BEFORE INSERT OR UPDATE
|
40 |
+
ON basejump.invitations
|
41 |
+
FOR EACH ROW
|
42 |
+
EXECUTE FUNCTION basejump.trigger_set_timestamps();
|
43 |
+
|
44 |
+
/**
|
45 |
+
* This funciton fills in account info and inviting user email
|
46 |
+
* so that the recipient can get more info about the invitation prior to
|
47 |
+
* accepting. It allows us to avoid complex permissions on accounts
|
48 |
+
*/
|
49 |
+
CREATE OR REPLACE FUNCTION basejump.trigger_set_invitation_details()
|
50 |
+
RETURNS TRIGGER AS
|
51 |
+
$$
|
52 |
+
BEGIN
|
53 |
+
NEW.invited_by_user_id = auth.uid();
|
54 |
+
NEW.account_name = (select name from basejump.accounts where id = NEW.account_id);
|
55 |
+
RETURN NEW;
|
56 |
+
END
|
57 |
+
$$ LANGUAGE plpgsql;
|
58 |
+
|
59 |
+
CREATE TRIGGER basejump_trigger_set_invitation_details
|
60 |
+
BEFORE INSERT
|
61 |
+
ON basejump.invitations
|
62 |
+
FOR EACH ROW
|
63 |
+
EXECUTE FUNCTION basejump.trigger_set_invitation_details();
|
64 |
+
|
65 |
+
-- enable RLS on invitations
|
66 |
+
alter table basejump.invitations
|
67 |
+
enable row level security;
|
68 |
+
|
69 |
+
/**
|
70 |
+
* -------------------------
|
71 |
+
* Section - RLS Policies
|
72 |
+
* -------------------------
|
73 |
+
* This is where we define access to tables in the basejump schema
|
74 |
+
*/
|
75 |
+
|
76 |
+
create policy "Invitations viewable by account owners" on basejump.invitations
|
77 |
+
for select
|
78 |
+
to authenticated
|
79 |
+
using (
|
80 |
+
created_at > (now() - interval '24 hours')
|
81 |
+
and
|
82 |
+
basejump.has_role_on_account(account_id, 'owner') = true
|
83 |
+
);
|
84 |
+
|
85 |
+
|
86 |
+
create policy "Invitations can be created by account owners" on basejump.invitations
|
87 |
+
for insert
|
88 |
+
to authenticated
|
89 |
+
with check (
|
90 |
+
-- team accounts should be enabled
|
91 |
+
basejump.is_set('enable_team_accounts') = true
|
92 |
+
-- this should not be a personal account
|
93 |
+
and (SELECT personal_account
|
94 |
+
FROM basejump.accounts
|
95 |
+
WHERE id = account_id) = false
|
96 |
+
-- the inserting user should be an owner of the account
|
97 |
+
and
|
98 |
+
(basejump.has_role_on_account(account_id, 'owner') = true)
|
99 |
+
);
|
100 |
+
|
101 |
+
create policy "Invitations can be deleted by account owners" on basejump.invitations
|
102 |
+
for delete
|
103 |
+
to authenticated
|
104 |
+
using (
|
105 |
+
basejump.has_role_on_account(account_id, 'owner') = true
|
106 |
+
);
|
107 |
+
|
108 |
+
|
109 |
+
|
110 |
+
/**
|
111 |
+
* -------------------------------------------------------
|
112 |
+
* Section - Public functions
|
113 |
+
* -------------------------------------------------------
|
114 |
+
* Each of these functions exists in the public name space because they are accessible
|
115 |
+
* via the API. it is the primary way developers can interact with Basejump accounts
|
116 |
+
*/
|
117 |
+
|
118 |
+
|
119 |
+
/**
|
120 |
+
Returns a list of currently active invitations for a given account
|
121 |
+
*/
|
122 |
+
|
123 |
+
create or replace function public.get_account_invitations(account_id uuid, results_limit integer default 25,
|
124 |
+
results_offset integer default 0)
|
125 |
+
returns json
|
126 |
+
language plpgsql
|
127 |
+
as
|
128 |
+
$$
|
129 |
+
BEGIN
|
130 |
+
-- only account owners can access this function
|
131 |
+
if (select public.current_user_account_role(get_account_invitations.account_id) ->> 'account_role' <> 'owner') then
|
132 |
+
raise exception 'Only account owners can access this function';
|
133 |
+
end if;
|
134 |
+
|
135 |
+
return (select json_agg(
|
136 |
+
json_build_object(
|
137 |
+
'account_role', i.account_role,
|
138 |
+
'created_at', i.created_at,
|
139 |
+
'invitation_type', i.invitation_type,
|
140 |
+
'invitation_id', i.id
|
141 |
+
)
|
142 |
+
)
|
143 |
+
from basejump.invitations i
|
144 |
+
where i.account_id = get_account_invitations.account_id
|
145 |
+
and i.created_at > now() - interval '24 hours'
|
146 |
+
limit coalesce(get_account_invitations.results_limit, 25) offset coalesce(get_account_invitations.results_offset, 0));
|
147 |
+
END;
|
148 |
+
$$;
|
149 |
+
|
150 |
+
grant execute on function public.get_account_invitations(uuid, integer, integer) to authenticated;
|
151 |
+
|
152 |
+
|
153 |
+
/**
|
154 |
+
* Allows a user to accept an existing invitation and join a account
|
155 |
+
* This one exists in the public schema because we want it to be called
|
156 |
+
* using the supabase rpc method
|
157 |
+
*/
|
158 |
+
create or replace function public.accept_invitation(lookup_invitation_token text)
|
159 |
+
returns jsonb
|
160 |
+
language plpgsql
|
161 |
+
security definer set search_path = public, basejump
|
162 |
+
as
|
163 |
+
$$
|
164 |
+
declare
|
165 |
+
lookup_account_id uuid;
|
166 |
+
declare new_member_role basejump.account_role;
|
167 |
+
lookup_account_slug text;
|
168 |
+
begin
|
169 |
+
select i.account_id, i.account_role, a.slug
|
170 |
+
into lookup_account_id, new_member_role, lookup_account_slug
|
171 |
+
from basejump.invitations i
|
172 |
+
join basejump.accounts a on a.id = i.account_id
|
173 |
+
where i.token = lookup_invitation_token
|
174 |
+
and i.created_at > now() - interval '24 hours';
|
175 |
+
|
176 |
+
if lookup_account_id IS NULL then
|
177 |
+
raise exception 'Invitation not found';
|
178 |
+
end if;
|
179 |
+
|
180 |
+
if lookup_account_id is not null then
|
181 |
+
-- we've validated the token is real, so grant the user access
|
182 |
+
insert into basejump.account_user (account_id, user_id, account_role)
|
183 |
+
values (lookup_account_id, auth.uid(), new_member_role);
|
184 |
+
-- email types of invitations are only good for one usage
|
185 |
+
delete from basejump.invitations where token = lookup_invitation_token and invitation_type = 'one_time';
|
186 |
+
end if;
|
187 |
+
return json_build_object('account_id', lookup_account_id, 'account_role', new_member_role, 'slug',
|
188 |
+
lookup_account_slug);
|
189 |
+
EXCEPTION
|
190 |
+
WHEN unique_violation THEN
|
191 |
+
raise exception 'You are already a member of this account';
|
192 |
+
end;
|
193 |
+
$$;
|
194 |
+
|
195 |
+
grant execute on function public.accept_invitation(text) to authenticated;
|
196 |
+
|
197 |
+
|
198 |
+
/**
|
199 |
+
* Allows a user to lookup an existing invitation and join a account
|
200 |
+
* This one exists in the public schema because we want it to be called
|
201 |
+
* using the supabase rpc method
|
202 |
+
*/
|
203 |
+
create or replace function public.lookup_invitation(lookup_invitation_token text)
|
204 |
+
returns json
|
205 |
+
language plpgsql
|
206 |
+
security definer set search_path = public, basejump
|
207 |
+
as
|
208 |
+
$$
|
209 |
+
declare
|
210 |
+
name text;
|
211 |
+
invitation_active boolean;
|
212 |
+
begin
|
213 |
+
select account_name,
|
214 |
+
case when id IS NOT NULL then true else false end as active
|
215 |
+
into name, invitation_active
|
216 |
+
from basejump.invitations
|
217 |
+
where token = lookup_invitation_token
|
218 |
+
and created_at > now() - interval '24 hours'
|
219 |
+
limit 1;
|
220 |
+
return json_build_object('active', coalesce(invitation_active, false), 'account_name', name);
|
221 |
+
end;
|
222 |
+
$$;
|
223 |
+
|
224 |
+
grant execute on function public.lookup_invitation(text) to authenticated;
|
225 |
+
|
226 |
+
|
227 |
+
/**
|
228 |
+
Allows a user to create a new invitation if they are an owner of an account
|
229 |
+
*/
|
230 |
+
create or replace function public.create_invitation(account_id uuid, account_role basejump.account_role,
|
231 |
+
invitation_type basejump.invitation_type)
|
232 |
+
returns json
|
233 |
+
language plpgsql
|
234 |
+
as
|
235 |
+
$$
|
236 |
+
declare
|
237 |
+
new_invitation basejump.invitations;
|
238 |
+
begin
|
239 |
+
insert into basejump.invitations (account_id, account_role, invitation_type, invited_by_user_id)
|
240 |
+
values (account_id, account_role, invitation_type, auth.uid())
|
241 |
+
returning * into new_invitation;
|
242 |
+
|
243 |
+
return json_build_object('token', new_invitation.token);
|
244 |
+
end
|
245 |
+
$$;
|
246 |
+
|
247 |
+
grant execute on function public.create_invitation(uuid, basejump.account_role, basejump.invitation_type) to authenticated;
|
248 |
+
|
249 |
+
/**
|
250 |
+
Allows an owner to delete an existing invitation
|
251 |
+
*/
|
252 |
+
|
253 |
+
create or replace function public.delete_invitation(invitation_id uuid)
|
254 |
+
returns void
|
255 |
+
language plpgsql
|
256 |
+
as
|
257 |
+
$$
|
258 |
+
begin
|
259 |
+
-- verify account owner for the invitation
|
260 |
+
if basejump.has_role_on_account(
|
261 |
+
(select account_id from basejump.invitations where id = delete_invitation.invitation_id), 'owner') <>
|
262 |
+
true then
|
263 |
+
raise exception 'Only account owners can delete invitations';
|
264 |
+
end if;
|
265 |
+
|
266 |
+
delete from basejump.invitations where id = delete_invitation.invitation_id;
|
267 |
+
end
|
268 |
+
$$;
|
269 |
+
|
270 |
+
grant execute on function public.delete_invitation(uuid) to authenticated;
|
20240414162131_basejump-billing.sql
ADDED
@@ -0,0 +1,236 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/**
|
2 |
+
* -------------------------------------------------------
|
3 |
+
* Section - Billing
|
4 |
+
* -------------------------------------------------------
|
5 |
+
*/
|
6 |
+
|
7 |
+
/**
|
8 |
+
* Subscription Status
|
9 |
+
* Tracks the current status of the account subscription
|
10 |
+
*/
|
11 |
+
DO
|
12 |
+
$$
|
13 |
+
BEGIN
|
14 |
+
IF NOT EXISTS(SELECT 1
|
15 |
+
FROM pg_type t
|
16 |
+
JOIN pg_namespace n ON n.oid = t.typnamespace
|
17 |
+
WHERE t.typname = 'subscription_status'
|
18 |
+
AND n.nspname = 'basejump') THEN
|
19 |
+
create type basejump.subscription_status as enum (
|
20 |
+
'trialing',
|
21 |
+
'active',
|
22 |
+
'canceled',
|
23 |
+
'incomplete',
|
24 |
+
'incomplete_expired',
|
25 |
+
'past_due',
|
26 |
+
'unpaid'
|
27 |
+
);
|
28 |
+
end if;
|
29 |
+
end;
|
30 |
+
$$;
|
31 |
+
|
32 |
+
|
33 |
+
/**
|
34 |
+
* Billing customer
|
35 |
+
* This is a private table that contains a mapping of user IDs to your billing providers IDs
|
36 |
+
*/
|
37 |
+
create table if not exists basejump.billing_customers
|
38 |
+
(
|
39 |
+
-- UUID from auth.users
|
40 |
+
account_id uuid references basejump.accounts (id) on delete cascade not null,
|
41 |
+
-- The user's customer ID in Stripe. User must not be able to update this.
|
42 |
+
id text primary key,
|
43 |
+
-- The email address the customer wants to use for invoicing
|
44 |
+
email text,
|
45 |
+
-- The active status of a customer
|
46 |
+
active boolean,
|
47 |
+
-- The billing provider the customer is using
|
48 |
+
provider text
|
49 |
+
);
|
50 |
+
|
51 |
+
-- Open up access to billing_customers
|
52 |
+
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE basejump.billing_customers TO service_role;
|
53 |
+
GRANT SELECT ON TABLE basejump.billing_customers TO authenticated;
|
54 |
+
|
55 |
+
|
56 |
+
-- enable RLS for billing_customers
|
57 |
+
alter table
|
58 |
+
basejump.billing_customers
|
59 |
+
enable row level security;
|
60 |
+
|
61 |
+
/**
|
62 |
+
* Billing subscriptions
|
63 |
+
* This is a private table that contains a mapping of account IDs to your billing providers subscription IDs
|
64 |
+
*/
|
65 |
+
create table if not exists basejump.billing_subscriptions
|
66 |
+
(
|
67 |
+
-- Subscription ID from Stripe, e.g. sub_1234.
|
68 |
+
id text primary key,
|
69 |
+
account_id uuid references basejump.accounts (id) on delete cascade not null,
|
70 |
+
billing_customer_id text references basejump.billing_customers (id) on delete cascade not null,
|
71 |
+
-- The status of the subscription object, one of subscription_status type above.
|
72 |
+
status basejump.subscription_status,
|
73 |
+
-- Set of key-value pairs, used to store additional information about the object in a structured format.
|
74 |
+
metadata jsonb,
|
75 |
+
-- ID of the price that created this subscription.
|
76 |
+
price_id text,
|
77 |
+
plan_name text,
|
78 |
+
-- Quantity multiplied by the unit amount of the price creates the amount of the subscription. Can be used to charge multiple seats.
|
79 |
+
quantity integer,
|
80 |
+
-- If true the subscription has been canceled by the user and will be deleted at the end of the billing period.
|
81 |
+
cancel_at_period_end boolean,
|
82 |
+
-- Time at which the subscription was created.
|
83 |
+
created timestamp with time zone default timezone('utc' :: text, now()) not null,
|
84 |
+
-- Start of the current period that the subscription has been invoiced for.
|
85 |
+
current_period_start timestamp with time zone default timezone('utc' :: text, now()) not null,
|
86 |
+
-- End of the current period that the subscription has been invoiced for. At the end of this period, a new invoice will be created.
|
87 |
+
current_period_end timestamp with time zone default timezone('utc' :: text, now()) not null,
|
88 |
+
-- If the subscription has ended, the timestamp of the date the subscription ended.
|
89 |
+
ended_at timestamp with time zone default timezone('utc' :: text, now()),
|
90 |
+
-- A date in the future at which the subscription will automatically get canceled.
|
91 |
+
cancel_at timestamp with time zone default timezone('utc' :: text, now()),
|
92 |
+
-- If the subscription has been canceled, the date of that cancellation. If the subscription was canceled with `cancel_at_period_end`, `canceled_at` will still reflect the date of the initial cancellation request, not the end of the subscription period when the subscription is automatically moved to a canceled state.
|
93 |
+
canceled_at timestamp with time zone default timezone('utc' :: text, now()),
|
94 |
+
-- If the subscription has a trial, the beginning of that trial.
|
95 |
+
trial_start timestamp with time zone default timezone('utc' :: text, now()),
|
96 |
+
-- If the subscription has a trial, the end of that trial.
|
97 |
+
trial_end timestamp with time zone default timezone('utc' :: text, now()),
|
98 |
+
provider text
|
99 |
+
);
|
100 |
+
|
101 |
+
-- Open up access to billing_subscriptions
|
102 |
+
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE basejump.billing_subscriptions TO service_role;
|
103 |
+
GRANT SELECT ON TABLE basejump.billing_subscriptions TO authenticated;
|
104 |
+
|
105 |
+
-- enable RLS for billing_subscriptions
|
106 |
+
alter table
|
107 |
+
basejump.billing_subscriptions
|
108 |
+
enable row level security;
|
109 |
+
|
110 |
+
/**
|
111 |
+
* -------------------------
|
112 |
+
* Section - RLS Policies
|
113 |
+
* -------------------------
|
114 |
+
* This is where we define access to tables in the basejump schema
|
115 |
+
*/
|
116 |
+
|
117 |
+
create policy "Can only view own billing customer data." on basejump.billing_customers for
|
118 |
+
select
|
119 |
+
using (
|
120 |
+
basejump.has_role_on_account(account_id) = true
|
121 |
+
);
|
122 |
+
|
123 |
+
|
124 |
+
create policy "Can only view own billing subscription data." on basejump.billing_subscriptions for
|
125 |
+
select
|
126 |
+
using (
|
127 |
+
basejump.has_role_on_account(account_id) = true
|
128 |
+
);
|
129 |
+
|
130 |
+
/**
|
131 |
+
* -------------------------------------------------------
|
132 |
+
* Section - Public functions
|
133 |
+
* -------------------------------------------------------
|
134 |
+
* Each of these functions exists in the public name space because they are accessible
|
135 |
+
* via the API. it is the primary way developers can interact with Basejump accounts
|
136 |
+
*/
|
137 |
+
|
138 |
+
|
139 |
+
/**
|
140 |
+
* Returns the current billing status for an account
|
141 |
+
*/
|
142 |
+
CREATE OR REPLACE FUNCTION public.get_account_billing_status(account_id uuid)
|
143 |
+
RETURNS jsonb
|
144 |
+
security definer
|
145 |
+
set search_path = public, basejump
|
146 |
+
AS
|
147 |
+
$$
|
148 |
+
DECLARE
|
149 |
+
result jsonb;
|
150 |
+
role_result jsonb;
|
151 |
+
BEGIN
|
152 |
+
select public.current_user_account_role(get_account_billing_status.account_id) into role_result;
|
153 |
+
|
154 |
+
select jsonb_build_object(
|
155 |
+
'account_id', get_account_billing_status.account_id,
|
156 |
+
'billing_subscription_id', s.id,
|
157 |
+
'billing_enabled', case
|
158 |
+
when a.personal_account = true then config.enable_personal_account_billing
|
159 |
+
else config.enable_team_account_billing end,
|
160 |
+
'billing_status', s.status,
|
161 |
+
'billing_customer_id', c.id,
|
162 |
+
'billing_provider', config.billing_provider,
|
163 |
+
'billing_email',
|
164 |
+
coalesce(c.email, u.email) -- if we don't have a customer email, use the user's email as a fallback
|
165 |
+
)
|
166 |
+
into result
|
167 |
+
from basejump.accounts a
|
168 |
+
join auth.users u on u.id = a.primary_owner_user_id
|
169 |
+
left join basejump.billing_subscriptions s on s.account_id = a.id
|
170 |
+
left join basejump.billing_customers c on c.account_id = coalesce(s.account_id, a.id)
|
171 |
+
join basejump.config config on true
|
172 |
+
where a.id = get_account_billing_status.account_id
|
173 |
+
order by s.created desc
|
174 |
+
limit 1;
|
175 |
+
|
176 |
+
return result || role_result;
|
177 |
+
END;
|
178 |
+
$$ LANGUAGE plpgsql;
|
179 |
+
|
180 |
+
grant execute on function public.get_account_billing_status(uuid) to authenticated;
|
181 |
+
|
182 |
+
/**
|
183 |
+
* Allow service accounts to upsert the billing data for an account
|
184 |
+
*/
|
185 |
+
CREATE OR REPLACE FUNCTION public.service_role_upsert_customer_subscription(account_id uuid,
|
186 |
+
customer jsonb default null,
|
187 |
+
subscription jsonb default null)
|
188 |
+
RETURNS void AS
|
189 |
+
$$
|
190 |
+
BEGIN
|
191 |
+
-- if the customer is not null, upsert the data into billing_customers, only upsert fields that are present in the jsonb object
|
192 |
+
if customer is not null then
|
193 |
+
insert into basejump.billing_customers (id, account_id, email, provider)
|
194 |
+
values (customer ->> 'id', service_role_upsert_customer_subscription.account_id, customer ->> 'billing_email',
|
195 |
+
(customer ->> 'provider'))
|
196 |
+
on conflict (id) do update
|
197 |
+
set email = customer ->> 'billing_email';
|
198 |
+
end if;
|
199 |
+
|
200 |
+
-- if the subscription is not null, upsert the data into billing_subscriptions, only upsert fields that are present in the jsonb object
|
201 |
+
if subscription is not null then
|
202 |
+
insert into basejump.billing_subscriptions (id, account_id, billing_customer_id, status, metadata, price_id,
|
203 |
+
quantity, cancel_at_period_end, created, current_period_start,
|
204 |
+
current_period_end, ended_at, cancel_at, canceled_at, trial_start,
|
205 |
+
trial_end, plan_name, provider)
|
206 |
+
values (subscription ->> 'id', service_role_upsert_customer_subscription.account_id,
|
207 |
+
subscription ->> 'billing_customer_id', (subscription ->> 'status')::basejump.subscription_status,
|
208 |
+
subscription -> 'metadata',
|
209 |
+
subscription ->> 'price_id', (subscription ->> 'quantity')::int,
|
210 |
+
(subscription ->> 'cancel_at_period_end')::boolean,
|
211 |
+
(subscription ->> 'created')::timestamptz, (subscription ->> 'current_period_start')::timestamptz,
|
212 |
+
(subscription ->> 'current_period_end')::timestamptz, (subscription ->> 'ended_at')::timestamptz,
|
213 |
+
(subscription ->> 'cancel_at')::timestamptz,
|
214 |
+
(subscription ->> 'canceled_at')::timestamptz, (subscription ->> 'trial_start')::timestamptz,
|
215 |
+
(subscription ->> 'trial_end')::timestamptz,
|
216 |
+
subscription ->> 'plan_name', (subscription ->> 'provider'))
|
217 |
+
on conflict (id) do update
|
218 |
+
set billing_customer_id = subscription ->> 'billing_customer_id',
|
219 |
+
status = (subscription ->> 'status')::basejump.subscription_status,
|
220 |
+
metadata = subscription -> 'metadata',
|
221 |
+
price_id = subscription ->> 'price_id',
|
222 |
+
quantity = (subscription ->> 'quantity')::int,
|
223 |
+
cancel_at_period_end = (subscription ->> 'cancel_at_period_end')::boolean,
|
224 |
+
current_period_start = (subscription ->> 'current_period_start')::timestamptz,
|
225 |
+
current_period_end = (subscription ->> 'current_period_end')::timestamptz,
|
226 |
+
ended_at = (subscription ->> 'ended_at')::timestamptz,
|
227 |
+
cancel_at = (subscription ->> 'cancel_at')::timestamptz,
|
228 |
+
canceled_at = (subscription ->> 'canceled_at')::timestamptz,
|
229 |
+
trial_start = (subscription ->> 'trial_start')::timestamptz,
|
230 |
+
trial_end = (subscription ->> 'trial_end')::timestamptz,
|
231 |
+
plan_name = subscription ->> 'plan_name';
|
232 |
+
end if;
|
233 |
+
end;
|
234 |
+
$$ LANGUAGE plpgsql;
|
235 |
+
|
236 |
+
GRANT EXECUTE ON FUNCTION public.service_role_upsert_customer_subscription(uuid, jsonb, jsonb) TO service_role;
|
20250409211903_basejump-configure.sql
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
UPDATE basejump.config SET enable_team_accounts = TRUE;
|
2 |
+
UPDATE basejump.config SET enable_personal_account_billing = TRUE;
|
3 |
+
UPDATE basejump.config SET enable_team_account_billing = TRUE;
|
20250409212058_initial.sql
ADDED
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
-- Enable UUID extension
|
2 |
+
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
3 |
+
|
4 |
+
-- Create devices table first
|
5 |
+
CREATE TABLE public.devices (
|
6 |
+
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
7 |
+
account_id UUID NOT NULL,
|
8 |
+
name TEXT,
|
9 |
+
last_seen TIMESTAMP WITH TIME ZONE,
|
10 |
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT now(),
|
11 |
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT now(),
|
12 |
+
is_online BOOLEAN DEFAULT FALSE,
|
13 |
+
CONSTRAINT fk_account FOREIGN KEY (account_id) REFERENCES basejump.accounts(id) ON DELETE CASCADE
|
14 |
+
);
|
15 |
+
|
16 |
+
-- Create recordings table
|
17 |
+
CREATE TABLE public.recordings (
|
18 |
+
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
19 |
+
account_id UUID NOT NULL,
|
20 |
+
device_id UUID NOT NULL,
|
21 |
+
preprocessed_file_path TEXT,
|
22 |
+
meta JSONB,
|
23 |
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT now(),
|
24 |
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT now(),
|
25 |
+
name TEXT,
|
26 |
+
ui_annotated BOOLEAN DEFAULT FALSE,
|
27 |
+
a11y_file_path TEXT,
|
28 |
+
audio_file_path TEXT,
|
29 |
+
action_annotated BOOLEAN DEFAULT FALSE,
|
30 |
+
raw_data_file_path TEXT,
|
31 |
+
metadata_file_path TEXT,
|
32 |
+
action_training_file_path TEXT,
|
33 |
+
CONSTRAINT fk_account FOREIGN KEY (account_id) REFERENCES basejump.accounts(id) ON DELETE CASCADE,
|
34 |
+
CONSTRAINT fk_device FOREIGN KEY (device_id) REFERENCES public.devices(id) ON DELETE CASCADE
|
35 |
+
);
|
36 |
+
|
37 |
+
-- Create indexes for foreign keys
|
38 |
+
CREATE INDEX idx_recordings_account_id ON public.recordings(account_id);
|
39 |
+
CREATE INDEX idx_recordings_device_id ON public.recordings(device_id);
|
40 |
+
CREATE INDEX idx_devices_account_id ON public.devices(account_id);
|
41 |
+
|
42 |
+
-- Add RLS policies (optional, can be customized as needed)
|
43 |
+
ALTER TABLE public.recordings ENABLE ROW LEVEL SECURITY;
|
44 |
+
ALTER TABLE public.devices ENABLE ROW LEVEL SECURITY;
|
45 |
+
|
46 |
+
-- Create RLS policies for devices
|
47 |
+
CREATE POLICY "Account members can delete their own devices"
|
48 |
+
ON public.devices FOR DELETE
|
49 |
+
USING (basejump.has_role_on_account(account_id));
|
50 |
+
|
51 |
+
CREATE POLICY "Account members can insert their own devices"
|
52 |
+
ON public.devices FOR INSERT
|
53 |
+
WITH CHECK (basejump.has_role_on_account(account_id));
|
54 |
+
|
55 |
+
CREATE POLICY "Account members can only access their own devices"
|
56 |
+
ON public.devices FOR ALL
|
57 |
+
USING (basejump.has_role_on_account(account_id));
|
58 |
+
|
59 |
+
CREATE POLICY "Account members can update their own devices"
|
60 |
+
ON public.devices FOR UPDATE
|
61 |
+
USING (basejump.has_role_on_account(account_id));
|
62 |
+
|
63 |
+
CREATE POLICY "Account members can view their own devices"
|
64 |
+
ON public.devices FOR SELECT
|
65 |
+
USING (basejump.has_role_on_account(account_id));
|
66 |
+
|
67 |
+
-- Create RLS policies for recordings
|
68 |
+
CREATE POLICY "Account members can delete their own recordings"
|
69 |
+
ON public.recordings FOR DELETE
|
70 |
+
USING (basejump.has_role_on_account(account_id));
|
71 |
+
|
72 |
+
CREATE POLICY "Account members can insert their own recordings"
|
73 |
+
ON public.recordings FOR INSERT
|
74 |
+
WITH CHECK (basejump.has_role_on_account(account_id));
|
75 |
+
|
76 |
+
CREATE POLICY "Account members can only access their own recordings"
|
77 |
+
ON public.recordings FOR ALL
|
78 |
+
USING (basejump.has_role_on_account(account_id));
|
79 |
+
|
80 |
+
CREATE POLICY "Account members can update their own recordings"
|
81 |
+
ON public.recordings FOR UPDATE
|
82 |
+
USING (basejump.has_role_on_account(account_id));
|
83 |
+
|
84 |
+
CREATE POLICY "Account members can view their own recordings"
|
85 |
+
ON public.recordings FOR SELECT
|
86 |
+
USING (basejump.has_role_on_account(account_id));
|
87 |
+
|
88 |
+
-- Note: For threads and messages, you might want different RLS policies
|
89 |
+
-- depending on your application's requirements
|
90 |
+
|
91 |
+
|
92 |
+
-- Also drop the old function signature
|
93 |
+
DROP FUNCTION IF EXISTS transfer_device(UUID, UUID, TEXT);
|
94 |
+
|
95 |
+
|
96 |
+
CREATE OR REPLACE FUNCTION transfer_device(
|
97 |
+
device_id UUID, -- Parameter remains UUID
|
98 |
+
new_account_id UUID, -- Changed parameter name and implies new ownership target
|
99 |
+
device_name TEXT DEFAULT NULL
|
100 |
+
)
|
101 |
+
RETURNS SETOF devices AS $$
|
102 |
+
DECLARE
|
103 |
+
device_exists BOOLEAN;
|
104 |
+
updated_device devices;
|
105 |
+
BEGIN
|
106 |
+
-- Check if a device with the specified UUID exists
|
107 |
+
SELECT EXISTS (
|
108 |
+
SELECT 1 FROM devices WHERE id = device_id
|
109 |
+
) INTO device_exists;
|
110 |
+
|
111 |
+
IF device_exists THEN
|
112 |
+
-- Device exists: update its account ownership and last_seen timestamp
|
113 |
+
UPDATE devices
|
114 |
+
SET
|
115 |
+
account_id = new_account_id, -- Update account_id instead of user_id
|
116 |
+
name = COALESCE(device_name, name),
|
117 |
+
last_seen = NOW()
|
118 |
+
WHERE id = device_id
|
119 |
+
RETURNING * INTO updated_device;
|
120 |
+
|
121 |
+
RETURN NEXT updated_device;
|
122 |
+
ELSE
|
123 |
+
-- Device doesn't exist; return nothing so the caller can handle creation
|
124 |
+
RETURN;
|
125 |
+
END IF;
|
126 |
+
END;
|
127 |
+
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
128 |
+
|
129 |
+
-- Grant execute permission so that authenticated users can call this function
|
130 |
+
-- Updated function signature
|
131 |
+
GRANT EXECUTE ON FUNCTION transfer_device(UUID, UUID, TEXT) TO authenticated;
|
132 |
+
|
133 |
+
|
134 |
+
|
135 |
+
|
136 |
+
-- Create the ui_grounding bucket
|
137 |
+
INSERT INTO storage.buckets (id, name, public)
|
138 |
+
VALUES ('ui_grounding', 'ui_grounding', false)
|
139 |
+
ON CONFLICT (id) DO NOTHING; -- Avoid error if bucket already exists
|
140 |
+
|
141 |
+
-- Create the ui_grounding_trajs bucket
|
142 |
+
INSERT INTO storage.buckets (id, name, public)
|
143 |
+
VALUES ('ui_grounding_trajs', 'ui_grounding_trajs', false)
|
144 |
+
ON CONFLICT (id) DO NOTHING; -- Avoid error if bucket already exists
|
145 |
+
|
146 |
+
-- Create the recordings bucket
|
147 |
+
INSERT INTO storage.buckets (id, name, public, file_size_limit, allowed_mime_types)
|
148 |
+
VALUES ('recordings', 'recordings', false, null, null) -- Set file size limit and mime types as needed
|
149 |
+
ON CONFLICT (id) DO NOTHING; -- Avoid error if bucket already exists
|
150 |
+
|
151 |
+
|
152 |
+
-- RLS policies for the 'recordings' bucket
|
153 |
+
-- Allow members to view files in accounts they belong to
|
154 |
+
CREATE POLICY "Account members can select recording files"
|
155 |
+
ON storage.objects FOR SELECT
|
156 |
+
TO authenticated
|
157 |
+
USING (
|
158 |
+
bucket_id = 'recordings' AND
|
159 |
+
(storage.foldername(name))[1]::uuid IN (SELECT basejump.get_accounts_with_role())
|
160 |
+
);
|
161 |
+
|
162 |
+
-- Allow members to insert files into accounts they belong to
|
163 |
+
CREATE POLICY "Account members can insert recording files"
|
164 |
+
ON storage.objects FOR INSERT
|
165 |
+
TO authenticated
|
166 |
+
WITH CHECK (
|
167 |
+
bucket_id = 'recordings' AND
|
168 |
+
(storage.foldername(name))[1]::uuid IN (SELECT basejump.get_accounts_with_role())
|
169 |
+
);
|
170 |
+
|
171 |
+
-- Allow members to update files in accounts they belong to
|
172 |
+
CREATE POLICY "Account members can update recording files"
|
173 |
+
ON storage.objects FOR UPDATE
|
174 |
+
TO authenticated
|
175 |
+
USING (
|
176 |
+
bucket_id = 'recordings' AND
|
177 |
+
(storage.foldername(name))[1]::uuid IN (SELECT basejump.get_accounts_with_role())
|
178 |
+
);
|
179 |
+
|
180 |
+
-- Allow members to delete files from accounts they belong to
|
181 |
+
-- Consider restricting this further, e.g., to 'owner' role if needed:
|
182 |
+
-- (storage.foldername(name))[1]::uuid IN (SELECT basejump.get_accounts_with_role('owner'))
|
183 |
+
CREATE POLICY "Account members can delete recording files"
|
184 |
+
ON storage.objects FOR DELETE
|
185 |
+
TO authenticated
|
186 |
+
USING (
|
187 |
+
bucket_id = 'recordings' AND
|
188 |
+
(storage.foldername(name))[1]::uuid IN (SELECT basejump.get_accounts_with_role())
|
189 |
+
);
|
20250416133920_agentpress_schema.sql
ADDED
@@ -0,0 +1,382 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
-- AGENTPRESS SCHEMA:
|
2 |
+
-- Create projects table
|
3 |
+
CREATE TABLE projects (
|
4 |
+
project_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
5 |
+
name TEXT NOT NULL,
|
6 |
+
description TEXT,
|
7 |
+
account_id UUID NOT NULL REFERENCES basejump.accounts(id) ON DELETE CASCADE,
|
8 |
+
sandbox JSONB DEFAULT '{}'::jsonb,
|
9 |
+
is_public BOOLEAN DEFAULT FALSE,
|
10 |
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL,
|
11 |
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL
|
12 |
+
);
|
13 |
+
|
14 |
+
-- Create threads table
|
15 |
+
CREATE TABLE threads (
|
16 |
+
thread_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
17 |
+
account_id UUID REFERENCES basejump.accounts(id) ON DELETE CASCADE,
|
18 |
+
project_id UUID REFERENCES projects(project_id) ON DELETE CASCADE,
|
19 |
+
is_public BOOLEAN DEFAULT FALSE,
|
20 |
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL,
|
21 |
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL
|
22 |
+
);
|
23 |
+
|
24 |
+
-- Create messages table
|
25 |
+
CREATE TABLE messages (
|
26 |
+
message_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
27 |
+
thread_id UUID NOT NULL REFERENCES threads(thread_id) ON DELETE CASCADE,
|
28 |
+
type TEXT NOT NULL,
|
29 |
+
is_llm_message BOOLEAN NOT NULL DEFAULT TRUE,
|
30 |
+
content JSONB NOT NULL,
|
31 |
+
metadata JSONB DEFAULT '{}'::jsonb,
|
32 |
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL,
|
33 |
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL
|
34 |
+
);
|
35 |
+
|
36 |
+
-- Create agent_runs table
|
37 |
+
CREATE TABLE agent_runs (
|
38 |
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
39 |
+
thread_id UUID NOT NULL REFERENCES threads(thread_id),
|
40 |
+
status TEXT NOT NULL DEFAULT 'running',
|
41 |
+
started_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL,
|
42 |
+
completed_at TIMESTAMP WITH TIME ZONE,
|
43 |
+
responses JSONB NOT NULL DEFAULT '[]'::jsonb, -- TO BE REMOVED, NOT USED
|
44 |
+
error TEXT,
|
45 |
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL,
|
46 |
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL
|
47 |
+
);
|
48 |
+
|
49 |
+
-- Create updated_at trigger function
|
50 |
+
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
51 |
+
RETURNS TRIGGER AS $$
|
52 |
+
BEGIN
|
53 |
+
NEW.updated_at = TIMEZONE('utc'::text, NOW());
|
54 |
+
RETURN NEW;
|
55 |
+
END;
|
56 |
+
$$ language 'plpgsql';
|
57 |
+
|
58 |
+
-- Create triggers for updated_at
|
59 |
+
CREATE TRIGGER update_threads_updated_at
|
60 |
+
BEFORE UPDATE ON threads
|
61 |
+
FOR EACH ROW
|
62 |
+
EXECUTE FUNCTION update_updated_at_column();
|
63 |
+
|
64 |
+
CREATE TRIGGER update_messages_updated_at
|
65 |
+
BEFORE UPDATE ON messages
|
66 |
+
FOR EACH ROW
|
67 |
+
EXECUTE FUNCTION update_updated_at_column();
|
68 |
+
|
69 |
+
CREATE TRIGGER update_agent_runs_updated_at
|
70 |
+
BEFORE UPDATE ON agent_runs
|
71 |
+
FOR EACH ROW
|
72 |
+
EXECUTE FUNCTION update_updated_at_column();
|
73 |
+
|
74 |
+
CREATE TRIGGER update_projects_updated_at
|
75 |
+
BEFORE UPDATE ON projects
|
76 |
+
FOR EACH ROW
|
77 |
+
EXECUTE FUNCTION update_updated_at_column();
|
78 |
+
|
79 |
+
-- Create indexes for better query performance
|
80 |
+
CREATE INDEX idx_threads_created_at ON threads(created_at);
|
81 |
+
CREATE INDEX idx_threads_account_id ON threads(account_id);
|
82 |
+
CREATE INDEX idx_threads_project_id ON threads(project_id);
|
83 |
+
CREATE INDEX idx_agent_runs_thread_id ON agent_runs(thread_id);
|
84 |
+
CREATE INDEX idx_agent_runs_status ON agent_runs(status);
|
85 |
+
CREATE INDEX idx_agent_runs_created_at ON agent_runs(created_at);
|
86 |
+
CREATE INDEX idx_projects_account_id ON projects(account_id);
|
87 |
+
CREATE INDEX idx_projects_created_at ON projects(created_at);
|
88 |
+
CREATE INDEX idx_messages_thread_id ON messages(thread_id);
|
89 |
+
CREATE INDEX idx_messages_created_at ON messages(created_at);
|
90 |
+
|
91 |
+
-- Enable Row Level Security
|
92 |
+
ALTER TABLE threads ENABLE ROW LEVEL SECURITY;
|
93 |
+
ALTER TABLE messages ENABLE ROW LEVEL SECURITY;
|
94 |
+
ALTER TABLE agent_runs ENABLE ROW LEVEL SECURITY;
|
95 |
+
ALTER TABLE projects ENABLE ROW LEVEL SECURITY;
|
96 |
+
|
97 |
+
-- Project policies
|
98 |
+
CREATE POLICY project_select_policy ON projects
|
99 |
+
FOR SELECT
|
100 |
+
USING (
|
101 |
+
is_public = TRUE OR
|
102 |
+
basejump.has_role_on_account(account_id) = true
|
103 |
+
);
|
104 |
+
|
105 |
+
CREATE POLICY project_insert_policy ON projects
|
106 |
+
FOR INSERT
|
107 |
+
WITH CHECK (basejump.has_role_on_account(account_id) = true);
|
108 |
+
|
109 |
+
CREATE POLICY project_update_policy ON projects
|
110 |
+
FOR UPDATE
|
111 |
+
USING (basejump.has_role_on_account(account_id) = true);
|
112 |
+
|
113 |
+
CREATE POLICY project_delete_policy ON projects
|
114 |
+
FOR DELETE
|
115 |
+
USING (basejump.has_role_on_account(account_id) = true);
|
116 |
+
|
117 |
+
-- Thread policies based on project and account ownership
|
118 |
+
CREATE POLICY thread_select_policy ON threads
|
119 |
+
FOR SELECT
|
120 |
+
USING (
|
121 |
+
basejump.has_role_on_account(account_id) = true OR
|
122 |
+
EXISTS (
|
123 |
+
SELECT 1 FROM projects
|
124 |
+
WHERE projects.project_id = threads.project_id
|
125 |
+
AND (
|
126 |
+
projects.is_public = TRUE OR
|
127 |
+
basejump.has_role_on_account(projects.account_id) = true
|
128 |
+
)
|
129 |
+
)
|
130 |
+
);
|
131 |
+
|
132 |
+
CREATE POLICY thread_insert_policy ON threads
|
133 |
+
FOR INSERT
|
134 |
+
WITH CHECK (
|
135 |
+
basejump.has_role_on_account(account_id) = true OR
|
136 |
+
EXISTS (
|
137 |
+
SELECT 1 FROM projects
|
138 |
+
WHERE projects.project_id = threads.project_id
|
139 |
+
AND basejump.has_role_on_account(projects.account_id) = true
|
140 |
+
)
|
141 |
+
);
|
142 |
+
|
143 |
+
CREATE POLICY thread_update_policy ON threads
|
144 |
+
FOR UPDATE
|
145 |
+
USING (
|
146 |
+
basejump.has_role_on_account(account_id) = true OR
|
147 |
+
EXISTS (
|
148 |
+
SELECT 1 FROM projects
|
149 |
+
WHERE projects.project_id = threads.project_id
|
150 |
+
AND basejump.has_role_on_account(projects.account_id) = true
|
151 |
+
)
|
152 |
+
);
|
153 |
+
|
154 |
+
CREATE POLICY thread_delete_policy ON threads
|
155 |
+
FOR DELETE
|
156 |
+
USING (
|
157 |
+
basejump.has_role_on_account(account_id) = true OR
|
158 |
+
EXISTS (
|
159 |
+
SELECT 1 FROM projects
|
160 |
+
WHERE projects.project_id = threads.project_id
|
161 |
+
AND basejump.has_role_on_account(projects.account_id) = true
|
162 |
+
)
|
163 |
+
);
|
164 |
+
|
165 |
+
-- Create policies for agent_runs based on thread ownership
|
166 |
+
CREATE POLICY agent_run_select_policy ON agent_runs
|
167 |
+
FOR SELECT
|
168 |
+
USING (
|
169 |
+
EXISTS (
|
170 |
+
SELECT 1 FROM threads
|
171 |
+
LEFT JOIN projects ON threads.project_id = projects.project_id
|
172 |
+
WHERE threads.thread_id = agent_runs.thread_id
|
173 |
+
AND (
|
174 |
+
projects.is_public = TRUE OR
|
175 |
+
basejump.has_role_on_account(threads.account_id) = true OR
|
176 |
+
basejump.has_role_on_account(projects.account_id) = true
|
177 |
+
)
|
178 |
+
)
|
179 |
+
);
|
180 |
+
|
181 |
+
CREATE POLICY agent_run_insert_policy ON agent_runs
|
182 |
+
FOR INSERT
|
183 |
+
WITH CHECK (
|
184 |
+
EXISTS (
|
185 |
+
SELECT 1 FROM threads
|
186 |
+
LEFT JOIN projects ON threads.project_id = projects.project_id
|
187 |
+
WHERE threads.thread_id = agent_runs.thread_id
|
188 |
+
AND (
|
189 |
+
basejump.has_role_on_account(threads.account_id) = true OR
|
190 |
+
basejump.has_role_on_account(projects.account_id) = true
|
191 |
+
)
|
192 |
+
)
|
193 |
+
);
|
194 |
+
|
195 |
+
CREATE POLICY agent_run_update_policy ON agent_runs
|
196 |
+
FOR UPDATE
|
197 |
+
USING (
|
198 |
+
EXISTS (
|
199 |
+
SELECT 1 FROM threads
|
200 |
+
LEFT JOIN projects ON threads.project_id = projects.project_id
|
201 |
+
WHERE threads.thread_id = agent_runs.thread_id
|
202 |
+
AND (
|
203 |
+
basejump.has_role_on_account(threads.account_id) = true OR
|
204 |
+
basejump.has_role_on_account(projects.account_id) = true
|
205 |
+
)
|
206 |
+
)
|
207 |
+
);
|
208 |
+
|
209 |
+
CREATE POLICY agent_run_delete_policy ON agent_runs
|
210 |
+
FOR DELETE
|
211 |
+
USING (
|
212 |
+
EXISTS (
|
213 |
+
SELECT 1 FROM threads
|
214 |
+
LEFT JOIN projects ON threads.project_id = projects.project_id
|
215 |
+
WHERE threads.thread_id = agent_runs.thread_id
|
216 |
+
AND (
|
217 |
+
basejump.has_role_on_account(threads.account_id) = true OR
|
218 |
+
basejump.has_role_on_account(projects.account_id) = true
|
219 |
+
)
|
220 |
+
)
|
221 |
+
);
|
222 |
+
|
223 |
+
-- Create message policies based on thread ownership
|
224 |
+
CREATE POLICY message_select_policy ON messages
|
225 |
+
FOR SELECT
|
226 |
+
USING (
|
227 |
+
EXISTS (
|
228 |
+
SELECT 1 FROM threads
|
229 |
+
LEFT JOIN projects ON threads.project_id = projects.project_id
|
230 |
+
WHERE threads.thread_id = messages.thread_id
|
231 |
+
AND (
|
232 |
+
projects.is_public = TRUE OR
|
233 |
+
basejump.has_role_on_account(threads.account_id) = true OR
|
234 |
+
basejump.has_role_on_account(projects.account_id) = true
|
235 |
+
)
|
236 |
+
)
|
237 |
+
);
|
238 |
+
|
239 |
+
CREATE POLICY message_insert_policy ON messages
|
240 |
+
FOR INSERT
|
241 |
+
WITH CHECK (
|
242 |
+
EXISTS (
|
243 |
+
SELECT 1 FROM threads
|
244 |
+
LEFT JOIN projects ON threads.project_id = projects.project_id
|
245 |
+
WHERE threads.thread_id = messages.thread_id
|
246 |
+
AND (
|
247 |
+
basejump.has_role_on_account(threads.account_id) = true OR
|
248 |
+
basejump.has_role_on_account(projects.account_id) = true
|
249 |
+
)
|
250 |
+
)
|
251 |
+
);
|
252 |
+
|
253 |
+
CREATE POLICY message_update_policy ON messages
|
254 |
+
FOR UPDATE
|
255 |
+
USING (
|
256 |
+
EXISTS (
|
257 |
+
SELECT 1 FROM threads
|
258 |
+
LEFT JOIN projects ON threads.project_id = projects.project_id
|
259 |
+
WHERE threads.thread_id = messages.thread_id
|
260 |
+
AND (
|
261 |
+
basejump.has_role_on_account(threads.account_id) = true OR
|
262 |
+
basejump.has_role_on_account(projects.account_id) = true
|
263 |
+
)
|
264 |
+
)
|
265 |
+
);
|
266 |
+
|
267 |
+
CREATE POLICY message_delete_policy ON messages
|
268 |
+
FOR DELETE
|
269 |
+
USING (
|
270 |
+
EXISTS (
|
271 |
+
SELECT 1 FROM threads
|
272 |
+
LEFT JOIN projects ON threads.project_id = projects.project_id
|
273 |
+
WHERE threads.thread_id = messages.thread_id
|
274 |
+
AND (
|
275 |
+
basejump.has_role_on_account(threads.account_id) = true OR
|
276 |
+
basejump.has_role_on_account(projects.account_id) = true
|
277 |
+
)
|
278 |
+
)
|
279 |
+
);
|
280 |
+
|
281 |
+
-- Grant permissions to roles
|
282 |
+
GRANT ALL PRIVILEGES ON TABLE projects TO authenticated, service_role;
|
283 |
+
GRANT SELECT ON TABLE projects TO anon;
|
284 |
+
GRANT SELECT ON TABLE threads TO authenticated, anon, service_role;
|
285 |
+
GRANT SELECT ON TABLE messages TO authenticated, anon, service_role;
|
286 |
+
GRANT ALL PRIVILEGES ON TABLE agent_runs TO authenticated, service_role;
|
287 |
+
|
288 |
+
-- Create a function that matches the Python get_messages behavior
|
289 |
+
CREATE OR REPLACE FUNCTION get_llm_formatted_messages(p_thread_id UUID)
|
290 |
+
RETURNS JSONB
|
291 |
+
SECURITY DEFINER -- Changed to SECURITY DEFINER to allow service role access
|
292 |
+
LANGUAGE plpgsql
|
293 |
+
AS $$
|
294 |
+
DECLARE
|
295 |
+
messages_array JSONB := '[]'::JSONB;
|
296 |
+
has_access BOOLEAN;
|
297 |
+
current_role TEXT;
|
298 |
+
latest_summary_id UUID;
|
299 |
+
latest_summary_time TIMESTAMP WITH TIME ZONE;
|
300 |
+
is_project_public BOOLEAN;
|
301 |
+
BEGIN
|
302 |
+
-- Get current role
|
303 |
+
SELECT current_user INTO current_role;
|
304 |
+
|
305 |
+
-- Check if associated project is public
|
306 |
+
SELECT p.is_public INTO is_project_public
|
307 |
+
FROM threads t
|
308 |
+
LEFT JOIN projects p ON t.project_id = p.project_id
|
309 |
+
WHERE t.thread_id = p_thread_id;
|
310 |
+
|
311 |
+
-- Skip access check for service_role or public projects
|
312 |
+
IF current_role = 'authenticated' AND NOT is_project_public THEN
|
313 |
+
-- Check if thread exists and user has access
|
314 |
+
SELECT EXISTS (
|
315 |
+
SELECT 1 FROM threads t
|
316 |
+
LEFT JOIN projects p ON t.project_id = p.project_id
|
317 |
+
WHERE t.thread_id = p_thread_id
|
318 |
+
AND (
|
319 |
+
basejump.has_role_on_account(t.account_id) = true OR
|
320 |
+
basejump.has_role_on_account(p.account_id) = true
|
321 |
+
)
|
322 |
+
) INTO has_access;
|
323 |
+
|
324 |
+
IF NOT has_access THEN
|
325 |
+
RAISE EXCEPTION 'Thread not found or access denied';
|
326 |
+
END IF;
|
327 |
+
END IF;
|
328 |
+
|
329 |
+
-- Find the latest summary message if it exists
|
330 |
+
SELECT message_id, created_at
|
331 |
+
INTO latest_summary_id, latest_summary_time
|
332 |
+
FROM messages
|
333 |
+
WHERE thread_id = p_thread_id
|
334 |
+
AND type = 'summary'
|
335 |
+
AND is_llm_message = TRUE
|
336 |
+
ORDER BY created_at DESC
|
337 |
+
LIMIT 1;
|
338 |
+
|
339 |
+
-- Log whether a summary was found (helpful for debugging)
|
340 |
+
IF latest_summary_id IS NOT NULL THEN
|
341 |
+
RAISE NOTICE 'Found latest summary message: id=%, time=%', latest_summary_id, latest_summary_time;
|
342 |
+
ELSE
|
343 |
+
RAISE NOTICE 'No summary message found for thread %', p_thread_id;
|
344 |
+
END IF;
|
345 |
+
|
346 |
+
-- Parse content if it's stored as a string and return proper JSON objects
|
347 |
+
WITH parsed_messages AS (
|
348 |
+
SELECT
|
349 |
+
message_id,
|
350 |
+
CASE
|
351 |
+
WHEN jsonb_typeof(content) = 'string' THEN content::text::jsonb
|
352 |
+
ELSE content
|
353 |
+
END AS parsed_content,
|
354 |
+
created_at,
|
355 |
+
type
|
356 |
+
FROM messages
|
357 |
+
WHERE thread_id = p_thread_id
|
358 |
+
AND is_llm_message = TRUE
|
359 |
+
AND (
|
360 |
+
-- Include the latest summary and all messages after it,
|
361 |
+
-- or all messages if no summary exists
|
362 |
+
latest_summary_id IS NULL
|
363 |
+
OR message_id = latest_summary_id
|
364 |
+
OR created_at > latest_summary_time
|
365 |
+
)
|
366 |
+
ORDER BY created_at
|
367 |
+
)
|
368 |
+
SELECT JSONB_AGG(parsed_content)
|
369 |
+
INTO messages_array
|
370 |
+
FROM parsed_messages;
|
371 |
+
|
372 |
+
-- Handle the case when no messages are found
|
373 |
+
IF messages_array IS NULL THEN
|
374 |
+
RETURN '[]'::JSONB;
|
375 |
+
END IF;
|
376 |
+
|
377 |
+
RETURN messages_array;
|
378 |
+
END;
|
379 |
+
$$;
|
380 |
+
|
381 |
+
-- Grant execute permissions
|
382 |
+
GRANT EXECUTE ON FUNCTION get_llm_formatted_messages TO authenticated, anon, service_role;
|
ActiveJobsProvider.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Dict
|
2 |
+
|
3 |
+
from agent.tools.data_providers.RapidDataProviderBase import RapidDataProviderBase, EndpointSchema
|
4 |
+
|
5 |
+
|
6 |
+
class ActiveJobsProvider(RapidDataProviderBase):
|
7 |
+
def __init__(self):
|
8 |
+
endpoints: Dict[str, EndpointSchema] = {
|
9 |
+
"active_jobs": {
|
10 |
+
"route": "/active-ats-7d",
|
11 |
+
"method": "GET",
|
12 |
+
"name": "Active Jobs Search",
|
13 |
+
"description": "Get active job listings with various filter options.",
|
14 |
+
"payload": {
|
15 |
+
"limit": "Optional. Number of jobs per API call (10-100). Default is 100.",
|
16 |
+
"offset": "Optional. Offset for pagination. Default is 0.",
|
17 |
+
"title_filter": "Optional. Search terms for job title.",
|
18 |
+
"advanced_title_filter": "Optional. Advanced title filter with operators (can't be used with title_filter).",
|
19 |
+
"location_filter": "Optional. Filter by location(s). Use full names like 'United States' not 'US'.",
|
20 |
+
"description_filter": "Optional. Filter on job description content.",
|
21 |
+
"organization_filter": "Optional. Filter by company name(s).",
|
22 |
+
"description_type": "Optional. Return format for description: 'text' or 'html'. Leave empty to exclude descriptions.",
|
23 |
+
"source": "Optional. Filter by ATS source.",
|
24 |
+
"date_filter": "Optional. Filter by posting date (greater than).",
|
25 |
+
"ai_employment_type_filter": "Optional. Filter by employment type (FULL_TIME, PART_TIME, etc).",
|
26 |
+
"ai_work_arrangement_filter": "Optional. Filter by work arrangement (On-site, Hybrid, Remote OK, Remote Solely).",
|
27 |
+
"ai_experience_level_filter": "Optional. Filter by experience level (0-2, 2-5, 5-10, 10+).",
|
28 |
+
"li_organization_slug_filter": "Optional. Filter by LinkedIn company slug.",
|
29 |
+
"li_organization_slug_exclusion_filter": "Optional. Exclude LinkedIn company slugs.",
|
30 |
+
"li_industry_filter": "Optional. Filter by LinkedIn industry.",
|
31 |
+
"li_organization_specialties_filter": "Optional. Filter by LinkedIn company specialties.",
|
32 |
+
"li_organization_description_filter": "Optional. Filter by LinkedIn company description."
|
33 |
+
}
|
34 |
+
}
|
35 |
+
}
|
36 |
+
|
37 |
+
base_url = "https://active-jobs-db.p.rapidapi.com"
|
38 |
+
super().__init__(base_url, endpoints)
|
39 |
+
|
40 |
+
|
41 |
+
if __name__ == "__main__":
|
42 |
+
from dotenv import load_dotenv
|
43 |
+
load_dotenv()
|
44 |
+
tool = ActiveJobsProvider()
|
45 |
+
|
46 |
+
# Example for searching active jobs
|
47 |
+
jobs = tool.call_endpoint(
|
48 |
+
route="active_jobs",
|
49 |
+
payload={
|
50 |
+
"limit": "10",
|
51 |
+
"offset": "0",
|
52 |
+
"title_filter": "\"Data Engineer\"",
|
53 |
+
"location_filter": "\"United States\" OR \"United Kingdom\"",
|
54 |
+
"description_type": "text"
|
55 |
+
}
|
56 |
+
)
|
57 |
+
print("Active Jobs:", jobs)
|
AmazonProvider.py
ADDED
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Dict, Optional
|
2 |
+
|
3 |
+
from agent.tools.data_providers.RapidDataProviderBase import RapidDataProviderBase, EndpointSchema
|
4 |
+
|
5 |
+
|
6 |
+
class AmazonProvider(RapidDataProviderBase):
|
7 |
+
def __init__(self):
|
8 |
+
endpoints: Dict[str, EndpointSchema] = {
|
9 |
+
"search": {
|
10 |
+
"route": "/search",
|
11 |
+
"method": "GET",
|
12 |
+
"name": "Amazon Product Search",
|
13 |
+
"description": "Search for products on Amazon with various filters and parameters.",
|
14 |
+
"payload": {
|
15 |
+
"query": "Search query (supports both free-form text queries or a product asin)",
|
16 |
+
"page": "Results page to return (default: 1)",
|
17 |
+
"country": "Sets the Amazon domain, marketplace country, language and currency (default: US)",
|
18 |
+
"sort_by": "Return the results in a specific sort order (RELEVANCE, LOWEST_PRICE, HIGHEST_PRICE, REVIEWS, NEWEST, BEST_SELLERS)",
|
19 |
+
"product_condition": "Return products in a specific condition (ALL, NEW, USED, RENEWED, COLLECTIBLE)",
|
20 |
+
"is_prime": "Only return prime products (boolean)",
|
21 |
+
"deals_and_discounts": "Return deals and discounts in a specific condition (NONE, ALL_DISCOUNTS, TODAYS_DEALS)",
|
22 |
+
"category_id": "Find products in a specific category / department (optional)",
|
23 |
+
"category": "Filter by specific numeric Amazon category (optional)",
|
24 |
+
"min_price": "Only return product offers with price greater than a certain value (optional)",
|
25 |
+
"max_price": "Only return product offers with price lower than a certain value (optional)",
|
26 |
+
"brand": "Find products with a specific brand (optional)",
|
27 |
+
"seller_id": "Find products sold by specific seller (optional)",
|
28 |
+
"four_stars_and_up": "Return product listings with ratings of 4 stars & up (optional)",
|
29 |
+
"additional_filters": "Any filters available on the Amazon page but not part of this endpoint's parameters (optional)"
|
30 |
+
}
|
31 |
+
},
|
32 |
+
"product-details": {
|
33 |
+
"route": "/product-details",
|
34 |
+
"method": "GET",
|
35 |
+
"name": "Amazon Product Details",
|
36 |
+
"description": "Get detailed information about specific Amazon products by ASIN.",
|
37 |
+
"payload": {
|
38 |
+
"asin": "Product ASIN for which to get details. Supports batching of up to 10 ASINs in a single request, separated by comma.",
|
39 |
+
"country": "Sets the Amazon domain, marketplace country, language and currency (default: US)",
|
40 |
+
"more_info_query": "A query to search and get more info about the product as part of Product Information, Customer Q&As, and Customer Reviews (optional)",
|
41 |
+
"fields": "A comma separated list of product fields to include in the response (field projection). By default all fields are returned. (optional)"
|
42 |
+
}
|
43 |
+
},
|
44 |
+
"products-by-category": {
|
45 |
+
"route": "/products-by-category",
|
46 |
+
"method": "GET",
|
47 |
+
"name": "Amazon Products by Category",
|
48 |
+
"description": "Get products from a specific Amazon category.",
|
49 |
+
"payload": {
|
50 |
+
"category_id": "The Amazon category for which to return results. Multiple category values can be separated by comma.",
|
51 |
+
"page": "Page to return (default: 1)",
|
52 |
+
"country": "Sets the Amazon domain, marketplace country, language and currency (default: US)",
|
53 |
+
"sort_by": "Return the results in a specific sort order (RELEVANCE, LOWEST_PRICE, HIGHEST_PRICE, REVIEWS, NEWEST, BEST_SELLERS)",
|
54 |
+
"min_price": "Only return product offers with price greater than a certain value (optional)",
|
55 |
+
"max_price": "Only return product offers with price lower than a certain value (optional)",
|
56 |
+
"product_condition": "Return products in a specific condition (ALL, NEW, USED, RENEWED, COLLECTIBLE)",
|
57 |
+
"brand": "Only return products of a specific brand. Multiple brands can be specified as a comma separated list (optional)",
|
58 |
+
"is_prime": "Only return prime products (boolean)",
|
59 |
+
"deals_and_discounts": "Return deals and discounts in a specific condition (NONE, ALL_DISCOUNTS, TODAYS_DEALS)",
|
60 |
+
"four_stars_and_up": "Return product listings with ratings of 4 stars & up (optional)",
|
61 |
+
"additional_filters": "Any filters available on the Amazon page but not part of this endpoint's parameters (optional)"
|
62 |
+
}
|
63 |
+
},
|
64 |
+
"product-reviews": {
|
65 |
+
"route": "/product-reviews",
|
66 |
+
"method": "GET",
|
67 |
+
"name": "Amazon Product Reviews",
|
68 |
+
"description": "Get customer reviews for a specific Amazon product by ASIN.",
|
69 |
+
"payload": {
|
70 |
+
"asin": "Product asin for which to get reviews.",
|
71 |
+
"country": "Sets the Amazon domain, marketplace country, language and currency (default: US)",
|
72 |
+
"page": "Results page to return (default: 1)",
|
73 |
+
"sort_by": "Return reviews in a specific sort order (TOP_REVIEWS, MOST_RECENT)",
|
74 |
+
"star_rating": "Only return reviews with a specific star rating (ALL, 5_STARS, 4_STARS, 3_STARS, 2_STARS, 1_STARS, POSITIVE, CRITICAL)",
|
75 |
+
"verified_purchases_only": "Only return reviews by reviewers who made a verified purchase (boolean)",
|
76 |
+
"images_or_videos_only": "Only return reviews containing images and / or videos (boolean)",
|
77 |
+
"current_format_only": "Only return reviews of the current format (product variant - e.g. Color) (boolean)"
|
78 |
+
}
|
79 |
+
},
|
80 |
+
"seller-profile": {
|
81 |
+
"route": "/seller-profile",
|
82 |
+
"method": "GET",
|
83 |
+
"name": "Amazon Seller Profile",
|
84 |
+
"description": "Get detailed information about a specific Amazon seller by Seller ID.",
|
85 |
+
"payload": {
|
86 |
+
"seller_id": "The Amazon Seller ID for which to get seller profile details",
|
87 |
+
"country": "Sets the Amazon domain, marketplace country, language and currency (default: US)",
|
88 |
+
"fields": "A comma separated list of seller profile fields to include in the response (field projection). By default all fields are returned. (optional)"
|
89 |
+
}
|
90 |
+
},
|
91 |
+
"seller-reviews": {
|
92 |
+
"route": "/seller-reviews",
|
93 |
+
"method": "GET",
|
94 |
+
"name": "Amazon Seller Reviews",
|
95 |
+
"description": "Get customer reviews for a specific Amazon seller by Seller ID.",
|
96 |
+
"payload": {
|
97 |
+
"seller_id": "The Amazon Seller ID for which to get seller reviews",
|
98 |
+
"country": "Sets the Amazon domain, marketplace country, language and currency (default: US)",
|
99 |
+
"star_rating": "Only return reviews with a specific star rating or positive / negative sentiment (ALL, 5_STARS, 4_STARS, 3_STARS, 2_STARS, 1_STARS, POSITIVE, CRITICAL)",
|
100 |
+
"page": "The page of seller feedback results to retrieve (default: 1)",
|
101 |
+
"fields": "A comma separated list of seller review fields to include in the response (field projection). By default all fields are returned. (optional)"
|
102 |
+
}
|
103 |
+
}
|
104 |
+
}
|
105 |
+
base_url = "https://real-time-amazon-data.p.rapidapi.com"
|
106 |
+
super().__init__(base_url, endpoints)
|
107 |
+
|
108 |
+
|
109 |
+
if __name__ == "__main__":
|
110 |
+
from dotenv import load_dotenv
|
111 |
+
load_dotenv()
|
112 |
+
tool = AmazonProvider()
|
113 |
+
|
114 |
+
# Example for product search
|
115 |
+
search_result = tool.call_endpoint(
|
116 |
+
route="search",
|
117 |
+
payload={
|
118 |
+
"query": "Phone",
|
119 |
+
"page": 1,
|
120 |
+
"country": "US",
|
121 |
+
"sort_by": "RELEVANCE",
|
122 |
+
"product_condition": "ALL",
|
123 |
+
"is_prime": False,
|
124 |
+
"deals_and_discounts": "NONE"
|
125 |
+
}
|
126 |
+
)
|
127 |
+
print("Search Result:", search_result)
|
128 |
+
|
129 |
+
# Example for product details
|
130 |
+
details_result = tool.call_endpoint(
|
131 |
+
route="product-details",
|
132 |
+
payload={
|
133 |
+
"asin": "B07ZPKBL9V",
|
134 |
+
"country": "US"
|
135 |
+
}
|
136 |
+
)
|
137 |
+
print("Product Details:", details_result)
|
138 |
+
|
139 |
+
# Example for products by category
|
140 |
+
category_result = tool.call_endpoint(
|
141 |
+
route="products-by-category",
|
142 |
+
payload={
|
143 |
+
"category_id": "2478868012",
|
144 |
+
"page": 1,
|
145 |
+
"country": "US",
|
146 |
+
"sort_by": "RELEVANCE",
|
147 |
+
"product_condition": "ALL",
|
148 |
+
"is_prime": False,
|
149 |
+
"deals_and_discounts": "NONE"
|
150 |
+
}
|
151 |
+
)
|
152 |
+
print("Category Products:", category_result)
|
153 |
+
|
154 |
+
# Example for product reviews
|
155 |
+
reviews_result = tool.call_endpoint(
|
156 |
+
route="product-reviews",
|
157 |
+
payload={
|
158 |
+
"asin": "B07ZPKN6YR",
|
159 |
+
"country": "US",
|
160 |
+
"page": 1,
|
161 |
+
"sort_by": "TOP_REVIEWS",
|
162 |
+
"star_rating": "ALL",
|
163 |
+
"verified_purchases_only": False,
|
164 |
+
"images_or_videos_only": False,
|
165 |
+
"current_format_only": False
|
166 |
+
}
|
167 |
+
)
|
168 |
+
print("Product Reviews:", reviews_result)
|
169 |
+
|
170 |
+
# Example for seller profile
|
171 |
+
seller_result = tool.call_endpoint(
|
172 |
+
route="seller-profile",
|
173 |
+
payload={
|
174 |
+
"seller_id": "A02211013Q5HP3OMSZC7W",
|
175 |
+
"country": "US"
|
176 |
+
}
|
177 |
+
)
|
178 |
+
print("Seller Profile:", seller_result)
|
179 |
+
|
180 |
+
# Example for seller reviews
|
181 |
+
seller_reviews_result = tool.call_endpoint(
|
182 |
+
route="seller-reviews",
|
183 |
+
payload={
|
184 |
+
"seller_id": "A02211013Q5HP3OMSZC7W",
|
185 |
+
"country": "US",
|
186 |
+
"star_rating": "ALL",
|
187 |
+
"page": 1
|
188 |
+
}
|
189 |
+
)
|
190 |
+
print("Seller Reviews:", seller_reviews_result)
|
191 |
+
|
AuthProvider.tsx
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use client";
|
2 |
+
|
3 |
+
import React, { createContext, useContext, useState, useEffect, ReactNode } from 'react';
|
4 |
+
import { createClient } from '@/lib/supabase/client';
|
5 |
+
import { User, Session } from '@supabase/supabase-js';
|
6 |
+
import { SupabaseClient } from '@supabase/supabase-js';
|
7 |
+
|
8 |
+
type AuthContextType = {
|
9 |
+
supabase: SupabaseClient;
|
10 |
+
session: Session | null;
|
11 |
+
user: User | null;
|
12 |
+
isLoading: boolean;
|
13 |
+
signOut: () => Promise<void>;
|
14 |
+
};
|
15 |
+
|
16 |
+
const AuthContext = createContext<AuthContextType | undefined>(undefined);
|
17 |
+
|
18 |
+
export const AuthProvider = ({ children }: { children: ReactNode }) => {
|
19 |
+
const supabase = createClient();
|
20 |
+
const [session, setSession] = useState<Session | null>(null);
|
21 |
+
const [user, setUser] = useState<User | null>(null);
|
22 |
+
const [isLoading, setIsLoading] = useState(true);
|
23 |
+
|
24 |
+
useEffect(() => {
|
25 |
+
const getInitialSession = async () => {
|
26 |
+
const { data: { session: currentSession } } = await supabase.auth.getSession();
|
27 |
+
setSession(currentSession);
|
28 |
+
setUser(currentSession?.user ?? null);
|
29 |
+
setIsLoading(false);
|
30 |
+
};
|
31 |
+
|
32 |
+
getInitialSession();
|
33 |
+
|
34 |
+
const { data: authListener } = supabase.auth.onAuthStateChange(
|
35 |
+
(_event, newSession) => {
|
36 |
+
setSession(newSession);
|
37 |
+
setUser(newSession?.user ?? null);
|
38 |
+
// No need to set loading state here as initial load is done
|
39 |
+
// and subsequent changes shouldn't show a loading state for the whole app
|
40 |
+
if (isLoading) setIsLoading(false);
|
41 |
+
}
|
42 |
+
);
|
43 |
+
|
44 |
+
return () => {
|
45 |
+
authListener?.subscription.unsubscribe();
|
46 |
+
};
|
47 |
+
}, [supabase, isLoading]); // Added isLoading to dependencies to ensure it runs once after initial load completes
|
48 |
+
|
49 |
+
const signOut = async () => {
|
50 |
+
await supabase.auth.signOut();
|
51 |
+
// State updates will be handled by onAuthStateChange
|
52 |
+
};
|
53 |
+
|
54 |
+
const value = {
|
55 |
+
supabase,
|
56 |
+
session,
|
57 |
+
user,
|
58 |
+
isLoading,
|
59 |
+
signOut,
|
60 |
+
};
|
61 |
+
|
62 |
+
return (
|
63 |
+
<AuthContext.Provider value={value}>
|
64 |
+
{children}
|
65 |
+
</AuthContext.Provider>
|
66 |
+
);
|
67 |
+
};
|
68 |
+
|
69 |
+
export const useAuth = (): AuthContextType => {
|
70 |
+
const context = useContext(AuthContext);
|
71 |
+
if (context === undefined) {
|
72 |
+
throw new Error('useAuth must be used within an AuthProvider');
|
73 |
+
}
|
74 |
+
return context;
|
75 |
+
};
|
BrowserToolView.tsx
ADDED
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React, { useMemo } from "react";
|
2 |
+
import { Globe, MonitorPlay, ExternalLink, CheckCircle, AlertTriangle, CircleDashed } from "lucide-react";
|
3 |
+
import { ToolViewProps } from "./types";
|
4 |
+
import { extractBrowserUrl, extractBrowserOperation, formatTimestamp, getToolTitle } from "./utils";
|
5 |
+
import { ApiMessageType } from '@/components/thread/types';
|
6 |
+
import { safeJsonParse } from '@/components/thread/utils';
|
7 |
+
import { cn } from "@/lib/utils";
|
8 |
+
|
9 |
+
export function BrowserToolView({
|
10 |
+
name = "browser-operation",
|
11 |
+
assistantContent,
|
12 |
+
toolContent,
|
13 |
+
assistantTimestamp,
|
14 |
+
toolTimestamp,
|
15 |
+
isSuccess = true,
|
16 |
+
isStreaming = false,
|
17 |
+
project,
|
18 |
+
agentStatus = 'idle',
|
19 |
+
messages = [],
|
20 |
+
currentIndex = 0,
|
21 |
+
totalCalls = 1
|
22 |
+
}: ToolViewProps) {
|
23 |
+
const url = extractBrowserUrl(assistantContent);
|
24 |
+
const operation = extractBrowserOperation(name);
|
25 |
+
const toolTitle = getToolTitle(name);
|
26 |
+
|
27 |
+
// --- message_id Extraction Logic ---
|
28 |
+
let browserStateMessageId: string | undefined;
|
29 |
+
|
30 |
+
try {
|
31 |
+
// 1. Parse the top-level JSON
|
32 |
+
const topLevelParsed = safeJsonParse<{ content?: string }>(toolContent, {});
|
33 |
+
const innerContentString = topLevelParsed?.content;
|
34 |
+
|
35 |
+
if (innerContentString && typeof innerContentString === 'string') {
|
36 |
+
// 2. Extract the output='...' string using regex
|
37 |
+
const outputMatch = innerContentString.match(/\boutput='(.*?)'(?=\s*\))/);
|
38 |
+
const outputString = outputMatch ? outputMatch[1] : null;
|
39 |
+
|
40 |
+
if (outputString) {
|
41 |
+
// 3. Unescape the JSON string (basic unescaping for \n and \")
|
42 |
+
const unescapedOutput = outputString.replace(/\\n/g, '\n').replace(/\\"/g, '"');
|
43 |
+
|
44 |
+
// 4. Parse the unescaped JSON to get message_id
|
45 |
+
const finalParsedOutput = safeJsonParse<{ message_id?: string }>(unescapedOutput, {});
|
46 |
+
browserStateMessageId = finalParsedOutput?.message_id;
|
47 |
+
}
|
48 |
+
}
|
49 |
+
} catch (error) {
|
50 |
+
console.error("[BrowserToolView] Error parsing tool content for message_id:", error);
|
51 |
+
}
|
52 |
+
|
53 |
+
// Find the browser_state message and extract the screenshot
|
54 |
+
let screenshotBase64: string | null = null;
|
55 |
+
if (browserStateMessageId && messages.length > 0) {
|
56 |
+
const browserStateMessage = messages.find(msg =>
|
57 |
+
(msg.type as string) === 'browser_state' &&
|
58 |
+
msg.message_id === browserStateMessageId
|
59 |
+
);
|
60 |
+
|
61 |
+
if (browserStateMessage) {
|
62 |
+
const browserStateContent = safeJsonParse<{ screenshot_base64?: string }>(browserStateMessage.content, {});
|
63 |
+
screenshotBase64 = browserStateContent?.screenshot_base64 || null;
|
64 |
+
}
|
65 |
+
}
|
66 |
+
|
67 |
+
// Check if we have a VNC preview URL from the project
|
68 |
+
const vncPreviewUrl = project?.sandbox?.vnc_preview ?
|
69 |
+
`${project.sandbox.vnc_preview}/vnc_lite.html?password=${project?.sandbox?.pass}&autoconnect=true&scale=local&width=1024&height=768` :
|
70 |
+
undefined;
|
71 |
+
|
72 |
+
const isRunning = isStreaming || agentStatus === 'running';
|
73 |
+
const isLastToolCall = currentIndex === (totalCalls - 1);
|
74 |
+
|
75 |
+
// Memoize the VNC iframe to prevent reconnections on re-renders
|
76 |
+
const vncIframe = useMemo(() => {
|
77 |
+
if (!vncPreviewUrl) return null;
|
78 |
+
|
79 |
+
console.log("[BrowserToolView] Creating memoized VNC iframe with URL:", vncPreviewUrl);
|
80 |
+
|
81 |
+
return (
|
82 |
+
<iframe
|
83 |
+
src={vncPreviewUrl}
|
84 |
+
title="Browser preview"
|
85 |
+
className="w-full h-full border-0 flex-1"
|
86 |
+
/>
|
87 |
+
);
|
88 |
+
}, [vncPreviewUrl]); // Only recreate if the URL changes
|
89 |
+
|
90 |
+
return (
|
91 |
+
<div className="flex flex-col h-full">
|
92 |
+
<div className="flex-1 p-4 overflow-auto">
|
93 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
94 |
+
<div className="bg-zinc-100 dark:bg-zinc-900 p-2 flex items-center justify-between border-b border-zinc-200 dark:border-zinc-800">
|
95 |
+
<div className="flex items-center">
|
96 |
+
<MonitorPlay className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
97 |
+
<span className="text-xs font-medium text-zinc-700 dark:text-zinc-300">Browser Window</span>
|
98 |
+
</div>
|
99 |
+
{url && (
|
100 |
+
<div className="text-xs font-mono text-zinc-500 dark:text-zinc-400 truncate max-w-[340px]">
|
101 |
+
{url}
|
102 |
+
</div>
|
103 |
+
)}
|
104 |
+
</div>
|
105 |
+
|
106 |
+
{/* Preview Logic */}
|
107 |
+
<div className="flex-1 flex items-stretch bg-black">
|
108 |
+
{isLastToolCall ? (
|
109 |
+
// Only show live sandbox or fallback to sandbox for the last tool call
|
110 |
+
isRunning && vncIframe ? (
|
111 |
+
// Use the memoized iframe for live preview
|
112 |
+
vncIframe
|
113 |
+
) : screenshotBase64 ? (
|
114 |
+
<div className="flex items-center justify-center w-full h-full max-h-[650px] overflow-auto">
|
115 |
+
<img
|
116 |
+
src={`data:image/jpeg;base64,${screenshotBase64}`}
|
117 |
+
alt="Browser Screenshot"
|
118 |
+
className="max-w-full max-h-full object-contain"
|
119 |
+
/>
|
120 |
+
</div>
|
121 |
+
) : vncIframe ? (
|
122 |
+
// Use the memoized iframe
|
123 |
+
vncIframe
|
124 |
+
) : (
|
125 |
+
<div className="p-8 flex flex-col items-center justify-center w-full bg-zinc-50 dark:bg-zinc-900 text-zinc-700 dark:text-zinc-400">
|
126 |
+
<MonitorPlay className="h-12 w-12 mb-3 opacity-40" />
|
127 |
+
<p className="text-sm font-medium">Browser preview not available</p>
|
128 |
+
{url && (
|
129 |
+
<a
|
130 |
+
href={url}
|
131 |
+
target="_blank"
|
132 |
+
rel="noopener noreferrer"
|
133 |
+
className="mt-3 flex items-center text-blue-600 dark:text-blue-500 hover:text-blue-500 dark:hover:text-blue-400 hover:underline"
|
134 |
+
>
|
135 |
+
Visit URL <ExternalLink className="h-3 w-3 ml-1" />
|
136 |
+
</a>
|
137 |
+
)}
|
138 |
+
</div>
|
139 |
+
)
|
140 |
+
) : (
|
141 |
+
// For non-last tool calls, only show screenshot if available, otherwise show "No Browser State image found"
|
142 |
+
screenshotBase64 ? (
|
143 |
+
<div className="flex items-center justify-center w-full h-full max-h-[650px] overflow-auto">
|
144 |
+
<img
|
145 |
+
src={`data:image/jpeg;base64,${screenshotBase64}`}
|
146 |
+
alt="Browser Screenshot"
|
147 |
+
className="max-w-full max-h-full object-contain"
|
148 |
+
/>
|
149 |
+
</div>
|
150 |
+
) : (
|
151 |
+
<div className="p-8 flex flex-col items-center justify-center w-full bg-zinc-50 dark:bg-zinc-900 text-zinc-700 dark:text-zinc-400">
|
152 |
+
<MonitorPlay className="h-12 w-12 mb-3 opacity-40" />
|
153 |
+
<p className="text-sm font-medium">No Browser State image found</p>
|
154 |
+
</div>
|
155 |
+
)
|
156 |
+
)}
|
157 |
+
</div>
|
158 |
+
</div>
|
159 |
+
</div>
|
160 |
+
|
161 |
+
{/* Footer */}
|
162 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
163 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
164 |
+
{!isRunning && (
|
165 |
+
<div className="flex items-center gap-2">
|
166 |
+
{isSuccess ? (
|
167 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
168 |
+
) : (
|
169 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
170 |
+
)}
|
171 |
+
<span>
|
172 |
+
{isSuccess ? `${operation} completed successfully` : `${operation} failed`}
|
173 |
+
</span>
|
174 |
+
</div>
|
175 |
+
)}
|
176 |
+
|
177 |
+
{isRunning && (
|
178 |
+
<div className="flex items-center gap-2">
|
179 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
180 |
+
<span>Executing browser action...</span>
|
181 |
+
</div>
|
182 |
+
)}
|
183 |
+
|
184 |
+
<div className="text-xs">
|
185 |
+
{toolTimestamp && !isRunning
|
186 |
+
? formatTimestamp(toolTimestamp)
|
187 |
+
: assistantTimestamp
|
188 |
+
? formatTimestamp(assistantTimestamp)
|
189 |
+
: ''}
|
190 |
+
</div>
|
191 |
+
</div>
|
192 |
+
</div>
|
193 |
+
</div>
|
194 |
+
);
|
195 |
+
}
|
CommandToolView.tsx
ADDED
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from "react";
|
2 |
+
import { Terminal, CheckCircle, AlertTriangle, CircleDashed } from "lucide-react";
|
3 |
+
import { ToolViewProps } from "./types";
|
4 |
+
import { extractCommand, extractCommandOutput, extractExitCode, formatTimestamp, getToolTitle } from "./utils";
|
5 |
+
import { cn } from "@/lib/utils";
|
6 |
+
|
7 |
+
export function CommandToolView({
|
8 |
+
name = "execute-command",
|
9 |
+
assistantContent,
|
10 |
+
toolContent,
|
11 |
+
assistantTimestamp,
|
12 |
+
toolTimestamp,
|
13 |
+
isSuccess = true,
|
14 |
+
isStreaming = false
|
15 |
+
}: ToolViewProps) {
|
16 |
+
// Extract command with improved XML parsing
|
17 |
+
const rawCommand = React.useMemo(() => {
|
18 |
+
if (!assistantContent) return null;
|
19 |
+
|
20 |
+
try {
|
21 |
+
// Try to parse JSON content first
|
22 |
+
const parsed = JSON.parse(assistantContent);
|
23 |
+
if (parsed.content) {
|
24 |
+
// Look for execute-command tag
|
25 |
+
const commandMatch = parsed.content.match(/<execute-command[^>]*>([\s\S]*?)<\/execute-command>/);
|
26 |
+
if (commandMatch) {
|
27 |
+
return commandMatch[1].trim();
|
28 |
+
}
|
29 |
+
}
|
30 |
+
} catch (e) {
|
31 |
+
// If JSON parsing fails, try direct XML extraction
|
32 |
+
const commandMatch = assistantContent.match(/<execute-command[^>]*>([\s\S]*?)<\/execute-command>/);
|
33 |
+
if (commandMatch) {
|
34 |
+
return commandMatch[1].trim();
|
35 |
+
}
|
36 |
+
}
|
37 |
+
|
38 |
+
return null;
|
39 |
+
}, [assistantContent]);
|
40 |
+
|
41 |
+
// Clean the command by removing any leading/trailing whitespace and newlines
|
42 |
+
const command = rawCommand
|
43 |
+
?.replace(/^suna@computer:~\$\s*/g, '') // Remove prompt prefix
|
44 |
+
?.replace(/\\n/g, '') // Remove escaped newlines
|
45 |
+
?.replace(/\n/g, '') // Remove actual newlines
|
46 |
+
?.trim(); // Clean up any remaining whitespace
|
47 |
+
|
48 |
+
// Extract and clean the output with improved parsing
|
49 |
+
const output = React.useMemo(() => {
|
50 |
+
if (!toolContent) return null;
|
51 |
+
|
52 |
+
try {
|
53 |
+
// Try to parse JSON content first
|
54 |
+
const parsed = JSON.parse(toolContent);
|
55 |
+
if (parsed.content) {
|
56 |
+
// Look for tool_result tag
|
57 |
+
const toolResultMatch = parsed.content.match(/<tool_result>\s*<execute-command>([\s\S]*?)<\/execute-command>\s*<\/tool_result>/);
|
58 |
+
if (toolResultMatch) {
|
59 |
+
return toolResultMatch[1].trim();
|
60 |
+
}
|
61 |
+
|
62 |
+
// Look for output field in a ToolResult pattern
|
63 |
+
const outputMatch = parsed.content.match(/ToolResult\(.*?output='([\s\S]*?)'.*?\)/);
|
64 |
+
if (outputMatch) {
|
65 |
+
return outputMatch[1].replace(/\\n/g, '\n').replace(/\\"/g, '"');
|
66 |
+
}
|
67 |
+
|
68 |
+
// Try to parse as direct JSON
|
69 |
+
try {
|
70 |
+
const outputJson = JSON.parse(parsed.content);
|
71 |
+
if (outputJson.output) {
|
72 |
+
return outputJson.output;
|
73 |
+
}
|
74 |
+
} catch (e) {
|
75 |
+
// If JSON parsing fails, use the content as-is
|
76 |
+
return parsed.content;
|
77 |
+
}
|
78 |
+
}
|
79 |
+
} catch (e) {
|
80 |
+
// If JSON parsing fails, try direct XML extraction
|
81 |
+
const toolResultMatch = toolContent.match(/<tool_result>\s*<execute-command>([\s\S]*?)<\/execute-command>\s*<\/tool_result>/);
|
82 |
+
if (toolResultMatch) {
|
83 |
+
return toolResultMatch[1].trim();
|
84 |
+
}
|
85 |
+
|
86 |
+
const outputMatch = toolContent.match(/ToolResult\(.*?output='([\s\S]*?)'.*?\)/);
|
87 |
+
if (outputMatch) {
|
88 |
+
return outputMatch[1].replace(/\\n/g, '\n').replace(/\\"/g, '"');
|
89 |
+
}
|
90 |
+
}
|
91 |
+
|
92 |
+
return toolContent;
|
93 |
+
}, [toolContent]);
|
94 |
+
|
95 |
+
const exitCode = extractExitCode(toolContent);
|
96 |
+
const toolTitle = getToolTitle(name);
|
97 |
+
|
98 |
+
return (
|
99 |
+
<div className="flex flex-col h-full">
|
100 |
+
<div className="flex-1 p-4 overflow-auto">
|
101 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
102 |
+
<div className="flex items-center p-2 bg-zinc-100 dark:bg-zinc-900 justify-between border-b border-zinc-200 dark:border-zinc-800">
|
103 |
+
<div className="flex items-center">
|
104 |
+
<Terminal className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
105 |
+
<span className="text-xs font-medium text-zinc-700 dark:text-zinc-300">Terminal</span>
|
106 |
+
</div>
|
107 |
+
{exitCode !== null && !isStreaming && (
|
108 |
+
<span className={cn(
|
109 |
+
"text-xs flex items-center",
|
110 |
+
isSuccess ? "text-emerald-600 dark:text-emerald-400" : "text-red-600 dark:text-red-400"
|
111 |
+
)}>
|
112 |
+
<span className="h-1.5 w-1.5 rounded-full mr-1.5 bg-current"></span>
|
113 |
+
Exit: {exitCode}
|
114 |
+
</span>
|
115 |
+
)}
|
116 |
+
</div>
|
117 |
+
|
118 |
+
<div className="terminal-container flex-1 overflow-auto bg-black text-zinc-300 font-mono">
|
119 |
+
<div className="p-3 text-xs">
|
120 |
+
{command && output && !isStreaming && (
|
121 |
+
<div className="space-y-2">
|
122 |
+
<div className="flex items-start">
|
123 |
+
<span className="text-emerald-400 shrink-0 mr-2">suna@computer:~$</span>
|
124 |
+
<span className="text-zinc-300">{command}</span>
|
125 |
+
</div>
|
126 |
+
|
127 |
+
<div className="whitespace-pre-wrap break-words text-zinc-400 pl-0">
|
128 |
+
{output}
|
129 |
+
</div>
|
130 |
+
|
131 |
+
{isSuccess && <div className="text-emerald-400 mt-1">suna@computer:~$ _</div>}
|
132 |
+
</div>
|
133 |
+
)}
|
134 |
+
|
135 |
+
{command && !output && !isStreaming && (
|
136 |
+
<div className="space-y-2">
|
137 |
+
<div className="flex items-start">
|
138 |
+
<span className="text-emerald-400 shrink-0 mr-2">suna@computer:~$</span>
|
139 |
+
<span className="text-zinc-300">{command}</span>
|
140 |
+
</div>
|
141 |
+
<div className="flex items-center h-4">
|
142 |
+
<div className="w-2 h-4 bg-zinc-500 animate-pulse"></div>
|
143 |
+
</div>
|
144 |
+
</div>
|
145 |
+
)}
|
146 |
+
|
147 |
+
{!command && !output && !isStreaming && (
|
148 |
+
<div className="flex items-start">
|
149 |
+
<span className="text-emerald-400 shrink-0 mr-2">suna@computer:~$</span>
|
150 |
+
<span className="w-2 h-4 bg-zinc-500 animate-pulse"></span>
|
151 |
+
</div>
|
152 |
+
)}
|
153 |
+
|
154 |
+
{isStreaming && (
|
155 |
+
<div className="space-y-2">
|
156 |
+
<div className="flex items-start">
|
157 |
+
<span className="text-emerald-400 shrink-0 mr-2">suna@computer:~$</span>
|
158 |
+
<span className="text-zinc-300">{command || 'running command...'}</span>
|
159 |
+
</div>
|
160 |
+
<div className="flex items-center gap-2 text-zinc-400">
|
161 |
+
<CircleDashed className="h-3 w-3 animate-spin text-blue-400" />
|
162 |
+
<span>Command execution in progress...</span>
|
163 |
+
</div>
|
164 |
+
</div>
|
165 |
+
)}
|
166 |
+
</div>
|
167 |
+
</div>
|
168 |
+
</div>
|
169 |
+
</div>
|
170 |
+
|
171 |
+
{/* Footer */}
|
172 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
173 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
174 |
+
{!isStreaming && (
|
175 |
+
<div className="flex items-center gap-2">
|
176 |
+
{isSuccess ? (
|
177 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
178 |
+
) : (
|
179 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
180 |
+
)}
|
181 |
+
<span>
|
182 |
+
{isSuccess
|
183 |
+
? `Command completed successfully${exitCode !== null ? ` (exit code: ${exitCode})` : ''}`
|
184 |
+
: `Command failed${exitCode !== null ? ` with exit code ${exitCode}` : ''}`}
|
185 |
+
</span>
|
186 |
+
</div>
|
187 |
+
)}
|
188 |
+
|
189 |
+
{isStreaming && (
|
190 |
+
<div className="flex items-center gap-2">
|
191 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
192 |
+
<span>Executing command...</span>
|
193 |
+
</div>
|
194 |
+
)}
|
195 |
+
|
196 |
+
<div className="text-xs">
|
197 |
+
{toolTimestamp && !isStreaming
|
198 |
+
? formatTimestamp(toolTimestamp)
|
199 |
+
: assistantTimestamp
|
200 |
+
? formatTimestamp(assistantTimestamp)
|
201 |
+
: ''}
|
202 |
+
</div>
|
203 |
+
</div>
|
204 |
+
</div>
|
205 |
+
</div>
|
206 |
+
);
|
207 |
+
}
|
208 |
+
|
DataProviderToolView.tsx
ADDED
@@ -0,0 +1,229 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from "react";
|
2 |
+
import { ToolViewProps } from "./types";
|
3 |
+
import { formatTimestamp, getToolTitle } from "./utils";
|
4 |
+
import { getToolIcon } from "../utils";
|
5 |
+
import { CircleDashed, CheckCircle, AlertTriangle, Network, Database } from "lucide-react";
|
6 |
+
import { cn } from "@/lib/utils";
|
7 |
+
|
8 |
+
export function DataProviderToolView({
|
9 |
+
name = 'unknown',
|
10 |
+
assistantContent,
|
11 |
+
toolContent,
|
12 |
+
isSuccess = true,
|
13 |
+
isStreaming = false,
|
14 |
+
assistantTimestamp,
|
15 |
+
toolTimestamp
|
16 |
+
}: ToolViewProps) {
|
17 |
+
const toolTitle = getToolTitle(name);
|
18 |
+
const Icon = getToolIcon(name) || Network;
|
19 |
+
|
20 |
+
// Extract data from the assistant content (request)
|
21 |
+
const extractRequest = React.useMemo(() => {
|
22 |
+
if (!assistantContent) return null;
|
23 |
+
|
24 |
+
try {
|
25 |
+
// Parse assistant content as JSON
|
26 |
+
const parsed = JSON.parse(assistantContent);
|
27 |
+
|
28 |
+
if (parsed.content) {
|
29 |
+
// Try to extract content from service name and route
|
30 |
+
const serviceMatch = parsed.content.match(/service_name=\\?"([^"\\]+)\\?"/);
|
31 |
+
const routeMatch = parsed.content.match(/route=\\?"([^"\\]+)\\?"/);
|
32 |
+
|
33 |
+
// For execute-data-provider-call, also extract the payload
|
34 |
+
let payload = null;
|
35 |
+
if (name === 'execute-data-provider-call') {
|
36 |
+
const payloadMatch = parsed.content.match(/{([^}]+)}/);
|
37 |
+
if (payloadMatch) {
|
38 |
+
try {
|
39 |
+
// Try to parse the payload JSON
|
40 |
+
payload = JSON.parse(`{${payloadMatch[1]}}`);
|
41 |
+
} catch (e) {
|
42 |
+
payload = payloadMatch[1];
|
43 |
+
}
|
44 |
+
}
|
45 |
+
}
|
46 |
+
|
47 |
+
return {
|
48 |
+
service: serviceMatch ? serviceMatch[1] : undefined,
|
49 |
+
route: routeMatch ? routeMatch[1] : undefined,
|
50 |
+
payload
|
51 |
+
};
|
52 |
+
}
|
53 |
+
} catch (e) {
|
54 |
+
console.error("Error parsing assistant content:", e);
|
55 |
+
}
|
56 |
+
|
57 |
+
return null;
|
58 |
+
}, [assistantContent, name]);
|
59 |
+
|
60 |
+
// Parse the tool response
|
61 |
+
const parsedResponse = React.useMemo(() => {
|
62 |
+
if (!toolContent || isStreaming) return null;
|
63 |
+
|
64 |
+
try {
|
65 |
+
// Extract content from tool_result tags if present
|
66 |
+
const toolResultMatch = toolContent.match(/<tool_result>\s*<[^>]+>([\s\S]*?)<\/[^>]+>\s*<\/tool_result>/);
|
67 |
+
let contentToFormat = toolResultMatch ? toolResultMatch[1] : toolContent;
|
68 |
+
|
69 |
+
// Look for a ToolResult pattern
|
70 |
+
const toolResultOutputMatch = contentToFormat.match(/ToolResult\(success=.+?, output='([\s\S]*?)'\)/);
|
71 |
+
if (toolResultOutputMatch) {
|
72 |
+
contentToFormat = toolResultOutputMatch[1];
|
73 |
+
}
|
74 |
+
|
75 |
+
// Try to parse as JSON for pretty formatting
|
76 |
+
try {
|
77 |
+
// Replace escaped quotes and newlines
|
78 |
+
contentToFormat = contentToFormat.replace(/\\"/g, '"').replace(/\\n/g, '\n');
|
79 |
+
const parsedJson = JSON.parse(contentToFormat);
|
80 |
+
return JSON.stringify(parsedJson, null, 2);
|
81 |
+
} catch (e) {
|
82 |
+
// If not valid JSON, return as is
|
83 |
+
return contentToFormat;
|
84 |
+
}
|
85 |
+
} catch (e) {
|
86 |
+
return toolContent;
|
87 |
+
}
|
88 |
+
}, [toolContent, isStreaming]);
|
89 |
+
|
90 |
+
return (
|
91 |
+
<div className="flex flex-col h-full">
|
92 |
+
<div className="flex-1 p-4 overflow-auto">
|
93 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
94 |
+
{/* Header - exactly like other tool views */}
|
95 |
+
<div className="flex items-center p-2 bg-zinc-100 dark:bg-zinc-900 justify-between border-b border-zinc-200 dark:border-zinc-800">
|
96 |
+
<div className="flex items-center">
|
97 |
+
<Database className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
98 |
+
<span className="text-xs font-medium text-zinc-700 dark:text-zinc-300">{toolTitle}</span>
|
99 |
+
</div>
|
100 |
+
|
101 |
+
{!isStreaming && (
|
102 |
+
<span className={cn(
|
103 |
+
"text-xs flex items-center",
|
104 |
+
isSuccess ? "text-emerald-600 dark:text-emerald-400" : "text-red-600 dark:text-red-400"
|
105 |
+
)}>
|
106 |
+
<span className="h-1.5 w-1.5 rounded-full mr-1.5 bg-current"></span>
|
107 |
+
{isSuccess ? 'Success' : 'Failed'}
|
108 |
+
</span>
|
109 |
+
)}
|
110 |
+
</div>
|
111 |
+
|
112 |
+
{/* Request Info Bar - match style with file paths in other tools */}
|
113 |
+
{extractRequest && (
|
114 |
+
<div className="px-3 py-2 border-b border-zinc-200 dark:border-zinc-800 bg-zinc-50 dark:bg-zinc-900">
|
115 |
+
<code className="text-xs font-mono text-zinc-700 dark:text-zinc-300">
|
116 |
+
{extractRequest.service}{extractRequest.route && `/${extractRequest.route}`}
|
117 |
+
</code>
|
118 |
+
</div>
|
119 |
+
)}
|
120 |
+
|
121 |
+
{/* Content Container */}
|
122 |
+
{!isStreaming ? (
|
123 |
+
<div className="flex-1 bg-white dark:bg-zinc-950 font-mono text-sm">
|
124 |
+
<div className="p-3">
|
125 |
+
{/* Request section - show payload if available */}
|
126 |
+
{extractRequest?.payload && (
|
127 |
+
<div className="mb-4">
|
128 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400 mb-2">Request Payload</div>
|
129 |
+
<div className="bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md">
|
130 |
+
<pre className="p-3 text-xs overflow-auto whitespace-pre-wrap text-zinc-800 dark:text-zinc-300 font-mono">
|
131 |
+
{typeof extractRequest.payload === 'object'
|
132 |
+
? JSON.stringify(extractRequest.payload, null, 2)
|
133 |
+
: extractRequest.payload}
|
134 |
+
</pre>
|
135 |
+
</div>
|
136 |
+
</div>
|
137 |
+
)}
|
138 |
+
|
139 |
+
{/* Response section */}
|
140 |
+
{parsedResponse && (
|
141 |
+
<div>
|
142 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400 mb-2">Response Data</div>
|
143 |
+
<div className="bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md">
|
144 |
+
<pre className="p-3 text-xs overflow-auto whitespace-pre-wrap text-zinc-800 dark:text-zinc-300 font-mono">
|
145 |
+
{parsedResponse}
|
146 |
+
</pre>
|
147 |
+
</div>
|
148 |
+
</div>
|
149 |
+
)}
|
150 |
+
|
151 |
+
{/* Show raw data if parsed content isn't available */}
|
152 |
+
{!extractRequest?.payload && !parsedResponse && assistantContent && (
|
153 |
+
<div className="mb-4">
|
154 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400 mb-2">Raw Request</div>
|
155 |
+
<div className="bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md">
|
156 |
+
<pre className="p-3 text-xs overflow-auto whitespace-pre-wrap text-zinc-800 dark:text-zinc-300 font-mono">
|
157 |
+
{assistantContent}
|
158 |
+
</pre>
|
159 |
+
</div>
|
160 |
+
</div>
|
161 |
+
)}
|
162 |
+
|
163 |
+
{!parsedResponse && toolContent && (
|
164 |
+
<div>
|
165 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400 mb-2">Raw Response</div>
|
166 |
+
<div className="bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md">
|
167 |
+
<pre className="p-3 text-xs overflow-auto whitespace-pre-wrap text-zinc-800 dark:text-zinc-300 font-mono">
|
168 |
+
{toolContent}
|
169 |
+
</pre>
|
170 |
+
</div>
|
171 |
+
</div>
|
172 |
+
)}
|
173 |
+
</div>
|
174 |
+
</div>
|
175 |
+
) : (
|
176 |
+
<div className="flex-1 bg-white dark:bg-zinc-950 flex items-center justify-center">
|
177 |
+
<div className="text-center p-6">
|
178 |
+
<CircleDashed className="h-8 w-8 mx-auto mb-3 text-blue-500 animate-spin" />
|
179 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">
|
180 |
+
Processing {name.toLowerCase()} operation...
|
181 |
+
</p>
|
182 |
+
{extractRequest?.service && extractRequest?.route && (
|
183 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400 font-mono">
|
184 |
+
{extractRequest.service}/{extractRequest.route}
|
185 |
+
</p>
|
186 |
+
)}
|
187 |
+
</div>
|
188 |
+
</div>
|
189 |
+
)}
|
190 |
+
</div>
|
191 |
+
</div>
|
192 |
+
|
193 |
+
{/* Footer - exactly like other tool views */}
|
194 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
195 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
196 |
+
{!isStreaming && (
|
197 |
+
<div className="flex items-center gap-2">
|
198 |
+
{isSuccess ? (
|
199 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
200 |
+
) : (
|
201 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
202 |
+
)}
|
203 |
+
<span>
|
204 |
+
{isSuccess
|
205 |
+
? `${toolTitle} completed successfully`
|
206 |
+
: `${toolTitle} operation failed`}
|
207 |
+
</span>
|
208 |
+
</div>
|
209 |
+
)}
|
210 |
+
|
211 |
+
{isStreaming && (
|
212 |
+
<div className="flex items-center gap-2">
|
213 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
214 |
+
<span>Executing {toolTitle.toLowerCase()}...</span>
|
215 |
+
</div>
|
216 |
+
)}
|
217 |
+
|
218 |
+
<div className="text-xs">
|
219 |
+
{toolTimestamp && !isStreaming
|
220 |
+
? formatTimestamp(toolTimestamp)
|
221 |
+
: assistantTimestamp
|
222 |
+
? formatTimestamp(assistantTimestamp)
|
223 |
+
: ''}
|
224 |
+
</div>
|
225 |
+
</div>
|
226 |
+
</div>
|
227 |
+
</div>
|
228 |
+
);
|
229 |
+
}
|
Dockerfile
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM node:20-slim
|
2 |
+
|
3 |
+
WORKDIR /app
|
4 |
+
|
5 |
+
# Copy package files first for better layer caching
|
6 |
+
COPY package*.json ./
|
7 |
+
|
8 |
+
# Install build dependencies for node-gyp
|
9 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
10 |
+
python3 \
|
11 |
+
make \
|
12 |
+
g++ \
|
13 |
+
build-essential \
|
14 |
+
pkg-config \
|
15 |
+
libcairo2-dev \
|
16 |
+
libpango1.0-dev \
|
17 |
+
libjpeg-dev \
|
18 |
+
libgif-dev \
|
19 |
+
librsvg2-dev \
|
20 |
+
&& rm -rf /var/lib/apt/lists/*
|
21 |
+
|
22 |
+
RUN npm install
|
23 |
+
|
24 |
+
# Copy the frontend code
|
25 |
+
COPY . .
|
26 |
+
|
27 |
+
RUN npm run build
|
28 |
+
|
29 |
+
EXPOSE 3000
|
30 |
+
|
31 |
+
# Default command is dev, but can be overridden in docker-compose
|
32 |
+
CMD ["npm", "start"]
|
ExposePortToolView.tsx
ADDED
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from "react";
|
2 |
+
import { ToolViewProps } from "./types";
|
3 |
+
import { formatTimestamp } from "./utils";
|
4 |
+
import { ExternalLink, CheckCircle, AlertTriangle } from "lucide-react";
|
5 |
+
import { Markdown } from "@/components/ui/markdown";
|
6 |
+
import { cn } from "@/lib/utils";
|
7 |
+
|
8 |
+
export function ExposePortToolView({
|
9 |
+
name = 'expose-port',
|
10 |
+
assistantContent,
|
11 |
+
toolContent,
|
12 |
+
isSuccess = true,
|
13 |
+
isStreaming = false,
|
14 |
+
assistantTimestamp,
|
15 |
+
toolTimestamp
|
16 |
+
}: ToolViewProps) {
|
17 |
+
console.log('ExposePortToolView:', {
|
18 |
+
name,
|
19 |
+
assistantContent,
|
20 |
+
toolContent,
|
21 |
+
isSuccess,
|
22 |
+
isStreaming,
|
23 |
+
assistantTimestamp,
|
24 |
+
toolTimestamp
|
25 |
+
});
|
26 |
+
|
27 |
+
// Parse the assistant content
|
28 |
+
const parsedAssistantContent = React.useMemo(() => {
|
29 |
+
if (!assistantContent) return null;
|
30 |
+
try {
|
31 |
+
const parsed = JSON.parse(assistantContent);
|
32 |
+
return parsed.content;
|
33 |
+
} catch (e) {
|
34 |
+
console.error('Failed to parse assistant content:', e);
|
35 |
+
return null;
|
36 |
+
}
|
37 |
+
}, [assistantContent]);
|
38 |
+
|
39 |
+
// Parse the tool result
|
40 |
+
const toolResult = React.useMemo(() => {
|
41 |
+
if (!toolContent) return null;
|
42 |
+
try {
|
43 |
+
// First parse the outer JSON
|
44 |
+
const parsed = JSON.parse(toolContent);
|
45 |
+
// Then extract the tool result content
|
46 |
+
const match = parsed.content.match(/output='(.*?)'/);
|
47 |
+
if (match) {
|
48 |
+
const jsonStr = match[1]
|
49 |
+
.replace(/\\n/g, '')
|
50 |
+
.replace(/\\"/g, '"');
|
51 |
+
return JSON.parse(jsonStr);
|
52 |
+
}
|
53 |
+
return null;
|
54 |
+
} catch (e) {
|
55 |
+
console.error('Failed to parse tool content:', e);
|
56 |
+
return null;
|
57 |
+
}
|
58 |
+
}, [toolContent]);
|
59 |
+
|
60 |
+
// Extract port number from assistant content
|
61 |
+
const portNumber = React.useMemo(() => {
|
62 |
+
if (!parsedAssistantContent) return null;
|
63 |
+
try {
|
64 |
+
const match = parsedAssistantContent.match(/<expose-port>\s*(\d+)\s*<\/expose-port>/);
|
65 |
+
return match ? match[1] : null;
|
66 |
+
} catch (e) {
|
67 |
+
console.error('Failed to extract port number:', e);
|
68 |
+
return null;
|
69 |
+
}
|
70 |
+
}, [parsedAssistantContent]);
|
71 |
+
|
72 |
+
// If we have no content to show, render a placeholder
|
73 |
+
if (!portNumber && !toolResult && !isStreaming) {
|
74 |
+
return (
|
75 |
+
<div className="flex flex-col h-full p-4">
|
76 |
+
<div className="text-xs text-zinc-500 dark:text-zinc-400">
|
77 |
+
No port exposure information available
|
78 |
+
</div>
|
79 |
+
</div>
|
80 |
+
);
|
81 |
+
}
|
82 |
+
|
83 |
+
return (
|
84 |
+
<div className="flex flex-col h-full">
|
85 |
+
<div className="flex-1 p-4 overflow-auto">
|
86 |
+
{/* Assistant Content */}
|
87 |
+
{portNumber && !isStreaming && (
|
88 |
+
<div className="space-y-1.5">
|
89 |
+
<div className="flex justify-between items-center">
|
90 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400">Port to Expose</div>
|
91 |
+
{assistantTimestamp && (
|
92 |
+
<div className="text-xs text-zinc-500 dark:text-zinc-400">{formatTimestamp(assistantTimestamp)}</div>
|
93 |
+
)}
|
94 |
+
</div>
|
95 |
+
<div className="rounded-md border border-zinc-200 dark:border-zinc-800 bg-zinc-50 dark:bg-zinc-900 p-3">
|
96 |
+
<div className="flex items-center gap-2">
|
97 |
+
<div className="text-xs font-medium text-zinc-800 dark:text-zinc-300">Port</div>
|
98 |
+
<div className="px-2 py-1 rounded-md bg-zinc-100 dark:bg-zinc-800 text-xs font-mono text-zinc-800 dark:text-zinc-300">
|
99 |
+
{portNumber}
|
100 |
+
</div>
|
101 |
+
</div>
|
102 |
+
</div>
|
103 |
+
</div>
|
104 |
+
)}
|
105 |
+
|
106 |
+
{/* Tool Result */}
|
107 |
+
{toolResult && (
|
108 |
+
<div className="space-y-1.5 mt-4">
|
109 |
+
<div className="flex justify-between items-center">
|
110 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400">
|
111 |
+
{isStreaming ? "Processing" : "Exposed URL"}
|
112 |
+
</div>
|
113 |
+
{toolTimestamp && !isStreaming && (
|
114 |
+
<div className="text-xs text-zinc-500 dark:text-zinc-400">{formatTimestamp(toolTimestamp)}</div>
|
115 |
+
)}
|
116 |
+
</div>
|
117 |
+
<div className={cn(
|
118 |
+
"rounded-md border p-3",
|
119 |
+
isStreaming
|
120 |
+
? 'border-blue-200 bg-blue-50 dark:border-blue-800 dark:bg-blue-900/10'
|
121 |
+
: isSuccess
|
122 |
+
? 'border-zinc-200 bg-zinc-50 dark:border-zinc-800 dark:bg-zinc-900'
|
123 |
+
: 'border-red-200 bg-red-50 dark:border-red-800 dark:bg-red-900/10'
|
124 |
+
)}>
|
125 |
+
{isStreaming ? (
|
126 |
+
<div className="flex items-center gap-2 text-xs font-medium text-blue-700 dark:text-blue-400">
|
127 |
+
<span>Exposing port {portNumber}...</span>
|
128 |
+
</div>
|
129 |
+
) : (
|
130 |
+
<div className="space-y-3">
|
131 |
+
<div className="flex items-center gap-2">
|
132 |
+
<ExternalLink className="h-4 w-4 text-zinc-500 dark:text-zinc-400" />
|
133 |
+
<a
|
134 |
+
href={toolResult.url}
|
135 |
+
target="_blank"
|
136 |
+
rel="noopener noreferrer"
|
137 |
+
className="text-xs font-medium text-blue-600 dark:text-blue-400 hover:underline break-all"
|
138 |
+
>
|
139 |
+
{toolResult.url}
|
140 |
+
</a>
|
141 |
+
</div>
|
142 |
+
<div className="flex items-center gap-2">
|
143 |
+
<div className="text-xs text-zinc-600 dark:text-zinc-400">Port</div>
|
144 |
+
<div className="px-2 py-1 rounded-md bg-zinc-100 dark:bg-zinc-800 text-xs font-mono text-zinc-800 dark:text-zinc-300">
|
145 |
+
{toolResult.port}
|
146 |
+
</div>
|
147 |
+
</div>
|
148 |
+
<div className="text-xs text-zinc-600 dark:text-zinc-400">
|
149 |
+
{toolResult.message}
|
150 |
+
</div>
|
151 |
+
<div className="text-xs text-amber-600 dark:text-amber-400 italic">
|
152 |
+
Note: This URL might only be temporarily available and could expire after some time.
|
153 |
+
</div>
|
154 |
+
</div>
|
155 |
+
)}
|
156 |
+
</div>
|
157 |
+
</div>
|
158 |
+
)}
|
159 |
+
</div>
|
160 |
+
|
161 |
+
{/* Footer */}
|
162 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
163 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
164 |
+
{!isStreaming && (
|
165 |
+
<div className="flex items-center gap-2">
|
166 |
+
{isSuccess ? (
|
167 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
168 |
+
) : (
|
169 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
170 |
+
)}
|
171 |
+
<span>
|
172 |
+
{isSuccess ? 'Port exposed successfully' : 'Failed to expose port'}
|
173 |
+
</span>
|
174 |
+
</div>
|
175 |
+
)}
|
176 |
+
|
177 |
+
{isStreaming && (
|
178 |
+
<div className="flex items-center gap-2">
|
179 |
+
<span>Exposing port...</span>
|
180 |
+
</div>
|
181 |
+
)}
|
182 |
+
|
183 |
+
<div className="text-xs">
|
184 |
+
{toolTimestamp && !isStreaming
|
185 |
+
? formatTimestamp(toolTimestamp)
|
186 |
+
: assistantTimestamp
|
187 |
+
? formatTimestamp(assistantTimestamp)
|
188 |
+
: ''}
|
189 |
+
</div>
|
190 |
+
</div>
|
191 |
+
</div>
|
192 |
+
</div>
|
193 |
+
);
|
194 |
+
}
|
FileOperationToolView.tsx
ADDED
@@ -0,0 +1,494 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React, { useState } from "react";
|
2 |
+
import { FileCode, FileSymlink, FolderPlus, FileX, Replace, CheckCircle, AlertTriangle, ExternalLink, CircleDashed, Code, Eye, FileSpreadsheet } from "lucide-react";
|
3 |
+
import { ToolViewProps } from "./types";
|
4 |
+
import { extractFilePath, extractFileContent, getFileType, formatTimestamp, getToolTitle } from "./utils";
|
5 |
+
import { GenericToolView } from "./GenericToolView";
|
6 |
+
import { MarkdownRenderer, processUnicodeContent } from "@/components/file-renderers/markdown-renderer";
|
7 |
+
import { CsvRenderer } from "@/components/file-renderers/csv-renderer";
|
8 |
+
import { cn } from "@/lib/utils";
|
9 |
+
import { useTheme } from "next-themes";
|
10 |
+
import { CodeBlockCode } from "@/components/ui/code-block";
|
11 |
+
import { constructHtmlPreviewUrl } from "@/lib/utils/url";
|
12 |
+
|
13 |
+
|
14 |
+
// Type for operation type
|
15 |
+
type FileOperation = "create" | "rewrite" | "delete";
|
16 |
+
|
17 |
+
// Map file extensions to language names for syntax highlighting
|
18 |
+
const getLanguageFromFileName = (fileName: string): string => {
|
19 |
+
const extension = fileName.split('.').pop()?.toLowerCase() || '';
|
20 |
+
|
21 |
+
// Map of file extensions to language names for syntax highlighting
|
22 |
+
const extensionMap: Record<string, string> = {
|
23 |
+
// Web languages
|
24 |
+
'html': 'html',
|
25 |
+
'htm': 'html',
|
26 |
+
'css': 'css',
|
27 |
+
'scss': 'scss',
|
28 |
+
'sass': 'scss',
|
29 |
+
'less': 'less',
|
30 |
+
'js': 'javascript',
|
31 |
+
'jsx': 'jsx',
|
32 |
+
'ts': 'typescript',
|
33 |
+
'tsx': 'tsx',
|
34 |
+
'json': 'json',
|
35 |
+
'jsonc': 'json',
|
36 |
+
|
37 |
+
// Build and config files
|
38 |
+
'xml': 'xml',
|
39 |
+
'yml': 'yaml',
|
40 |
+
'yaml': 'yaml',
|
41 |
+
'toml': 'toml',
|
42 |
+
'ini': 'ini',
|
43 |
+
'env': 'bash',
|
44 |
+
'gitignore': 'bash',
|
45 |
+
'dockerignore': 'bash',
|
46 |
+
|
47 |
+
// Scripting languages
|
48 |
+
'py': 'python',
|
49 |
+
'rb': 'ruby',
|
50 |
+
'php': 'php',
|
51 |
+
'go': 'go',
|
52 |
+
'java': 'java',
|
53 |
+
'kt': 'kotlin',
|
54 |
+
'c': 'c',
|
55 |
+
'cpp': 'cpp',
|
56 |
+
'h': 'c',
|
57 |
+
'hpp': 'cpp',
|
58 |
+
'cs': 'csharp',
|
59 |
+
'swift': 'swift',
|
60 |
+
'rs': 'rust',
|
61 |
+
|
62 |
+
// Shell scripts
|
63 |
+
'sh': 'bash',
|
64 |
+
'bash': 'bash',
|
65 |
+
'zsh': 'bash',
|
66 |
+
'ps1': 'powershell',
|
67 |
+
'bat': 'batch',
|
68 |
+
'cmd': 'batch',
|
69 |
+
|
70 |
+
// Markup languages (excluding markdown which has its own renderer)
|
71 |
+
'svg': 'svg',
|
72 |
+
'tex': 'latex',
|
73 |
+
|
74 |
+
// Data formats
|
75 |
+
'graphql': 'graphql',
|
76 |
+
'gql': 'graphql',
|
77 |
+
};
|
78 |
+
|
79 |
+
return extensionMap[extension] || 'text';
|
80 |
+
};
|
81 |
+
|
82 |
+
export function FileOperationToolView({
|
83 |
+
assistantContent,
|
84 |
+
toolContent,
|
85 |
+
assistantTimestamp,
|
86 |
+
toolTimestamp,
|
87 |
+
isSuccess = true,
|
88 |
+
isStreaming = false,
|
89 |
+
name,
|
90 |
+
project
|
91 |
+
}: ToolViewProps) {
|
92 |
+
const { resolvedTheme } = useTheme();
|
93 |
+
const isDarkTheme = resolvedTheme === 'dark';
|
94 |
+
|
95 |
+
// Determine operation type from content or name
|
96 |
+
const getOperationType = (): FileOperation => {
|
97 |
+
// First check tool name if available
|
98 |
+
if (name) {
|
99 |
+
if (name.includes("create")) return "create";
|
100 |
+
if (name.includes("rewrite")) return "rewrite";
|
101 |
+
if (name.includes("delete")) return "delete";
|
102 |
+
}
|
103 |
+
|
104 |
+
if (!assistantContent) return "create"; // default fallback
|
105 |
+
|
106 |
+
if (assistantContent.includes("<create-file>")) return "create";
|
107 |
+
if (assistantContent.includes("<full-file-rewrite>")) return "rewrite";
|
108 |
+
if (assistantContent.includes("delete-file") || assistantContent.includes("<delete>")) return "delete";
|
109 |
+
|
110 |
+
// Check for tool names as a fallback
|
111 |
+
if (assistantContent.toLowerCase().includes("create file")) return "create";
|
112 |
+
if (assistantContent.toLowerCase().includes("rewrite file")) return "rewrite";
|
113 |
+
if (assistantContent.toLowerCase().includes("delete file")) return "delete";
|
114 |
+
|
115 |
+
// Default to create if we can't determine
|
116 |
+
return "create";
|
117 |
+
};
|
118 |
+
|
119 |
+
const operation = getOperationType();
|
120 |
+
const filePath = extractFilePath(assistantContent);
|
121 |
+
const toolTitle = getToolTitle(name || `file-${operation}`);
|
122 |
+
|
123 |
+
// Only extract content for create and rewrite operations
|
124 |
+
const fileContent = operation !== "delete"
|
125 |
+
? extractFileContent(assistantContent, operation === "create" ? 'create-file' : 'full-file-rewrite')
|
126 |
+
: null;
|
127 |
+
|
128 |
+
// For debugging - show raw content if file path can't be extracted for delete operations
|
129 |
+
const showDebugInfo = !filePath && operation === "delete";
|
130 |
+
|
131 |
+
// Process file path - handle potential newlines and clean up
|
132 |
+
const processedFilePath = filePath ? filePath.trim().replace(/\\n/g, '\n').split('\n')[0] : null;
|
133 |
+
|
134 |
+
// For create and rewrite, prepare content for display
|
135 |
+
const contentLines = fileContent ? fileContent.replace(/\\n/g, '\n').split('\n') : [];
|
136 |
+
const fileName = processedFilePath ? processedFilePath.split('/').pop() || processedFilePath : '';
|
137 |
+
const fileType = processedFilePath ? getFileType(processedFilePath) : '';
|
138 |
+
const isMarkdown = fileName.endsWith('.md');
|
139 |
+
const isHtml = fileName.endsWith('.html');
|
140 |
+
const isCsv = fileName.endsWith('.csv');
|
141 |
+
const language = getLanguageFromFileName(fileName);
|
142 |
+
const hasHighlighting = language !== 'text';
|
143 |
+
// Construct HTML file preview URL if we have a sandbox and the file is HTML
|
144 |
+
const htmlPreviewUrl = (isHtml && project?.sandbox?.sandbox_url && processedFilePath)
|
145 |
+
? constructHtmlPreviewUrl(project.sandbox.sandbox_url, processedFilePath)
|
146 |
+
: undefined;
|
147 |
+
|
148 |
+
console.log('HTML Preview URL:', htmlPreviewUrl);
|
149 |
+
// Add state for view mode toggle (code or preview)
|
150 |
+
const [viewMode, setViewMode] = useState<'code' | 'preview'>(isHtml || isMarkdown || isCsv ? 'preview' : 'code');
|
151 |
+
|
152 |
+
// Fall back to generic view if file path is missing or if content is missing for non-delete operations
|
153 |
+
if ((!filePath && !showDebugInfo) || (operation !== "delete" && !fileContent)) {
|
154 |
+
return (
|
155 |
+
<GenericToolView
|
156 |
+
name={name || `file-${operation}`}
|
157 |
+
assistantContent={assistantContent}
|
158 |
+
toolContent={toolContent}
|
159 |
+
assistantTimestamp={assistantTimestamp}
|
160 |
+
toolTimestamp={toolTimestamp}
|
161 |
+
isSuccess={isSuccess}
|
162 |
+
isStreaming={isStreaming}
|
163 |
+
/>
|
164 |
+
);
|
165 |
+
}
|
166 |
+
|
167 |
+
// Operation-specific configs
|
168 |
+
const configs = {
|
169 |
+
create: {
|
170 |
+
icon: FolderPlus,
|
171 |
+
successMessage: "File created successfully"
|
172 |
+
},
|
173 |
+
rewrite: {
|
174 |
+
icon: Replace,
|
175 |
+
successMessage: "File rewritten successfully"
|
176 |
+
},
|
177 |
+
delete: {
|
178 |
+
icon: FileX,
|
179 |
+
successMessage: "File deleted successfully"
|
180 |
+
}
|
181 |
+
};
|
182 |
+
|
183 |
+
const config = configs[operation];
|
184 |
+
const Icon = config.icon;
|
185 |
+
|
186 |
+
return (
|
187 |
+
<div className="flex flex-col h-full">
|
188 |
+
<div className="flex-1 p-4 overflow-auto">
|
189 |
+
{/* File Content for create and rewrite operations */}
|
190 |
+
{operation !== "delete" && fileContent && !isStreaming && (
|
191 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden shadow-sm bg-white dark:bg-zinc-950 h-full flex flex-col">
|
192 |
+
{/* IDE Header */}
|
193 |
+
<div className="flex items-center p-2 bg-zinc-100 dark:bg-zinc-900 text-zinc-900 dark:text-zinc-100 justify-between border-b border-zinc-200 dark:border-zinc-800">
|
194 |
+
<div className="flex items-center">
|
195 |
+
{isMarkdown ?
|
196 |
+
<FileCode className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" /> :
|
197 |
+
isCsv ?
|
198 |
+
<FileSpreadsheet className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" /> :
|
199 |
+
<FileSymlink className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
200 |
+
}
|
201 |
+
<span className="text-xs font-medium">{fileName}</span>
|
202 |
+
</div>
|
203 |
+
|
204 |
+
<div className="flex items-center gap-2">
|
205 |
+
{/* View switcher for HTML files */}
|
206 |
+
{isHtml && htmlPreviewUrl && isSuccess && (
|
207 |
+
<div className="flex rounded-md overflow-hidden border border-zinc-200 dark:border-zinc-700">
|
208 |
+
<button
|
209 |
+
onClick={() => setViewMode('code')}
|
210 |
+
className={cn(
|
211 |
+
"flex items-center gap-1 text-xs px-2 py-1 transition-colors",
|
212 |
+
viewMode === 'code'
|
213 |
+
? "bg-zinc-800 text-zinc-100 dark:bg-zinc-700 dark:text-zinc-100"
|
214 |
+
: "bg-zinc-200 text-zinc-700 dark:bg-zinc-800 dark:text-zinc-400 hover:bg-zinc-300 dark:hover:bg-zinc-700"
|
215 |
+
)}
|
216 |
+
>
|
217 |
+
<Code className="h-3 w-3" />
|
218 |
+
<span>Code</span>
|
219 |
+
</button>
|
220 |
+
<button
|
221 |
+
onClick={() => setViewMode('preview')}
|
222 |
+
className={cn(
|
223 |
+
"flex items-center gap-1 text-xs px-2 py-1 transition-colors",
|
224 |
+
viewMode === 'preview'
|
225 |
+
? "bg-zinc-800 text-zinc-100 dark:bg-zinc-700 dark:text-zinc-100"
|
226 |
+
: "bg-zinc-200 text-zinc-700 dark:bg-zinc-800 dark:text-zinc-400 hover:bg-zinc-300 dark:hover:bg-zinc-700"
|
227 |
+
)}
|
228 |
+
>
|
229 |
+
<Eye className="h-3 w-3" />
|
230 |
+
<span>Preview</span>
|
231 |
+
</button>
|
232 |
+
</div>
|
233 |
+
)}
|
234 |
+
{/* View switcher for Markdown files */}
|
235 |
+
{isMarkdown && isSuccess && (
|
236 |
+
<div className="flex rounded-md overflow-hidden border border-zinc-200 dark:border-zinc-700">
|
237 |
+
<button
|
238 |
+
onClick={() => setViewMode('code')}
|
239 |
+
className={cn(
|
240 |
+
"flex items-center gap-1 text-xs px-2 py-1 transition-colors",
|
241 |
+
viewMode === 'code'
|
242 |
+
? "bg-zinc-800 text-zinc-100 dark:bg-zinc-700 dark:text-zinc-100"
|
243 |
+
: "bg-zinc-200 text-zinc-700 dark:bg-zinc-800 dark:text-zinc-400 hover:bg-zinc-300 dark:hover:bg-zinc-700"
|
244 |
+
)}
|
245 |
+
>
|
246 |
+
<Code className="h-3 w-3" />
|
247 |
+
<span>Code</span>
|
248 |
+
</button>
|
249 |
+
<button
|
250 |
+
onClick={() => setViewMode('preview')}
|
251 |
+
className={cn(
|
252 |
+
"flex items-center gap-1 text-xs px-2 py-1 transition-colors",
|
253 |
+
viewMode === 'preview'
|
254 |
+
? "bg-zinc-800 text-zinc-100 dark:bg-zinc-700 dark:text-zinc-100"
|
255 |
+
: "bg-zinc-200 text-zinc-700 dark:bg-zinc-800 dark:text-zinc-400 hover:bg-zinc-300 dark:hover:bg-zinc-700"
|
256 |
+
)}
|
257 |
+
>
|
258 |
+
<Eye className="h-3 w-3" />
|
259 |
+
<span>Preview</span>
|
260 |
+
</button>
|
261 |
+
</div>
|
262 |
+
)}
|
263 |
+
{/* View switcher for CSV files */}
|
264 |
+
{isCsv && isSuccess && (
|
265 |
+
<div className="flex rounded-md overflow-hidden border border-zinc-200 dark:border-zinc-700">
|
266 |
+
<button
|
267 |
+
onClick={() => setViewMode('code')}
|
268 |
+
className={cn(
|
269 |
+
"flex items-center gap-1 text-xs px-2 py-1 transition-colors",
|
270 |
+
viewMode === 'code'
|
271 |
+
? "bg-zinc-800 text-zinc-100 dark:bg-zinc-700 dark:text-zinc-100"
|
272 |
+
: "bg-zinc-200 text-zinc-700 dark:bg-zinc-800 dark:text-zinc-400 hover:bg-zinc-300 dark:hover:bg-zinc-700"
|
273 |
+
)}
|
274 |
+
>
|
275 |
+
<Code className="h-3 w-3" />
|
276 |
+
<span>Code</span>
|
277 |
+
</button>
|
278 |
+
<button
|
279 |
+
onClick={() => setViewMode('preview')}
|
280 |
+
className={cn(
|
281 |
+
"flex items-center gap-1 text-xs px-2 py-1 transition-colors",
|
282 |
+
viewMode === 'preview'
|
283 |
+
? "bg-zinc-800 text-zinc-100 dark:bg-zinc-700 dark:text-zinc-100"
|
284 |
+
: "bg-zinc-200 text-zinc-700 dark:bg-zinc-800 dark:text-zinc-400 hover:bg-zinc-300 dark:hover:bg-zinc-700"
|
285 |
+
)}
|
286 |
+
>
|
287 |
+
<Eye className="h-3 w-3" />
|
288 |
+
<span>Preview</span>
|
289 |
+
</button>
|
290 |
+
</div>
|
291 |
+
)}
|
292 |
+
<span className="text-xs text-zinc-500 dark:text-zinc-400 bg-zinc-200 dark:bg-zinc-800 px-2 py-0.5 rounded">
|
293 |
+
{hasHighlighting ? language.toUpperCase() : fileType}
|
294 |
+
</span>
|
295 |
+
</div>
|
296 |
+
</div>
|
297 |
+
|
298 |
+
{/* File Content (Code View with Syntax Highlighting) */}
|
299 |
+
{viewMode === 'code' || (!isHtml && !isMarkdown && !isCsv) || !isSuccess ? (
|
300 |
+
<div className="flex-1 overflow-auto bg-white dark:bg-zinc-950 text-zinc-900 dark:text-zinc-100">
|
301 |
+
{hasHighlighting ? (
|
302 |
+
<div className="relative">
|
303 |
+
<div className="absolute left-0 top-0 bottom-0 w-12 border-r border-zinc-200 dark:border-zinc-800 z-10 flex flex-col">
|
304 |
+
{contentLines.map((_, idx) => (
|
305 |
+
<div key={idx}
|
306 |
+
className="h-6 text-right pr-3 text-xs font-mono text-zinc-500 dark:text-zinc-500 select-none">
|
307 |
+
{idx + 1}
|
308 |
+
</div>
|
309 |
+
))}
|
310 |
+
</div>
|
311 |
+
<div className="pl-12">
|
312 |
+
<CodeBlockCode
|
313 |
+
code={processUnicodeContent(fileContent)}
|
314 |
+
language={language}
|
315 |
+
className="text-xs p-2"
|
316 |
+
/>
|
317 |
+
</div>
|
318 |
+
</div>
|
319 |
+
) : (
|
320 |
+
<div className="min-w-full table">
|
321 |
+
{contentLines.map((line, idx) => (
|
322 |
+
<div key={idx} className="table-row hover:bg-zinc-50 dark:hover:bg-zinc-900 transition-colors">
|
323 |
+
<div className="table-cell text-right pr-3 py-0.5 text-xs font-mono text-zinc-500 dark:text-zinc-500 select-none w-12 border-r border-zinc-200 dark:border-zinc-800">
|
324 |
+
{idx + 1}
|
325 |
+
</div>
|
326 |
+
<div className="table-cell pl-3 py-0.5 text-xs font-mono whitespace-pre text-zinc-800 dark:text-zinc-300">
|
327 |
+
{processUnicodeContent(line) || ' '}
|
328 |
+
</div>
|
329 |
+
</div>
|
330 |
+
))}
|
331 |
+
<div className="table-row h-4"></div>
|
332 |
+
</div>
|
333 |
+
)}
|
334 |
+
</div>
|
335 |
+
) : null}
|
336 |
+
|
337 |
+
{/* HTML Preview with iframe */}
|
338 |
+
{isHtml && viewMode === 'preview' && htmlPreviewUrl && isSuccess && (
|
339 |
+
<div className="flex-1 bg-white overflow-hidden">
|
340 |
+
<iframe
|
341 |
+
src={htmlPreviewUrl}
|
342 |
+
title={`HTML Preview of ${fileName}`}
|
343 |
+
className="w-full h-full border-0"
|
344 |
+
style={{ minHeight: "300px" }}
|
345 |
+
sandbox="allow-same-origin allow-scripts"
|
346 |
+
/>
|
347 |
+
</div>
|
348 |
+
)}
|
349 |
+
|
350 |
+
{/* Markdown Preview */}
|
351 |
+
{isMarkdown && viewMode === 'preview' && isSuccess && (
|
352 |
+
<div className="flex-1 overflow-auto bg-white dark:bg-zinc-950 text-zinc-900 dark:text-zinc-100">
|
353 |
+
<MarkdownRenderer content={processUnicodeContent(fileContent)} />
|
354 |
+
</div>
|
355 |
+
)}
|
356 |
+
|
357 |
+
{/* CSV Preview */}
|
358 |
+
{isCsv && viewMode === 'preview' && isSuccess && (
|
359 |
+
<div className="flex-1 overflow-hidden bg-white dark:bg-zinc-950">
|
360 |
+
<CsvRenderer content={processUnicodeContent(fileContent)} />
|
361 |
+
</div>
|
362 |
+
)}
|
363 |
+
|
364 |
+
{/* External link button for HTML files */}
|
365 |
+
{isHtml && viewMode === 'preview' && htmlPreviewUrl && isSuccess && (
|
366 |
+
<div className="bg-zinc-100 dark:bg-zinc-900 p-2 border-t border-zinc-200 dark:border-zinc-800 flex justify-end">
|
367 |
+
<a
|
368 |
+
href={htmlPreviewUrl}
|
369 |
+
target="_blank"
|
370 |
+
rel="noopener noreferrer"
|
371 |
+
className="flex items-center gap-1.5 py-1 px-2 text-xs text-zinc-700 dark:text-zinc-300 bg-zinc-200 dark:bg-zinc-800 hover:bg-zinc-300 dark:hover:bg-zinc-700 rounded transition-colors"
|
372 |
+
>
|
373 |
+
<ExternalLink className="h-3.5 w-3.5 text-zinc-500 flex-shrink-0" />
|
374 |
+
<span>Open in Browser</span>
|
375 |
+
</a>
|
376 |
+
</div>
|
377 |
+
)}
|
378 |
+
</div>
|
379 |
+
)}
|
380 |
+
|
381 |
+
{/* File Content for streaming state */}
|
382 |
+
{operation !== "delete" && isStreaming && (
|
383 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden shadow-sm bg-white dark:bg-zinc-950 h-full flex flex-col">
|
384 |
+
{/* IDE Header */}
|
385 |
+
<div className="flex items-center p-2 bg-zinc-100 dark:bg-zinc-900 text-zinc-900 dark:text-zinc-100 justify-between border-b border-zinc-200 dark:border-zinc-800">
|
386 |
+
<div className="flex items-center">
|
387 |
+
<FileSymlink className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
388 |
+
<span className="text-xs font-medium">{fileName || 'file.txt'}</span>
|
389 |
+
</div>
|
390 |
+
<span className="text-xs text-zinc-500 dark:text-zinc-400 bg-zinc-200 dark:bg-zinc-800 px-2 py-0.5 rounded">
|
391 |
+
{fileType || 'Text'}
|
392 |
+
</span>
|
393 |
+
</div>
|
394 |
+
|
395 |
+
{/* Streaming state */}
|
396 |
+
<div className="flex-1 flex items-center justify-center p-8 bg-white dark:bg-zinc-950">
|
397 |
+
<div className="text-center">
|
398 |
+
<CircleDashed className="h-8 w-8 mx-auto mb-3 text-blue-500 animate-spin" />
|
399 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">
|
400 |
+
{operation === "create" ? "Creating file..." : "Rewriting file..."}
|
401 |
+
</p>
|
402 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400">
|
403 |
+
{processedFilePath || "Processing file operation"}
|
404 |
+
</p>
|
405 |
+
</div>
|
406 |
+
</div>
|
407 |
+
</div>
|
408 |
+
)}
|
409 |
+
|
410 |
+
{/* Delete view with file path */}
|
411 |
+
{operation === "delete" && processedFilePath && !isStreaming && (
|
412 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
413 |
+
<div className="p-6 flex-1 flex flex-col items-center justify-center bg-white dark:bg-zinc-950 text-zinc-900 dark:text-zinc-100">
|
414 |
+
<div className="w-14 h-14 rounded-full bg-red-50 dark:bg-red-900/20 flex items-center justify-center mb-4">
|
415 |
+
<FileX className="h-7 w-7 text-red-600 dark:text-red-400" />
|
416 |
+
</div>
|
417 |
+
<h3 className="text-lg font-medium mb-4 text-red-600 dark:text-red-400">File Deleted</h3>
|
418 |
+
<div className="bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md p-4 w-full max-w-md text-center mb-2">
|
419 |
+
<code className="text-sm font-mono text-zinc-700 dark:text-zinc-300 break-all">{processedFilePath}</code>
|
420 |
+
</div>
|
421 |
+
<p className="text-sm text-zinc-500 dark:text-zinc-400 mt-2">This file has been permanently removed</p>
|
422 |
+
</div>
|
423 |
+
</div>
|
424 |
+
)}
|
425 |
+
|
426 |
+
{/* Delete view streaming state */}
|
427 |
+
{operation === "delete" && isStreaming && (
|
428 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
429 |
+
<div className="p-6 flex-1 flex flex-col items-center justify-center bg-white dark:bg-zinc-950">
|
430 |
+
<div className="text-center">
|
431 |
+
<CircleDashed className="h-8 w-8 mx-auto mb-3 text-blue-500 animate-spin" />
|
432 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">Deleting file...</p>
|
433 |
+
{processedFilePath && (
|
434 |
+
<p className="text-xs mt-2 font-mono text-zinc-500 dark:text-zinc-400 break-all">
|
435 |
+
{processedFilePath}
|
436 |
+
</p>
|
437 |
+
)}
|
438 |
+
</div>
|
439 |
+
</div>
|
440 |
+
</div>
|
441 |
+
)}
|
442 |
+
|
443 |
+
{/* Delete view with unknown path */}
|
444 |
+
{operation === "delete" && !processedFilePath && !showDebugInfo && !isStreaming && (
|
445 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
446 |
+
<div className="p-6 flex-1 flex flex-col items-center justify-center bg-white dark:bg-zinc-950 text-zinc-900 dark:text-zinc-100">
|
447 |
+
<div className="w-14 h-14 rounded-full bg-red-50 dark:bg-red-900/20 flex items-center justify-center mb-4">
|
448 |
+
<FileX className="h-7 w-7 text-red-600 dark:text-red-400" />
|
449 |
+
</div>
|
450 |
+
<h3 className="text-lg font-medium mb-4 text-red-600 dark:text-red-400">File Deleted</h3>
|
451 |
+
<div className="bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md p-4 w-full max-w-md text-center mb-2">
|
452 |
+
<p className="text-sm text-zinc-700 dark:text-zinc-300">Unknown file path</p>
|
453 |
+
</div>
|
454 |
+
<p className="text-sm text-zinc-500 dark:text-zinc-400 mt-2">A file has been deleted but the path could not be determined</p>
|
455 |
+
</div>
|
456 |
+
</div>
|
457 |
+
)}
|
458 |
+
</div>
|
459 |
+
|
460 |
+
{/* Footer */}
|
461 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
462 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
463 |
+
{!isStreaming && (
|
464 |
+
<div className="flex items-center gap-2">
|
465 |
+
{isSuccess ? (
|
466 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
467 |
+
) : (
|
468 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
469 |
+
)}
|
470 |
+
<span>
|
471 |
+
{isSuccess ? config.successMessage : `Failed to ${operation} file`}
|
472 |
+
</span>
|
473 |
+
</div>
|
474 |
+
)}
|
475 |
+
|
476 |
+
{isStreaming && (
|
477 |
+
<div className="flex items-center gap-2">
|
478 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
479 |
+
<span>Processing file operation...</span>
|
480 |
+
</div>
|
481 |
+
)}
|
482 |
+
|
483 |
+
<div className="text-xs">
|
484 |
+
{toolTimestamp && !isStreaming
|
485 |
+
? formatTimestamp(toolTimestamp)
|
486 |
+
: assistantTimestamp
|
487 |
+
? formatTimestamp(assistantTimestamp)
|
488 |
+
: ''}
|
489 |
+
</div>
|
490 |
+
</div>
|
491 |
+
</div>
|
492 |
+
</div>
|
493 |
+
);
|
494 |
+
}
|
Frame 50.svg
ADDED
|
GenericToolView.tsx
ADDED
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from "react";
|
2 |
+
import { ToolViewProps } from "./types";
|
3 |
+
import { formatTimestamp, getToolTitle } from "./utils";
|
4 |
+
import { getToolIcon } from "../utils";
|
5 |
+
import { CircleDashed, CheckCircle, AlertTriangle } from "lucide-react";
|
6 |
+
import { Markdown } from "@/components/ui/markdown";
|
7 |
+
import { cn } from "@/lib/utils";
|
8 |
+
|
9 |
+
export function GenericToolView({
|
10 |
+
name = 'unknown',
|
11 |
+
assistantContent,
|
12 |
+
toolContent,
|
13 |
+
isSuccess = true,
|
14 |
+
isStreaming = false,
|
15 |
+
assistantTimestamp,
|
16 |
+
toolTimestamp
|
17 |
+
}: ToolViewProps) {
|
18 |
+
console.log('GenericToolView:', {
|
19 |
+
name,
|
20 |
+
assistantContent,
|
21 |
+
toolContent,
|
22 |
+
isSuccess,
|
23 |
+
isStreaming,
|
24 |
+
assistantTimestamp,
|
25 |
+
toolTimestamp
|
26 |
+
});
|
27 |
+
|
28 |
+
const toolTitle = getToolTitle(name);
|
29 |
+
const Icon = getToolIcon(name);
|
30 |
+
|
31 |
+
// Format content for display
|
32 |
+
const formatContent = (content: string | null) => {
|
33 |
+
if (!content) return null;
|
34 |
+
|
35 |
+
try {
|
36 |
+
// Try to parse as JSON for pretty formatting
|
37 |
+
const parsedJson = JSON.parse(content);
|
38 |
+
return JSON.stringify(parsedJson, null, 2);
|
39 |
+
} catch (e) {
|
40 |
+
// If not valid JSON, return as is
|
41 |
+
return content;
|
42 |
+
}
|
43 |
+
};
|
44 |
+
|
45 |
+
// Format the contents
|
46 |
+
const formattedAssistantContent = React.useMemo(() => formatContent(assistantContent), [assistantContent]);
|
47 |
+
const formattedToolContent = React.useMemo(() => formatContent(toolContent), [toolContent]);
|
48 |
+
|
49 |
+
return (
|
50 |
+
<div className="flex flex-col h-full">
|
51 |
+
<div className="flex-1 p-4 overflow-auto">
|
52 |
+
{/* Assistant Content */}
|
53 |
+
{assistantContent && !isStreaming && (
|
54 |
+
<div className="space-y-1.5">
|
55 |
+
<div className="flex justify-between items-center">
|
56 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400">Input</div>
|
57 |
+
{assistantTimestamp && (
|
58 |
+
<div className="text-xs text-zinc-500 dark:text-zinc-400">{formatTimestamp(assistantTimestamp)}</div>
|
59 |
+
)}
|
60 |
+
</div>
|
61 |
+
<div className="rounded-md border border-zinc-200 dark:border-zinc-800 bg-zinc-50 dark:bg-zinc-900 p-3">
|
62 |
+
<Markdown className="text-xs text-zinc-800 dark:text-zinc-300">{formattedAssistantContent}</Markdown>
|
63 |
+
</div>
|
64 |
+
</div>
|
65 |
+
)}
|
66 |
+
|
67 |
+
{/* Tool Result */}
|
68 |
+
{toolContent && (
|
69 |
+
<div className="space-y-1.5 mt-4">
|
70 |
+
<div className="flex justify-between items-center">
|
71 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400">
|
72 |
+
{isStreaming ? "Processing" : "Output"}
|
73 |
+
</div>
|
74 |
+
{toolTimestamp && !isStreaming && (
|
75 |
+
<div className="text-xs text-zinc-500 dark:text-zinc-400">{formatTimestamp(toolTimestamp)}</div>
|
76 |
+
)}
|
77 |
+
</div>
|
78 |
+
<div className={cn(
|
79 |
+
"rounded-md border p-3",
|
80 |
+
isStreaming
|
81 |
+
? 'border-blue-200 bg-blue-50 dark:border-blue-800 dark:bg-blue-900/10'
|
82 |
+
: isSuccess
|
83 |
+
? 'border-zinc-200 bg-zinc-50 dark:border-zinc-800 dark:bg-zinc-900'
|
84 |
+
: 'border-red-200 bg-red-50 dark:border-red-800 dark:bg-red-900/10'
|
85 |
+
)}>
|
86 |
+
{isStreaming ? (
|
87 |
+
<div className="flex items-center gap-2 text-xs font-medium text-blue-700 dark:text-blue-400">
|
88 |
+
<CircleDashed className="h-3 w-3 animate-spin" />
|
89 |
+
<span>Executing {toolTitle.toLowerCase()}...</span>
|
90 |
+
</div>
|
91 |
+
) : (
|
92 |
+
<Markdown className="text-xs text-zinc-800 dark:text-zinc-300">{formattedToolContent}</Markdown>
|
93 |
+
)}
|
94 |
+
</div>
|
95 |
+
</div>
|
96 |
+
)}
|
97 |
+
</div>
|
98 |
+
|
99 |
+
{/* Footer */}
|
100 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
101 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
102 |
+
{!isStreaming && (
|
103 |
+
<div className="flex items-center gap-2">
|
104 |
+
{isSuccess ? (
|
105 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
106 |
+
) : (
|
107 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
108 |
+
)}
|
109 |
+
<span>
|
110 |
+
{isSuccess ? 'Completed successfully' : 'Execution failed'}
|
111 |
+
</span>
|
112 |
+
</div>
|
113 |
+
)}
|
114 |
+
|
115 |
+
{isStreaming && (
|
116 |
+
<div className="flex items-center gap-2">
|
117 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
118 |
+
<span>Processing...</span>
|
119 |
+
</div>
|
120 |
+
)}
|
121 |
+
|
122 |
+
<div className="text-xs">
|
123 |
+
{toolTimestamp && !isStreaming
|
124 |
+
? formatTimestamp(toolTimestamp)
|
125 |
+
: assistantTimestamp
|
126 |
+
? formatTimestamp(assistantTimestamp)
|
127 |
+
: ''}
|
128 |
+
</div>
|
129 |
+
</div>
|
130 |
+
</div>
|
131 |
+
</div>
|
132 |
+
);
|
133 |
+
}
|
GoogleSignIn.tsx
ADDED
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
import { useEffect, useCallback, useRef, useState } from 'react';
|
4 |
+
import Script from 'next/script';
|
5 |
+
import { createClient } from '@/lib/supabase/client';
|
6 |
+
|
7 |
+
// Add type declarations for Google One Tap
|
8 |
+
declare global {
|
9 |
+
interface Window {
|
10 |
+
handleGoogleSignIn?: (response: GoogleSignInResponse) => void;
|
11 |
+
google: {
|
12 |
+
accounts: {
|
13 |
+
id: {
|
14 |
+
initialize: (config: GoogleInitializeConfig) => void;
|
15 |
+
renderButton: (element: HTMLElement, options: GoogleButtonOptions) => void;
|
16 |
+
prompt: (callback?: (notification: GoogleNotification) => void) => void;
|
17 |
+
cancel: () => void;
|
18 |
+
};
|
19 |
+
};
|
20 |
+
};
|
21 |
+
}
|
22 |
+
}
|
23 |
+
|
24 |
+
// Define types for Google Sign-In
|
25 |
+
interface GoogleSignInResponse {
|
26 |
+
credential: string;
|
27 |
+
clientId?: string;
|
28 |
+
select_by?: string;
|
29 |
+
}
|
30 |
+
|
31 |
+
interface GoogleInitializeConfig {
|
32 |
+
client_id: string | undefined;
|
33 |
+
callback: ((response: GoogleSignInResponse) => void) | undefined;
|
34 |
+
nonce?: string;
|
35 |
+
use_fedcm?: boolean;
|
36 |
+
context?: string;
|
37 |
+
itp_support?: boolean;
|
38 |
+
}
|
39 |
+
|
40 |
+
interface GoogleButtonOptions {
|
41 |
+
type?: string;
|
42 |
+
theme?: string;
|
43 |
+
size?: string;
|
44 |
+
text?: string;
|
45 |
+
shape?: string;
|
46 |
+
logoAlignment?: string;
|
47 |
+
width?: number;
|
48 |
+
}
|
49 |
+
|
50 |
+
interface GoogleNotification {
|
51 |
+
isNotDisplayed: () => boolean;
|
52 |
+
getNotDisplayedReason: () => string;
|
53 |
+
isSkippedMoment: () => boolean;
|
54 |
+
getSkippedReason: () => string;
|
55 |
+
isDismissedMoment: () => boolean;
|
56 |
+
getDismissedReason: () => string;
|
57 |
+
}
|
58 |
+
|
59 |
+
interface GoogleSignInProps {
|
60 |
+
returnUrl?: string;
|
61 |
+
}
|
62 |
+
|
63 |
+
export default function GoogleSignIn({ returnUrl }: GoogleSignInProps) {
|
64 |
+
const googleClientId = process.env.NEXT_PUBLIC_GOOGLE_CLIENT_ID;
|
65 |
+
const [isLoading, setIsLoading] = useState(false);
|
66 |
+
|
67 |
+
const handleGoogleSignIn = useCallback(async (response: GoogleSignInResponse) => {
|
68 |
+
try {
|
69 |
+
setIsLoading(true);
|
70 |
+
const supabase = createClient();
|
71 |
+
const { error } = await supabase.auth.signInWithIdToken({
|
72 |
+
provider: 'google',
|
73 |
+
token: response.credential,
|
74 |
+
});
|
75 |
+
|
76 |
+
if (error) throw error;
|
77 |
+
|
78 |
+
// Add a small delay before redirecting to ensure localStorage is properly saved
|
79 |
+
setTimeout(() => {
|
80 |
+
window.location.href = returnUrl || "/dashboard";
|
81 |
+
}, 100);
|
82 |
+
} catch (error) {
|
83 |
+
console.error('Error signing in with Google:', error);
|
84 |
+
setIsLoading(false);
|
85 |
+
}
|
86 |
+
}, [returnUrl]);
|
87 |
+
|
88 |
+
useEffect(() => {
|
89 |
+
// Assign the callback to window object so it can be called from the Google button
|
90 |
+
window.handleGoogleSignIn = handleGoogleSignIn;
|
91 |
+
|
92 |
+
if (window.google && googleClientId) {
|
93 |
+
window.google.accounts.id.initialize({
|
94 |
+
client_id: googleClientId,
|
95 |
+
callback: handleGoogleSignIn,
|
96 |
+
use_fedcm: true,
|
97 |
+
context: 'signin',
|
98 |
+
itp_support: true
|
99 |
+
});
|
100 |
+
}
|
101 |
+
|
102 |
+
return () => {
|
103 |
+
// Cleanup
|
104 |
+
delete window.handleGoogleSignIn;
|
105 |
+
if (window.google) {
|
106 |
+
window.google.accounts.id.cancel();
|
107 |
+
}
|
108 |
+
};
|
109 |
+
}, [googleClientId, handleGoogleSignIn]);
|
110 |
+
|
111 |
+
if (!googleClientId) {
|
112 |
+
return (
|
113 |
+
<button
|
114 |
+
disabled
|
115 |
+
className="w-full h-12 flex items-center justify-center gap-2 text-sm font-medium tracking-wide rounded-full bg-background border border-border opacity-60 cursor-not-allowed"
|
116 |
+
>
|
117 |
+
<svg className="w-5 h-5" viewBox="0 0 24 24">
|
118 |
+
<path d="M22.56 12.25c0-.78-.07-1.53-.2-2.25H12v4.26h5.92c-.26 1.37-1.04 2.53-2.21 3.31v2.77h3.57c2.08-1.92 3.28-4.74 3.28-8.09z" fill="#4285F4"/>
|
119 |
+
<path d="M12 23c2.97 0 5.46-.98 7.28-2.66l-3.57-2.77c-.98.66-2.23 1.06-3.71 1.06-2.86 0-5.29-1.93-6.16-4.53H2.18v2.84C3.99 20.53 7.7 23 12 23z" fill="#34A853"/>
|
120 |
+
<path d="M5.84 14.09c-.22-.66-.35-1.36-.35-2.09s.13-1.43.35-2.09V7.07H2.18C1.43 8.55 1 10.22 1 12s.43 3.45 1.18 4.93l2.85-2.22.81-.62z" fill="#FBBC05"/>
|
121 |
+
<path d="M12 5.38c1.62 0 3.06.56 4.21 1.64l3.15-3.15C17.45 2.09 14.97 1 12 1 7.7 1 3.99 3.47 2.18 7.07l3.66 2.84c.87-2.6 3.3-4.53 6.16-4.53z" fill="#EA4335"/>
|
122 |
+
</svg>
|
123 |
+
Google Sign-In Not Configured
|
124 |
+
</button>
|
125 |
+
);
|
126 |
+
}
|
127 |
+
|
128 |
+
return (
|
129 |
+
<>
|
130 |
+
{/* Google One Tap container */}
|
131 |
+
<div
|
132 |
+
id="g_id_onload"
|
133 |
+
data-client_id={googleClientId}
|
134 |
+
data-context="signin"
|
135 |
+
data-ux_mode="popup"
|
136 |
+
data-auto_prompt="false"
|
137 |
+
data-itp_support="true"
|
138 |
+
data-callback="handleGoogleSignIn"
|
139 |
+
/>
|
140 |
+
|
141 |
+
{/* Google Sign-In button container styled to match site design */}
|
142 |
+
<div id="google-signin-button" className="w-full h-12">
|
143 |
+
{/* The Google button will be rendered here */}
|
144 |
+
</div>
|
145 |
+
|
146 |
+
<Script
|
147 |
+
src="https://accounts.google.com/gsi/client"
|
148 |
+
strategy="afterInteractive"
|
149 |
+
onLoad={() => {
|
150 |
+
if (window.google && googleClientId) {
|
151 |
+
// Style the button after Google script loads
|
152 |
+
const buttonContainer = document.getElementById('google-signin-button');
|
153 |
+
if (buttonContainer) {
|
154 |
+
window.google.accounts.id.renderButton(buttonContainer, {
|
155 |
+
type: 'standard',
|
156 |
+
theme: 'outline',
|
157 |
+
size: 'large',
|
158 |
+
text: 'continue_with',
|
159 |
+
shape: 'pill',
|
160 |
+
logoAlignment: 'left',
|
161 |
+
width: buttonContainer.offsetWidth
|
162 |
+
});
|
163 |
+
|
164 |
+
// Apply custom styles to match site design
|
165 |
+
setTimeout(() => {
|
166 |
+
const googleButton = buttonContainer.querySelector('div[role="button"]');
|
167 |
+
if (googleButton instanceof HTMLElement) {
|
168 |
+
googleButton.style.borderRadius = '9999px';
|
169 |
+
googleButton.style.width = '100%';
|
170 |
+
googleButton.style.height = '56px';
|
171 |
+
googleButton.style.border = '1px solid var(--border)';
|
172 |
+
googleButton.style.background = 'var(--background)';
|
173 |
+
googleButton.style.transition = 'all 0.2s';
|
174 |
+
}
|
175 |
+
}, 100);
|
176 |
+
}
|
177 |
+
}
|
178 |
+
}}
|
179 |
+
/>
|
180 |
+
</>
|
181 |
+
);
|
182 |
+
}
|
LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright [yyyy] [name of copyright owner]
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
LinkedinProvider.py
ADDED
@@ -0,0 +1,250 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Dict
|
2 |
+
|
3 |
+
from agent.tools.data_providers.RapidDataProviderBase import RapidDataProviderBase, EndpointSchema
|
4 |
+
|
5 |
+
|
6 |
+
class LinkedinProvider(RapidDataProviderBase):
|
7 |
+
def __init__(self):
|
8 |
+
endpoints: Dict[str, EndpointSchema] = {
|
9 |
+
"person": {
|
10 |
+
"route": "/person",
|
11 |
+
"method": "POST",
|
12 |
+
"name": "Person Data",
|
13 |
+
"description": "Fetches any Linkedin profiles data including skills, certificates, experiences, qualifications and much more.",
|
14 |
+
"payload": {
|
15 |
+
"link": "LinkedIn Profile URL"
|
16 |
+
}
|
17 |
+
},
|
18 |
+
"person_urn": {
|
19 |
+
"route": "/person_urn",
|
20 |
+
"method": "POST",
|
21 |
+
"name": "Person Data (Using Urn)",
|
22 |
+
"description": "It takes profile urn instead of profile public identifier in input",
|
23 |
+
"payload": {
|
24 |
+
"link": "LinkedIn Profile URL or URN"
|
25 |
+
}
|
26 |
+
},
|
27 |
+
"person_deep": {
|
28 |
+
"route": "/person_deep",
|
29 |
+
"method": "POST",
|
30 |
+
"name": "Person Data (Deep)",
|
31 |
+
"description": "Fetches all experiences, educations, skills, languages, publications... related to a profile.",
|
32 |
+
"payload": {
|
33 |
+
"link": "LinkedIn Profile URL"
|
34 |
+
}
|
35 |
+
},
|
36 |
+
"profile_updates": {
|
37 |
+
"route": "/profile_updates",
|
38 |
+
"method": "GET",
|
39 |
+
"name": "Person Posts (WITH PAGINATION)",
|
40 |
+
"description": "Fetches posts of a linkedin profile alongwith reactions, comments, postLink and reposts data.",
|
41 |
+
"payload": {
|
42 |
+
"profile_url": "LinkedIn Profile URL",
|
43 |
+
"page": "Page number",
|
44 |
+
"reposts": "Include reposts (1 or 0)",
|
45 |
+
"comments": "Include comments (1 or 0)"
|
46 |
+
}
|
47 |
+
},
|
48 |
+
"profile_recent_comments": {
|
49 |
+
"route": "/profile_recent_comments",
|
50 |
+
"method": "POST",
|
51 |
+
"name": "Person Recent Activity (Comments on Posts)",
|
52 |
+
"description": "Fetches 20 most recent comments posted by a linkedin user (per page).",
|
53 |
+
"payload": {
|
54 |
+
"profile_url": "LinkedIn Profile URL",
|
55 |
+
"page": "Page number",
|
56 |
+
"paginationToken": "Token for pagination"
|
57 |
+
}
|
58 |
+
},
|
59 |
+
"comments_from_recent_activity": {
|
60 |
+
"route": "/comments_from_recent_activity",
|
61 |
+
"method": "GET",
|
62 |
+
"name": "Comments from recent activity",
|
63 |
+
"description": "Fetches recent comments posted by a person as per his recent activity tab.",
|
64 |
+
"payload": {
|
65 |
+
"profile_url": "LinkedIn Profile URL",
|
66 |
+
"page": "Page number"
|
67 |
+
}
|
68 |
+
},
|
69 |
+
"person_skills": {
|
70 |
+
"route": "/person_skills",
|
71 |
+
"method": "POST",
|
72 |
+
"name": "Person Skills",
|
73 |
+
"description": "Scraper all skills of a linkedin user",
|
74 |
+
"payload": {
|
75 |
+
"link": "LinkedIn Profile URL"
|
76 |
+
}
|
77 |
+
},
|
78 |
+
"email_to_linkedin_profile": {
|
79 |
+
"route": "/email_to_linkedin_profile",
|
80 |
+
"method": "POST",
|
81 |
+
"name": "Email to LinkedIn Profile",
|
82 |
+
"description": "Finds LinkedIn profile associated with an email address",
|
83 |
+
"payload": {
|
84 |
+
"email": "Email address to search"
|
85 |
+
}
|
86 |
+
},
|
87 |
+
"company": {
|
88 |
+
"route": "/company",
|
89 |
+
"method": "POST",
|
90 |
+
"name": "Company Data",
|
91 |
+
"description": "Fetches LinkedIn company profile data",
|
92 |
+
"payload": {
|
93 |
+
"link": "LinkedIn Company URL"
|
94 |
+
}
|
95 |
+
},
|
96 |
+
"web_domain": {
|
97 |
+
"route": "/web-domain",
|
98 |
+
"method": "POST",
|
99 |
+
"name": "Web Domain to Company",
|
100 |
+
"description": "Fetches LinkedIn company profile data from a web domain",
|
101 |
+
"payload": {
|
102 |
+
"link": "Website domain (e.g., huzzle.app)"
|
103 |
+
}
|
104 |
+
},
|
105 |
+
"similar_profiles": {
|
106 |
+
"route": "/similar_profiles",
|
107 |
+
"method": "GET",
|
108 |
+
"name": "Similar Profiles",
|
109 |
+
"description": "Fetches profiles similar to a given LinkedIn profile",
|
110 |
+
"payload": {
|
111 |
+
"profileUrl": "LinkedIn Profile URL"
|
112 |
+
}
|
113 |
+
},
|
114 |
+
"company_jobs": {
|
115 |
+
"route": "/company_jobs",
|
116 |
+
"method": "POST",
|
117 |
+
"name": "Company Jobs",
|
118 |
+
"description": "Fetches job listings from a LinkedIn company page",
|
119 |
+
"payload": {
|
120 |
+
"company_url": "LinkedIn Company URL",
|
121 |
+
"count": "Number of job listings to fetch"
|
122 |
+
}
|
123 |
+
},
|
124 |
+
"company_updates": {
|
125 |
+
"route": "/company_updates",
|
126 |
+
"method": "GET",
|
127 |
+
"name": "Company Posts",
|
128 |
+
"description": "Fetches posts from a LinkedIn company page",
|
129 |
+
"payload": {
|
130 |
+
"company_url": "LinkedIn Company URL",
|
131 |
+
"page": "Page number",
|
132 |
+
"reposts": "Include reposts (0, 1, or 2)",
|
133 |
+
"comments": "Include comments (0, 1, or 2)"
|
134 |
+
}
|
135 |
+
},
|
136 |
+
"company_employee": {
|
137 |
+
"route": "/company_employee",
|
138 |
+
"method": "GET",
|
139 |
+
"name": "Company Employees",
|
140 |
+
"description": "Fetches employees of a LinkedIn company using company ID",
|
141 |
+
"payload": {
|
142 |
+
"companyId": "LinkedIn Company ID",
|
143 |
+
"page": "Page number"
|
144 |
+
}
|
145 |
+
},
|
146 |
+
"company_updates_post": {
|
147 |
+
"route": "/company_updates",
|
148 |
+
"method": "POST",
|
149 |
+
"name": "Company Posts (POST)",
|
150 |
+
"description": "Fetches posts from a LinkedIn company page with specific count parameters",
|
151 |
+
"payload": {
|
152 |
+
"company_url": "LinkedIn Company URL",
|
153 |
+
"posts": "Number of posts to fetch",
|
154 |
+
"comments": "Number of comments to fetch per post",
|
155 |
+
"reposts": "Number of reposts to fetch"
|
156 |
+
}
|
157 |
+
},
|
158 |
+
"search_posts_with_filters": {
|
159 |
+
"route": "/search_posts_with_filters",
|
160 |
+
"method": "GET",
|
161 |
+
"name": "Search Posts With Filters",
|
162 |
+
"description": "Searches LinkedIn posts with various filtering options",
|
163 |
+
"payload": {
|
164 |
+
"query": "Keywords/Search terms (text you put in LinkedIn search bar)",
|
165 |
+
"page": "Page number (1-100, each page contains 20 results)",
|
166 |
+
"sort_by": "Sort method: 'relevance' (Top match) or 'date_posted' (Latest)",
|
167 |
+
"author_job_title": "Filter by job title of author (e.g., CEO)",
|
168 |
+
"content_type": "Type of content post contains (photos, videos, liveVideos, collaborativeArticles, documents)",
|
169 |
+
"from_member": "URN of person who posted (comma-separated for multiple)",
|
170 |
+
"from_organization": "ID of organization who posted (comma-separated for multiple)",
|
171 |
+
"author_company": "ID of company author works for (comma-separated for multiple)",
|
172 |
+
"author_industry": "URN of industry author is connected with (comma-separated for multiple)",
|
173 |
+
"mentions_member": "URN of person mentioned in post (comma-separated for multiple)",
|
174 |
+
"mentions_organization": "ID of organization mentioned in post (comma-separated for multiple)"
|
175 |
+
}
|
176 |
+
},
|
177 |
+
"search_jobs": {
|
178 |
+
"route": "/search_jobs",
|
179 |
+
"method": "GET",
|
180 |
+
"name": "Search Jobs",
|
181 |
+
"description": "Searches LinkedIn jobs with various filtering options",
|
182 |
+
"payload": {
|
183 |
+
"query": "Job search keywords (e.g., Software developer)",
|
184 |
+
"page": "Page number",
|
185 |
+
"searchLocationId": "Location ID for job search (get from Suggestion location endpoint)",
|
186 |
+
"easyApply": "Filter for easy apply jobs (true or false)",
|
187 |
+
"experience": "Experience level required (1=Internship, 2=Entry level, 3=Associate, 4=Mid senior, 5=Director, 6=Executive, comma-separated)",
|
188 |
+
"jobType": "Job type (F=Full time, P=Part time, C=Contract, T=Temporary, V=Volunteer, I=Internship, O=Other, comma-separated)",
|
189 |
+
"postedAgo": "Time jobs were posted in seconds (e.g., 3600 for past hour)",
|
190 |
+
"workplaceType": "Workplace type (1=On-Site, 2=Remote, 3=Hybrid, comma-separated)",
|
191 |
+
"sortBy": "Sort method (DD=most recent, R=most relevant)",
|
192 |
+
"companyIdsList": "List of company IDs, comma-separated",
|
193 |
+
"industryIdsList": "List of industry IDs, comma-separated",
|
194 |
+
"functionIdsList": "List of function IDs, comma-separated",
|
195 |
+
"titleIdsList": "List of job title IDs, comma-separated",
|
196 |
+
"locationIdsList": "List of location IDs within specified searchLocationId country, comma-separated"
|
197 |
+
}
|
198 |
+
},
|
199 |
+
"search_people_with_filters": {
|
200 |
+
"route": "/search_people_with_filters",
|
201 |
+
"method": "POST",
|
202 |
+
"name": "Search People With Filters",
|
203 |
+
"description": "Searches LinkedIn profiles with detailed filtering options",
|
204 |
+
"payload": {
|
205 |
+
"keyword": "General search keyword",
|
206 |
+
"page": "Page number",
|
207 |
+
"title_free_text": "Job title to filter by (e.g., CEO)",
|
208 |
+
"company_free_text": "Company name to filter by",
|
209 |
+
"first_name": "First name of person",
|
210 |
+
"last_name": "Last name of person",
|
211 |
+
"current_company_list": "List of current companies (comma-separated IDs)",
|
212 |
+
"past_company_list": "List of past companies (comma-separated IDs)",
|
213 |
+
"location_list": "List of locations (comma-separated IDs)",
|
214 |
+
"language_list": "List of languages (comma-separated)",
|
215 |
+
"service_catagory_list": "List of service categories (comma-separated)",
|
216 |
+
"school_free_text": "School name to filter by",
|
217 |
+
"industry_list": "List of industries (comma-separated IDs)",
|
218 |
+
"school_list": "List of schools (comma-separated IDs)"
|
219 |
+
}
|
220 |
+
},
|
221 |
+
"search_company_with_filters": {
|
222 |
+
"route": "/search_company_with_filters",
|
223 |
+
"method": "POST",
|
224 |
+
"name": "Search Company With Filters",
|
225 |
+
"description": "Searches LinkedIn companies with detailed filtering options",
|
226 |
+
"payload": {
|
227 |
+
"keyword": "General search keyword",
|
228 |
+
"page": "Page number",
|
229 |
+
"company_size_list": "List of company sizes (comma-separated, e.g., A,D)",
|
230 |
+
"hasJobs": "Filter companies with jobs (true or false)",
|
231 |
+
"location_list": "List of location IDs (comma-separated)",
|
232 |
+
"industry_list": "List of industry IDs (comma-separated)"
|
233 |
+
}
|
234 |
+
}
|
235 |
+
}
|
236 |
+
base_url = "https://linkedin-data-scraper.p.rapidapi.com"
|
237 |
+
super().__init__(base_url, endpoints)
|
238 |
+
|
239 |
+
|
240 |
+
if __name__ == "__main__":
|
241 |
+
from dotenv import load_dotenv
|
242 |
+
load_dotenv()
|
243 |
+
tool = LinkedinProvider()
|
244 |
+
|
245 |
+
result = tool.call_endpoint(
|
246 |
+
route="comments_from_recent_activity",
|
247 |
+
payload={"profile_url": "https://www.linkedin.com/in/adamcohenhillel/", "page": 1}
|
248 |
+
)
|
249 |
+
print(result)
|
250 |
+
|
MANIFEST.in
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Include all Python files in agentpress directory
|
2 |
+
recursive-include agentpress *.py
|
3 |
+
|
4 |
+
# Include example files
|
5 |
+
recursive-include agentpress/examples *
|
6 |
+
|
7 |
+
# Include any other necessary files
|
8 |
+
include LICENSE
|
9 |
+
include README.md
|
10 |
+
include pyproject.toml
|
11 |
+
|
12 |
+
# Exclude unnecessary files
|
13 |
+
global-exclude *.pyc
|
14 |
+
global-exclude __pycache__
|
15 |
+
global-exclude .DS_Store
|
16 |
+
global-exclude *.pyo
|
17 |
+
global-exclude *.pyd
|
README.md
CHANGED
@@ -1,12 +1,36 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app).
|
2 |
+
|
3 |
+
## Getting Started
|
4 |
+
|
5 |
+
First, run the development server:
|
6 |
+
|
7 |
+
```bash
|
8 |
+
npm run dev
|
9 |
+
# or
|
10 |
+
yarn dev
|
11 |
+
# or
|
12 |
+
pnpm dev
|
13 |
+
# or
|
14 |
+
bun dev
|
15 |
+
```
|
16 |
+
|
17 |
+
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
18 |
+
|
19 |
+
You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
|
20 |
+
|
21 |
+
This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel.
|
22 |
+
|
23 |
+
## Learn More
|
24 |
+
|
25 |
+
To learn more about Next.js, take a look at the following resources:
|
26 |
+
|
27 |
+
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
|
28 |
+
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
|
29 |
+
|
30 |
+
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!
|
31 |
+
|
32 |
+
## Deploy on Vercel
|
33 |
+
|
34 |
+
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
|
35 |
+
|
36 |
+
Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.
|
RapidDataProviderBase.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import requests
|
3 |
+
from typing import Dict, Any, Optional, TypedDict, Literal
|
4 |
+
|
5 |
+
|
6 |
+
class EndpointSchema(TypedDict):
|
7 |
+
route: str
|
8 |
+
method: Literal['GET', 'POST']
|
9 |
+
name: str
|
10 |
+
description: str
|
11 |
+
payload: Dict[str, Any]
|
12 |
+
|
13 |
+
|
14 |
+
class RapidDataProviderBase:
|
15 |
+
def __init__(self, base_url: str, endpoints: Dict[str, EndpointSchema]):
|
16 |
+
self.base_url = base_url
|
17 |
+
self.endpoints = endpoints
|
18 |
+
|
19 |
+
def get_endpoints(self):
|
20 |
+
return self.endpoints
|
21 |
+
|
22 |
+
def call_endpoint(
|
23 |
+
self,
|
24 |
+
route: str,
|
25 |
+
payload: Optional[Dict[str, Any]] = None
|
26 |
+
):
|
27 |
+
"""
|
28 |
+
Call an API endpoint with the given parameters and data.
|
29 |
+
|
30 |
+
Args:
|
31 |
+
endpoint (EndpointSchema): The endpoint configuration dictionary
|
32 |
+
params (dict, optional): Query parameters for GET requests
|
33 |
+
payload (dict, optional): JSON payload for POST requests
|
34 |
+
|
35 |
+
Returns:
|
36 |
+
dict: The JSON response from the API
|
37 |
+
"""
|
38 |
+
if route.startswith("/"):
|
39 |
+
route = route[1:]
|
40 |
+
|
41 |
+
endpoint = self.endpoints.get(route)
|
42 |
+
if not endpoint:
|
43 |
+
raise ValueError(f"Endpoint {route} not found")
|
44 |
+
|
45 |
+
url = f"{self.base_url}{endpoint['route']}"
|
46 |
+
|
47 |
+
headers = {
|
48 |
+
"x-rapidapi-key": os.getenv("RAPID_API_KEY"),
|
49 |
+
"x-rapidapi-host": url.split("//")[1].split("/")[0],
|
50 |
+
"Content-Type": "application/json"
|
51 |
+
}
|
52 |
+
|
53 |
+
method = endpoint.get('method', 'GET').upper()
|
54 |
+
|
55 |
+
if method == 'GET':
|
56 |
+
response = requests.get(url, params=payload, headers=headers)
|
57 |
+
elif method == 'POST':
|
58 |
+
response = requests.post(url, json=payload, headers=headers)
|
59 |
+
else:
|
60 |
+
raise ValueError(f"Unsupported HTTP method: {method}")
|
61 |
+
return response.json()
|
StrReplaceToolView.tsx
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from "react";
|
2 |
+
import { FileSearch, FileDiff, CheckCircle, AlertTriangle, CircleDashed } from "lucide-react";
|
3 |
+
import { ToolViewProps } from "./types";
|
4 |
+
import { extractFilePath, extractStrReplaceContent, formatTimestamp, getToolTitle } from "./utils";
|
5 |
+
import { GenericToolView } from "./GenericToolView";
|
6 |
+
import { cn } from "@/lib/utils";
|
7 |
+
|
8 |
+
export function StrReplaceToolView({
|
9 |
+
name = "str-replace",
|
10 |
+
assistantContent,
|
11 |
+
toolContent,
|
12 |
+
assistantTimestamp,
|
13 |
+
toolTimestamp,
|
14 |
+
isSuccess = true,
|
15 |
+
isStreaming = false
|
16 |
+
}: ToolViewProps) {
|
17 |
+
const filePath = extractFilePath(assistantContent);
|
18 |
+
const { oldStr, newStr } = extractStrReplaceContent(assistantContent);
|
19 |
+
const toolTitle = getToolTitle(name);
|
20 |
+
|
21 |
+
if (!oldStr || !newStr) {
|
22 |
+
return (
|
23 |
+
<GenericToolView
|
24 |
+
name={name}
|
25 |
+
assistantContent={assistantContent}
|
26 |
+
toolContent={toolContent}
|
27 |
+
assistantTimestamp={assistantTimestamp}
|
28 |
+
toolTimestamp={toolTimestamp}
|
29 |
+
isSuccess={isSuccess}
|
30 |
+
isStreaming={isStreaming}
|
31 |
+
/>
|
32 |
+
);
|
33 |
+
}
|
34 |
+
|
35 |
+
// Perform a character-level diff to identify changes
|
36 |
+
const generateDiff = (oldText: string, newText: string) => {
|
37 |
+
const i = 0;
|
38 |
+
const j = 0;
|
39 |
+
|
40 |
+
// Find common prefix length
|
41 |
+
let prefixLength = 0;
|
42 |
+
while (prefixLength < oldText.length && prefixLength < newText.length &&
|
43 |
+
oldText[prefixLength] === newText[prefixLength]) {
|
44 |
+
prefixLength++;
|
45 |
+
}
|
46 |
+
|
47 |
+
// Find common suffix length
|
48 |
+
let oldSuffixStart = oldText.length;
|
49 |
+
let newSuffixStart = newText.length;
|
50 |
+
while (oldSuffixStart > prefixLength && newSuffixStart > prefixLength &&
|
51 |
+
oldText[oldSuffixStart - 1] === newText[newSuffixStart - 1]) {
|
52 |
+
oldSuffixStart--;
|
53 |
+
newSuffixStart--;
|
54 |
+
}
|
55 |
+
|
56 |
+
// Generate unified diff parts
|
57 |
+
const parts = [];
|
58 |
+
|
59 |
+
// Add common prefix
|
60 |
+
if (prefixLength > 0) {
|
61 |
+
parts.push({ text: oldText.substring(0, prefixLength), type: 'unchanged' });
|
62 |
+
}
|
63 |
+
|
64 |
+
// Add the changed middle parts
|
65 |
+
if (oldSuffixStart > prefixLength) {
|
66 |
+
parts.push({ text: oldText.substring(prefixLength, oldSuffixStart), type: 'removed' });
|
67 |
+
}
|
68 |
+
if (newSuffixStart > prefixLength) {
|
69 |
+
parts.push({ text: newText.substring(prefixLength, newSuffixStart), type: 'added' });
|
70 |
+
}
|
71 |
+
|
72 |
+
// Add common suffix
|
73 |
+
if (oldSuffixStart < oldText.length) {
|
74 |
+
parts.push({ text: oldText.substring(oldSuffixStart), type: 'unchanged' });
|
75 |
+
}
|
76 |
+
|
77 |
+
return parts;
|
78 |
+
};
|
79 |
+
|
80 |
+
const diffParts = generateDiff(oldStr, newStr);
|
81 |
+
|
82 |
+
return (
|
83 |
+
<div className="flex flex-col h-full">
|
84 |
+
<div className="flex-1 p-4 overflow-auto">
|
85 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
86 |
+
<div className="flex items-center p-2 bg-zinc-100 dark:bg-zinc-900 justify-between border-b border-zinc-200 dark:border-zinc-800">
|
87 |
+
<div className="flex items-center">
|
88 |
+
<FileDiff className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
89 |
+
<span className="text-xs font-medium text-zinc-700 dark:text-zinc-300">String Replacement</span>
|
90 |
+
</div>
|
91 |
+
</div>
|
92 |
+
|
93 |
+
<div className="px-3 py-2 border-b border-zinc-200 dark:border-zinc-800 bg-zinc-50 dark:bg-zinc-900 flex items-center">
|
94 |
+
<code className="text-xs font-mono text-zinc-700 dark:text-zinc-300">{filePath || 'Unknown file'}</code>
|
95 |
+
</div>
|
96 |
+
|
97 |
+
{isStreaming ? (
|
98 |
+
<div className="flex-1 bg-white dark:bg-zinc-950 flex items-center justify-center">
|
99 |
+
<div className="text-center p-6">
|
100 |
+
<CircleDashed className="h-8 w-8 mx-auto mb-3 text-blue-500 animate-spin" />
|
101 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">Processing string replacement...</p>
|
102 |
+
{filePath && (
|
103 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400 font-mono">{filePath}</p>
|
104 |
+
)}
|
105 |
+
</div>
|
106 |
+
</div>
|
107 |
+
) : (
|
108 |
+
<div className="p-3 bg-white dark:bg-zinc-950 font-mono text-sm flex-1">
|
109 |
+
{diffParts.map((part, i) => (
|
110 |
+
<span
|
111 |
+
key={i}
|
112 |
+
className={cn(
|
113 |
+
part.type === 'removed' ? 'bg-red-50 text-red-700 dark:bg-red-900/20 dark:text-red-400 line-through mx-0.5' :
|
114 |
+
part.type === 'added' ? 'bg-emerald-50 text-emerald-700 dark:bg-emerald-900/20 dark:text-emerald-400 mx-0.5' :
|
115 |
+
'text-zinc-700 dark:text-zinc-300'
|
116 |
+
)}
|
117 |
+
>
|
118 |
+
{part.text}
|
119 |
+
</span>
|
120 |
+
))}
|
121 |
+
</div>
|
122 |
+
)}
|
123 |
+
</div>
|
124 |
+
</div>
|
125 |
+
|
126 |
+
{/* Footer */}
|
127 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
128 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
129 |
+
{!isStreaming && (
|
130 |
+
<div className="flex items-center gap-2">
|
131 |
+
{isSuccess ? (
|
132 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
133 |
+
) : (
|
134 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
135 |
+
)}
|
136 |
+
<span>
|
137 |
+
{isSuccess ? 'Replacement applied successfully' : 'Replacement failed'}
|
138 |
+
</span>
|
139 |
+
</div>
|
140 |
+
)}
|
141 |
+
|
142 |
+
{isStreaming && (
|
143 |
+
<div className="flex items-center gap-2">
|
144 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
145 |
+
<span>Processing string replacement...</span>
|
146 |
+
</div>
|
147 |
+
)}
|
148 |
+
|
149 |
+
<div className="text-xs">
|
150 |
+
{toolTimestamp && !isStreaming
|
151 |
+
? formatTimestamp(toolTimestamp)
|
152 |
+
: assistantTimestamp
|
153 |
+
? formatTimestamp(assistantTimestamp)
|
154 |
+
: ''}
|
155 |
+
</div>
|
156 |
+
</div>
|
157 |
+
</div>
|
158 |
+
</div>
|
159 |
+
);
|
160 |
+
}
|
TwitterProvider.py
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Dict
|
2 |
+
|
3 |
+
from agent.tools.data_providers.RapidDataProviderBase import RapidDataProviderBase, EndpointSchema
|
4 |
+
|
5 |
+
|
6 |
+
class TwitterProvider(RapidDataProviderBase):
|
7 |
+
def __init__(self):
|
8 |
+
endpoints: Dict[str, EndpointSchema] = {
|
9 |
+
"user_info": {
|
10 |
+
"route": "/screenname.php",
|
11 |
+
"method": "GET",
|
12 |
+
"name": "Twitter User Info",
|
13 |
+
"description": "Get information about a Twitter user by screenname or user ID.",
|
14 |
+
"payload": {
|
15 |
+
"screenname": "Twitter username without the @ symbol",
|
16 |
+
"rest_id": "Optional Twitter user's ID. If provided, overwrites screenname parameter."
|
17 |
+
}
|
18 |
+
},
|
19 |
+
"timeline": {
|
20 |
+
"route": "/timeline.php",
|
21 |
+
"method": "GET",
|
22 |
+
"name": "User Timeline",
|
23 |
+
"description": "Get tweets from a user's timeline.",
|
24 |
+
"payload": {
|
25 |
+
"screenname": "Twitter username without the @ symbol",
|
26 |
+
"rest_id": "Optional parameter that overwrites the screenname",
|
27 |
+
"cursor": "Optional pagination cursor"
|
28 |
+
}
|
29 |
+
},
|
30 |
+
"following": {
|
31 |
+
"route": "/following.php",
|
32 |
+
"method": "GET",
|
33 |
+
"name": "User Following",
|
34 |
+
"description": "Get users that a specific user follows.",
|
35 |
+
"payload": {
|
36 |
+
"screenname": "Twitter username without the @ symbol",
|
37 |
+
"rest_id": "Optional parameter that overwrites the screenname",
|
38 |
+
"cursor": "Optional pagination cursor"
|
39 |
+
}
|
40 |
+
},
|
41 |
+
"followers": {
|
42 |
+
"route": "/followers.php",
|
43 |
+
"method": "GET",
|
44 |
+
"name": "User Followers",
|
45 |
+
"description": "Get followers of a specific user.",
|
46 |
+
"payload": {
|
47 |
+
"screenname": "Twitter username without the @ symbol",
|
48 |
+
"cursor": "Optional pagination cursor"
|
49 |
+
}
|
50 |
+
},
|
51 |
+
"search": {
|
52 |
+
"route": "/search.php",
|
53 |
+
"method": "GET",
|
54 |
+
"name": "Twitter Search",
|
55 |
+
"description": "Search for tweets with a specific query.",
|
56 |
+
"payload": {
|
57 |
+
"query": "Search query string",
|
58 |
+
"cursor": "Optional pagination cursor",
|
59 |
+
"search_type": "Optional search type (e.g. 'Top')"
|
60 |
+
}
|
61 |
+
},
|
62 |
+
"replies": {
|
63 |
+
"route": "/replies.php",
|
64 |
+
"method": "GET",
|
65 |
+
"name": "User Replies",
|
66 |
+
"description": "Get replies made by a user.",
|
67 |
+
"payload": {
|
68 |
+
"screenname": "Twitter username without the @ symbol",
|
69 |
+
"cursor": "Optional pagination cursor"
|
70 |
+
}
|
71 |
+
},
|
72 |
+
"check_retweet": {
|
73 |
+
"route": "/checkretweet.php",
|
74 |
+
"method": "GET",
|
75 |
+
"name": "Check Retweet",
|
76 |
+
"description": "Check if a user has retweeted a specific tweet.",
|
77 |
+
"payload": {
|
78 |
+
"screenname": "Twitter username without the @ symbol",
|
79 |
+
"tweet_id": "ID of the tweet to check"
|
80 |
+
}
|
81 |
+
},
|
82 |
+
"tweet": {
|
83 |
+
"route": "/tweet.php",
|
84 |
+
"method": "GET",
|
85 |
+
"name": "Get Tweet",
|
86 |
+
"description": "Get details of a specific tweet by ID.",
|
87 |
+
"payload": {
|
88 |
+
"id": "ID of the tweet"
|
89 |
+
}
|
90 |
+
},
|
91 |
+
"tweet_thread": {
|
92 |
+
"route": "/tweet_thread.php",
|
93 |
+
"method": "GET",
|
94 |
+
"name": "Get Tweet Thread",
|
95 |
+
"description": "Get a thread of tweets starting from a specific tweet ID.",
|
96 |
+
"payload": {
|
97 |
+
"id": "ID of the tweet",
|
98 |
+
"cursor": "Optional pagination cursor"
|
99 |
+
}
|
100 |
+
},
|
101 |
+
"retweets": {
|
102 |
+
"route": "/retweets.php",
|
103 |
+
"method": "GET",
|
104 |
+
"name": "Get Retweets",
|
105 |
+
"description": "Get users who retweeted a specific tweet.",
|
106 |
+
"payload": {
|
107 |
+
"id": "ID of the tweet",
|
108 |
+
"cursor": "Optional pagination cursor"
|
109 |
+
}
|
110 |
+
},
|
111 |
+
"latest_replies": {
|
112 |
+
"route": "/latest_replies.php",
|
113 |
+
"method": "GET",
|
114 |
+
"name": "Get Latest Replies",
|
115 |
+
"description": "Get the latest replies to a specific tweet.",
|
116 |
+
"payload": {
|
117 |
+
"id": "ID of the tweet",
|
118 |
+
"cursor": "Optional pagination cursor"
|
119 |
+
}
|
120 |
+
}
|
121 |
+
}
|
122 |
+
base_url = "https://twitter-api45.p.rapidapi.com"
|
123 |
+
super().__init__(base_url, endpoints)
|
124 |
+
|
125 |
+
|
126 |
+
if __name__ == "__main__":
|
127 |
+
from dotenv import load_dotenv
|
128 |
+
load_dotenv()
|
129 |
+
tool = TwitterProvider()
|
130 |
+
|
131 |
+
# Example for getting user info
|
132 |
+
user_info = tool.call_endpoint(
|
133 |
+
route="user_info",
|
134 |
+
payload={
|
135 |
+
"screenname": "elonmusk",
|
136 |
+
# "rest_id": "44196397" # Optional, uncomment to use user ID instead of screenname
|
137 |
+
}
|
138 |
+
)
|
139 |
+
print("User Info:", user_info)
|
140 |
+
|
141 |
+
# Example for getting user timeline
|
142 |
+
timeline = tool.call_endpoint(
|
143 |
+
route="timeline",
|
144 |
+
payload={
|
145 |
+
"screenname": "elonmusk",
|
146 |
+
# "cursor": "optional-cursor-value" # Optional for pagination
|
147 |
+
}
|
148 |
+
)
|
149 |
+
print("Timeline:", timeline)
|
150 |
+
|
151 |
+
# Example for getting user following
|
152 |
+
following = tool.call_endpoint(
|
153 |
+
route="following",
|
154 |
+
payload={
|
155 |
+
"screenname": "elonmusk",
|
156 |
+
# "cursor": "optional-cursor-value" # Optional for pagination
|
157 |
+
}
|
158 |
+
)
|
159 |
+
print("Following:", following)
|
160 |
+
|
161 |
+
# Example for getting user followers
|
162 |
+
followers = tool.call_endpoint(
|
163 |
+
route="followers",
|
164 |
+
payload={
|
165 |
+
"screenname": "elonmusk",
|
166 |
+
# "cursor": "optional-cursor-value" # Optional for pagination
|
167 |
+
}
|
168 |
+
)
|
169 |
+
print("Followers:", followers)
|
170 |
+
|
171 |
+
# Example for searching tweets
|
172 |
+
search_results = tool.call_endpoint(
|
173 |
+
route="search",
|
174 |
+
payload={
|
175 |
+
"query": "cybertruck",
|
176 |
+
"search_type": "Top" # Optional, defaults to Top
|
177 |
+
# "cursor": "optional-cursor-value" # Optional for pagination
|
178 |
+
}
|
179 |
+
)
|
180 |
+
print("Search Results:", search_results)
|
181 |
+
|
182 |
+
# Example for getting user replies
|
183 |
+
replies = tool.call_endpoint(
|
184 |
+
route="replies",
|
185 |
+
payload={
|
186 |
+
"screenname": "elonmusk",
|
187 |
+
# "cursor": "optional-cursor-value" # Optional for pagination
|
188 |
+
}
|
189 |
+
)
|
190 |
+
print("Replies:", replies)
|
191 |
+
|
192 |
+
# Example for checking if user retweeted a tweet
|
193 |
+
check_retweet = tool.call_endpoint(
|
194 |
+
route="check_retweet",
|
195 |
+
payload={
|
196 |
+
"screenname": "elonmusk",
|
197 |
+
"tweet_id": "1671370010743263233"
|
198 |
+
}
|
199 |
+
)
|
200 |
+
print("Check Retweet:", check_retweet)
|
201 |
+
|
202 |
+
# Example for getting tweet details
|
203 |
+
tweet = tool.call_endpoint(
|
204 |
+
route="tweet",
|
205 |
+
payload={
|
206 |
+
"id": "1671370010743263233"
|
207 |
+
}
|
208 |
+
)
|
209 |
+
print("Tweet:", tweet)
|
210 |
+
|
211 |
+
# Example for getting a tweet thread
|
212 |
+
tweet_thread = tool.call_endpoint(
|
213 |
+
route="tweet_thread",
|
214 |
+
payload={
|
215 |
+
"id": "1738106896777699464",
|
216 |
+
# "cursor": "optional-cursor-value" # Optional for pagination
|
217 |
+
}
|
218 |
+
)
|
219 |
+
print("Tweet Thread:", tweet_thread)
|
220 |
+
|
221 |
+
# Example for getting retweets of a tweet
|
222 |
+
retweets = tool.call_endpoint(
|
223 |
+
route="retweets",
|
224 |
+
payload={
|
225 |
+
"id": "1700199139470942473",
|
226 |
+
# "cursor": "optional-cursor-value" # Optional for pagination
|
227 |
+
}
|
228 |
+
)
|
229 |
+
print("Retweets:", retweets)
|
230 |
+
|
231 |
+
# Example for getting latest replies to a tweet
|
232 |
+
latest_replies = tool.call_endpoint(
|
233 |
+
route="latest_replies",
|
234 |
+
payload={
|
235 |
+
"id": "1738106896777699464",
|
236 |
+
# "cursor": "optional-cursor-value" # Optional for pagination
|
237 |
+
}
|
238 |
+
)
|
239 |
+
print("Latest Replies:", latest_replies)
|
240 |
+
|
WebCrawlToolView.tsx
ADDED
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from "react";
|
2 |
+
import { Globe, CheckCircle, AlertTriangle, CircleDashed, ExternalLink } from "lucide-react";
|
3 |
+
import { ToolViewProps } from "./types";
|
4 |
+
import { extractCrawlUrl, extractWebpageContent, formatTimestamp, getToolTitle } from "./utils";
|
5 |
+
import { GenericToolView } from "./GenericToolView";
|
6 |
+
import { cn } from "@/lib/utils";
|
7 |
+
|
8 |
+
export function WebCrawlToolView({
|
9 |
+
name = "crawl-webpage",
|
10 |
+
assistantContent,
|
11 |
+
toolContent,
|
12 |
+
assistantTimestamp,
|
13 |
+
toolTimestamp,
|
14 |
+
isSuccess = true,
|
15 |
+
isStreaming = false
|
16 |
+
}: ToolViewProps) {
|
17 |
+
const url = extractCrawlUrl(assistantContent);
|
18 |
+
const webpageContent = extractWebpageContent(toolContent);
|
19 |
+
const toolTitle = getToolTitle(name);
|
20 |
+
|
21 |
+
if (!url) {
|
22 |
+
return (
|
23 |
+
<GenericToolView
|
24 |
+
name={name}
|
25 |
+
assistantContent={assistantContent}
|
26 |
+
toolContent={toolContent}
|
27 |
+
assistantTimestamp={assistantTimestamp}
|
28 |
+
toolTimestamp={toolTimestamp}
|
29 |
+
isSuccess={isSuccess}
|
30 |
+
isStreaming={isStreaming}
|
31 |
+
/>
|
32 |
+
);
|
33 |
+
}
|
34 |
+
|
35 |
+
// Format domain for display
|
36 |
+
const formatDomain = (url: string): string => {
|
37 |
+
try {
|
38 |
+
const urlObj = new URL(url);
|
39 |
+
return urlObj.hostname.replace('www.', '');
|
40 |
+
} catch (e) {
|
41 |
+
return url;
|
42 |
+
}
|
43 |
+
};
|
44 |
+
|
45 |
+
const domain = url ? formatDomain(url) : 'Unknown';
|
46 |
+
|
47 |
+
return (
|
48 |
+
<div className="flex flex-col h-full">
|
49 |
+
<div className="flex-1 p-4 overflow-auto">
|
50 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
51 |
+
{/* Webpage Header */}
|
52 |
+
<div className="flex items-center p-2 bg-zinc-100 dark:bg-zinc-900 justify-between border-b border-zinc-200 dark:border-zinc-800">
|
53 |
+
<div className="flex items-center">
|
54 |
+
<Globe className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
55 |
+
<span className="text-xs font-medium text-zinc-700 dark:text-zinc-300">
|
56 |
+
{toolTitle}
|
57 |
+
</span>
|
58 |
+
</div>
|
59 |
+
|
60 |
+
{!isStreaming && (
|
61 |
+
<div className="flex items-center gap-2">
|
62 |
+
<span className={cn(
|
63 |
+
"text-xs flex items-center",
|
64 |
+
isSuccess ? "text-emerald-600 dark:text-emerald-400" : "text-red-600 dark:text-red-400"
|
65 |
+
)}>
|
66 |
+
<span className="h-1.5 w-1.5 rounded-full mr-1.5 bg-current"></span>
|
67 |
+
{isSuccess ? 'Success' : 'Failed'}
|
68 |
+
</span>
|
69 |
+
|
70 |
+
<a
|
71 |
+
href={url}
|
72 |
+
target="_blank"
|
73 |
+
rel="noopener noreferrer"
|
74 |
+
className="flex items-center gap-1.5 py-1 px-2 text-xs text-zinc-700 dark:text-zinc-300 bg-zinc-200 dark:bg-zinc-800 hover:bg-zinc-300 dark:hover:bg-zinc-700 rounded transition-colors"
|
75 |
+
>
|
76 |
+
<ExternalLink className="h-3.5 w-3.5 text-zinc-500 flex-shrink-0" />
|
77 |
+
<span>Open URL</span>
|
78 |
+
</a>
|
79 |
+
</div>
|
80 |
+
)}
|
81 |
+
</div>
|
82 |
+
|
83 |
+
{/* URL Bar */}
|
84 |
+
<div className="px-3 py-2 border-b border-zinc-200 dark:border-zinc-800 bg-zinc-50 dark:bg-zinc-900">
|
85 |
+
<code className="text-xs font-mono text-zinc-700 dark:text-zinc-300">{url}</code>
|
86 |
+
</div>
|
87 |
+
|
88 |
+
{/* Content */}
|
89 |
+
{isStreaming ? (
|
90 |
+
<div className="flex-1 bg-white dark:bg-zinc-950 flex items-center justify-center">
|
91 |
+
<div className="text-center p-6">
|
92 |
+
<CircleDashed className="h-8 w-8 mx-auto mb-3 text-blue-500 animate-spin" />
|
93 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">Crawling webpage...</p>
|
94 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400">Fetching content from {domain}</p>
|
95 |
+
</div>
|
96 |
+
</div>
|
97 |
+
) : (
|
98 |
+
<div className="flex-1 overflow-auto bg-white dark:bg-zinc-950 font-mono text-sm">
|
99 |
+
{webpageContent ? (
|
100 |
+
<div className="p-3">
|
101 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400 mb-2">Page Content</div>
|
102 |
+
<div className="bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md">
|
103 |
+
<pre className="p-3 text-xs overflow-auto whitespace-pre-wrap text-zinc-800 dark:text-zinc-300 font-mono">
|
104 |
+
{webpageContent.text || "No content extracted"}
|
105 |
+
</pre>
|
106 |
+
</div>
|
107 |
+
</div>
|
108 |
+
) : (
|
109 |
+
<div className="p-6 h-full flex items-center justify-center">
|
110 |
+
<div className="text-center">
|
111 |
+
<Globe className="h-6 w-6 mx-auto mb-2 text-zinc-400 dark:text-zinc-500" />
|
112 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">No content extracted</p>
|
113 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400">The webpage might be restricted or empty</p>
|
114 |
+
</div>
|
115 |
+
</div>
|
116 |
+
)}
|
117 |
+
</div>
|
118 |
+
)}
|
119 |
+
</div>
|
120 |
+
</div>
|
121 |
+
|
122 |
+
{/* Footer */}
|
123 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
124 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
125 |
+
{!isStreaming && (
|
126 |
+
<div className="flex items-center gap-2">
|
127 |
+
{isSuccess ? (
|
128 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
129 |
+
) : (
|
130 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
131 |
+
)}
|
132 |
+
<span>
|
133 |
+
{isSuccess ? `${toolTitle} completed successfully` : `${toolTitle} operation failed`}
|
134 |
+
</span>
|
135 |
+
</div>
|
136 |
+
)}
|
137 |
+
|
138 |
+
{isStreaming && (
|
139 |
+
<div className="flex items-center gap-2">
|
140 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
141 |
+
<span>Executing {toolTitle.toLowerCase()}...</span>
|
142 |
+
</div>
|
143 |
+
)}
|
144 |
+
|
145 |
+
<div className="text-xs">
|
146 |
+
{toolTimestamp && !isStreaming
|
147 |
+
? formatTimestamp(toolTimestamp)
|
148 |
+
: assistantTimestamp
|
149 |
+
? formatTimestamp(assistantTimestamp)
|
150 |
+
: ''}
|
151 |
+
</div>
|
152 |
+
</div>
|
153 |
+
</div>
|
154 |
+
</div>
|
155 |
+
);
|
156 |
+
}
|
WebScrapeToolView.tsx
ADDED
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from "react";
|
2 |
+
import { Globe, CheckCircle, AlertTriangle, CircleDashed, ExternalLink } from "lucide-react";
|
3 |
+
import { ToolViewProps } from "./types";
|
4 |
+
import { extractCrawlUrl, extractWebpageContent, formatTimestamp, getToolTitle } from "./utils";
|
5 |
+
import { GenericToolView } from "./GenericToolView";
|
6 |
+
import { cn } from "@/lib/utils";
|
7 |
+
|
8 |
+
export function WebScrapeToolView({
|
9 |
+
name = "scrape-webpage",
|
10 |
+
assistantContent,
|
11 |
+
toolContent,
|
12 |
+
assistantTimestamp,
|
13 |
+
toolTimestamp,
|
14 |
+
isSuccess = true,
|
15 |
+
isStreaming = false
|
16 |
+
}: ToolViewProps) {
|
17 |
+
const url = extractCrawlUrl(assistantContent);
|
18 |
+
const webpageContent = extractWebpageContent(toolContent);
|
19 |
+
const toolTitle = getToolTitle(name);
|
20 |
+
|
21 |
+
if (!url) {
|
22 |
+
return (
|
23 |
+
<GenericToolView
|
24 |
+
name={name}
|
25 |
+
assistantContent={assistantContent}
|
26 |
+
toolContent={toolContent}
|
27 |
+
assistantTimestamp={assistantTimestamp}
|
28 |
+
toolTimestamp={toolTimestamp}
|
29 |
+
isSuccess={isSuccess}
|
30 |
+
isStreaming={isStreaming}
|
31 |
+
/>
|
32 |
+
);
|
33 |
+
}
|
34 |
+
|
35 |
+
// Format domain for display
|
36 |
+
const formatDomain = (url: string): string => {
|
37 |
+
try {
|
38 |
+
const urlObj = new URL(url);
|
39 |
+
return urlObj.hostname.replace('www.', '');
|
40 |
+
} catch (e) {
|
41 |
+
return url;
|
42 |
+
}
|
43 |
+
};
|
44 |
+
|
45 |
+
const domain = url ? formatDomain(url) : 'Unknown';
|
46 |
+
|
47 |
+
return (
|
48 |
+
<div className="flex flex-col h-full">
|
49 |
+
<div className="flex-1 p-4 overflow-auto">
|
50 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
51 |
+
{/* Webpage Header */}
|
52 |
+
<div className="flex items-center p-2 bg-zinc-100 dark:bg-zinc-900 justify-between border-b border-zinc-200 dark:border-zinc-800">
|
53 |
+
<div className="flex items-center">
|
54 |
+
<Globe className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
55 |
+
<span className="text-xs font-medium text-zinc-700 dark:text-zinc-300">
|
56 |
+
{toolTitle}
|
57 |
+
</span>
|
58 |
+
</div>
|
59 |
+
|
60 |
+
{!isStreaming && (
|
61 |
+
<div className="flex items-center gap-2">
|
62 |
+
<span className={cn(
|
63 |
+
"text-xs flex items-center",
|
64 |
+
isSuccess ? "text-emerald-600 dark:text-emerald-400" : "text-red-600 dark:text-red-400"
|
65 |
+
)}>
|
66 |
+
<span className="h-1.5 w-1.5 rounded-full mr-1.5 bg-current"></span>
|
67 |
+
{isSuccess ? 'Success' : 'Failed'}
|
68 |
+
</span>
|
69 |
+
|
70 |
+
<a
|
71 |
+
href={url}
|
72 |
+
target="_blank"
|
73 |
+
rel="noopener noreferrer"
|
74 |
+
className="flex items-center gap-1.5 py-1 px-2 text-xs text-zinc-700 dark:text-zinc-300 bg-zinc-200 dark:bg-zinc-800 hover:bg-zinc-300 dark:hover:bg-zinc-700 rounded transition-colors"
|
75 |
+
>
|
76 |
+
<ExternalLink className="h-3.5 w-3.5 text-zinc-500 flex-shrink-0" />
|
77 |
+
<span>Open URL</span>
|
78 |
+
</a>
|
79 |
+
</div>
|
80 |
+
)}
|
81 |
+
</div>
|
82 |
+
|
83 |
+
{/* URL Bar */}
|
84 |
+
<div className="px-3 py-2 border-b border-zinc-200 dark:border-zinc-800 bg-zinc-50 dark:bg-zinc-900">
|
85 |
+
<code className="text-xs font-mono text-zinc-700 dark:text-zinc-300">{url}</code>
|
86 |
+
</div>
|
87 |
+
|
88 |
+
{/* Content */}
|
89 |
+
{isStreaming ? (
|
90 |
+
<div className="flex-1 bg-white dark:bg-zinc-950 flex items-center justify-center">
|
91 |
+
<div className="text-center p-6">
|
92 |
+
<CircleDashed className="h-8 w-8 mx-auto mb-3 text-blue-500 animate-spin" />
|
93 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">Scraping webpage...</p>
|
94 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400">Fetching content from {domain}</p>
|
95 |
+
</div>
|
96 |
+
</div>
|
97 |
+
) : (
|
98 |
+
<div className="flex-1 overflow-auto bg-white dark:bg-zinc-950 font-mono text-sm">
|
99 |
+
{webpageContent ? (
|
100 |
+
<div className="p-3">
|
101 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400 mb-2">Page Content</div>
|
102 |
+
<div className="bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md">
|
103 |
+
<pre className="p-3 text-xs overflow-auto whitespace-pre-wrap text-zinc-800 dark:text-zinc-300 font-mono">
|
104 |
+
{webpageContent.text || "No content extracted"}
|
105 |
+
</pre>
|
106 |
+
</div>
|
107 |
+
</div>
|
108 |
+
) : (
|
109 |
+
<div className="p-6 h-full flex items-center justify-center">
|
110 |
+
<div className="text-center">
|
111 |
+
<Globe className="h-6 w-6 mx-auto mb-2 text-zinc-400 dark:text-zinc-500" />
|
112 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">No content extracted</p>
|
113 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400">The webpage might be restricted or empty</p>
|
114 |
+
</div>
|
115 |
+
</div>
|
116 |
+
)}
|
117 |
+
</div>
|
118 |
+
)}
|
119 |
+
</div>
|
120 |
+
</div>
|
121 |
+
|
122 |
+
{/* Footer */}
|
123 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
124 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
125 |
+
{!isStreaming && (
|
126 |
+
<div className="flex items-center gap-2">
|
127 |
+
{isSuccess ? (
|
128 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
129 |
+
) : (
|
130 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
131 |
+
)}
|
132 |
+
<span>
|
133 |
+
{isSuccess ? `${toolTitle} completed successfully` : `${toolTitle} operation failed`}
|
134 |
+
</span>
|
135 |
+
</div>
|
136 |
+
)}
|
137 |
+
|
138 |
+
{isStreaming && (
|
139 |
+
<div className="flex items-center gap-2">
|
140 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
141 |
+
<span>Executing {toolTitle.toLowerCase()}...</span>
|
142 |
+
</div>
|
143 |
+
)}
|
144 |
+
|
145 |
+
<div className="text-xs">
|
146 |
+
{toolTimestamp && !isStreaming
|
147 |
+
? formatTimestamp(toolTimestamp)
|
148 |
+
: assistantTimestamp
|
149 |
+
? formatTimestamp(assistantTimestamp)
|
150 |
+
: ''}
|
151 |
+
</div>
|
152 |
+
</div>
|
153 |
+
</div>
|
154 |
+
</div>
|
155 |
+
);
|
156 |
+
}
|
WebSearchToolView.tsx
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from "react";
|
2 |
+
import { Search, CircleDashed, CheckCircle, AlertTriangle, ExternalLink } from "lucide-react";
|
3 |
+
import { ToolViewProps } from "./types";
|
4 |
+
import { extractSearchQuery, extractSearchResults, cleanUrl, formatTimestamp, getToolTitle } from "./utils";
|
5 |
+
import { cn } from "@/lib/utils";
|
6 |
+
|
7 |
+
export function WebSearchToolView({
|
8 |
+
name = "web-search",
|
9 |
+
assistantContent,
|
10 |
+
toolContent,
|
11 |
+
assistantTimestamp,
|
12 |
+
toolTimestamp,
|
13 |
+
isSuccess = true,
|
14 |
+
isStreaming = false
|
15 |
+
}: ToolViewProps) {
|
16 |
+
const query = extractSearchQuery(assistantContent);
|
17 |
+
const searchResults = extractSearchResults(toolContent);
|
18 |
+
const toolTitle = getToolTitle(name);
|
19 |
+
|
20 |
+
return (
|
21 |
+
<div className="flex flex-col h-full">
|
22 |
+
<div className="flex-1 p-4 overflow-auto">
|
23 |
+
<div className="border border-zinc-200 dark:border-zinc-800 rounded-md overflow-hidden h-full flex flex-col">
|
24 |
+
<div className="flex items-center p-2 bg-zinc-100 dark:bg-zinc-900 justify-between border-b border-zinc-200 dark:border-zinc-800">
|
25 |
+
<div className="flex items-center">
|
26 |
+
<Search className="h-4 w-4 mr-2 text-zinc-600 dark:text-zinc-400" />
|
27 |
+
<span className="text-xs font-medium text-zinc-700 dark:text-zinc-300">{toolTitle}</span>
|
28 |
+
</div>
|
29 |
+
|
30 |
+
{!isStreaming && (
|
31 |
+
<span className={cn(
|
32 |
+
"text-xs flex items-center",
|
33 |
+
isSuccess ? "text-emerald-600 dark:text-emerald-400" : "text-red-600 dark:text-red-400"
|
34 |
+
)}>
|
35 |
+
<span className="h-1.5 w-1.5 rounded-full mr-1.5 bg-current"></span>
|
36 |
+
{isSuccess ? 'Success' : 'Failed'}
|
37 |
+
</span>
|
38 |
+
)}
|
39 |
+
</div>
|
40 |
+
|
41 |
+
<div className="px-3 py-2 border-b border-zinc-200 dark:border-zinc-800 bg-zinc-50 dark:bg-zinc-900">
|
42 |
+
<code className="text-xs font-mono text-zinc-700 dark:text-zinc-300">{query || 'Unknown query'}</code>
|
43 |
+
</div>
|
44 |
+
|
45 |
+
<div className="flex-1 overflow-auto bg-white dark:bg-zinc-950 font-mono text-sm">
|
46 |
+
{isStreaming ? (
|
47 |
+
<div className="flex-1 flex items-center justify-center">
|
48 |
+
<div className="text-center p-6">
|
49 |
+
<CircleDashed className="h-8 w-8 mx-auto mb-3 text-blue-500 animate-spin" />
|
50 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">Searching the web...</p>
|
51 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400">This might take a moment</p>
|
52 |
+
</div>
|
53 |
+
</div>
|
54 |
+
) : searchResults.length > 0 ? (
|
55 |
+
<div className="p-3">
|
56 |
+
<div className="text-xs font-medium text-zinc-500 dark:text-zinc-400 mb-3">
|
57 |
+
Found {searchResults.length} results
|
58 |
+
</div>
|
59 |
+
<div className="divide-y divide-zinc-100 dark:divide-zinc-800 bg-zinc-50 dark:bg-zinc-900 border border-zinc-200 dark:border-zinc-800 rounded-md">
|
60 |
+
{searchResults.map((result, idx) => (
|
61 |
+
<div key={idx} className="p-3 space-y-1">
|
62 |
+
<div className="flex flex-col">
|
63 |
+
<div className="text-xs text-zinc-500 dark:text-zinc-400 truncate mb-0.5">
|
64 |
+
{cleanUrl(result.url)}
|
65 |
+
</div>
|
66 |
+
<a
|
67 |
+
href={result.url}
|
68 |
+
target="_blank"
|
69 |
+
rel="noopener noreferrer"
|
70 |
+
className="text-sm text-blue-600 dark:text-blue-400 hover:underline font-medium flex items-center gap-1"
|
71 |
+
>
|
72 |
+
{result.title}
|
73 |
+
<ExternalLink className="h-3 w-3 opacity-60" />
|
74 |
+
</a>
|
75 |
+
</div>
|
76 |
+
{result.snippet && (
|
77 |
+
<p className="text-xs text-zinc-600 dark:text-zinc-400 line-clamp-2">
|
78 |
+
{result.snippet}
|
79 |
+
</p>
|
80 |
+
)}
|
81 |
+
</div>
|
82 |
+
))}
|
83 |
+
</div>
|
84 |
+
</div>
|
85 |
+
) : (
|
86 |
+
<div className="p-6 text-center flex-1 flex flex-col items-center justify-center h-full">
|
87 |
+
<Search className="h-6 w-6 mx-auto mb-2 text-zinc-400 dark:text-zinc-500" />
|
88 |
+
<p className="text-sm font-medium text-zinc-700 dark:text-zinc-300">No results found</p>
|
89 |
+
<p className="text-xs mt-1 text-zinc-500 dark:text-zinc-400">Try refining your search query</p>
|
90 |
+
</div>
|
91 |
+
)}
|
92 |
+
</div>
|
93 |
+
</div>
|
94 |
+
</div>
|
95 |
+
|
96 |
+
<div className="p-4 border-t border-zinc-200 dark:border-zinc-800">
|
97 |
+
<div className="flex items-center justify-between text-xs text-zinc-500 dark:text-zinc-400">
|
98 |
+
{!isStreaming && (
|
99 |
+
<div className="flex items-center gap-2">
|
100 |
+
{isSuccess ? (
|
101 |
+
<CheckCircle className="h-3.5 w-3.5 text-emerald-500" />
|
102 |
+
) : (
|
103 |
+
<AlertTriangle className="h-3.5 w-3.5 text-red-500" />
|
104 |
+
)}
|
105 |
+
<span>
|
106 |
+
{isSuccess ? `${toolTitle} completed successfully` : `${toolTitle} operation failed`}
|
107 |
+
</span>
|
108 |
+
</div>
|
109 |
+
)}
|
110 |
+
|
111 |
+
{isStreaming && (
|
112 |
+
<div className="flex items-center gap-2">
|
113 |
+
<CircleDashed className="h-3.5 w-3.5 text-blue-500 animate-spin" />
|
114 |
+
<span>Executing {toolTitle.toLowerCase()}...</span>
|
115 |
+
</div>
|
116 |
+
)}
|
117 |
+
|
118 |
+
<div className="text-xs">
|
119 |
+
{toolTimestamp && !isStreaming
|
120 |
+
? formatTimestamp(toolTimestamp)
|
121 |
+
: assistantTimestamp
|
122 |
+
? formatTimestamp(assistantTimestamp)
|
123 |
+
: ''}
|
124 |
+
</div>
|
125 |
+
</div>
|
126 |
+
</div>
|
127 |
+
</div>
|
128 |
+
);
|
129 |
+
}
|
YahooFinanceProvider.py
ADDED
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Dict
|
2 |
+
|
3 |
+
from agent.tools.data_providers.RapidDataProviderBase import RapidDataProviderBase, EndpointSchema
|
4 |
+
|
5 |
+
|
6 |
+
class YahooFinanceProvider(RapidDataProviderBase):
|
7 |
+
def __init__(self):
|
8 |
+
endpoints: Dict[str, EndpointSchema] = {
|
9 |
+
"get_tickers": {
|
10 |
+
"route": "/v2/markets/tickers",
|
11 |
+
"method": "GET",
|
12 |
+
"name": "Yahoo Finance Tickers",
|
13 |
+
"description": "Get financial tickers from Yahoo Finance with various filters and parameters.",
|
14 |
+
"payload": {
|
15 |
+
"page": "Page number for pagination (optional, default: 1)",
|
16 |
+
"type": "Asset class type (required): STOCKS, ETF, MUTUALFUNDS, or FUTURES",
|
17 |
+
}
|
18 |
+
},
|
19 |
+
"search": {
|
20 |
+
"route": "/v1/markets/search",
|
21 |
+
"method": "GET",
|
22 |
+
"name": "Yahoo Finance Search",
|
23 |
+
"description": "Search for financial instruments on Yahoo Finance",
|
24 |
+
"payload": {
|
25 |
+
"search": "Search term (required)",
|
26 |
+
}
|
27 |
+
},
|
28 |
+
"get_news": {
|
29 |
+
"route": "/v2/markets/news",
|
30 |
+
"method": "GET",
|
31 |
+
"name": "Yahoo Finance News",
|
32 |
+
"description": "Get news related to specific tickers from Yahoo Finance",
|
33 |
+
"payload": {
|
34 |
+
"tickers": "Stock symbol (optional, e.g., AAPL)",
|
35 |
+
"type": "News type (optional): ALL, VIDEO, or PRESS_RELEASE",
|
36 |
+
}
|
37 |
+
},
|
38 |
+
"get_stock_module": {
|
39 |
+
"route": "/v1/markets/stock/modules",
|
40 |
+
"method": "GET",
|
41 |
+
"name": "Yahoo Finance Stock Module",
|
42 |
+
"description": "Get detailed information about a specific stock module",
|
43 |
+
"payload": {
|
44 |
+
"ticker": "Company ticker symbol (required, e.g., AAPL)",
|
45 |
+
"module": "Module to retrieve (required): asset-profile, financial-data, earnings, etc.",
|
46 |
+
}
|
47 |
+
},
|
48 |
+
"get_sma": {
|
49 |
+
"route": "/v1/markets/indicators/sma",
|
50 |
+
"method": "GET",
|
51 |
+
"name": "Yahoo Finance SMA Indicator",
|
52 |
+
"description": "Get Simple Moving Average (SMA) indicator data for a stock",
|
53 |
+
"payload": {
|
54 |
+
"symbol": "Stock symbol (required, e.g., AAPL)",
|
55 |
+
"interval": "Time interval (required): 5m, 15m, 30m, 1h, 1d, 1wk, 1mo, 3mo",
|
56 |
+
"series_type": "Series type (required): open, close, high, low",
|
57 |
+
"time_period": "Number of data points used for calculation (required)",
|
58 |
+
"limit": "Limit the number of results (optional, default: 50)",
|
59 |
+
}
|
60 |
+
},
|
61 |
+
"get_rsi": {
|
62 |
+
"route": "/v1/markets/indicators/rsi",
|
63 |
+
"method": "GET",
|
64 |
+
"name": "Yahoo Finance RSI Indicator",
|
65 |
+
"description": "Get Relative Strength Index (RSI) indicator data for a stock",
|
66 |
+
"payload": {
|
67 |
+
"symbol": "Stock symbol (required, e.g., AAPL)",
|
68 |
+
"interval": "Time interval (required): 5m, 15m, 30m, 1h, 1d, 1wk, 1mo, 3mo",
|
69 |
+
"series_type": "Series type (required): open, close, high, low",
|
70 |
+
"time_period": "Number of data points used for calculation (required)",
|
71 |
+
"limit": "Limit the number of results (optional, default: 50)",
|
72 |
+
}
|
73 |
+
},
|
74 |
+
"get_earnings_calendar": {
|
75 |
+
"route": "/v1/markets/calendar/earnings",
|
76 |
+
"method": "GET",
|
77 |
+
"name": "Yahoo Finance Earnings Calendar",
|
78 |
+
"description": "Get earnings calendar data for a specific date",
|
79 |
+
"payload": {
|
80 |
+
"date": "Calendar date in yyyy-mm-dd format (optional, e.g., 2023-11-30)",
|
81 |
+
}
|
82 |
+
},
|
83 |
+
"get_insider_trades": {
|
84 |
+
"route": "/v1/markets/insider-trades",
|
85 |
+
"method": "GET",
|
86 |
+
"name": "Yahoo Finance Insider Trades",
|
87 |
+
"description": "Get recent insider trading activity",
|
88 |
+
"payload": {}
|
89 |
+
},
|
90 |
+
}
|
91 |
+
base_url = "https://yahoo-finance15.p.rapidapi.com/api"
|
92 |
+
super().__init__(base_url, endpoints)
|
93 |
+
|
94 |
+
|
95 |
+
if __name__ == "__main__":
|
96 |
+
from dotenv import load_dotenv
|
97 |
+
load_dotenv()
|
98 |
+
tool = YahooFinanceProvider()
|
99 |
+
|
100 |
+
# Example for getting stock tickers
|
101 |
+
tickers_result = tool.call_endpoint(
|
102 |
+
route="get_tickers",
|
103 |
+
payload={
|
104 |
+
"page": 1,
|
105 |
+
"type": "STOCKS"
|
106 |
+
}
|
107 |
+
)
|
108 |
+
print("Tickers Result:", tickers_result)
|
109 |
+
|
110 |
+
# Example for searching financial instruments
|
111 |
+
search_result = tool.call_endpoint(
|
112 |
+
route="search",
|
113 |
+
payload={
|
114 |
+
"search": "AA"
|
115 |
+
}
|
116 |
+
)
|
117 |
+
print("Search Result:", search_result)
|
118 |
+
|
119 |
+
# Example for getting financial news
|
120 |
+
news_result = tool.call_endpoint(
|
121 |
+
route="get_news",
|
122 |
+
payload={
|
123 |
+
"tickers": "AAPL",
|
124 |
+
"type": "ALL"
|
125 |
+
}
|
126 |
+
)
|
127 |
+
print("News Result:", news_result)
|
128 |
+
|
129 |
+
# Example for getting stock asset profile module
|
130 |
+
stock_module_result = tool.call_endpoint(
|
131 |
+
route="get_stock_module",
|
132 |
+
payload={
|
133 |
+
"ticker": "AAPL",
|
134 |
+
"module": "asset-profile"
|
135 |
+
}
|
136 |
+
)
|
137 |
+
print("Asset Profile Result:", stock_module_result)
|
138 |
+
|
139 |
+
# Example for getting financial data module
|
140 |
+
financial_data_result = tool.call_endpoint(
|
141 |
+
route="get_stock_module",
|
142 |
+
payload={
|
143 |
+
"ticker": "AAPL",
|
144 |
+
"module": "financial-data"
|
145 |
+
}
|
146 |
+
)
|
147 |
+
print("Financial Data Result:", financial_data_result)
|
148 |
+
|
149 |
+
# Example for getting SMA indicator data
|
150 |
+
sma_result = tool.call_endpoint(
|
151 |
+
route="get_sma",
|
152 |
+
payload={
|
153 |
+
"symbol": "AAPL",
|
154 |
+
"interval": "5m",
|
155 |
+
"series_type": "close",
|
156 |
+
"time_period": "50",
|
157 |
+
"limit": "50"
|
158 |
+
}
|
159 |
+
)
|
160 |
+
print("SMA Result:", sma_result)
|
161 |
+
|
162 |
+
# Example for getting RSI indicator data
|
163 |
+
rsi_result = tool.call_endpoint(
|
164 |
+
route="get_rsi",
|
165 |
+
payload={
|
166 |
+
"symbol": "AAPL",
|
167 |
+
"interval": "5m",
|
168 |
+
"series_type": "close",
|
169 |
+
"time_period": "50",
|
170 |
+
"limit": "50"
|
171 |
+
}
|
172 |
+
)
|
173 |
+
print("RSI Result:", rsi_result)
|
174 |
+
|
175 |
+
# Example for getting earnings calendar data
|
176 |
+
earnings_calendar_result = tool.call_endpoint(
|
177 |
+
route="get_earnings_calendar",
|
178 |
+
payload={
|
179 |
+
"date": "2023-11-30"
|
180 |
+
}
|
181 |
+
)
|
182 |
+
print("Earnings Calendar Result:", earnings_calendar_result)
|
183 |
+
|
184 |
+
# Example for getting insider trades
|
185 |
+
insider_trades_result = tool.call_endpoint(
|
186 |
+
route="get_insider_trades",
|
187 |
+
payload={}
|
188 |
+
)
|
189 |
+
print("Insider Trades Result:", insider_trades_result)
|
190 |
+
|
ZillowProvider.py
ADDED
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Dict
|
2 |
+
import logging
|
3 |
+
|
4 |
+
from agent.tools.data_providers.RapidDataProviderBase import RapidDataProviderBase, EndpointSchema
|
5 |
+
|
6 |
+
logger = logging.getLogger(__name__)
|
7 |
+
|
8 |
+
|
9 |
+
class ZillowProvider(RapidDataProviderBase):
|
10 |
+
def __init__(self):
|
11 |
+
endpoints: Dict[str, EndpointSchema] = {
|
12 |
+
"search": {
|
13 |
+
"route": "/search",
|
14 |
+
"method": "GET",
|
15 |
+
"name": "Zillow Property Search",
|
16 |
+
"description": "Search for properties by neighborhood, city, or ZIP code with various filters.",
|
17 |
+
"payload": {
|
18 |
+
"location": "Location can be an address, neighborhood, city, or ZIP code (required)",
|
19 |
+
"page": "Page number for pagination (optional, default: 0)",
|
20 |
+
"output": "Output format: json, csv, xlsx (optional, default: json)",
|
21 |
+
"status": "Status of properties: forSale, forRent, recentlySold (optional, default: forSale)",
|
22 |
+
"sortSelection": "Sorting criteria (optional, default: priorityscore)",
|
23 |
+
"listing_type": "Listing type: by_agent, by_owner_other (optional, default: by_agent)",
|
24 |
+
"doz": "Days on Zillow: any, 1, 7, 14, 30, 90, 6m, 12m, 24m, 36m (optional, default: any)",
|
25 |
+
"price_min": "Minimum price (optional)",
|
26 |
+
"price_max": "Maximum price (optional)",
|
27 |
+
"sqft_min": "Minimum square footage (optional)",
|
28 |
+
"sqft_max": "Maximum square footage (optional)",
|
29 |
+
"beds_min": "Minimum number of bedrooms (optional)",
|
30 |
+
"beds_max": "Maximum number of bedrooms (optional)",
|
31 |
+
"baths_min": "Minimum number of bathrooms (optional)",
|
32 |
+
"baths_max": "Maximum number of bathrooms (optional)",
|
33 |
+
"built_min": "Minimum year built (optional)",
|
34 |
+
"built_max": "Maximum year built (optional)",
|
35 |
+
"lotSize_min": "Minimum lot size in sqft (optional)",
|
36 |
+
"lotSize_max": "Maximum lot size in sqft (optional)",
|
37 |
+
"keywords": "Keywords to search for (optional)"
|
38 |
+
}
|
39 |
+
},
|
40 |
+
"search_address": {
|
41 |
+
"route": "/search_address",
|
42 |
+
"method": "GET",
|
43 |
+
"name": "Zillow Address Search",
|
44 |
+
"description": "Search for a specific property by its full address.",
|
45 |
+
"payload": {
|
46 |
+
"address": "Full property address (required)"
|
47 |
+
}
|
48 |
+
},
|
49 |
+
"propertyV2": {
|
50 |
+
"route": "/propertyV2",
|
51 |
+
"method": "GET",
|
52 |
+
"name": "Zillow Property Details",
|
53 |
+
"description": "Get detailed information about a specific property by zpid or URL.",
|
54 |
+
"payload": {
|
55 |
+
"zpid": "Zillow property ID (optional if URL is provided)",
|
56 |
+
"url": "Property details URL (optional if zpid is provided)"
|
57 |
+
}
|
58 |
+
},
|
59 |
+
"zestimate_history": {
|
60 |
+
"route": "/zestimate_history",
|
61 |
+
"method": "GET",
|
62 |
+
"name": "Zillow Zestimate History",
|
63 |
+
"description": "Get historical Zestimate values for a specific property.",
|
64 |
+
"payload": {
|
65 |
+
"zpid": "Zillow property ID (optional if URL is provided)",
|
66 |
+
"url": "Property details URL (optional if zpid is provided)"
|
67 |
+
}
|
68 |
+
},
|
69 |
+
"similar_properties": {
|
70 |
+
"route": "/similar_properties",
|
71 |
+
"method": "GET",
|
72 |
+
"name": "Zillow Similar Properties",
|
73 |
+
"description": "Find properties similar to a specific property.",
|
74 |
+
"payload": {
|
75 |
+
"zpid": "Zillow property ID (optional if URL or address is provided)",
|
76 |
+
"url": "Property details URL (optional if zpid or address is provided)",
|
77 |
+
"address": "Property address (optional if zpid or URL is provided)"
|
78 |
+
}
|
79 |
+
},
|
80 |
+
"mortgage_rates": {
|
81 |
+
"route": "/mortgage/rates",
|
82 |
+
"method": "GET",
|
83 |
+
"name": "Zillow Mortgage Rates",
|
84 |
+
"description": "Get current mortgage rates for different loan programs and conditions.",
|
85 |
+
"payload": {
|
86 |
+
"program": "Loan program (required): Fixed30Year, Fixed20Year, Fixed15Year, Fixed10Year, ARM3, ARM5, ARM7, etc.",
|
87 |
+
"state": "State abbreviation (optional, default: US)",
|
88 |
+
"refinance": "Whether this is for refinancing (optional, default: false)",
|
89 |
+
"loanType": "Type of loan: Conventional, etc. (optional)",
|
90 |
+
"loanAmount": "Loan amount category: Micro, SmallConforming, Conforming, SuperConforming, Jumbo (optional)",
|
91 |
+
"loanToValue": "Loan to value ratio: Normal, High, VeryHigh (optional)",
|
92 |
+
"creditScore": "Credit score category: Low, High, VeryHigh (optional)",
|
93 |
+
"duration": "Duration in days (optional, default: 30)"
|
94 |
+
}
|
95 |
+
},
|
96 |
+
}
|
97 |
+
base_url = "https://zillow56.p.rapidapi.com"
|
98 |
+
super().__init__(base_url, endpoints)
|
99 |
+
|
100 |
+
|
101 |
+
if __name__ == "__main__":
|
102 |
+
from dotenv import load_dotenv
|
103 |
+
from time import sleep
|
104 |
+
load_dotenv()
|
105 |
+
tool = ZillowProvider()
|
106 |
+
|
107 |
+
# Example for searching properties in Houston
|
108 |
+
search_result = tool.call_endpoint(
|
109 |
+
route="search",
|
110 |
+
payload={
|
111 |
+
"location": "houston, tx",
|
112 |
+
"status": "forSale",
|
113 |
+
"sortSelection": "priorityscore",
|
114 |
+
"listing_type": "by_agent",
|
115 |
+
"doz": "any"
|
116 |
+
}
|
117 |
+
)
|
118 |
+
logger.debug("Search Result: %s", search_result)
|
119 |
+
logger.debug("***")
|
120 |
+
logger.debug("***")
|
121 |
+
logger.debug("***")
|
122 |
+
sleep(1)
|
123 |
+
# Example for searching by address
|
124 |
+
address_result = tool.call_endpoint(
|
125 |
+
route="search_address",
|
126 |
+
payload={
|
127 |
+
"address": "1161 Natchez Dr College Station Texas 77845"
|
128 |
+
}
|
129 |
+
)
|
130 |
+
logger.debug("Address Search Result: %s", address_result)
|
131 |
+
logger.debug("***")
|
132 |
+
logger.debug("***")
|
133 |
+
logger.debug("***")
|
134 |
+
sleep(1)
|
135 |
+
# Example for getting property details
|
136 |
+
property_result = tool.call_endpoint(
|
137 |
+
route="propertyV2",
|
138 |
+
payload={
|
139 |
+
"zpid": "7594920"
|
140 |
+
}
|
141 |
+
)
|
142 |
+
logger.debug("Property Details Result: %s", property_result)
|
143 |
+
sleep(1)
|
144 |
+
logger.debug("***")
|
145 |
+
logger.debug("***")
|
146 |
+
logger.debug("***")
|
147 |
+
|
148 |
+
# Example for getting zestimate history
|
149 |
+
zestimate_result = tool.call_endpoint(
|
150 |
+
route="zestimate_history",
|
151 |
+
payload={
|
152 |
+
"zpid": "20476226"
|
153 |
+
}
|
154 |
+
)
|
155 |
+
logger.debug("Zestimate History Result: %s", zestimate_result)
|
156 |
+
sleep(1)
|
157 |
+
logger.debug("***")
|
158 |
+
logger.debug("***")
|
159 |
+
logger.debug("***")
|
160 |
+
# Example for getting similar properties
|
161 |
+
similar_result = tool.call_endpoint(
|
162 |
+
route="similar_properties",
|
163 |
+
payload={
|
164 |
+
"zpid": "28253016"
|
165 |
+
}
|
166 |
+
)
|
167 |
+
logger.debug("Similar Properties Result: %s", similar_result)
|
168 |
+
sleep(1)
|
169 |
+
logger.debug("***")
|
170 |
+
logger.debug("***")
|
171 |
+
logger.debug("***")
|
172 |
+
# Example for getting mortgage rates
|
173 |
+
mortgage_result = tool.call_endpoint(
|
174 |
+
route="mortgage_rates",
|
175 |
+
payload={
|
176 |
+
"program": "Fixed30Year",
|
177 |
+
"state": "US",
|
178 |
+
"refinance": "false",
|
179 |
+
"loanType": "Conventional",
|
180 |
+
"loanAmount": "Conforming",
|
181 |
+
"loanToValue": "Normal",
|
182 |
+
"creditScore": "Low",
|
183 |
+
"duration": "30"
|
184 |
+
}
|
185 |
+
)
|
186 |
+
logger.debug("Mortgage Rates Result: %s", mortgage_result)
|
187 |
+
|
__init__.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
# Utility functions and constants for agent tools
|
accept-team-invitation.tsx
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { acceptInvitation } from "@/lib/actions/invitations";
|
2 |
+
import { createClient } from "@/lib/supabase/server";
|
3 |
+
import { Alert } from "../ui/alert";
|
4 |
+
import { Card, CardContent } from "../ui/card";
|
5 |
+
import { SubmitButton } from "../ui/submit-button";
|
6 |
+
|
7 |
+
type Props = {
|
8 |
+
token: string;
|
9 |
+
}
|
10 |
+
export default async function AcceptTeamInvitation({ token }: Props) {
|
11 |
+
const supabaseClient = await createClient();
|
12 |
+
const { data: invitation } = await supabaseClient.rpc('lookup_invitation', {
|
13 |
+
lookup_invitation_token: token
|
14 |
+
});
|
15 |
+
|
16 |
+
return (
|
17 |
+
<Card>
|
18 |
+
<CardContent className="p-8 text-center flex flex-col gap-y-8">
|
19 |
+
<div>
|
20 |
+
<p>You've been invited to join</p>
|
21 |
+
<h1 className="text-xl font-bold">{invitation.account_name}</h1>
|
22 |
+
</div>
|
23 |
+
{Boolean(invitation.active) ? (
|
24 |
+
<form>
|
25 |
+
<input type="hidden" name="token" value={token} />
|
26 |
+
<SubmitButton formAction={acceptInvitation} pendingText="Accepting invitation...">Accept invitation</SubmitButton>
|
27 |
+
</form>
|
28 |
+
) : (
|
29 |
+
<Alert variant="destructive">
|
30 |
+
This invitation has been deactivated. Please contact the account owner for a new invitation.
|
31 |
+
</Alert>
|
32 |
+
)}
|
33 |
+
</CardContent>
|
34 |
+
</Card>
|
35 |
+
)
|
36 |
+
}
|
accordion.tsx
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use client"
|
2 |
+
|
3 |
+
import * as React from "react"
|
4 |
+
import * as AccordionPrimitive from "@radix-ui/react-accordion"
|
5 |
+
import { ChevronDownIcon } from "lucide-react"
|
6 |
+
|
7 |
+
import { cn } from "@/lib/utils"
|
8 |
+
|
9 |
+
function Accordion({
|
10 |
+
...props
|
11 |
+
}: React.ComponentProps<typeof AccordionPrimitive.Root>) {
|
12 |
+
return <AccordionPrimitive.Root data-slot="accordion" {...props} />
|
13 |
+
}
|
14 |
+
|
15 |
+
function AccordionItem({
|
16 |
+
className,
|
17 |
+
...props
|
18 |
+
}: React.ComponentProps<typeof AccordionPrimitive.Item>) {
|
19 |
+
return (
|
20 |
+
<AccordionPrimitive.Item
|
21 |
+
data-slot="accordion-item"
|
22 |
+
className={cn("border-b last:border-b-0", className)}
|
23 |
+
{...props}
|
24 |
+
/>
|
25 |
+
)
|
26 |
+
}
|
27 |
+
|
28 |
+
function AccordionTrigger({
|
29 |
+
className,
|
30 |
+
children,
|
31 |
+
...props
|
32 |
+
}: React.ComponentProps<typeof AccordionPrimitive.Trigger>) {
|
33 |
+
return (
|
34 |
+
<AccordionPrimitive.Header className="flex">
|
35 |
+
<AccordionPrimitive.Trigger
|
36 |
+
data-slot="accordion-trigger"
|
37 |
+
className={cn(
|
38 |
+
"focus-visible:border-ring focus-visible:ring-ring/50 flex flex-1 items-start justify-between gap-4 rounded-md py-4 text-left text-sm font-medium transition-all outline-none hover:underline focus-visible:ring-[3px] disabled:pointer-events-none disabled:opacity-50 [&[data-state=open]>svg]:rotate-180",
|
39 |
+
className
|
40 |
+
)}
|
41 |
+
{...props}
|
42 |
+
>
|
43 |
+
{children}
|
44 |
+
<ChevronDownIcon className="text-muted-foreground pointer-events-none size-4 shrink-0 translate-y-0.5 transition-transform duration-200" />
|
45 |
+
</AccordionPrimitive.Trigger>
|
46 |
+
</AccordionPrimitive.Header>
|
47 |
+
)
|
48 |
+
}
|
49 |
+
|
50 |
+
function AccordionContent({
|
51 |
+
className,
|
52 |
+
children,
|
53 |
+
...props
|
54 |
+
}: React.ComponentProps<typeof AccordionPrimitive.Content>) {
|
55 |
+
return (
|
56 |
+
<AccordionPrimitive.Content
|
57 |
+
data-slot="accordion-content"
|
58 |
+
className="data-[state=closed]:animate-accordion-up data-[state=open]:animate-accordion-down overflow-hidden text-sm"
|
59 |
+
{...props}
|
60 |
+
>
|
61 |
+
<div className={cn("pt-0 pb-4", className)}>{children}</div>
|
62 |
+
</AccordionPrimitive.Content>
|
63 |
+
)
|
64 |
+
}
|
65 |
+
|
66 |
+
export { Accordion, AccordionItem, AccordionTrigger, AccordionContent }
|
account-billing-status.tsx
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { createClient } from "@/lib/supabase/server";
|
2 |
+
import { SubmitButton } from "../ui/submit-button";
|
3 |
+
import { manageSubscription } from "@/lib/actions/billing";
|
4 |
+
import { PlanComparison, SUBSCRIPTION_PLANS } from "../billing/plan-comparison";
|
5 |
+
import { isLocalMode } from "@/lib/config";
|
6 |
+
|
7 |
+
type Props = {
|
8 |
+
accountId: string;
|
9 |
+
returnUrl: string;
|
10 |
+
}
|
11 |
+
|
12 |
+
export default async function AccountBillingStatus({ accountId, returnUrl }: Props) {
|
13 |
+
// In local development mode, show a simplified component
|
14 |
+
if (isLocalMode()) {
|
15 |
+
return (
|
16 |
+
<div className="rounded-xl border shadow-sm bg-card p-6">
|
17 |
+
<h2 className="text-xl font-semibold mb-4">Billing Status</h2>
|
18 |
+
<div className="p-4 mb-4 bg-muted/30 border border-border rounded-lg text-center">
|
19 |
+
<p className="text-sm text-muted-foreground">
|
20 |
+
Running in local development mode - billing features are disabled
|
21 |
+
</p>
|
22 |
+
<p className="text-xs text-muted-foreground mt-2">
|
23 |
+
Agent usage limits are not enforced in this environment
|
24 |
+
</p>
|
25 |
+
</div>
|
26 |
+
</div>
|
27 |
+
);
|
28 |
+
}
|
29 |
+
|
30 |
+
const supabaseClient = await createClient();
|
31 |
+
|
32 |
+
// Get account subscription and usage data
|
33 |
+
const { data: subscriptionData } = await supabaseClient
|
34 |
+
.schema('basejump')
|
35 |
+
.from('billing_subscriptions')
|
36 |
+
.select('*')
|
37 |
+
.eq('account_id', accountId)
|
38 |
+
.eq('status', 'active')
|
39 |
+
.limit(1)
|
40 |
+
.order('created_at', { ascending: false })
|
41 |
+
.single();
|
42 |
+
|
43 |
+
// Get agent runs for this account
|
44 |
+
// Get the account's threads
|
45 |
+
const { data: threads } = await supabaseClient
|
46 |
+
.from('threads')
|
47 |
+
.select('thread_id')
|
48 |
+
.eq('account_id', accountId);
|
49 |
+
|
50 |
+
const threadIds = threads?.map(t => t.thread_id) || [];
|
51 |
+
|
52 |
+
// Get current month usage
|
53 |
+
const now = new Date();
|
54 |
+
const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
|
55 |
+
const isoStartOfMonth = startOfMonth.toISOString();
|
56 |
+
|
57 |
+
let totalAgentTime = 0;
|
58 |
+
let usageDisplay = "No usage this month";
|
59 |
+
|
60 |
+
if (threadIds.length > 0) {
|
61 |
+
const { data: agentRuns } = await supabaseClient
|
62 |
+
.from('agent_runs')
|
63 |
+
.select('started_at, completed_at')
|
64 |
+
.in('thread_id', threadIds)
|
65 |
+
.gte('started_at', isoStartOfMonth);
|
66 |
+
|
67 |
+
if (agentRuns && agentRuns.length > 0) {
|
68 |
+
const nowTimestamp = now.getTime();
|
69 |
+
|
70 |
+
totalAgentTime = agentRuns.reduce((total, run) => {
|
71 |
+
const startTime = new Date(run.started_at).getTime();
|
72 |
+
const endTime = run.completed_at
|
73 |
+
? new Date(run.completed_at).getTime()
|
74 |
+
: nowTimestamp;
|
75 |
+
|
76 |
+
return total + (endTime - startTime) / 1000; // In seconds
|
77 |
+
}, 0);
|
78 |
+
|
79 |
+
// Convert to minutes
|
80 |
+
const totalMinutes = Math.round(totalAgentTime / 60);
|
81 |
+
usageDisplay = `${totalMinutes} minutes`;
|
82 |
+
}
|
83 |
+
}
|
84 |
+
|
85 |
+
const isPlan = (planId?: string) => {
|
86 |
+
return subscriptionData?.price_id === planId;
|
87 |
+
};
|
88 |
+
|
89 |
+
const planName = isPlan(SUBSCRIPTION_PLANS.FREE)
|
90 |
+
? "Free"
|
91 |
+
: isPlan(SUBSCRIPTION_PLANS.PRO)
|
92 |
+
? "Pro"
|
93 |
+
: isPlan(SUBSCRIPTION_PLANS.ENTERPRISE)
|
94 |
+
? "Enterprise"
|
95 |
+
: "Unknown";
|
96 |
+
|
97 |
+
return (
|
98 |
+
<div className="rounded-xl border shadow-sm bg-card p-6">
|
99 |
+
<h2 className="text-xl font-semibold mb-4">Billing Status</h2>
|
100 |
+
|
101 |
+
{subscriptionData ? (
|
102 |
+
<>
|
103 |
+
<div className="mb-6">
|
104 |
+
<div className="rounded-lg border bg-background p-4 grid grid-cols-1 md:grid-cols-2 gap-4">
|
105 |
+
<div>
|
106 |
+
<div className="flex justify-between items-center">
|
107 |
+
<span className="text-sm font-medium text-foreground/90">Current Plan</span>
|
108 |
+
<span className="text-sm font-medium text-card-title">{planName}</span>
|
109 |
+
</div>
|
110 |
+
</div>
|
111 |
+
|
112 |
+
<div className="flex justify-between items-center">
|
113 |
+
<span className="text-sm font-medium text-foreground/90">Agent Usage This Month</span>
|
114 |
+
<span className="text-sm font-medium text-card-title">{usageDisplay}</span>
|
115 |
+
</div>
|
116 |
+
</div>
|
117 |
+
</div>
|
118 |
+
|
119 |
+
{/* Plans Comparison */}
|
120 |
+
<PlanComparison
|
121 |
+
accountId={accountId}
|
122 |
+
returnUrl={returnUrl}
|
123 |
+
className="mb-6"
|
124 |
+
/>
|
125 |
+
|
126 |
+
{/* Manage Subscription Button */}
|
127 |
+
<form>
|
128 |
+
<input type="hidden" name="accountId" value={accountId} />
|
129 |
+
<input type="hidden" name="returnUrl" value={returnUrl} />
|
130 |
+
<SubmitButton
|
131 |
+
pendingText="Loading..."
|
132 |
+
formAction={manageSubscription}
|
133 |
+
className="w-full bg-primary text-white hover:bg-primary/90 shadow-md hover:shadow-lg transition-all"
|
134 |
+
>
|
135 |
+
Manage Subscription
|
136 |
+
</SubmitButton>
|
137 |
+
</form>
|
138 |
+
</>
|
139 |
+
) : (
|
140 |
+
<>
|
141 |
+
<div className="mb-6">
|
142 |
+
<div className="rounded-lg border bg-background p-4 gap-4">
|
143 |
+
<div className="flex justify-between items-center">
|
144 |
+
<span className="text-sm font-medium text-foreground/90">Current Plan</span>
|
145 |
+
<span className="text-sm font-medium text-card-title">Free</span>
|
146 |
+
</div>
|
147 |
+
|
148 |
+
<div className="flex justify-between items-center">
|
149 |
+
<span className="text-sm font-medium text-foreground/90">Agent Usage This Month</span>
|
150 |
+
<span className="text-sm font-medium text-card-title">{usageDisplay}</span>
|
151 |
+
</div>
|
152 |
+
</div>
|
153 |
+
</div>
|
154 |
+
|
155 |
+
{/* Plans Comparison */}
|
156 |
+
<PlanComparison
|
157 |
+
accountId={accountId}
|
158 |
+
returnUrl={returnUrl}
|
159 |
+
className="mb-6"
|
160 |
+
/>
|
161 |
+
|
162 |
+
{/* Manage Subscription Button */}
|
163 |
+
<form>
|
164 |
+
<input type="hidden" name="accountId" value={accountId} />
|
165 |
+
<input type="hidden" name="returnUrl" value={returnUrl} />
|
166 |
+
<SubmitButton
|
167 |
+
pendingText="Loading..."
|
168 |
+
formAction={manageSubscription}
|
169 |
+
className="w-full bg-primary text-white hover:bg-primary/90 shadow-md hover:shadow-lg transition-all"
|
170 |
+
>
|
171 |
+
Manage Subscription
|
172 |
+
</SubmitButton>
|
173 |
+
</form>
|
174 |
+
</>
|
175 |
+
)}
|
176 |
+
</div>
|
177 |
+
)
|
178 |
+
}
|
account-selector.tsx
ADDED
@@ -0,0 +1,165 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use client"
|
2 |
+
|
3 |
+
import { ComponentPropsWithoutRef, useMemo, useState } from "react"
|
4 |
+
import { Check, ChevronsUpDown, PlusCircle } from "lucide-react";
|
5 |
+
|
6 |
+
import { cn } from "@/lib/utils"
|
7 |
+
import { Button } from "@/components/ui/button"
|
8 |
+
import {
|
9 |
+
Command,
|
10 |
+
CommandEmpty,
|
11 |
+
CommandGroup,
|
12 |
+
CommandInput,
|
13 |
+
CommandItem,
|
14 |
+
CommandList,
|
15 |
+
CommandSeparator,
|
16 |
+
} from "@/components/ui/command"
|
17 |
+
import {
|
18 |
+
Dialog,
|
19 |
+
DialogContent,
|
20 |
+
DialogDescription,
|
21 |
+
DialogHeader,
|
22 |
+
DialogTitle,
|
23 |
+
DialogTrigger,
|
24 |
+
} from "@/components/ui/dialog"
|
25 |
+
import { Popover, PopoverContent, PopoverTrigger, } from "@/components/ui/popover"
|
26 |
+
import NewTeamForm from "@/components/basejump/new-team-form";
|
27 |
+
import { useAccounts } from "@/hooks/use-accounts";
|
28 |
+
|
29 |
+
type PopoverTriggerProps = ComponentPropsWithoutRef<typeof PopoverTrigger>;
|
30 |
+
|
31 |
+
type SelectedAccount = NonNullable<ReturnType<typeof useAccounts>["data"]>[0];
|
32 |
+
|
33 |
+
interface AccountSelectorProps extends PopoverTriggerProps {
|
34 |
+
accountId: string;
|
35 |
+
placeholder?: string;
|
36 |
+
onAccountSelected?: (account: SelectedAccount) => void;
|
37 |
+
}
|
38 |
+
|
39 |
+
export default function AccountSelector({ className, accountId, onAccountSelected, placeholder = "Select an account..." }: AccountSelectorProps) {
|
40 |
+
|
41 |
+
const [open, setOpen] = useState(false)
|
42 |
+
const [showNewTeamDialog, setShowNewTeamDialog] = useState(false)
|
43 |
+
|
44 |
+
const { data: accounts } = useAccounts();
|
45 |
+
|
46 |
+
const { teamAccounts, personalAccount, selectedAccount } = useMemo(() => {
|
47 |
+
const personalAccount = accounts?.find((account) => account.personal_account);
|
48 |
+
const teamAccounts = accounts?.filter((account) => !account.personal_account);
|
49 |
+
const selectedAccount = accounts?.find((account) => account.account_id === accountId);
|
50 |
+
|
51 |
+
return {
|
52 |
+
personalAccount,
|
53 |
+
teamAccounts,
|
54 |
+
selectedAccount,
|
55 |
+
}
|
56 |
+
}, [accounts, accountId]);
|
57 |
+
|
58 |
+
return (
|
59 |
+
<Dialog open={showNewTeamDialog} onOpenChange={setShowNewTeamDialog}>
|
60 |
+
<Popover open={open} onOpenChange={setOpen}>
|
61 |
+
<PopoverTrigger asChild>
|
62 |
+
<Button
|
63 |
+
variant="ghost"
|
64 |
+
role="combobox"
|
65 |
+
aria-expanded={open}
|
66 |
+
aria-label="Select a team"
|
67 |
+
className={cn(
|
68 |
+
"w-full flex items-center gap-2 h-9 pl-3 pr-2 rounded-md justify-between border border-subtle dark:border-white/10 bg-transparent hover:bg-hover-bg text-foreground/90",
|
69 |
+
className
|
70 |
+
)}
|
71 |
+
>
|
72 |
+
<span className="truncate max-w-[180px]">
|
73 |
+
{selectedAccount?.name || placeholder}
|
74 |
+
</span>
|
75 |
+
<ChevronsUpDown className="h-4 w-4 shrink-0 text-foreground/50" />
|
76 |
+
</Button>
|
77 |
+
</PopoverTrigger>
|
78 |
+
<PopoverContent className="w-[250px] p-0 border-subtle dark:border-white/10 bg-card-bg dark:bg-background-secondary rounded-xl shadow-custom">
|
79 |
+
<Command className="rounded-xl overflow-hidden bg-card-bg dark:bg-background-secondary border-0">
|
80 |
+
<CommandList className="border-0 bg-card-bg dark:bg-background-secondary">
|
81 |
+
<CommandInput placeholder="Search account..." className="h-9 border-0 focus:ring-0 rounded-t-xl bg-card-bg dark:bg-background-secondary text-foreground/90" />
|
82 |
+
<CommandEmpty className="text-foreground/70 text-sm py-2">No account found.</CommandEmpty>
|
83 |
+
<CommandGroup heading="Personal Account" className="text-xs font-medium text-foreground/70 bg-card-bg dark:bg-background-secondary">
|
84 |
+
<CommandItem
|
85 |
+
key={personalAccount?.account_id}
|
86 |
+
onSelect={() => {
|
87 |
+
if (onAccountSelected) {
|
88 |
+
onAccountSelected(personalAccount!)
|
89 |
+
}
|
90 |
+
setOpen(false)
|
91 |
+
}}
|
92 |
+
className="text-sm rounded-md bg-card-bg dark:bg-background-secondary hover:!bg-[#f1eee7] dark:hover:!bg-[#141413] aria-selected:!bg-[#f1eee7] dark:aria-selected:!bg-[#141413] text-foreground/90"
|
93 |
+
>
|
94 |
+
{personalAccount?.name}
|
95 |
+
<Check
|
96 |
+
className={cn(
|
97 |
+
"ml-auto h-4 w-4 text-primary",
|
98 |
+
selectedAccount?.account_id === personalAccount?.account_id
|
99 |
+
? "opacity-100"
|
100 |
+
: "opacity-0"
|
101 |
+
)}
|
102 |
+
/>
|
103 |
+
</CommandItem>
|
104 |
+
</CommandGroup>
|
105 |
+
{Boolean(teamAccounts?.length) && (
|
106 |
+
<CommandGroup heading="Teams" className="text-xs font-medium text-foreground/70 bg-card-bg dark:bg-background-secondary">
|
107 |
+
{teamAccounts?.map((team) => (
|
108 |
+
<CommandItem
|
109 |
+
key={team.account_id}
|
110 |
+
onSelect={() => {
|
111 |
+
if (onAccountSelected) {
|
112 |
+
onAccountSelected(team)
|
113 |
+
}
|
114 |
+
|
115 |
+
setOpen(false)
|
116 |
+
}}
|
117 |
+
className="text-sm rounded-md bg-card-bg dark:bg-background-secondary hover:!bg-[#f1eee7] dark:hover:!bg-[#141413] aria-selected:!bg-[#f1eee7] dark:aria-selected:!bg-[#141413] text-foreground/90"
|
118 |
+
>
|
119 |
+
{team.name}
|
120 |
+
<Check
|
121 |
+
className={cn(
|
122 |
+
"ml-auto h-4 w-4 text-primary",
|
123 |
+
selectedAccount?.account_id === team.account_id
|
124 |
+
? "opacity-100"
|
125 |
+
: "opacity-0"
|
126 |
+
)}
|
127 |
+
/>
|
128 |
+
</CommandItem>
|
129 |
+
))}
|
130 |
+
</CommandGroup>
|
131 |
+
)}
|
132 |
+
</CommandList>
|
133 |
+
<CommandSeparator className="border-subtle dark:border-white/10" />
|
134 |
+
<CommandList className="bg-card-bg dark:bg-background-secondary">
|
135 |
+
<CommandGroup className="bg-card-bg dark:bg-background-secondary">
|
136 |
+
<DialogTrigger asChild>
|
137 |
+
<CommandItem
|
138 |
+
value="new-team"
|
139 |
+
onSelect={() => {
|
140 |
+
setOpen(false)
|
141 |
+
setShowNewTeamDialog(true)
|
142 |
+
}}
|
143 |
+
className="text-sm rounded-md bg-card-bg dark:bg-background-secondary hover:!bg-[#f1eee7] dark:hover:!bg-[#141413] text-foreground/90"
|
144 |
+
>
|
145 |
+
<PlusCircle className="mr-2 h-4 w-4 text-primary" />
|
146 |
+
Create Team
|
147 |
+
</CommandItem>
|
148 |
+
</DialogTrigger>
|
149 |
+
</CommandGroup>
|
150 |
+
</CommandList>
|
151 |
+
</Command>
|
152 |
+
</PopoverContent>
|
153 |
+
</Popover>
|
154 |
+
<DialogContent className="sm:max-w-[425px] border-subtle dark:border-white/10 bg-card-bg dark:bg-background-secondary rounded-2xl shadow-custom">
|
155 |
+
<DialogHeader>
|
156 |
+
<DialogTitle className="text-foreground">Create a new team</DialogTitle>
|
157 |
+
<DialogDescription className="text-foreground/70">
|
158 |
+
Create a team to collaborate with others.
|
159 |
+
</DialogDescription>
|
160 |
+
</DialogHeader>
|
161 |
+
<NewTeamForm />
|
162 |
+
</DialogContent>
|
163 |
+
</Dialog>
|
164 |
+
)
|
165 |
+
}
|
actions.ts
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use server";
|
2 |
+
|
3 |
+
import { createClient } from "@/lib/supabase/server";
|
4 |
+
import { redirect } from "next/navigation";
|
5 |
+
|
6 |
+
export async function signIn(prevState: any, formData: FormData) {
|
7 |
+
const email = formData.get("email") as string;
|
8 |
+
const password = formData.get("password") as string;
|
9 |
+
const returnUrl = formData.get("returnUrl") as string | undefined;
|
10 |
+
|
11 |
+
if (!email || !email.includes('@')) {
|
12 |
+
return { message: "Please enter a valid email address" };
|
13 |
+
}
|
14 |
+
|
15 |
+
if (!password || password.length < 6) {
|
16 |
+
return { message: "Password must be at least 6 characters" };
|
17 |
+
}
|
18 |
+
|
19 |
+
const supabase = await createClient();
|
20 |
+
|
21 |
+
const { error } = await supabase.auth.signInWithPassword({
|
22 |
+
email,
|
23 |
+
password
|
24 |
+
});
|
25 |
+
|
26 |
+
if (error) {
|
27 |
+
return { message: error.message || "Could not authenticate user" };
|
28 |
+
}
|
29 |
+
|
30 |
+
return redirect(returnUrl || "/dashboard");
|
31 |
+
}
|
32 |
+
|
33 |
+
export async function signUp(prevState: any, formData: FormData) {
|
34 |
+
const origin = formData.get("origin") as string;
|
35 |
+
const email = formData.get("email") as string;
|
36 |
+
const password = formData.get("password") as string;
|
37 |
+
const confirmPassword = formData.get("confirmPassword") as string;
|
38 |
+
const returnUrl = formData.get("returnUrl") as string | undefined;
|
39 |
+
|
40 |
+
if (!email || !email.includes('@')) {
|
41 |
+
return { message: "Please enter a valid email address" };
|
42 |
+
}
|
43 |
+
|
44 |
+
if (!password || password.length < 6) {
|
45 |
+
return { message: "Password must be at least 6 characters" };
|
46 |
+
}
|
47 |
+
|
48 |
+
if (password !== confirmPassword) {
|
49 |
+
return { message: "Passwords do not match" };
|
50 |
+
}
|
51 |
+
|
52 |
+
const supabase = await createClient();
|
53 |
+
|
54 |
+
const { error } = await supabase.auth.signUp({
|
55 |
+
email,
|
56 |
+
password,
|
57 |
+
options: {
|
58 |
+
emailRedirectTo: `${origin}/auth/callback?returnUrl=${returnUrl}`,
|
59 |
+
},
|
60 |
+
});
|
61 |
+
|
62 |
+
if (error) {
|
63 |
+
return { message: error.message || "Could not create account" };
|
64 |
+
}
|
65 |
+
|
66 |
+
// Try to sign in immediately
|
67 |
+
const { error: signInError } = await supabase.auth.signInWithPassword({
|
68 |
+
email,
|
69 |
+
password
|
70 |
+
});
|
71 |
+
|
72 |
+
if (signInError) {
|
73 |
+
return { message: "Account created! Check your email to confirm your registration." };
|
74 |
+
}
|
75 |
+
|
76 |
+
return redirect(returnUrl || "/dashboard");
|
77 |
+
}
|
78 |
+
|
79 |
+
export async function forgotPassword(prevState: any, formData: FormData) {
|
80 |
+
const email = formData.get("email") as string;
|
81 |
+
const origin = formData.get("origin") as string;
|
82 |
+
|
83 |
+
if (!email || !email.includes('@')) {
|
84 |
+
return { message: "Please enter a valid email address" };
|
85 |
+
}
|
86 |
+
|
87 |
+
const supabase = await createClient();
|
88 |
+
|
89 |
+
const { error } = await supabase.auth.resetPasswordForEmail(email, {
|
90 |
+
redirectTo: `${origin}/auth/reset-password`,
|
91 |
+
});
|
92 |
+
|
93 |
+
if (error) {
|
94 |
+
return { message: error.message || "Could not send password reset email" };
|
95 |
+
}
|
96 |
+
|
97 |
+
return {
|
98 |
+
success: true,
|
99 |
+
message: "Check your email for a password reset link"
|
100 |
+
};
|
101 |
+
}
|
102 |
+
|
103 |
+
export async function resetPassword(prevState: any, formData: FormData) {
|
104 |
+
const password = formData.get("password") as string;
|
105 |
+
const confirmPassword = formData.get("confirmPassword") as string;
|
106 |
+
|
107 |
+
if (!password || password.length < 6) {
|
108 |
+
return { message: "Password must be at least 6 characters" };
|
109 |
+
}
|
110 |
+
|
111 |
+
if (password !== confirmPassword) {
|
112 |
+
return { message: "Passwords do not match" };
|
113 |
+
}
|
114 |
+
|
115 |
+
const supabase = await createClient();
|
116 |
+
|
117 |
+
const { error } = await supabase.auth.updateUser({
|
118 |
+
password
|
119 |
+
});
|
120 |
+
|
121 |
+
if (error) {
|
122 |
+
return { message: error.message || "Could not update password" };
|
123 |
+
}
|
124 |
+
|
125 |
+
return {
|
126 |
+
success: true,
|
127 |
+
message: "Password updated successfully"
|
128 |
+
};
|
129 |
+
}
|
130 |
+
|
131 |
+
export async function signOut() {
|
132 |
+
const supabase = await createClient();
|
133 |
+
const { error } = await supabase.auth.signOut();
|
134 |
+
|
135 |
+
if (error) {
|
136 |
+
return { message: error.message || "Could not sign out" };
|
137 |
+
}
|
138 |
+
|
139 |
+
return redirect("/");
|
140 |
+
}
|
alert-dialog.tsx
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use client"
|
2 |
+
|
3 |
+
import * as React from "react"
|
4 |
+
import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog"
|
5 |
+
|
6 |
+
import { cn } from "@/lib/utils"
|
7 |
+
import { buttonVariants } from "@/components/ui/button"
|
8 |
+
|
9 |
+
function AlertDialog({
|
10 |
+
...props
|
11 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Root>) {
|
12 |
+
return <AlertDialogPrimitive.Root data-slot="alert-dialog" {...props} />
|
13 |
+
}
|
14 |
+
|
15 |
+
function AlertDialogTrigger({
|
16 |
+
...props
|
17 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Trigger>) {
|
18 |
+
return (
|
19 |
+
<AlertDialogPrimitive.Trigger data-slot="alert-dialog-trigger" {...props} />
|
20 |
+
)
|
21 |
+
}
|
22 |
+
|
23 |
+
function AlertDialogPortal({
|
24 |
+
...props
|
25 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Portal>) {
|
26 |
+
return (
|
27 |
+
<AlertDialogPrimitive.Portal data-slot="alert-dialog-portal" {...props} />
|
28 |
+
)
|
29 |
+
}
|
30 |
+
|
31 |
+
function AlertDialogOverlay({
|
32 |
+
className,
|
33 |
+
...props
|
34 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Overlay>) {
|
35 |
+
return (
|
36 |
+
<AlertDialogPrimitive.Overlay
|
37 |
+
data-slot="alert-dialog-overlay"
|
38 |
+
className={cn(
|
39 |
+
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-50 bg-black/50",
|
40 |
+
className
|
41 |
+
)}
|
42 |
+
{...props}
|
43 |
+
/>
|
44 |
+
)
|
45 |
+
}
|
46 |
+
|
47 |
+
function AlertDialogContent({
|
48 |
+
className,
|
49 |
+
...props
|
50 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Content>) {
|
51 |
+
return (
|
52 |
+
<AlertDialogPortal>
|
53 |
+
<AlertDialogOverlay />
|
54 |
+
<AlertDialogPrimitive.Content
|
55 |
+
data-slot="alert-dialog-content"
|
56 |
+
className={cn(
|
57 |
+
"bg-background data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 fixed top-[50%] left-[50%] z-50 grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg duration-200 sm:max-w-lg",
|
58 |
+
className
|
59 |
+
)}
|
60 |
+
{...props}
|
61 |
+
/>
|
62 |
+
</AlertDialogPortal>
|
63 |
+
)
|
64 |
+
}
|
65 |
+
|
66 |
+
function AlertDialogHeader({
|
67 |
+
className,
|
68 |
+
...props
|
69 |
+
}: React.ComponentProps<"div">) {
|
70 |
+
return (
|
71 |
+
<div
|
72 |
+
data-slot="alert-dialog-header"
|
73 |
+
className={cn("flex flex-col gap-2 text-center sm:text-left", className)}
|
74 |
+
{...props}
|
75 |
+
/>
|
76 |
+
)
|
77 |
+
}
|
78 |
+
|
79 |
+
function AlertDialogFooter({
|
80 |
+
className,
|
81 |
+
...props
|
82 |
+
}: React.ComponentProps<"div">) {
|
83 |
+
return (
|
84 |
+
<div
|
85 |
+
data-slot="alert-dialog-footer"
|
86 |
+
className={cn(
|
87 |
+
"flex flex-col-reverse gap-2 sm:flex-row sm:justify-end",
|
88 |
+
className
|
89 |
+
)}
|
90 |
+
{...props}
|
91 |
+
/>
|
92 |
+
)
|
93 |
+
}
|
94 |
+
|
95 |
+
function AlertDialogTitle({
|
96 |
+
className,
|
97 |
+
...props
|
98 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Title>) {
|
99 |
+
return (
|
100 |
+
<AlertDialogPrimitive.Title
|
101 |
+
data-slot="alert-dialog-title"
|
102 |
+
className={cn("text-lg font-semibold", className)}
|
103 |
+
{...props}
|
104 |
+
/>
|
105 |
+
)
|
106 |
+
}
|
107 |
+
|
108 |
+
function AlertDialogDescription({
|
109 |
+
className,
|
110 |
+
...props
|
111 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Description>) {
|
112 |
+
return (
|
113 |
+
<AlertDialogPrimitive.Description
|
114 |
+
data-slot="alert-dialog-description"
|
115 |
+
className={cn("text-muted-foreground text-sm", className)}
|
116 |
+
{...props}
|
117 |
+
/>
|
118 |
+
)
|
119 |
+
}
|
120 |
+
|
121 |
+
function AlertDialogAction({
|
122 |
+
className,
|
123 |
+
...props
|
124 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Action>) {
|
125 |
+
return (
|
126 |
+
<AlertDialogPrimitive.Action
|
127 |
+
className={cn(buttonVariants(), className)}
|
128 |
+
{...props}
|
129 |
+
/>
|
130 |
+
)
|
131 |
+
}
|
132 |
+
|
133 |
+
function AlertDialogCancel({
|
134 |
+
className,
|
135 |
+
...props
|
136 |
+
}: React.ComponentProps<typeof AlertDialogPrimitive.Cancel>) {
|
137 |
+
return (
|
138 |
+
<AlertDialogPrimitive.Cancel
|
139 |
+
className={cn(buttonVariants({ variant: "outline" }), className)}
|
140 |
+
{...props}
|
141 |
+
/>
|
142 |
+
)
|
143 |
+
}
|
144 |
+
|
145 |
+
export {
|
146 |
+
AlertDialog,
|
147 |
+
AlertDialogPortal,
|
148 |
+
AlertDialogOverlay,
|
149 |
+
AlertDialogTrigger,
|
150 |
+
AlertDialogContent,
|
151 |
+
AlertDialogHeader,
|
152 |
+
AlertDialogFooter,
|
153 |
+
AlertDialogTitle,
|
154 |
+
AlertDialogDescription,
|
155 |
+
AlertDialogAction,
|
156 |
+
AlertDialogCancel,
|
157 |
+
}
|
alert.tsx
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import * as React from "react"
|
2 |
+
import { cva, type VariantProps } from "class-variance-authority"
|
3 |
+
|
4 |
+
import { cn } from "@/lib/utils"
|
5 |
+
|
6 |
+
const alertVariants = cva(
|
7 |
+
"relative w-full rounded-lg border px-4 py-3 text-sm grid has-[>svg]:grid-cols-[calc(var(--spacing)*4)_1fr] grid-cols-[0_1fr] has-[>svg]:gap-x-3 gap-y-0.5 items-start [&>svg]:size-4 [&>svg]:translate-y-0.5 [&>svg]:text-current",
|
8 |
+
{
|
9 |
+
variants: {
|
10 |
+
variant: {
|
11 |
+
default: "bg-card text-card-foreground",
|
12 |
+
destructive:
|
13 |
+
"text-destructive bg-card [&>svg]:text-current *:data-[slot=alert-description]:text-destructive/90",
|
14 |
+
},
|
15 |
+
},
|
16 |
+
defaultVariants: {
|
17 |
+
variant: "default",
|
18 |
+
},
|
19 |
+
}
|
20 |
+
)
|
21 |
+
|
22 |
+
function Alert({
|
23 |
+
className,
|
24 |
+
variant,
|
25 |
+
...props
|
26 |
+
}: React.ComponentProps<"div"> & VariantProps<typeof alertVariants>) {
|
27 |
+
return (
|
28 |
+
<div
|
29 |
+
data-slot="alert"
|
30 |
+
role="alert"
|
31 |
+
className={cn(alertVariants({ variant }), className)}
|
32 |
+
{...props}
|
33 |
+
/>
|
34 |
+
)
|
35 |
+
}
|
36 |
+
|
37 |
+
function AlertTitle({ className, ...props }: React.ComponentProps<"div">) {
|
38 |
+
return (
|
39 |
+
<div
|
40 |
+
data-slot="alert-title"
|
41 |
+
className={cn(
|
42 |
+
"col-start-2 line-clamp-1 min-h-4 font-medium tracking-tight",
|
43 |
+
className
|
44 |
+
)}
|
45 |
+
{...props}
|
46 |
+
/>
|
47 |
+
)
|
48 |
+
}
|
49 |
+
|
50 |
+
function AlertDescription({
|
51 |
+
className,
|
52 |
+
...props
|
53 |
+
}: React.ComponentProps<"div">) {
|
54 |
+
return (
|
55 |
+
<div
|
56 |
+
data-slot="alert-description"
|
57 |
+
className={cn(
|
58 |
+
"text-muted-foreground col-start-2 grid justify-items-start gap-1 text-sm [&_p]:leading-relaxed",
|
59 |
+
className
|
60 |
+
)}
|
61 |
+
{...props}
|
62 |
+
/>
|
63 |
+
)
|
64 |
+
}
|
65 |
+
|
66 |
+
export { Alert, AlertTitle, AlertDescription }
|
api.py
ADDED
@@ -0,0 +1,311 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from typing import List, Optional
|
3 |
+
|
4 |
+
from fastapi import FastAPI, UploadFile, File, HTTPException, APIRouter, Form, Depends, Request
|
5 |
+
from fastapi.responses import Response, JSONResponse
|
6 |
+
from pydantic import BaseModel
|
7 |
+
|
8 |
+
from utils.logger import logger
|
9 |
+
from utils.auth_utils import get_current_user_id, get_user_id_from_stream_auth, get_optional_user_id
|
10 |
+
from sandbox.sandbox import get_or_start_sandbox
|
11 |
+
from services.supabase import DBConnection
|
12 |
+
from agent.api import get_or_create_project_sandbox
|
13 |
+
|
14 |
+
|
15 |
+
# Initialize shared resources
|
16 |
+
router = APIRouter(tags=["sandbox"])
|
17 |
+
db = None
|
18 |
+
|
19 |
+
def initialize(_db: DBConnection):
|
20 |
+
"""Initialize the sandbox API with resources from the main API."""
|
21 |
+
global db
|
22 |
+
db = _db
|
23 |
+
logger.info("Initialized sandbox API with database connection")
|
24 |
+
|
25 |
+
class FileInfo(BaseModel):
|
26 |
+
"""Model for file information"""
|
27 |
+
name: str
|
28 |
+
path: str
|
29 |
+
is_dir: bool
|
30 |
+
size: int
|
31 |
+
mod_time: str
|
32 |
+
permissions: Optional[str] = None
|
33 |
+
|
34 |
+
async def verify_sandbox_access(client, sandbox_id: str, user_id: Optional[str] = None):
|
35 |
+
"""
|
36 |
+
Verify that a user has access to a specific sandbox based on account membership.
|
37 |
+
|
38 |
+
Args:
|
39 |
+
client: The Supabase client
|
40 |
+
sandbox_id: The sandbox ID to check access for
|
41 |
+
user_id: The user ID to check permissions for. Can be None for public resource access.
|
42 |
+
|
43 |
+
Returns:
|
44 |
+
dict: Project data containing sandbox information
|
45 |
+
|
46 |
+
Raises:
|
47 |
+
HTTPException: If the user doesn't have access to the sandbox or sandbox doesn't exist
|
48 |
+
"""
|
49 |
+
# Find the project that owns this sandbox
|
50 |
+
project_result = await client.table('projects').select('*').filter('sandbox->>id', 'eq', sandbox_id).execute()
|
51 |
+
|
52 |
+
if not project_result.data or len(project_result.data) == 0:
|
53 |
+
raise HTTPException(status_code=404, detail="Sandbox not found")
|
54 |
+
|
55 |
+
project_data = project_result.data[0]
|
56 |
+
|
57 |
+
if project_data.get('is_public'):
|
58 |
+
return project_data
|
59 |
+
|
60 |
+
# For private projects, we must have a user_id
|
61 |
+
if not user_id:
|
62 |
+
raise HTTPException(status_code=401, detail="Authentication required for this resource")
|
63 |
+
|
64 |
+
account_id = project_data.get('account_id')
|
65 |
+
|
66 |
+
# Verify account membership
|
67 |
+
if account_id:
|
68 |
+
account_user_result = await client.schema('basejump').from_('account_user').select('account_role').eq('user_id', user_id).eq('account_id', account_id).execute()
|
69 |
+
if account_user_result.data and len(account_user_result.data) > 0:
|
70 |
+
return project_data
|
71 |
+
|
72 |
+
raise HTTPException(status_code=403, detail="Not authorized to access this sandbox")
|
73 |
+
|
74 |
+
async def get_sandbox_by_id_safely(client, sandbox_id: str):
|
75 |
+
"""
|
76 |
+
Safely retrieve a sandbox object by its ID, using the project that owns it.
|
77 |
+
|
78 |
+
Args:
|
79 |
+
client: The Supabase client
|
80 |
+
sandbox_id: The sandbox ID to retrieve
|
81 |
+
|
82 |
+
Returns:
|
83 |
+
Sandbox: The sandbox object
|
84 |
+
|
85 |
+
Raises:
|
86 |
+
HTTPException: If the sandbox doesn't exist or can't be retrieved
|
87 |
+
"""
|
88 |
+
# Find the project that owns this sandbox
|
89 |
+
project_result = await client.table('projects').select('project_id').filter('sandbox->>id', 'eq', sandbox_id).execute()
|
90 |
+
|
91 |
+
if not project_result.data or len(project_result.data) == 0:
|
92 |
+
logger.error(f"No project found for sandbox ID: {sandbox_id}")
|
93 |
+
raise HTTPException(status_code=404, detail="Sandbox not found - no project owns this sandbox ID")
|
94 |
+
|
95 |
+
project_id = project_result.data[0]['project_id']
|
96 |
+
logger.debug(f"Found project {project_id} for sandbox {sandbox_id}")
|
97 |
+
|
98 |
+
try:
|
99 |
+
# Get the sandbox
|
100 |
+
sandbox, retrieved_sandbox_id, sandbox_pass = await get_or_create_project_sandbox(client, project_id)
|
101 |
+
|
102 |
+
# Verify we got the right sandbox
|
103 |
+
if retrieved_sandbox_id != sandbox_id:
|
104 |
+
logger.warning(f"Retrieved sandbox ID {retrieved_sandbox_id} doesn't match requested ID {sandbox_id} for project {project_id}")
|
105 |
+
# Fall back to the direct method if IDs don't match (shouldn't happen but just in case)
|
106 |
+
sandbox = await get_or_start_sandbox(sandbox_id)
|
107 |
+
|
108 |
+
return sandbox
|
109 |
+
except Exception as e:
|
110 |
+
logger.error(f"Error retrieving sandbox {sandbox_id}: {str(e)}")
|
111 |
+
raise HTTPException(status_code=500, detail=f"Failed to retrieve sandbox: {str(e)}")
|
112 |
+
|
113 |
+
@router.post("/sandboxes/{sandbox_id}/files")
|
114 |
+
async def create_file(
|
115 |
+
sandbox_id: str,
|
116 |
+
path: str = Form(...),
|
117 |
+
file: UploadFile = File(...),
|
118 |
+
request: Request = None,
|
119 |
+
user_id: Optional[str] = Depends(get_optional_user_id)
|
120 |
+
):
|
121 |
+
"""Create a file in the sandbox using direct file upload"""
|
122 |
+
logger.info(f"Received file upload request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
|
123 |
+
client = await db.client
|
124 |
+
|
125 |
+
# Verify the user has access to this sandbox
|
126 |
+
await verify_sandbox_access(client, sandbox_id, user_id)
|
127 |
+
|
128 |
+
try:
|
129 |
+
# Get sandbox using the safer method
|
130 |
+
sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
|
131 |
+
|
132 |
+
# Read file content directly from the uploaded file
|
133 |
+
content = await file.read()
|
134 |
+
|
135 |
+
# Create file using raw binary content
|
136 |
+
sandbox.fs.upload_file(path, content)
|
137 |
+
logger.info(f"File created at {path} in sandbox {sandbox_id}")
|
138 |
+
|
139 |
+
return {"status": "success", "created": True, "path": path}
|
140 |
+
except Exception as e:
|
141 |
+
logger.error(f"Error creating file in sandbox {sandbox_id}: {str(e)}")
|
142 |
+
raise HTTPException(status_code=500, detail=str(e))
|
143 |
+
|
144 |
+
# For backward compatibility, keep the JSON version too
|
145 |
+
@router.post("/sandboxes/{sandbox_id}/files/json")
|
146 |
+
async def create_file_json(
|
147 |
+
sandbox_id: str,
|
148 |
+
file_request: dict,
|
149 |
+
request: Request = None,
|
150 |
+
user_id: Optional[str] = Depends(get_optional_user_id)
|
151 |
+
):
|
152 |
+
"""Create a file in the sandbox using JSON (legacy support)"""
|
153 |
+
logger.info(f"Received JSON file creation request for sandbox {sandbox_id}, user_id: {user_id}")
|
154 |
+
client = await db.client
|
155 |
+
|
156 |
+
# Verify the user has access to this sandbox
|
157 |
+
await verify_sandbox_access(client, sandbox_id, user_id)
|
158 |
+
|
159 |
+
try:
|
160 |
+
# Get sandbox using the safer method
|
161 |
+
sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
|
162 |
+
|
163 |
+
# Get file path and content
|
164 |
+
path = file_request.get("path")
|
165 |
+
content = file_request.get("content", "")
|
166 |
+
|
167 |
+
if not path:
|
168 |
+
logger.error(f"Missing file path in request for sandbox {sandbox_id}")
|
169 |
+
raise HTTPException(status_code=400, detail="File path is required")
|
170 |
+
|
171 |
+
# Convert string content to bytes
|
172 |
+
if isinstance(content, str):
|
173 |
+
content = content.encode('utf-8')
|
174 |
+
|
175 |
+
# Create file
|
176 |
+
sandbox.fs.upload_file(path, content)
|
177 |
+
logger.info(f"File created at {path} in sandbox {sandbox_id}")
|
178 |
+
|
179 |
+
return {"status": "success", "created": True, "path": path}
|
180 |
+
except Exception as e:
|
181 |
+
logger.error(f"Error creating file in sandbox {sandbox_id}: {str(e)}")
|
182 |
+
raise HTTPException(status_code=500, detail=str(e))
|
183 |
+
|
184 |
+
@router.get("/sandboxes/{sandbox_id}/files")
|
185 |
+
async def list_files(
|
186 |
+
sandbox_id: str,
|
187 |
+
path: str,
|
188 |
+
request: Request = None,
|
189 |
+
user_id: Optional[str] = Depends(get_optional_user_id)
|
190 |
+
):
|
191 |
+
"""List files and directories at the specified path"""
|
192 |
+
logger.info(f"Received list files request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
|
193 |
+
client = await db.client
|
194 |
+
|
195 |
+
# Verify the user has access to this sandbox
|
196 |
+
await verify_sandbox_access(client, sandbox_id, user_id)
|
197 |
+
|
198 |
+
try:
|
199 |
+
# Get sandbox using the safer method
|
200 |
+
sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
|
201 |
+
|
202 |
+
# List files
|
203 |
+
files = sandbox.fs.list_files(path)
|
204 |
+
result = []
|
205 |
+
|
206 |
+
for file in files:
|
207 |
+
# Convert file information to our model
|
208 |
+
# Ensure forward slashes are used for paths, regardless of OS
|
209 |
+
full_path = f"{path.rstrip('/')}/{file.name}" if path != '/' else f"/{file.name}"
|
210 |
+
file_info = FileInfo(
|
211 |
+
name=file.name,
|
212 |
+
path=full_path, # Use the constructed path
|
213 |
+
is_dir=file.is_dir,
|
214 |
+
size=file.size,
|
215 |
+
mod_time=str(file.mod_time),
|
216 |
+
permissions=getattr(file, 'permissions', None)
|
217 |
+
)
|
218 |
+
result.append(file_info)
|
219 |
+
|
220 |
+
logger.info(f"Successfully listed {len(result)} files in sandbox {sandbox_id}")
|
221 |
+
return {"files": [file.dict() for file in result]}
|
222 |
+
except Exception as e:
|
223 |
+
logger.error(f"Error listing files in sandbox {sandbox_id}: {str(e)}")
|
224 |
+
raise HTTPException(status_code=500, detail=str(e))
|
225 |
+
|
226 |
+
@router.get("/sandboxes/{sandbox_id}/files/content")
|
227 |
+
async def read_file(
|
228 |
+
sandbox_id: str,
|
229 |
+
path: str,
|
230 |
+
request: Request = None,
|
231 |
+
user_id: Optional[str] = Depends(get_optional_user_id)
|
232 |
+
):
|
233 |
+
"""Read a file from the sandbox"""
|
234 |
+
logger.info(f"Received file read request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
|
235 |
+
client = await db.client
|
236 |
+
|
237 |
+
# Verify the user has access to this sandbox
|
238 |
+
await verify_sandbox_access(client, sandbox_id, user_id)
|
239 |
+
|
240 |
+
try:
|
241 |
+
# Get sandbox using the safer method
|
242 |
+
sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
|
243 |
+
|
244 |
+
# Read file
|
245 |
+
content = sandbox.fs.download_file(path)
|
246 |
+
|
247 |
+
# Return a Response object with the content directly
|
248 |
+
filename = os.path.basename(path)
|
249 |
+
logger.info(f"Successfully read file {filename} from sandbox {sandbox_id}")
|
250 |
+
return Response(
|
251 |
+
content=content,
|
252 |
+
media_type="application/octet-stream",
|
253 |
+
headers={"Content-Disposition": f"attachment; filename={filename}"}
|
254 |
+
)
|
255 |
+
except Exception as e:
|
256 |
+
logger.error(f"Error reading file in sandbox {sandbox_id}: {str(e)}")
|
257 |
+
raise HTTPException(status_code=500, detail=str(e))
|
258 |
+
|
259 |
+
@router.post("/project/{project_id}/sandbox/ensure-active")
|
260 |
+
async def ensure_project_sandbox_active(
|
261 |
+
project_id: str,
|
262 |
+
request: Request = None,
|
263 |
+
user_id: Optional[str] = Depends(get_optional_user_id)
|
264 |
+
):
|
265 |
+
"""
|
266 |
+
Ensure that a project's sandbox is active and running.
|
267 |
+
Checks the sandbox status and starts it if it's not running.
|
268 |
+
"""
|
269 |
+
logger.info(f"Received ensure sandbox active request for project {project_id}, user_id: {user_id}")
|
270 |
+
client = await db.client
|
271 |
+
|
272 |
+
# Find the project and sandbox information
|
273 |
+
project_result = await client.table('projects').select('*').eq('project_id', project_id).execute()
|
274 |
+
|
275 |
+
if not project_result.data or len(project_result.data) == 0:
|
276 |
+
logger.error(f"Project not found: {project_id}")
|
277 |
+
raise HTTPException(status_code=404, detail="Project not found")
|
278 |
+
|
279 |
+
project_data = project_result.data[0]
|
280 |
+
|
281 |
+
# For public projects, no authentication is needed
|
282 |
+
if not project_data.get('is_public'):
|
283 |
+
# For private projects, we must have a user_id
|
284 |
+
if not user_id:
|
285 |
+
logger.error(f"Authentication required for private project {project_id}")
|
286 |
+
raise HTTPException(status_code=401, detail="Authentication required for this resource")
|
287 |
+
|
288 |
+
account_id = project_data.get('account_id')
|
289 |
+
|
290 |
+
# Verify account membership
|
291 |
+
if account_id:
|
292 |
+
account_user_result = await client.schema('basejump').from_('account_user').select('account_role').eq('user_id', user_id).eq('account_id', account_id).execute()
|
293 |
+
if not (account_user_result.data and len(account_user_result.data) > 0):
|
294 |
+
logger.error(f"User {user_id} not authorized to access project {project_id}")
|
295 |
+
raise HTTPException(status_code=403, detail="Not authorized to access this project")
|
296 |
+
|
297 |
+
try:
|
298 |
+
# Get or create the sandbox
|
299 |
+
logger.info(f"Ensuring sandbox is active for project {project_id}")
|
300 |
+
sandbox, sandbox_id, sandbox_pass = await get_or_create_project_sandbox(client, project_id)
|
301 |
+
|
302 |
+
logger.info(f"Successfully ensured sandbox {sandbox_id} is active for project {project_id}")
|
303 |
+
|
304 |
+
return {
|
305 |
+
"status": "success",
|
306 |
+
"sandbox_id": sandbox_id,
|
307 |
+
"message": "Sandbox is active"
|
308 |
+
}
|
309 |
+
except Exception as e:
|
310 |
+
logger.error(f"Error ensuring sandbox is active for project {project_id}: {str(e)}")
|
311 |
+
raise HTTPException(status_code=500, detail=str(e))
|
api.py.bak
ADDED
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, Request
|
2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
3 |
+
from fastapi.responses import JSONResponse
|
4 |
+
from contextlib import asynccontextmanager
|
5 |
+
from agentpress.thread_manager import ThreadManager
|
6 |
+
from services.supabase import DBConnection
|
7 |
+
from datetime import datetime, timezone
|
8 |
+
from dotenv import load_dotenv
|
9 |
+
from utils.config import config, EnvMode
|
10 |
+
import asyncio
|
11 |
+
from utils.logger import logger
|
12 |
+
import uuid
|
13 |
+
import time
|
14 |
+
from collections import OrderedDict
|
15 |
+
|
16 |
+
# Import the agent API module
|
17 |
+
from agent import api as agent_api
|
18 |
+
from sandbox import api as sandbox_api
|
19 |
+
|
20 |
+
# Load environment variables (these will be available through config)
|
21 |
+
load_dotenv()
|
22 |
+
|
23 |
+
# Initialize managers
|
24 |
+
db = DBConnection()
|
25 |
+
thread_manager = None
|
26 |
+
instance_id = "single"
|
27 |
+
|
28 |
+
# Rate limiter state
|
29 |
+
ip_tracker = OrderedDict()
|
30 |
+
MAX_CONCURRENT_IPS = 25
|
31 |
+
|
32 |
+
@asynccontextmanager
|
33 |
+
async def lifespan(app: FastAPI):
|
34 |
+
# Startup
|
35 |
+
global thread_manager
|
36 |
+
logger.info(f"Starting up FastAPI application with instance ID: {instance_id} in {config.ENV_MODE.value} mode")
|
37 |
+
|
38 |
+
try:
|
39 |
+
# Initialize database
|
40 |
+
await db.initialize()
|
41 |
+
thread_manager = ThreadManager()
|
42 |
+
|
43 |
+
# Initialize the agent API with shared resources
|
44 |
+
agent_api.initialize(
|
45 |
+
thread_manager,
|
46 |
+
db,
|
47 |
+
instance_id
|
48 |
+
)
|
49 |
+
|
50 |
+
# Initialize the sandbox API with shared resources
|
51 |
+
sandbox_api.initialize(db)
|
52 |
+
|
53 |
+
# Initialize Redis connection
|
54 |
+
from services import redis
|
55 |
+
try:
|
56 |
+
await redis.initialize_async()
|
57 |
+
logger.info("Redis connection initialized successfully")
|
58 |
+
except Exception as e:
|
59 |
+
logger.error(f"Failed to initialize Redis connection: {e}")
|
60 |
+
# Continue without Redis - the application will handle Redis failures gracefully
|
61 |
+
|
62 |
+
# Start background tasks
|
63 |
+
asyncio.create_task(agent_api.restore_running_agent_runs())
|
64 |
+
|
65 |
+
yield
|
66 |
+
|
67 |
+
# Clean up agent resources
|
68 |
+
logger.info("Cleaning up agent resources")
|
69 |
+
await agent_api.cleanup()
|
70 |
+
|
71 |
+
# Clean up Redis connection
|
72 |
+
try:
|
73 |
+
logger.info("Closing Redis connection")
|
74 |
+
await redis.close()
|
75 |
+
logger.info("Redis connection closed successfully")
|
76 |
+
except Exception as e:
|
77 |
+
logger.error(f"Error closing Redis connection: {e}")
|
78 |
+
|
79 |
+
# Clean up database connection
|
80 |
+
logger.info("Disconnecting from database")
|
81 |
+
await db.disconnect()
|
82 |
+
except Exception as e:
|
83 |
+
logger.error(f"Error during application startup: {e}")
|
84 |
+
raise
|
85 |
+
|
86 |
+
app = FastAPI(lifespan=lifespan)
|
87 |
+
|
88 |
+
@app.middleware("http")
|
89 |
+
async def log_requests_middleware(request: Request, call_next):
|
90 |
+
start_time = time.time()
|
91 |
+
client_ip = request.client.host
|
92 |
+
method = request.method
|
93 |
+
url = str(request.url)
|
94 |
+
path = request.url.path
|
95 |
+
query_params = str(request.query_params)
|
96 |
+
|
97 |
+
# Log the incoming request
|
98 |
+
logger.info(f"Request started: {method} {path} from {client_ip} | Query: {query_params}")
|
99 |
+
|
100 |
+
try:
|
101 |
+
response = await call_next(request)
|
102 |
+
process_time = time.time() - start_time
|
103 |
+
logger.debug(f"Request completed: {method} {path} | Status: {response.status_code} | Time: {process_time:.2f}s")
|
104 |
+
return response
|
105 |
+
except Exception as e:
|
106 |
+
process_time = time.time() - start_time
|
107 |
+
logger.error(f"Request failed: {method} {path} | Error: {str(e)} | Time: {process_time:.2f}s")
|
108 |
+
raise
|
109 |
+
|
110 |
+
# Define allowed origins based on environment
|
111 |
+
allowed_origins = ["https://www.suna.so", "https://suna.so", "https://staging.suna.so", "http://localhost:3000"]
|
112 |
+
|
113 |
+
# Add staging-specific origins
|
114 |
+
if config.ENV_MODE == EnvMode.STAGING:
|
115 |
+
allowed_origins.append("http://localhost:3000")
|
116 |
+
|
117 |
+
# Add local-specific origins
|
118 |
+
if config.ENV_MODE == EnvMode.LOCAL:
|
119 |
+
allowed_origins.append("http://localhost:3000")
|
120 |
+
|
121 |
+
app.add_middleware(
|
122 |
+
CORSMiddleware,
|
123 |
+
allow_origins=allowed_origins,
|
124 |
+
allow_credentials=True,
|
125 |
+
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
|
126 |
+
allow_headers=["Content-Type", "Authorization"],
|
127 |
+
)
|
128 |
+
|
129 |
+
# Include the agent router with a prefix
|
130 |
+
app.include_router(agent_api.router, prefix="/api")
|
131 |
+
|
132 |
+
# Include the sandbox router with a prefix
|
133 |
+
app.include_router(sandbox_api.router, prefix="/api")
|
134 |
+
|
135 |
+
@app.get("/api/health")
|
136 |
+
async def health_check():
|
137 |
+
"""Health check endpoint to verify API is working."""
|
138 |
+
logger.info("Health check endpoint called")
|
139 |
+
return {
|
140 |
+
"status": "ok",
|
141 |
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
142 |
+
"instance_id": instance_id
|
143 |
+
}
|
144 |
+
|
145 |
+
if __name__ == "__main__":
|
146 |
+
import uvicorn
|
147 |
+
|
148 |
+
workers = 2
|
149 |
+
|
150 |
+
logger.info(f"Starting server on 0.0.0.0:8000 with {workers} workers")
|
151 |
+
uvicorn.run(
|
152 |
+
"api:app",
|
153 |
+
host="0.0.0.0",
|
154 |
+
port=8000,
|
155 |
+
workers=workers
|
156 |
+
)
|
api.ts
ADDED
@@ -0,0 +1,1179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { createClient } from '@/lib/supabase/client';
|
2 |
+
|
3 |
+
// Get backend URL from environment variables
|
4 |
+
const API_URL = process.env.NEXT_PUBLIC_BACKEND_URL || '';
|
5 |
+
|
6 |
+
// Set to keep track of agent runs that are known to be non-running
|
7 |
+
const nonRunningAgentRuns = new Set<string>();
|
8 |
+
// Map to keep track of active EventSource streams
|
9 |
+
const activeStreams = new Map<string, EventSource>();
|
10 |
+
|
11 |
+
// Custom error for billing issues
|
12 |
+
export class BillingError extends Error {
|
13 |
+
status: number;
|
14 |
+
detail: { message: string; [key: string]: any }; // Allow other properties in detail
|
15 |
+
|
16 |
+
constructor(status: number, detail: { message: string; [key: string]: any }, message?: string) {
|
17 |
+
super(message || detail.message || `Billing Error: ${status}`);
|
18 |
+
this.name = 'BillingError';
|
19 |
+
this.status = status;
|
20 |
+
this.detail = detail;
|
21 |
+
|
22 |
+
// Set the prototype explicitly.
|
23 |
+
Object.setPrototypeOf(this, BillingError.prototype);
|
24 |
+
}
|
25 |
+
}
|
26 |
+
|
27 |
+
// Type Definitions (moved from potential separate file for clarity)
|
28 |
+
export type Project = {
|
29 |
+
id: string;
|
30 |
+
name: string;
|
31 |
+
description: string;
|
32 |
+
account_id: string;
|
33 |
+
created_at: string;
|
34 |
+
updated_at?: string;
|
35 |
+
sandbox: {
|
36 |
+
vnc_preview?: string;
|
37 |
+
sandbox_url?: string;
|
38 |
+
id?: string;
|
39 |
+
pass?: string;
|
40 |
+
};
|
41 |
+
is_public?: boolean; // Flag to indicate if the project is public
|
42 |
+
[key: string]: any; // Allow additional properties to handle database fields
|
43 |
+
}
|
44 |
+
|
45 |
+
export type Thread = {
|
46 |
+
thread_id: string;
|
47 |
+
account_id: string | null;
|
48 |
+
project_id?: string | null;
|
49 |
+
is_public?: boolean;
|
50 |
+
created_at: string;
|
51 |
+
updated_at: string;
|
52 |
+
[key: string]: any; // Allow additional properties to handle database fields
|
53 |
+
}
|
54 |
+
|
55 |
+
export type Message = {
|
56 |
+
role: string;
|
57 |
+
content: string;
|
58 |
+
type: string;
|
59 |
+
}
|
60 |
+
|
61 |
+
export type AgentRun = {
|
62 |
+
id: string;
|
63 |
+
thread_id: string;
|
64 |
+
status: 'running' | 'completed' | 'stopped' | 'error';
|
65 |
+
started_at: string;
|
66 |
+
completed_at: string | null;
|
67 |
+
responses: Message[];
|
68 |
+
error: string | null;
|
69 |
+
}
|
70 |
+
|
71 |
+
export type ToolCall = {
|
72 |
+
name: string;
|
73 |
+
arguments: Record<string, unknown>;
|
74 |
+
}
|
75 |
+
|
76 |
+
export interface InitiateAgentResponse {
|
77 |
+
thread_id: string;
|
78 |
+
agent_run_id: string;
|
79 |
+
}
|
80 |
+
|
81 |
+
export interface HealthCheckResponse {
|
82 |
+
status: string;
|
83 |
+
timestamp: string;
|
84 |
+
instance_id: string;
|
85 |
+
}
|
86 |
+
|
87 |
+
export interface FileInfo {
|
88 |
+
name: string;
|
89 |
+
path: string;
|
90 |
+
is_dir: boolean;
|
91 |
+
size: number;
|
92 |
+
mod_time: string;
|
93 |
+
permissions?: string;
|
94 |
+
}
|
95 |
+
|
96 |
+
// Project APIs
|
97 |
+
export const getProjects = async (): Promise<Project[]> => {
|
98 |
+
try {
|
99 |
+
const supabase = createClient();
|
100 |
+
|
101 |
+
// Get the current user's ID to filter projects
|
102 |
+
const { data: userData, error: userError } = await supabase.auth.getUser();
|
103 |
+
if (userError) {
|
104 |
+
console.error('Error getting current user:', userError);
|
105 |
+
return [];
|
106 |
+
}
|
107 |
+
|
108 |
+
// If no user is logged in, return an empty array
|
109 |
+
if (!userData.user) {
|
110 |
+
console.log('[API] No user logged in, returning empty projects array');
|
111 |
+
return [];
|
112 |
+
}
|
113 |
+
|
114 |
+
// Query only projects where account_id matches the current user's ID
|
115 |
+
const { data, error } = await supabase
|
116 |
+
.from('projects')
|
117 |
+
.select('*')
|
118 |
+
.eq('account_id', userData.user.id);
|
119 |
+
|
120 |
+
if (error) {
|
121 |
+
// Handle permission errors specifically
|
122 |
+
if (error.code === '42501' && error.message.includes('has_role_on_account')) {
|
123 |
+
console.error('Permission error: User does not have proper account access');
|
124 |
+
return []; // Return empty array instead of throwing
|
125 |
+
}
|
126 |
+
throw error;
|
127 |
+
}
|
128 |
+
|
129 |
+
console.log('[API] Raw projects from DB:', data?.length, data);
|
130 |
+
|
131 |
+
// Map database fields to our Project type
|
132 |
+
const mappedProjects: Project[] = (data || []).map(project => ({
|
133 |
+
id: project.project_id,
|
134 |
+
name: project.name || '',
|
135 |
+
description: project.description || '',
|
136 |
+
account_id: project.account_id,
|
137 |
+
created_at: project.created_at,
|
138 |
+
updated_at: project.updated_at,
|
139 |
+
sandbox: project.sandbox || { id: "", pass: "", vnc_preview: "", sandbox_url: "" }
|
140 |
+
}));
|
141 |
+
|
142 |
+
console.log('[API] Mapped projects for frontend:', mappedProjects.length);
|
143 |
+
|
144 |
+
return mappedProjects;
|
145 |
+
} catch (err) {
|
146 |
+
console.error('Error fetching projects:', err);
|
147 |
+
// Return empty array for permission errors to avoid crashing the UI
|
148 |
+
return [];
|
149 |
+
}
|
150 |
+
};
|
151 |
+
|
152 |
+
export const getProject = async (projectId: string): Promise<Project> => {
|
153 |
+
const supabase = createClient();
|
154 |
+
|
155 |
+
try {
|
156 |
+
const { data, error } = await supabase
|
157 |
+
.from('projects')
|
158 |
+
.select('*')
|
159 |
+
.eq('project_id', projectId)
|
160 |
+
.single();
|
161 |
+
|
162 |
+
if (error) {
|
163 |
+
// Handle the specific "no rows returned" error from Supabase
|
164 |
+
if (error.code === 'PGRST116') {
|
165 |
+
throw new Error(`Project not found or not accessible: ${projectId}`);
|
166 |
+
}
|
167 |
+
throw error;
|
168 |
+
}
|
169 |
+
|
170 |
+
console.log('Raw project data from database:', data);
|
171 |
+
|
172 |
+
// If project has a sandbox, ensure it's started
|
173 |
+
if (data.sandbox?.id) {
|
174 |
+
// Fire off sandbox activation without blocking
|
175 |
+
const ensureSandboxActive = async () => {
|
176 |
+
try {
|
177 |
+
const { data: { session } } = await supabase.auth.getSession();
|
178 |
+
|
179 |
+
// For public projects, we don't need authentication
|
180 |
+
const headers: Record<string, string> = {
|
181 |
+
'Content-Type': 'application/json'
|
182 |
+
};
|
183 |
+
|
184 |
+
if (session?.access_token) {
|
185 |
+
headers['Authorization'] = `Bearer ${session.access_token}`;
|
186 |
+
}
|
187 |
+
|
188 |
+
console.log(`Ensuring sandbox is active for project ${projectId}...`);
|
189 |
+
const response = await fetch(`${API_URL}/project/${projectId}/sandbox/ensure-active`, {
|
190 |
+
method: 'POST',
|
191 |
+
headers,
|
192 |
+
});
|
193 |
+
|
194 |
+
if (!response.ok) {
|
195 |
+
const errorText = await response.text().catch(() => 'No error details available');
|
196 |
+
console.warn(`Failed to ensure sandbox is active: ${response.status} ${response.statusText}`, errorText);
|
197 |
+
} else {
|
198 |
+
console.log('Sandbox activation successful');
|
199 |
+
}
|
200 |
+
} catch (sandboxError) {
|
201 |
+
console.warn('Failed to ensure sandbox is active:', sandboxError);
|
202 |
+
}
|
203 |
+
};
|
204 |
+
|
205 |
+
// Start the sandbox activation without awaiting
|
206 |
+
ensureSandboxActive();
|
207 |
+
}
|
208 |
+
|
209 |
+
// Map database fields to our Project type
|
210 |
+
const mappedProject: Project = {
|
211 |
+
id: data.project_id,
|
212 |
+
name: data.name || '',
|
213 |
+
description: data.description || '',
|
214 |
+
account_id: data.account_id,
|
215 |
+
created_at: data.created_at,
|
216 |
+
sandbox: data.sandbox || { id: "", pass: "", vnc_preview: "", sandbox_url: "" }
|
217 |
+
};
|
218 |
+
|
219 |
+
console.log('Mapped project data for frontend:', mappedProject);
|
220 |
+
|
221 |
+
return mappedProject;
|
222 |
+
} catch (error) {
|
223 |
+
console.error(`Error fetching project ${projectId}:`, error);
|
224 |
+
throw error;
|
225 |
+
}
|
226 |
+
};
|
227 |
+
|
228 |
+
export const createProject = async (
|
229 |
+
projectData: { name: string; description: string },
|
230 |
+
accountId?: string
|
231 |
+
): Promise<Project> => {
|
232 |
+
const supabase = createClient();
|
233 |
+
|
234 |
+
// If accountId is not provided, we'll need to get the user's ID
|
235 |
+
if (!accountId) {
|
236 |
+
const { data: userData, error: userError } = await supabase.auth.getUser();
|
237 |
+
|
238 |
+
if (userError) throw userError;
|
239 |
+
if (!userData.user) throw new Error('You must be logged in to create a project');
|
240 |
+
|
241 |
+
// In Basejump, the personal account ID is the same as the user ID
|
242 |
+
accountId = userData.user.id;
|
243 |
+
}
|
244 |
+
|
245 |
+
const { data, error } = await supabase
|
246 |
+
.from('projects')
|
247 |
+
.insert({
|
248 |
+
name: projectData.name,
|
249 |
+
description: projectData.description || null,
|
250 |
+
account_id: accountId
|
251 |
+
})
|
252 |
+
.select()
|
253 |
+
.single();
|
254 |
+
|
255 |
+
if (error) throw error;
|
256 |
+
|
257 |
+
// Map the database response to our Project type
|
258 |
+
return {
|
259 |
+
id: data.project_id,
|
260 |
+
name: data.name,
|
261 |
+
description: data.description || '',
|
262 |
+
account_id: data.account_id,
|
263 |
+
created_at: data.created_at,
|
264 |
+
sandbox: { id: "", pass: "", vnc_preview: "" }
|
265 |
+
};
|
266 |
+
};
|
267 |
+
|
268 |
+
export const updateProject = async (projectId: string, data: Partial<Project>): Promise<Project> => {
|
269 |
+
const supabase = createClient();
|
270 |
+
|
271 |
+
console.log('Updating project with ID:', projectId);
|
272 |
+
console.log('Update data:', data);
|
273 |
+
|
274 |
+
// Sanity check to avoid update errors
|
275 |
+
if (!projectId || projectId === '') {
|
276 |
+
console.error('Attempted to update project with invalid ID:', projectId);
|
277 |
+
throw new Error('Cannot update project: Invalid project ID');
|
278 |
+
}
|
279 |
+
|
280 |
+
const { data: updatedData, error } = await supabase
|
281 |
+
.from('projects')
|
282 |
+
.update(data)
|
283 |
+
.eq('project_id', projectId)
|
284 |
+
.select()
|
285 |
+
.single();
|
286 |
+
|
287 |
+
if (error) {
|
288 |
+
console.error('Error updating project:', error);
|
289 |
+
throw error;
|
290 |
+
}
|
291 |
+
|
292 |
+
if (!updatedData) {
|
293 |
+
throw new Error('No data returned from update');
|
294 |
+
}
|
295 |
+
|
296 |
+
// Dispatch a custom event to notify components about the project change
|
297 |
+
if (typeof window !== 'undefined') {
|
298 |
+
window.dispatchEvent(new CustomEvent('project-updated', {
|
299 |
+
detail: {
|
300 |
+
projectId,
|
301 |
+
updatedData: {
|
302 |
+
id: updatedData.project_id,
|
303 |
+
name: updatedData.name,
|
304 |
+
description: updatedData.description
|
305 |
+
}
|
306 |
+
}
|
307 |
+
}));
|
308 |
+
}
|
309 |
+
|
310 |
+
// Return formatted project data - use same mapping as getProject
|
311 |
+
return {
|
312 |
+
id: updatedData.project_id,
|
313 |
+
name: updatedData.name,
|
314 |
+
description: updatedData.description || '',
|
315 |
+
account_id: updatedData.account_id,
|
316 |
+
created_at: updatedData.created_at,
|
317 |
+
sandbox: updatedData.sandbox || { id: "", pass: "", vnc_preview: "", sandbox_url: "" }
|
318 |
+
};
|
319 |
+
};
|
320 |
+
|
321 |
+
export const deleteProject = async (projectId: string): Promise<void> => {
|
322 |
+
const supabase = createClient();
|
323 |
+
const { error } = await supabase
|
324 |
+
.from('projects')
|
325 |
+
.delete()
|
326 |
+
.eq('project_id', projectId);
|
327 |
+
|
328 |
+
if (error) throw error;
|
329 |
+
};
|
330 |
+
|
331 |
+
// Thread APIs
|
332 |
+
export const getThreads = async (projectId?: string): Promise<Thread[]> => {
|
333 |
+
const supabase = createClient();
|
334 |
+
|
335 |
+
// Get the current user's ID to filter threads
|
336 |
+
const { data: userData, error: userError } = await supabase.auth.getUser();
|
337 |
+
if (userError) {
|
338 |
+
console.error('Error getting current user:', userError);
|
339 |
+
return [];
|
340 |
+
}
|
341 |
+
|
342 |
+
// If no user is logged in, return an empty array
|
343 |
+
if (!userData.user) {
|
344 |
+
console.log('[API] No user logged in, returning empty threads array');
|
345 |
+
return [];
|
346 |
+
}
|
347 |
+
|
348 |
+
let query = supabase.from('threads').select('*');
|
349 |
+
|
350 |
+
// Always filter by the current user's account ID
|
351 |
+
query = query.eq('account_id', userData.user.id);
|
352 |
+
|
353 |
+
if (projectId) {
|
354 |
+
console.log('[API] Filtering threads by project_id:', projectId);
|
355 |
+
query = query.eq('project_id', projectId);
|
356 |
+
}
|
357 |
+
|
358 |
+
const { data, error } = await query;
|
359 |
+
|
360 |
+
if (error) {
|
361 |
+
console.error('[API] Error fetching threads:', error);
|
362 |
+
throw error;
|
363 |
+
}
|
364 |
+
|
365 |
+
console.log('[API] Raw threads from DB:', data?.length, data);
|
366 |
+
|
367 |
+
// Map database fields to ensure consistency with our Thread type
|
368 |
+
const mappedThreads: Thread[] = (data || []).map(thread => ({
|
369 |
+
thread_id: thread.thread_id,
|
370 |
+
account_id: thread.account_id,
|
371 |
+
project_id: thread.project_id,
|
372 |
+
created_at: thread.created_at,
|
373 |
+
updated_at: thread.updated_at
|
374 |
+
}));
|
375 |
+
|
376 |
+
return mappedThreads;
|
377 |
+
};
|
378 |
+
|
379 |
+
export const getThread = async (threadId: string): Promise<Thread> => {
|
380 |
+
const supabase = createClient();
|
381 |
+
const { data, error } = await supabase
|
382 |
+
.from('threads')
|
383 |
+
.select('*')
|
384 |
+
.eq('thread_id', threadId)
|
385 |
+
.single();
|
386 |
+
|
387 |
+
if (error) throw error;
|
388 |
+
|
389 |
+
return data;
|
390 |
+
};
|
391 |
+
|
392 |
+
export const createThread = async (projectId: string): Promise<Thread> => {
|
393 |
+
const supabase = createClient();
|
394 |
+
|
395 |
+
// If user is not logged in, redirect to login
|
396 |
+
const { data: { user } } = await supabase.auth.getUser();
|
397 |
+
if (!user) {
|
398 |
+
throw new Error('You must be logged in to create a thread');
|
399 |
+
}
|
400 |
+
|
401 |
+
const { data, error } = await supabase
|
402 |
+
.from('threads')
|
403 |
+
.insert({
|
404 |
+
project_id: projectId,
|
405 |
+
account_id: user.id, // Use the current user's ID as the account ID
|
406 |
+
})
|
407 |
+
.select()
|
408 |
+
.single();
|
409 |
+
|
410 |
+
if (error) throw error;
|
411 |
+
|
412 |
+
return data;
|
413 |
+
};
|
414 |
+
|
415 |
+
export const addUserMessage = async (threadId: string, content: string): Promise<void> => {
|
416 |
+
const supabase = createClient();
|
417 |
+
|
418 |
+
// Format the message in the format the LLM expects - keep it simple with only required fields
|
419 |
+
const message = {
|
420 |
+
role: 'user',
|
421 |
+
content: content
|
422 |
+
};
|
423 |
+
|
424 |
+
// Insert the message into the messages table
|
425 |
+
const { error } = await supabase
|
426 |
+
.from('messages')
|
427 |
+
.insert({
|
428 |
+
thread_id: threadId,
|
429 |
+
type: 'user',
|
430 |
+
is_llm_message: true,
|
431 |
+
content: JSON.stringify(message)
|
432 |
+
});
|
433 |
+
|
434 |
+
if (error) {
|
435 |
+
console.error('Error adding user message:', error);
|
436 |
+
throw new Error(`Error adding message: ${error.message}`);
|
437 |
+
}
|
438 |
+
};
|
439 |
+
|
440 |
+
export const getMessages = async (threadId: string): Promise<Message[]> => {
|
441 |
+
const supabase = createClient();
|
442 |
+
|
443 |
+
const { data, error } = await supabase
|
444 |
+
.from('messages')
|
445 |
+
.select('*')
|
446 |
+
.eq('thread_id', threadId)
|
447 |
+
.neq('type', 'cost')
|
448 |
+
.neq('type', 'summary')
|
449 |
+
.order('created_at', { ascending: true });
|
450 |
+
|
451 |
+
if (error) {
|
452 |
+
console.error('Error fetching messages:', error);
|
453 |
+
throw new Error(`Error getting messages: ${error.message}`);
|
454 |
+
}
|
455 |
+
|
456 |
+
console.log('[API] Messages fetched:', data);
|
457 |
+
|
458 |
+
return data || [];
|
459 |
+
};
|
460 |
+
|
461 |
+
// Agent APIs
|
462 |
+
export const startAgent = async (
|
463 |
+
threadId: string,
|
464 |
+
options?: {
|
465 |
+
model_name?: string;
|
466 |
+
enable_thinking?: boolean;
|
467 |
+
reasoning_effort?: string;
|
468 |
+
stream?: boolean;
|
469 |
+
}
|
470 |
+
): Promise<{ agent_run_id: string }> => {
|
471 |
+
try {
|
472 |
+
const supabase = createClient();
|
473 |
+
const { data: { session } } = await supabase.auth.getSession();
|
474 |
+
|
475 |
+
if (!session?.access_token) {
|
476 |
+
throw new Error('No access token available');
|
477 |
+
}
|
478 |
+
|
479 |
+
// Check if backend URL is configured
|
480 |
+
if (!API_URL) {
|
481 |
+
throw new Error('Backend URL is not configured. Set NEXT_PUBLIC_BACKEND_URL in your environment.');
|
482 |
+
}
|
483 |
+
|
484 |
+
console.log(`[API] Starting agent for thread ${threadId} using ${API_URL}/thread/${threadId}/agent/start`);
|
485 |
+
|
486 |
+
const response = await fetch(`${API_URL}/thread/${threadId}/agent/start`, {
|
487 |
+
method: 'POST',
|
488 |
+
headers: {
|
489 |
+
'Content-Type': 'application/json',
|
490 |
+
'Authorization': `Bearer ${session.access_token}`,
|
491 |
+
},
|
492 |
+
// Add cache: 'no-store' to prevent caching
|
493 |
+
cache: 'no-store',
|
494 |
+
// Add the body, stringifying the options or an empty object
|
495 |
+
body: JSON.stringify(options || {}),
|
496 |
+
});
|
497 |
+
|
498 |
+
if (!response.ok) {
|
499 |
+
// Check for 402 Payment Required first
|
500 |
+
if (response.status === 402) {
|
501 |
+
try {
|
502 |
+
const errorData = await response.json();
|
503 |
+
console.error(`[API] Billing error starting agent (402):`, errorData);
|
504 |
+
// Ensure detail exists and has a message property
|
505 |
+
const detail = errorData?.detail || { message: 'Payment Required' };
|
506 |
+
if (typeof detail.message !== 'string') {
|
507 |
+
detail.message = 'Payment Required'; // Default message if missing
|
508 |
+
}
|
509 |
+
throw new BillingError(response.status, detail);
|
510 |
+
} catch (parseError) {
|
511 |
+
// Handle cases where parsing fails or the structure isn't as expected
|
512 |
+
console.error('[API] Could not parse 402 error response body:', parseError);
|
513 |
+
throw new BillingError(response.status, { message: 'Payment Required' }, `Error starting agent: ${response.statusText} (402)`);
|
514 |
+
}
|
515 |
+
}
|
516 |
+
|
517 |
+
// Handle other errors
|
518 |
+
const errorText = await response.text().catch(() => 'No error details available');
|
519 |
+
console.error(`[API] Error starting agent: ${response.status} ${response.statusText}`, errorText);
|
520 |
+
throw new Error(`Error starting agent: ${response.statusText} (${response.status})`);
|
521 |
+
}
|
522 |
+
|
523 |
+
return response.json();
|
524 |
+
} catch (error) {
|
525 |
+
// Rethrow BillingError instances directly
|
526 |
+
if (error instanceof BillingError) {
|
527 |
+
throw error;
|
528 |
+
}
|
529 |
+
|
530 |
+
console.error('[API] Failed to start agent:', error);
|
531 |
+
|
532 |
+
// Provide clearer error message for network errors
|
533 |
+
if (error instanceof TypeError && error.message.includes('Failed to fetch')) {
|
534 |
+
throw new Error(`Cannot connect to backend server. Please check your internet connection and make sure the backend is running.`);
|
535 |
+
}
|
536 |
+
|
537 |
+
// Rethrow other caught errors
|
538 |
+
throw error;
|
539 |
+
}
|
540 |
+
};
|
541 |
+
|
542 |
+
export const stopAgent = async (agentRunId: string): Promise<void> => {
|
543 |
+
// Add to non-running set immediately to prevent reconnection attempts
|
544 |
+
nonRunningAgentRuns.add(agentRunId);
|
545 |
+
|
546 |
+
// Close any existing stream
|
547 |
+
const existingStream = activeStreams.get(agentRunId);
|
548 |
+
if (existingStream) {
|
549 |
+
console.log(`[API] Closing existing stream for ${agentRunId} before stopping agent`);
|
550 |
+
existingStream.close();
|
551 |
+
activeStreams.delete(agentRunId);
|
552 |
+
}
|
553 |
+
|
554 |
+
const supabase = createClient();
|
555 |
+
const { data: { session } } = await supabase.auth.getSession();
|
556 |
+
|
557 |
+
if (!session?.access_token) {
|
558 |
+
throw new Error('No access token available');
|
559 |
+
}
|
560 |
+
|
561 |
+
const response = await fetch(`${API_URL}/agent-run/${agentRunId}/stop`, {
|
562 |
+
method: 'POST',
|
563 |
+
headers: {
|
564 |
+
'Content-Type': 'application/json',
|
565 |
+
'Authorization': `Bearer ${session.access_token}`,
|
566 |
+
},
|
567 |
+
// Add cache: 'no-store' to prevent caching
|
568 |
+
cache: 'no-store',
|
569 |
+
});
|
570 |
+
|
571 |
+
if (!response.ok) {
|
572 |
+
throw new Error(`Error stopping agent: ${response.statusText}`);
|
573 |
+
}
|
574 |
+
};
|
575 |
+
|
576 |
+
export const getAgentStatus = async (agentRunId: string): Promise<AgentRun> => {
|
577 |
+
console.log(`[API] Requesting agent status for ${agentRunId}`);
|
578 |
+
|
579 |
+
// If we already know this agent is not running, throw an error
|
580 |
+
if (nonRunningAgentRuns.has(agentRunId)) {
|
581 |
+
console.log(`[API] Agent run ${agentRunId} is known to be non-running, returning error`);
|
582 |
+
throw new Error(`Agent run ${agentRunId} is not running`);
|
583 |
+
}
|
584 |
+
|
585 |
+
try {
|
586 |
+
const supabase = createClient();
|
587 |
+
const { data: { session } } = await supabase.auth.getSession();
|
588 |
+
|
589 |
+
if (!session?.access_token) {
|
590 |
+
console.error('[API] No access token available for getAgentStatus');
|
591 |
+
throw new Error('No access token available');
|
592 |
+
}
|
593 |
+
|
594 |
+
const url = `${API_URL}/agent-run/${agentRunId}`;
|
595 |
+
console.log(`[API] Fetching from: ${url}`);
|
596 |
+
|
597 |
+
const response = await fetch(url, {
|
598 |
+
headers: {
|
599 |
+
'Authorization': `Bearer ${session.access_token}`,
|
600 |
+
},
|
601 |
+
// Add cache: 'no-store' to prevent caching
|
602 |
+
cache: 'no-store',
|
603 |
+
});
|
604 |
+
|
605 |
+
if (!response.ok) {
|
606 |
+
const errorText = await response.text().catch(() => 'No error details available');
|
607 |
+
console.error(`[API] Error getting agent status: ${response.status} ${response.statusText}`, errorText);
|
608 |
+
|
609 |
+
// If we get a 404, add to non-running set
|
610 |
+
if (response.status === 404) {
|
611 |
+
nonRunningAgentRuns.add(agentRunId);
|
612 |
+
}
|
613 |
+
|
614 |
+
throw new Error(`Error getting agent status: ${response.statusText} (${response.status})`);
|
615 |
+
}
|
616 |
+
|
617 |
+
const data = await response.json();
|
618 |
+
console.log(`[API] Successfully got agent status:`, data);
|
619 |
+
|
620 |
+
// If agent is not running, add to non-running set
|
621 |
+
if (data.status !== 'running') {
|
622 |
+
nonRunningAgentRuns.add(agentRunId);
|
623 |
+
}
|
624 |
+
|
625 |
+
return data;
|
626 |
+
} catch (error) {
|
627 |
+
console.error('[API] Failed to get agent status:', error);
|
628 |
+
throw error;
|
629 |
+
}
|
630 |
+
};
|
631 |
+
|
632 |
+
export const getAgentRuns = async (threadId: string): Promise<AgentRun[]> => {
|
633 |
+
try {
|
634 |
+
const supabase = createClient();
|
635 |
+
const { data: { session } } = await supabase.auth.getSession();
|
636 |
+
|
637 |
+
if (!session?.access_token) {
|
638 |
+
throw new Error('No access token available');
|
639 |
+
}
|
640 |
+
|
641 |
+
const response = await fetch(`${API_URL}/thread/${threadId}/agent-runs`, {
|
642 |
+
headers: {
|
643 |
+
'Authorization': `Bearer ${session.access_token}`,
|
644 |
+
},
|
645 |
+
// Add cache: 'no-store' to prevent caching
|
646 |
+
cache: 'no-store',
|
647 |
+
});
|
648 |
+
|
649 |
+
if (!response.ok) {
|
650 |
+
throw new Error(`Error getting agent runs: ${response.statusText}`);
|
651 |
+
}
|
652 |
+
|
653 |
+
const data = await response.json();
|
654 |
+
return data.agent_runs || [];
|
655 |
+
} catch (error) {
|
656 |
+
console.error('Failed to get agent runs:', error);
|
657 |
+
throw error;
|
658 |
+
}
|
659 |
+
};
|
660 |
+
|
661 |
+
export const streamAgent = (agentRunId: string, callbacks: {
|
662 |
+
onMessage: (content: string) => void;
|
663 |
+
onError: (error: Error | string) => void;
|
664 |
+
onClose: () => void;
|
665 |
+
}): () => void => {
|
666 |
+
console.log(`[STREAM] streamAgent called for ${agentRunId}`);
|
667 |
+
|
668 |
+
// Check if this agent run is known to be non-running
|
669 |
+
if (nonRunningAgentRuns.has(agentRunId)) {
|
670 |
+
console.log(`[STREAM] Agent run ${agentRunId} is known to be non-running, not creating stream`);
|
671 |
+
// Notify the caller immediately
|
672 |
+
setTimeout(() => {
|
673 |
+
callbacks.onError(`Agent run ${agentRunId} is not running`);
|
674 |
+
callbacks.onClose();
|
675 |
+
}, 0);
|
676 |
+
|
677 |
+
// Return a no-op cleanup function
|
678 |
+
return () => {};
|
679 |
+
}
|
680 |
+
|
681 |
+
// Check if there's already an active stream for this agent run
|
682 |
+
const existingStream = activeStreams.get(agentRunId);
|
683 |
+
if (existingStream) {
|
684 |
+
console.log(`[STREAM] Stream already exists for ${agentRunId}, closing it first`);
|
685 |
+
existingStream.close();
|
686 |
+
activeStreams.delete(agentRunId);
|
687 |
+
}
|
688 |
+
|
689 |
+
// Set up a new stream
|
690 |
+
try {
|
691 |
+
const setupStream = async () => {
|
692 |
+
// First verify the agent is actually running
|
693 |
+
try {
|
694 |
+
const status = await getAgentStatus(agentRunId);
|
695 |
+
if (status.status !== 'running') {
|
696 |
+
console.log(`[STREAM] Agent run ${agentRunId} is not running (status: ${status.status}), not creating stream`);
|
697 |
+
nonRunningAgentRuns.add(agentRunId);
|
698 |
+
callbacks.onError(`Agent run ${agentRunId} is not running (status: ${status.status})`);
|
699 |
+
callbacks.onClose();
|
700 |
+
return;
|
701 |
+
}
|
702 |
+
} catch (err) {
|
703 |
+
console.error(`[STREAM] Error verifying agent run ${agentRunId}:`, err);
|
704 |
+
|
705 |
+
// Check if this is a "not found" error
|
706 |
+
const errorMessage = err instanceof Error ? err.message : String(err);
|
707 |
+
const isNotFoundError = errorMessage.includes('not found') ||
|
708 |
+
errorMessage.includes('404') ||
|
709 |
+
errorMessage.includes('does not exist');
|
710 |
+
|
711 |
+
if (isNotFoundError) {
|
712 |
+
console.log(`[STREAM] Agent run ${agentRunId} not found, not creating stream`);
|
713 |
+
nonRunningAgentRuns.add(agentRunId);
|
714 |
+
}
|
715 |
+
|
716 |
+
callbacks.onError(errorMessage);
|
717 |
+
callbacks.onClose();
|
718 |
+
return;
|
719 |
+
}
|
720 |
+
|
721 |
+
const supabase = createClient();
|
722 |
+
const { data: { session } } = await supabase.auth.getSession();
|
723 |
+
|
724 |
+
if (!session?.access_token) {
|
725 |
+
console.error('[STREAM] No auth token available');
|
726 |
+
callbacks.onError(new Error('Authentication required'));
|
727 |
+
callbacks.onClose();
|
728 |
+
return;
|
729 |
+
}
|
730 |
+
|
731 |
+
const url = new URL(`${API_URL}/agent-run/${agentRunId}/stream`);
|
732 |
+
url.searchParams.append('token', session.access_token);
|
733 |
+
|
734 |
+
console.log(`[STREAM] Creating EventSource for ${agentRunId}`);
|
735 |
+
const eventSource = new EventSource(url.toString());
|
736 |
+
|
737 |
+
// Store the EventSource in the active streams map
|
738 |
+
activeStreams.set(agentRunId, eventSource);
|
739 |
+
|
740 |
+
eventSource.onopen = () => {
|
741 |
+
console.log(`[STREAM] Connection opened for ${agentRunId}`);
|
742 |
+
};
|
743 |
+
|
744 |
+
eventSource.onmessage = (event) => {
|
745 |
+
try {
|
746 |
+
const rawData = event.data;
|
747 |
+
if (rawData.includes('"type":"ping"')) return;
|
748 |
+
|
749 |
+
// Log raw data for debugging (truncated for readability)
|
750 |
+
console.log(`[STREAM] Received data for ${agentRunId}: ${rawData.substring(0, 100)}${rawData.length > 100 ? '...' : ''}`);
|
751 |
+
|
752 |
+
// Skip empty messages
|
753 |
+
if (!rawData || rawData.trim() === '') {
|
754 |
+
console.debug('[STREAM] Received empty message, skipping');
|
755 |
+
return;
|
756 |
+
}
|
757 |
+
|
758 |
+
// Check for "Agent run not found" error
|
759 |
+
if (rawData.includes('Agent run') && rawData.includes('not found in active runs')) {
|
760 |
+
console.log(`[STREAM] Agent run ${agentRunId} not found in active runs, closing stream`);
|
761 |
+
|
762 |
+
// Add to non-running set to prevent future reconnection attempts
|
763 |
+
nonRunningAgentRuns.add(agentRunId);
|
764 |
+
|
765 |
+
// Notify about the error
|
766 |
+
callbacks.onError("Agent run not found in active runs");
|
767 |
+
|
768 |
+
// Clean up
|
769 |
+
eventSource.close();
|
770 |
+
activeStreams.delete(agentRunId);
|
771 |
+
callbacks.onClose();
|
772 |
+
|
773 |
+
return;
|
774 |
+
}
|
775 |
+
|
776 |
+
// Check for completion messages
|
777 |
+
if (rawData.includes('"type":"status"') && rawData.includes('"status":"completed"')) {
|
778 |
+
console.log(`[STREAM] Detected completion status message for ${agentRunId}`);
|
779 |
+
|
780 |
+
// Check for specific completion messages that indicate we should stop checking
|
781 |
+
if (rawData.includes('Run data not available for streaming') ||
|
782 |
+
rawData.includes('Stream ended with status: completed')) {
|
783 |
+
console.log(`[STREAM] Detected final completion message for ${agentRunId}, adding to non-running set`);
|
784 |
+
// Add to non-running set to prevent future reconnection attempts
|
785 |
+
nonRunningAgentRuns.add(agentRunId);
|
786 |
+
}
|
787 |
+
|
788 |
+
// Notify about the message
|
789 |
+
callbacks.onMessage(rawData);
|
790 |
+
|
791 |
+
// Clean up
|
792 |
+
eventSource.close();
|
793 |
+
activeStreams.delete(agentRunId);
|
794 |
+
callbacks.onClose();
|
795 |
+
|
796 |
+
return;
|
797 |
+
}
|
798 |
+
|
799 |
+
// Check for thread run end message
|
800 |
+
if (rawData.includes('"type":"status"') && rawData.includes('"status_type":"thread_run_end"')) {
|
801 |
+
console.log(`[STREAM] Detected thread run end message for ${agentRunId}`);
|
802 |
+
|
803 |
+
// Add to non-running set
|
804 |
+
nonRunningAgentRuns.add(agentRunId);
|
805 |
+
|
806 |
+
// Notify about the message
|
807 |
+
callbacks.onMessage(rawData);
|
808 |
+
|
809 |
+
// Clean up
|
810 |
+
eventSource.close();
|
811 |
+
activeStreams.delete(agentRunId);
|
812 |
+
callbacks.onClose();
|
813 |
+
|
814 |
+
return;
|
815 |
+
}
|
816 |
+
|
817 |
+
// For all other messages, just pass them through
|
818 |
+
callbacks.onMessage(rawData);
|
819 |
+
|
820 |
+
} catch (error) {
|
821 |
+
console.error(`[STREAM] Error handling message:`, error);
|
822 |
+
callbacks.onError(error instanceof Error ? error : String(error));
|
823 |
+
}
|
824 |
+
};
|
825 |
+
|
826 |
+
eventSource.onerror = (event) => {
|
827 |
+
console.log(`[STREAM] EventSource error for ${agentRunId}:`, event);
|
828 |
+
|
829 |
+
// Check if the agent is still running
|
830 |
+
getAgentStatus(agentRunId)
|
831 |
+
.then(status => {
|
832 |
+
if (status.status !== 'running') {
|
833 |
+
console.log(`[STREAM] Agent run ${agentRunId} is not running after error, closing stream`);
|
834 |
+
nonRunningAgentRuns.add(agentRunId);
|
835 |
+
eventSource.close();
|
836 |
+
activeStreams.delete(agentRunId);
|
837 |
+
callbacks.onClose();
|
838 |
+
} else {
|
839 |
+
console.log(`[STREAM] Agent run ${agentRunId} is still running after error, keeping stream open`);
|
840 |
+
// Let the browser handle reconnection for non-fatal errors
|
841 |
+
}
|
842 |
+
})
|
843 |
+
.catch(err => {
|
844 |
+
console.error(`[STREAM] Error checking agent status after stream error:`, err);
|
845 |
+
|
846 |
+
// Check if this is a "not found" error
|
847 |
+
const errMsg = err instanceof Error ? err.message : String(err);
|
848 |
+
const isNotFoundErr = errMsg.includes('not found') ||
|
849 |
+
errMsg.includes('404') ||
|
850 |
+
errMsg.includes('does not exist');
|
851 |
+
|
852 |
+
if (isNotFoundErr) {
|
853 |
+
console.log(`[STREAM] Agent run ${agentRunId} not found after error, closing stream`);
|
854 |
+
nonRunningAgentRuns.add(agentRunId);
|
855 |
+
eventSource.close();
|
856 |
+
activeStreams.delete(agentRunId);
|
857 |
+
callbacks.onClose();
|
858 |
+
}
|
859 |
+
|
860 |
+
// For other errors, notify but don't close the stream
|
861 |
+
callbacks.onError(errMsg);
|
862 |
+
});
|
863 |
+
};
|
864 |
+
};
|
865 |
+
|
866 |
+
// Start the stream setup
|
867 |
+
setupStream();
|
868 |
+
|
869 |
+
// Return a cleanup function
|
870 |
+
return () => {
|
871 |
+
console.log(`[STREAM] Cleanup called for ${agentRunId}`);
|
872 |
+
const stream = activeStreams.get(agentRunId);
|
873 |
+
if (stream) {
|
874 |
+
console.log(`[STREAM] Closing stream for ${agentRunId}`);
|
875 |
+
stream.close();
|
876 |
+
activeStreams.delete(agentRunId);
|
877 |
+
}
|
878 |
+
};
|
879 |
+
} catch (error) {
|
880 |
+
console.error(`[STREAM] Error setting up stream for ${agentRunId}:`, error);
|
881 |
+
callbacks.onError(error instanceof Error ? error : String(error));
|
882 |
+
callbacks.onClose();
|
883 |
+
return () => {};
|
884 |
+
}
|
885 |
+
};
|
886 |
+
|
887 |
+
// Sandbox API Functions
|
888 |
+
export const createSandboxFile = async (sandboxId: string, filePath: string, content: string): Promise<void> => {
|
889 |
+
try {
|
890 |
+
const supabase = createClient();
|
891 |
+
const { data: { session } } = await supabase.auth.getSession();
|
892 |
+
|
893 |
+
// Use FormData to handle both text and binary content more reliably
|
894 |
+
const formData = new FormData();
|
895 |
+
formData.append('path', filePath);
|
896 |
+
|
897 |
+
// Create a Blob from the content string and append as a file
|
898 |
+
const blob = new Blob([content], { type: 'application/octet-stream' });
|
899 |
+
formData.append('file', blob, filePath.split('/').pop() || 'file');
|
900 |
+
|
901 |
+
const headers: Record<string, string> = {};
|
902 |
+
if (session?.access_token) {
|
903 |
+
headers['Authorization'] = `Bearer ${session.access_token}`;
|
904 |
+
}
|
905 |
+
|
906 |
+
const response = await fetch(`${API_URL}/sandboxes/${sandboxId}/files`, {
|
907 |
+
method: 'POST',
|
908 |
+
headers,
|
909 |
+
body: formData,
|
910 |
+
});
|
911 |
+
|
912 |
+
if (!response.ok) {
|
913 |
+
const errorText = await response.text().catch(() => 'No error details available');
|
914 |
+
console.error(`Error creating sandbox file: ${response.status} ${response.statusText}`, errorText);
|
915 |
+
throw new Error(`Error creating sandbox file: ${response.statusText} (${response.status})`);
|
916 |
+
}
|
917 |
+
|
918 |
+
return response.json();
|
919 |
+
} catch (error) {
|
920 |
+
console.error('Failed to create sandbox file:', error);
|
921 |
+
throw error;
|
922 |
+
}
|
923 |
+
};
|
924 |
+
|
925 |
+
// Fallback method for legacy support using JSON
|
926 |
+
export const createSandboxFileJson = async (sandboxId: string, filePath: string, content: string): Promise<void> => {
|
927 |
+
try {
|
928 |
+
const supabase = createClient();
|
929 |
+
const { data: { session } } = await supabase.auth.getSession();
|
930 |
+
|
931 |
+
const headers: Record<string, string> = {
|
932 |
+
'Content-Type': 'application/json'
|
933 |
+
};
|
934 |
+
|
935 |
+
if (session?.access_token) {
|
936 |
+
headers['Authorization'] = `Bearer ${session.access_token}`;
|
937 |
+
}
|
938 |
+
|
939 |
+
const response = await fetch(`${API_URL}/sandboxes/${sandboxId}/files/json`, {
|
940 |
+
method: 'POST',
|
941 |
+
headers,
|
942 |
+
body: JSON.stringify({
|
943 |
+
path: filePath,
|
944 |
+
content: content
|
945 |
+
}),
|
946 |
+
});
|
947 |
+
|
948 |
+
if (!response.ok) {
|
949 |
+
const errorText = await response.text().catch(() => 'No error details available');
|
950 |
+
console.error(`Error creating sandbox file (JSON): ${response.status} ${response.statusText}`, errorText);
|
951 |
+
throw new Error(`Error creating sandbox file: ${response.statusText} (${response.status})`);
|
952 |
+
}
|
953 |
+
|
954 |
+
return response.json();
|
955 |
+
} catch (error) {
|
956 |
+
console.error('Failed to create sandbox file with JSON:', error);
|
957 |
+
throw error;
|
958 |
+
}
|
959 |
+
};
|
960 |
+
|
961 |
+
export const listSandboxFiles = async (sandboxId: string, path: string): Promise<FileInfo[]> => {
|
962 |
+
try {
|
963 |
+
const supabase = createClient();
|
964 |
+
const { data: { session } } = await supabase.auth.getSession();
|
965 |
+
|
966 |
+
const url = new URL(`${API_URL}/sandboxes/${sandboxId}/files`);
|
967 |
+
url.searchParams.append('path', path);
|
968 |
+
|
969 |
+
const headers: Record<string, string> = {};
|
970 |
+
if (session?.access_token) {
|
971 |
+
headers['Authorization'] = `Bearer ${session.access_token}`;
|
972 |
+
}
|
973 |
+
|
974 |
+
const response = await fetch(url.toString(), {
|
975 |
+
headers,
|
976 |
+
});
|
977 |
+
|
978 |
+
if (!response.ok) {
|
979 |
+
const errorText = await response.text().catch(() => 'No error details available');
|
980 |
+
console.error(`Error listing sandbox files: ${response.status} ${response.statusText}`, errorText);
|
981 |
+
throw new Error(`Error listing sandbox files: ${response.statusText} (${response.status})`);
|
982 |
+
}
|
983 |
+
|
984 |
+
const data = await response.json();
|
985 |
+
return data.files || [];
|
986 |
+
} catch (error) {
|
987 |
+
console.error('Failed to list sandbox files:', error);
|
988 |
+
throw error;
|
989 |
+
}
|
990 |
+
};
|
991 |
+
|
992 |
+
export const getSandboxFileContent = async (sandboxId: string, path: string): Promise<string | Blob> => {
|
993 |
+
try {
|
994 |
+
const supabase = createClient();
|
995 |
+
const { data: { session } } = await supabase.auth.getSession();
|
996 |
+
|
997 |
+
const url = new URL(`${API_URL}/sandboxes/${sandboxId}/files/content`);
|
998 |
+
url.searchParams.append('path', path);
|
999 |
+
|
1000 |
+
const headers: Record<string, string> = {};
|
1001 |
+
if (session?.access_token) {
|
1002 |
+
headers['Authorization'] = `Bearer ${session.access_token}`;
|
1003 |
+
}
|
1004 |
+
|
1005 |
+
const response = await fetch(url.toString(), {
|
1006 |
+
headers,
|
1007 |
+
});
|
1008 |
+
|
1009 |
+
if (!response.ok) {
|
1010 |
+
const errorText = await response.text().catch(() => 'No error details available');
|
1011 |
+
console.error(`Error getting sandbox file content: ${response.status} ${response.statusText}`, errorText);
|
1012 |
+
throw new Error(`Error getting sandbox file content: ${response.statusText} (${response.status})`);
|
1013 |
+
}
|
1014 |
+
|
1015 |
+
// Check if it's a text file or binary file based on content-type
|
1016 |
+
const contentType = response.headers.get('content-type');
|
1017 |
+
if (contentType && contentType.includes('text') || contentType?.includes('application/json')) {
|
1018 |
+
return await response.text();
|
1019 |
+
} else {
|
1020 |
+
return await response.blob();
|
1021 |
+
}
|
1022 |
+
} catch (error) {
|
1023 |
+
console.error('Failed to get sandbox file content:', error);
|
1024 |
+
throw error;
|
1025 |
+
}
|
1026 |
+
};
|
1027 |
+
|
1028 |
+
export const updateThread = async (threadId: string, data: Partial<Thread>): Promise<Thread> => {
|
1029 |
+
const supabase = createClient();
|
1030 |
+
|
1031 |
+
// Format the data for update
|
1032 |
+
const updateData = { ...data };
|
1033 |
+
|
1034 |
+
// Update the thread
|
1035 |
+
const { data: updatedThread, error } = await supabase
|
1036 |
+
.from('threads')
|
1037 |
+
.update(updateData)
|
1038 |
+
.eq('thread_id', threadId)
|
1039 |
+
.select()
|
1040 |
+
.single();
|
1041 |
+
|
1042 |
+
if (error) {
|
1043 |
+
console.error('Error updating thread:', error);
|
1044 |
+
throw new Error(`Error updating thread: ${error.message}`);
|
1045 |
+
}
|
1046 |
+
|
1047 |
+
return updatedThread;
|
1048 |
+
};
|
1049 |
+
|
1050 |
+
export const toggleThreadPublicStatus = async (threadId: string, isPublic: boolean): Promise<Thread> => {
|
1051 |
+
return updateThread(threadId, { is_public: isPublic });
|
1052 |
+
};
|
1053 |
+
|
1054 |
+
// Function to get public projects
|
1055 |
+
export const getPublicProjects = async (): Promise<Project[]> => {
|
1056 |
+
try {
|
1057 |
+
const supabase = createClient();
|
1058 |
+
|
1059 |
+
// Query for threads that are marked as public
|
1060 |
+
const { data: publicThreads, error: threadsError } = await supabase
|
1061 |
+
.from('threads')
|
1062 |
+
.select('project_id')
|
1063 |
+
.eq('is_public', true);
|
1064 |
+
|
1065 |
+
if (threadsError) {
|
1066 |
+
console.error('Error fetching public threads:', threadsError);
|
1067 |
+
return [];
|
1068 |
+
}
|
1069 |
+
|
1070 |
+
// If no public threads found, return empty array
|
1071 |
+
if (!publicThreads?.length) {
|
1072 |
+
return [];
|
1073 |
+
}
|
1074 |
+
|
1075 |
+
// Extract unique project IDs from public threads
|
1076 |
+
const publicProjectIds = [...new Set(publicThreads.map(thread => thread.project_id))].filter(Boolean);
|
1077 |
+
|
1078 |
+
// If no valid project IDs, return empty array
|
1079 |
+
if (!publicProjectIds.length) {
|
1080 |
+
return [];
|
1081 |
+
}
|
1082 |
+
|
1083 |
+
// Get the projects that have public threads
|
1084 |
+
const { data: projects, error: projectsError } = await supabase
|
1085 |
+
.from('projects')
|
1086 |
+
.select('*')
|
1087 |
+
.in('project_id', publicProjectIds);
|
1088 |
+
|
1089 |
+
if (projectsError) {
|
1090 |
+
console.error('Error fetching public projects:', projectsError);
|
1091 |
+
return [];
|
1092 |
+
}
|
1093 |
+
|
1094 |
+
console.log('[API] Raw public projects from DB:', projects?.length, projects);
|
1095 |
+
|
1096 |
+
// Map database fields to our Project type
|
1097 |
+
const mappedProjects: Project[] = (projects || []).map(project => ({
|
1098 |
+
id: project.project_id,
|
1099 |
+
name: project.name || '',
|
1100 |
+
description: project.description || '',
|
1101 |
+
account_id: project.account_id,
|
1102 |
+
created_at: project.created_at,
|
1103 |
+
updated_at: project.updated_at,
|
1104 |
+
sandbox: project.sandbox || { id: "", pass: "", vnc_preview: "", sandbox_url: "" },
|
1105 |
+
is_public: true // Mark these as public projects
|
1106 |
+
}));
|
1107 |
+
|
1108 |
+
console.log('[API] Mapped public projects for frontend:', mappedProjects.length);
|
1109 |
+
|
1110 |
+
return mappedProjects;
|
1111 |
+
} catch (err) {
|
1112 |
+
console.error('Error fetching public projects:', err);
|
1113 |
+
return [];
|
1114 |
+
}
|
1115 |
+
};
|
1116 |
+
|
1117 |
+
export const initiateAgent = async (formData: FormData): Promise<InitiateAgentResponse> => {
|
1118 |
+
try {
|
1119 |
+
const supabase = createClient();
|
1120 |
+
const { data: { session } } = await supabase.auth.getSession();
|
1121 |
+
|
1122 |
+
if (!session?.access_token) {
|
1123 |
+
throw new Error('No access token available');
|
1124 |
+
}
|
1125 |
+
|
1126 |
+
// Check if backend URL is configured
|
1127 |
+
if (!API_URL) {
|
1128 |
+
throw new Error('Backend URL is not configured. Set NEXT_PUBLIC_BACKEND_URL in your environment.');
|
1129 |
+
}
|
1130 |
+
|
1131 |
+
console.log(`[API] Initiating agent with files using ${API_URL}/agent/initiate`);
|
1132 |
+
|
1133 |
+
const response = await fetch(`${API_URL}/agent/initiate`, {
|
1134 |
+
method: 'POST',
|
1135 |
+
headers: {
|
1136 |
+
// Note: Don't set Content-Type for FormData
|
1137 |
+
'Authorization': `Bearer ${session.access_token}`,
|
1138 |
+
},
|
1139 |
+
body: formData,
|
1140 |
+
// Add cache: 'no-store' to prevent caching
|
1141 |
+
cache: 'no-store',
|
1142 |
+
});
|
1143 |
+
|
1144 |
+
if (!response.ok) {
|
1145 |
+
const errorText = await response.text().catch(() => 'No error details available');
|
1146 |
+
console.error(`[API] Error initiating agent: ${response.status} ${response.statusText}`, errorText);
|
1147 |
+
throw new Error(`Error initiating agent: ${response.statusText} (${response.status})`);
|
1148 |
+
}
|
1149 |
+
|
1150 |
+
return response.json();
|
1151 |
+
} catch (error) {
|
1152 |
+
console.error('[API] Failed to initiate agent:', error);
|
1153 |
+
|
1154 |
+
// Provide clearer error message for network errors
|
1155 |
+
if (error instanceof TypeError && error.message.includes('Failed to fetch')) {
|
1156 |
+
throw new Error(`Cannot connect to backend server. Please check your internet connection and make sure the backend is running.`);
|
1157 |
+
}
|
1158 |
+
|
1159 |
+
throw error;
|
1160 |
+
}
|
1161 |
+
};
|
1162 |
+
|
1163 |
+
export const checkApiHealth = async (): Promise<HealthCheckResponse> => {
|
1164 |
+
try {
|
1165 |
+
const response = await fetch(`${API_URL}/health`, {
|
1166 |
+
cache: 'no-store',
|
1167 |
+
});
|
1168 |
+
|
1169 |
+
if (!response.ok) {
|
1170 |
+
throw new Error(`API health check failed: ${response.statusText}`);
|
1171 |
+
}
|
1172 |
+
|
1173 |
+
return response.json();
|
1174 |
+
} catch (error) {
|
1175 |
+
console.error('API health check failed:', error);
|
1176 |
+
throw error;
|
1177 |
+
}
|
1178 |
+
};
|
1179 |
+
|
architecture_diagram.svg
ADDED
|
auth_utils.py
ADDED
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import HTTPException, Request, Depends
|
2 |
+
from typing import Optional, List, Dict, Any
|
3 |
+
import jwt
|
4 |
+
from jwt.exceptions import PyJWTError
|
5 |
+
from utils.logger import logger
|
6 |
+
|
7 |
+
# This function extracts the user ID from Supabase JWT
|
8 |
+
async def get_current_user_id(request: Request) -> str:
|
9 |
+
"""
|
10 |
+
Extract and verify the user ID from the JWT in the Authorization header.
|
11 |
+
|
12 |
+
This function is used as a dependency in FastAPI routes to ensure the user
|
13 |
+
is authenticated and to provide the user ID for authorization checks.
|
14 |
+
|
15 |
+
Args:
|
16 |
+
request: The FastAPI request object
|
17 |
+
|
18 |
+
Returns:
|
19 |
+
str: The user ID extracted from the JWT
|
20 |
+
|
21 |
+
Raises:
|
22 |
+
HTTPException: If no valid token is found or if the token is invalid
|
23 |
+
"""
|
24 |
+
auth_header = request.headers.get('Authorization')
|
25 |
+
|
26 |
+
if not auth_header or not auth_header.startswith('Bearer '):
|
27 |
+
raise HTTPException(
|
28 |
+
status_code=401,
|
29 |
+
detail="No valid authentication credentials found",
|
30 |
+
headers={"WWW-Authenticate": "Bearer"}
|
31 |
+
)
|
32 |
+
|
33 |
+
token = auth_header.split(' ')[1]
|
34 |
+
|
35 |
+
try:
|
36 |
+
# For Supabase JWT, we just need to decode and extract the user ID
|
37 |
+
# The actual validation is handled by Supabase's RLS
|
38 |
+
payload = jwt.decode(token, options={"verify_signature": False})
|
39 |
+
|
40 |
+
# Supabase stores the user ID in the 'sub' claim
|
41 |
+
user_id = payload.get('sub')
|
42 |
+
|
43 |
+
if not user_id:
|
44 |
+
raise HTTPException(
|
45 |
+
status_code=401,
|
46 |
+
detail="Invalid token payload",
|
47 |
+
headers={"WWW-Authenticate": "Bearer"}
|
48 |
+
)
|
49 |
+
|
50 |
+
return user_id
|
51 |
+
|
52 |
+
except PyJWTError:
|
53 |
+
raise HTTPException(
|
54 |
+
status_code=401,
|
55 |
+
detail="Invalid token",
|
56 |
+
headers={"WWW-Authenticate": "Bearer"}
|
57 |
+
)
|
58 |
+
|
59 |
+
async def get_user_id_from_stream_auth(
|
60 |
+
request: Request,
|
61 |
+
token: Optional[str] = None
|
62 |
+
) -> str:
|
63 |
+
"""
|
64 |
+
Extract and verify the user ID from either the Authorization header or query parameter token.
|
65 |
+
This function is specifically designed for streaming endpoints that need to support both
|
66 |
+
header-based and query parameter-based authentication (for EventSource compatibility).
|
67 |
+
|
68 |
+
Args:
|
69 |
+
request: The FastAPI request object
|
70 |
+
token: Optional token from query parameters
|
71 |
+
|
72 |
+
Returns:
|
73 |
+
str: The user ID extracted from the JWT
|
74 |
+
|
75 |
+
Raises:
|
76 |
+
HTTPException: If no valid token is found or if the token is invalid
|
77 |
+
"""
|
78 |
+
# Try to get user_id from token in query param (for EventSource which can't set headers)
|
79 |
+
if token:
|
80 |
+
try:
|
81 |
+
# For Supabase JWT, we just need to decode and extract the user ID
|
82 |
+
payload = jwt.decode(token, options={"verify_signature": False})
|
83 |
+
user_id = payload.get('sub')
|
84 |
+
if user_id:
|
85 |
+
return user_id
|
86 |
+
except Exception:
|
87 |
+
pass
|
88 |
+
|
89 |
+
# If no valid token in query param, try to get it from the Authorization header
|
90 |
+
auth_header = request.headers.get('Authorization')
|
91 |
+
if auth_header and auth_header.startswith('Bearer '):
|
92 |
+
try:
|
93 |
+
# Extract token from header
|
94 |
+
header_token = auth_header.split(' ')[1]
|
95 |
+
payload = jwt.decode(header_token, options={"verify_signature": False})
|
96 |
+
user_id = payload.get('sub')
|
97 |
+
if user_id:
|
98 |
+
return user_id
|
99 |
+
except Exception:
|
100 |
+
pass
|
101 |
+
|
102 |
+
# If we still don't have a user_id, return authentication error
|
103 |
+
raise HTTPException(
|
104 |
+
status_code=401,
|
105 |
+
detail="No valid authentication credentials found",
|
106 |
+
headers={"WWW-Authenticate": "Bearer"}
|
107 |
+
)
|
108 |
+
async def verify_thread_access(client, thread_id: str, user_id: str):
|
109 |
+
"""
|
110 |
+
Verify that a user has access to a specific thread based on account membership.
|
111 |
+
|
112 |
+
Args:
|
113 |
+
client: The Supabase client
|
114 |
+
thread_id: The thread ID to check access for
|
115 |
+
user_id: The user ID to check permissions for
|
116 |
+
|
117 |
+
Returns:
|
118 |
+
bool: True if the user has access
|
119 |
+
|
120 |
+
Raises:
|
121 |
+
HTTPException: If the user doesn't have access to the thread
|
122 |
+
"""
|
123 |
+
# Query the thread to get account information
|
124 |
+
thread_result = await client.table('threads').select('*,project_id').eq('thread_id', thread_id).execute()
|
125 |
+
|
126 |
+
if not thread_result.data or len(thread_result.data) == 0:
|
127 |
+
raise HTTPException(status_code=404, detail="Thread not found")
|
128 |
+
|
129 |
+
thread_data = thread_result.data[0]
|
130 |
+
|
131 |
+
# Check if project is public
|
132 |
+
project_id = thread_data.get('project_id')
|
133 |
+
if project_id:
|
134 |
+
project_result = await client.table('projects').select('is_public').eq('project_id', project_id).execute()
|
135 |
+
if project_result.data and len(project_result.data) > 0:
|
136 |
+
if project_result.data[0].get('is_public'):
|
137 |
+
return True
|
138 |
+
|
139 |
+
account_id = thread_data.get('account_id')
|
140 |
+
# When using service role, we need to manually check account membership instead of using current_user_account_role
|
141 |
+
if account_id:
|
142 |
+
account_user_result = await client.schema('basejump').from_('account_user').select('account_role').eq('user_id', user_id).eq('account_id', account_id).execute()
|
143 |
+
if account_user_result.data and len(account_user_result.data) > 0:
|
144 |
+
return True
|
145 |
+
raise HTTPException(status_code=403, detail="Not authorized to access this thread")
|
146 |
+
|
147 |
+
async def get_optional_user_id(request: Request) -> Optional[str]:
|
148 |
+
"""
|
149 |
+
Extract the user ID from the JWT in the Authorization header if present,
|
150 |
+
but don't require authentication. Returns None if no valid token is found.
|
151 |
+
|
152 |
+
This function is used for endpoints that support both authenticated and
|
153 |
+
unauthenticated access (like public projects).
|
154 |
+
|
155 |
+
Args:
|
156 |
+
request: The FastAPI request object
|
157 |
+
|
158 |
+
Returns:
|
159 |
+
Optional[str]: The user ID extracted from the JWT, or None if no valid token
|
160 |
+
"""
|
161 |
+
auth_header = request.headers.get('Authorization')
|
162 |
+
|
163 |
+
if not auth_header or not auth_header.startswith('Bearer '):
|
164 |
+
return None
|
165 |
+
|
166 |
+
token = auth_header.split(' ')[1]
|
167 |
+
|
168 |
+
try:
|
169 |
+
# For Supabase JWT, we just need to decode and extract the user ID
|
170 |
+
payload = jwt.decode(token, options={"verify_signature": False})
|
171 |
+
|
172 |
+
# Supabase stores the user ID in the 'sub' claim
|
173 |
+
user_id = payload.get('sub')
|
174 |
+
|
175 |
+
return user_id
|
176 |
+
except PyJWTError:
|
177 |
+
return None
|
avatar.tsx
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use client"
|
2 |
+
|
3 |
+
import * as React from "react"
|
4 |
+
import * as AvatarPrimitive from "@radix-ui/react-avatar"
|
5 |
+
|
6 |
+
import { cn } from "@/lib/utils"
|
7 |
+
|
8 |
+
function Avatar({
|
9 |
+
className,
|
10 |
+
...props
|
11 |
+
}: React.ComponentProps<typeof AvatarPrimitive.Root>) {
|
12 |
+
return (
|
13 |
+
<AvatarPrimitive.Root
|
14 |
+
data-slot="avatar"
|
15 |
+
className={cn(
|
16 |
+
"relative flex size-8 shrink-0 overflow-hidden rounded-full",
|
17 |
+
className
|
18 |
+
)}
|
19 |
+
{...props}
|
20 |
+
/>
|
21 |
+
)
|
22 |
+
}
|
23 |
+
|
24 |
+
function AvatarImage({
|
25 |
+
className,
|
26 |
+
...props
|
27 |
+
}: React.ComponentProps<typeof AvatarPrimitive.Image>) {
|
28 |
+
return (
|
29 |
+
<AvatarPrimitive.Image
|
30 |
+
data-slot="avatar-image"
|
31 |
+
className={cn("aspect-square size-full", className)}
|
32 |
+
{...props}
|
33 |
+
/>
|
34 |
+
)
|
35 |
+
}
|
36 |
+
|
37 |
+
function AvatarFallback({
|
38 |
+
className,
|
39 |
+
...props
|
40 |
+
}: React.ComponentProps<typeof AvatarPrimitive.Fallback>) {
|
41 |
+
return (
|
42 |
+
<AvatarPrimitive.Fallback
|
43 |
+
data-slot="avatar-fallback"
|
44 |
+
className={cn(
|
45 |
+
"bg-muted flex size-full items-center justify-center rounded-full",
|
46 |
+
className
|
47 |
+
)}
|
48 |
+
{...props}
|
49 |
+
/>
|
50 |
+
)
|
51 |
+
}
|
52 |
+
|
53 |
+
export { Avatar, AvatarImage, AvatarFallback }
|
badge.tsx
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import * as React from "react"
|
2 |
+
import { Slot } from "@radix-ui/react-slot"
|
3 |
+
import { cva, type VariantProps } from "class-variance-authority"
|
4 |
+
|
5 |
+
import { cn } from "@/lib/utils"
|
6 |
+
|
7 |
+
const badgeVariants = cva(
|
8 |
+
"inline-flex items-center justify-center rounded-md border px-2 py-0.5 text-xs font-medium w-fit whitespace-nowrap shrink-0 [&>svg]:size-3 gap-1 [&>svg]:pointer-events-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive transition-[color,box-shadow] overflow-hidden",
|
9 |
+
{
|
10 |
+
variants: {
|
11 |
+
variant: {
|
12 |
+
default:
|
13 |
+
"border-transparent bg-primary text-primary-foreground [a&]:hover:bg-primary/90",
|
14 |
+
secondary:
|
15 |
+
"border-transparent bg-secondary text-secondary-foreground [a&]:hover:bg-secondary/90",
|
16 |
+
destructive:
|
17 |
+
"border-transparent bg-destructive text-white [a&]:hover:bg-destructive/90 focus-visible:ring-destructive/20 dark:focus-visible:ring-destructive/40 dark:bg-destructive/60",
|
18 |
+
outline:
|
19 |
+
"text-foreground [a&]:hover:bg-accent [a&]:hover:text-accent-foreground",
|
20 |
+
},
|
21 |
+
},
|
22 |
+
defaultVariants: {
|
23 |
+
variant: "default",
|
24 |
+
},
|
25 |
+
}
|
26 |
+
)
|
27 |
+
|
28 |
+
function Badge({
|
29 |
+
className,
|
30 |
+
variant,
|
31 |
+
asChild = false,
|
32 |
+
...props
|
33 |
+
}: React.ComponentProps<"span"> &
|
34 |
+
VariantProps<typeof badgeVariants> & { asChild?: boolean }) {
|
35 |
+
const Comp = asChild ? Slot : "span"
|
36 |
+
|
37 |
+
return (
|
38 |
+
<Comp
|
39 |
+
data-slot="badge"
|
40 |
+
className={cn(badgeVariants({ variant }), className)}
|
41 |
+
{...props}
|
42 |
+
/>
|
43 |
+
)
|
44 |
+
}
|
45 |
+
|
46 |
+
export { Badge, badgeVariants }
|
banner.png
ADDED
![]() |
Git LFS Details
|
bento-section.tsx
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use client";
|
2 |
+
|
3 |
+
import { SectionHeader } from "@/components/home/section-header";
|
4 |
+
import { siteConfig } from "@/lib/home";
|
5 |
+
|
6 |
+
export function BentoSection() {
|
7 |
+
const { title, description, items } = siteConfig.bentoSection;
|
8 |
+
|
9 |
+
return (
|
10 |
+
<section
|
11 |
+
id="bento"
|
12 |
+
className="flex flex-col items-center justify-center w-full relative"
|
13 |
+
>
|
14 |
+
<div className="border-x mx-auto relative w-full max-w-6xl px-6">
|
15 |
+
<SectionHeader>
|
16 |
+
<h2 className="text-3xl md:text-4xl font-medium tracking-tighter text-center text-balance pb-1">
|
17 |
+
{title}
|
18 |
+
</h2>
|
19 |
+
<p className="text-muted-foreground text-center text-balance font-medium">
|
20 |
+
{description}
|
21 |
+
</p>
|
22 |
+
</SectionHeader>
|
23 |
+
|
24 |
+
<div className="grid grid-cols-1 md:grid-cols-2 overflow-hidden">
|
25 |
+
{items.map((item) => (
|
26 |
+
<div
|
27 |
+
key={item.id}
|
28 |
+
className="flex flex-col items-start justify-end min-h-[600px] md:min-h-[500px] p-0.5 relative before:absolute before:-left-0.5 before:top-0 before:z-10 before:h-screen before:w-px before:bg-border before:content-[''] after:absolute after:-top-0.5 after:left-0 after:z-10 after:h-px after:w-screen after:bg-border after:content-[''] group cursor-pointer max-h-[400px] group"
|
29 |
+
>
|
30 |
+
<div className="relative flex size-full items-center justify-center h-full overflow-hidden">
|
31 |
+
{item.content}
|
32 |
+
</div>
|
33 |
+
<div className="flex-1 flex-col gap-2 p-6">
|
34 |
+
<h3 className="text-lg tracking-tighter font-semibold">
|
35 |
+
{item.title}
|
36 |
+
</h3>
|
37 |
+
<p className="text-muted-foreground">{item.description}</p>
|
38 |
+
</div>
|
39 |
+
</div>
|
40 |
+
))}
|
41 |
+
</div>
|
42 |
+
</div>
|
43 |
+
</section>
|
44 |
+
);
|
45 |
+
}
|
billing.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime, timezone
|
2 |
+
from typing import Dict, Optional, Tuple
|
3 |
+
from utils.logger import logger
|
4 |
+
from utils.config import config, EnvMode
|
5 |
+
|
6 |
+
# Define subscription tiers and their monthly limits (in minutes)
|
7 |
+
SUBSCRIPTION_TIERS = {
|
8 |
+
'price_1RGJ9GG6l1KZGqIroxSqgphC': {'name': 'free', 'minutes': 8},
|
9 |
+
'price_1RGJ9LG6l1KZGqIrd9pwzeNW': {'name': 'base', 'minutes': 300},
|
10 |
+
'price_1RGJ9JG6l1KZGqIrVUU4ZRv6': {'name': 'extra', 'minutes': 2400}
|
11 |
+
}
|
12 |
+
|
13 |
+
async def get_account_subscription(client, account_id: str) -> Optional[Dict]:
|
14 |
+
"""Get the current subscription for an account."""
|
15 |
+
result = await client.schema('basejump').from_('billing_subscriptions') \
|
16 |
+
.select('*') \
|
17 |
+
.eq('account_id', account_id) \
|
18 |
+
.eq('status', 'active') \
|
19 |
+
.order('created', desc=True) \
|
20 |
+
.limit(1) \
|
21 |
+
.execute()
|
22 |
+
|
23 |
+
if result.data and len(result.data) > 0:
|
24 |
+
return result.data[0]
|
25 |
+
return None
|
26 |
+
|
27 |
+
async def calculate_monthly_usage(client, account_id: str) -> float:
|
28 |
+
"""Calculate total agent run minutes for the current month for an account."""
|
29 |
+
# Get start of current month in UTC
|
30 |
+
now = datetime.now(timezone.utc)
|
31 |
+
start_of_month = datetime(now.year, now.month, 1, tzinfo=timezone.utc)
|
32 |
+
|
33 |
+
# First get all threads for this account
|
34 |
+
threads_result = await client.table('threads') \
|
35 |
+
.select('thread_id') \
|
36 |
+
.eq('account_id', account_id) \
|
37 |
+
.execute()
|
38 |
+
|
39 |
+
if not threads_result.data:
|
40 |
+
return 0.0
|
41 |
+
|
42 |
+
thread_ids = [t['thread_id'] for t in threads_result.data]
|
43 |
+
|
44 |
+
# Then get all agent runs for these threads in current month
|
45 |
+
runs_result = await client.table('agent_runs') \
|
46 |
+
.select('started_at, completed_at') \
|
47 |
+
.in_('thread_id', thread_ids) \
|
48 |
+
.gte('started_at', start_of_month.isoformat()) \
|
49 |
+
.execute()
|
50 |
+
|
51 |
+
if not runs_result.data:
|
52 |
+
return 0.0
|
53 |
+
|
54 |
+
# Calculate total minutes
|
55 |
+
total_seconds = 0
|
56 |
+
now_ts = now.timestamp()
|
57 |
+
|
58 |
+
for run in runs_result.data:
|
59 |
+
start_time = datetime.fromisoformat(run['started_at'].replace('Z', '+00:00')).timestamp()
|
60 |
+
if run['completed_at']:
|
61 |
+
end_time = datetime.fromisoformat(run['completed_at'].replace('Z', '+00:00')).timestamp()
|
62 |
+
else:
|
63 |
+
# For running jobs, use current time
|
64 |
+
end_time = now_ts
|
65 |
+
|
66 |
+
total_seconds += (end_time - start_time)
|
67 |
+
|
68 |
+
return total_seconds / 60 # Convert to minutes
|
69 |
+
|
70 |
+
async def check_billing_status(client, account_id: str) -> Tuple[bool, str, Optional[Dict]]:
|
71 |
+
"""
|
72 |
+
Check if an account can run agents based on their subscription and usage.
|
73 |
+
|
74 |
+
Returns:
|
75 |
+
Tuple[bool, str, Optional[Dict]]: (can_run, message, subscription_info)
|
76 |
+
"""
|
77 |
+
if config.ENV_MODE == EnvMode.LOCAL:
|
78 |
+
logger.info("Running in local development mode - billing checks are disabled")
|
79 |
+
return True, "Local development mode - billing disabled", {
|
80 |
+
"price_id": "local_dev",
|
81 |
+
"plan_name": "Local Development",
|
82 |
+
"minutes_limit": "no limit"
|
83 |
+
}
|
84 |
+
|
85 |
+
# For staging/production, check subscription status
|
86 |
+
|
87 |
+
# Get current subscription
|
88 |
+
subscription = await get_account_subscription(client, account_id)
|
89 |
+
|
90 |
+
# If no subscription, they can use free tier
|
91 |
+
if not subscription:
|
92 |
+
subscription = {
|
93 |
+
'price_id': 'price_1RGJ9GG6l1KZGqIroxSqgphC', # Free tier
|
94 |
+
'plan_name': 'free'
|
95 |
+
}
|
96 |
+
|
97 |
+
# if not subscription or subscription.get('price_id') is None or subscription.get('price_id') == 'price_1RGJ9GG6l1KZGqIroxSqgphC':
|
98 |
+
# return False, "You are not subscribed to any plan. Please upgrade your plan to continue.", subscription
|
99 |
+
|
100 |
+
# Get tier info
|
101 |
+
tier_info = SUBSCRIPTION_TIERS.get(subscription['price_id'])
|
102 |
+
if not tier_info:
|
103 |
+
return False, "Invalid subscription tier", subscription
|
104 |
+
|
105 |
+
# Calculate current month's usage
|
106 |
+
current_usage = await calculate_monthly_usage(client, account_id)
|
107 |
+
|
108 |
+
# Check if within limits
|
109 |
+
if current_usage >= tier_info['minutes']:
|
110 |
+
return False, f"Monthly limit of {tier_info['minutes']} minutes reached. Please upgrade your plan or wait until next month.", subscription
|
111 |
+
|
112 |
+
return True, "OK", subscription
|
113 |
+
|
114 |
+
# Helper function to get account ID from thread
|
115 |
+
async def get_account_id_from_thread(client, thread_id: str) -> Optional[str]:
|
116 |
+
"""Get the account ID associated with a thread."""
|
117 |
+
result = await client.table('threads') \
|
118 |
+
.select('account_id') \
|
119 |
+
.eq('thread_id', thread_id) \
|
120 |
+
.limit(1) \
|
121 |
+
.execute()
|
122 |
+
|
123 |
+
if result.data and len(result.data) > 0:
|
124 |
+
return result.data[0]['account_id']
|
125 |
+
return None
|