FauziIsyrinApridal commited on
Commit
5565d95
·
1 Parent(s): 7940d99

test environ

Browse files
app/api/download-all/route.ts CHANGED
@@ -2,25 +2,51 @@ import { NextRequest } from "next/server";
2
  import JSZip from "jszip";
3
  import { createClient } from "@supabase/supabase-js";
4
 
5
- export const dynamic = "force-dynamic";
6
-
7
  export async function GET(req: NextRequest) {
8
  const supabase = createClient(
9
  process.env.NEXT_PUBLIC_SUPABASE_URL!,
10
  process.env.NEXT_PUBLIC_SUPABASE_SERVICE_KEY!,
11
  );
12
 
13
- // Validate environment variables
 
 
 
 
14
 
15
- // Add this debug logging to see what's happening
16
- console.log("URL:", process.env.NEXT_PUBLIC_SUPABASE_URL);
17
- console.log(
18
- "Service Key:",
19
- process.env.NEXT_PUBLIC_SUPABASE_SERVICE_KEY ? "Present" : "Missing",
20
- );
 
 
21
 
22
- try {
23
- // ... rest of your code
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  } catch (error) {
25
  console.error("ZIP error:", error);
26
  return new Response(
 
2
  import JSZip from "jszip";
3
  import { createClient } from "@supabase/supabase-js";
4
 
 
 
5
  export async function GET(req: NextRequest) {
6
  const supabase = createClient(
7
  process.env.NEXT_PUBLIC_SUPABASE_URL!,
8
  process.env.NEXT_PUBLIC_SUPABASE_SERVICE_KEY!,
9
  );
10
 
11
+ try {
12
+ // Ambil daftar file
13
+ const { data: files, error: listError } = await supabase.storage
14
+ .from("pnp-bot-storage")
15
+ .list("", { limit: 1000 }); // kosong berarti root folder, sesuaikan jika ada prefix
16
 
17
+ if (listError || !files) {
18
+ return new Response(
19
+ JSON.stringify({
20
+ error: listError?.message || "Gagal mengambil daftar file.",
21
+ }),
22
+ { status: 500 },
23
+ );
24
+ }
25
 
26
+ const zip = new JSZip();
27
+
28
+ // Download semua file dan tambahkan ke zip
29
+ for (const file of files) {
30
+ const { data: fileData, error: downloadError } = await supabase.storage
31
+ .from("pnp-bot-storage")
32
+ .download(file.name);
33
+
34
+ if (!downloadError && fileData) {
35
+ const arrayBuffer = await fileData.arrayBuffer();
36
+ zip.file(file.name, arrayBuffer);
37
+ }
38
+ }
39
+
40
+ // Hasilkan zip sebagai buffer
41
+ const zipBuffer = await zip.generateAsync({ type: "nodebuffer" });
42
+
43
+ return new Response(zipBuffer, {
44
+ status: 200,
45
+ headers: {
46
+ "Content-Type": "application/zip",
47
+ "Content-Disposition": "attachment; filename=all-files.zip",
48
+ },
49
+ });
50
  } catch (error) {
51
  console.error("ZIP error:", error);
52
  return new Response(
scrapping/dosen_scrap.py CHANGED
@@ -3,11 +3,8 @@ from scrapy.crawler import CrawlerProcess
3
  from datetime import datetime
4
  import re
5
  from supabase import create_client
6
- from dotenv import load_dotenv
7
  import os
8
 
9
- # Load environment variables
10
- load_dotenv()
11
 
12
  class DosenSpider(scrapy.Spider):
13
  name = 'dosen_spider'
@@ -27,10 +24,10 @@ class DosenSpider(scrapy.Spider):
27
  super(DosenSpider, self).__init__(*args, **kwargs)
28
  # Initialize Supabase client
29
  self.supabase = create_client(
30
- os.getenv("NEXT_PUBLIC_SUPABASE_URL"),
31
- os.getenv("NEXT_PUBLIC_SUPABASE_SERVICE_KEY")
32
  )
33
- self.storage_bucket = os.getenv("NEXT_PUBLIC_SUPABASE_STORAGE_BUCKET")
34
  self.collected_data = []
35
 
36
  def parse(self, response):
 
3
  from datetime import datetime
4
  import re
5
  from supabase import create_client
 
6
  import os
7
 
 
 
8
 
9
  class DosenSpider(scrapy.Spider):
10
  name = 'dosen_spider'
 
24
  super(DosenSpider, self).__init__(*args, **kwargs)
25
  # Initialize Supabase client
26
  self.supabase = create_client(
27
+ os.environ.get("NEXT_PUBLIC_SUPABASE_URL"),
28
+ os.environ.get("NEXT_PUBLIC_SUPABASE_SERVICE_KEY")
29
  )
30
+ self.storage_bucket = os.environ.get("NEXT_PUBLIC_SUPABASE_STORAGE_BUCKET")
31
  self.collected_data = []
32
 
33
  def parse(self, response):
scrapping/jadwal_scrap.py CHANGED
@@ -4,11 +4,9 @@ import os
4
  import re
5
  from datetime import datetime
6
  from supabase import create_client
7
- from dotenv import load_dotenv
8
  from io import StringIO
9
 
10
- # Load environment variables
11
- load_dotenv()
12
 
13
  class PnpSpider(scrapy.Spider):
14
  name = 'pnp_spider'
@@ -23,11 +21,11 @@ class PnpSpider(scrapy.Spider):
23
  def __init__(self, *args, **kwargs):
24
  super(PnpSpider, self).__init__(*args, **kwargs)
25
  # Initialize Supabase client
26
- url = os.getenv("NEXT_PUBLIC_SUPABASE_URL")
27
- key = os.getenv("NEXT_PUBLIC_SUPABASE_SERVICE_KEY")
28
 
29
  self.supabase = create_client(url, key)
30
- self.storage_bucket = os.getenv("NEXT_PUBLIC_SUPABASE_STORAGE_BUCKET")
31
 
32
  self.file_buffers = {} # Dictionary to store StringIO objects
33
  self.current_date = datetime.now().strftime("%Y-%m-%d")
 
4
  import re
5
  from datetime import datetime
6
  from supabase import create_client
 
7
  from io import StringIO
8
 
9
+
 
10
 
11
  class PnpSpider(scrapy.Spider):
12
  name = 'pnp_spider'
 
21
  def __init__(self, *args, **kwargs):
22
  super(PnpSpider, self).__init__(*args, **kwargs)
23
  # Initialize Supabase client
24
+ url = os.environ.get("NEXT_PUBLIC_SUPABASE_URL")
25
+ key = os.environ.get("NEXT_PUBLIC_SUPABASE_SERVICE_KEY")
26
 
27
  self.supabase = create_client(url, key)
28
+ self.storage_bucket = os.environ.get("NEXT_PUBLIC_SUPABASE_STORAGE_BUCKET")
29
 
30
  self.file_buffers = {} # Dictionary to store StringIO objects
31
  self.current_date = datetime.now().strftime("%Y-%m-%d")
scrapping/jurusan_scrap.py CHANGED
@@ -1,16 +1,13 @@
1
  import scrapy
2
  from scrapy.crawler import CrawlerProcess
3
  from bs4 import BeautifulSoup
4
- from dotenv import load_dotenv
5
  from supabase import create_client
6
  from datetime import datetime
7
  import os, re, tempfile
8
 
9
- # Load environment variables
10
- load_dotenv()
11
- SUPABASE_URL = os.getenv("NEXT_PUBLIC_SUPABASE_URL")
12
- SUPABASE_KEY = os.getenv("NEXT_PUBLIC_SUPABASE_SERVICE_KEY")
13
- SUPABASE_BUCKET = os.getenv("NEXT_PUBLIC_SUPABASE_STORAGE_BUCKET", "pnp-bot-storage")
14
 
15
 
16
 
 
1
  import scrapy
2
  from scrapy.crawler import CrawlerProcess
3
  from bs4 import BeautifulSoup
 
4
  from supabase import create_client
5
  from datetime import datetime
6
  import os, re, tempfile
7
 
8
+ SUPABASE_URL = os.environ.get("NEXT_PUBLIC_SUPABASE_URL")
9
+ SUPABASE_KEY = os.environ.get("NEXT_PUBLIC_SUPABASE_SERVICE_KEY")
10
+ SUPABASE_BUCKET = os.environ.get("NEXT_PUBLIC_SUPABASE_STORAGE_BUCKET", "pnp-bot-storage")
 
 
11
 
12
 
13
 
scrapping/pnp_scrap.py CHANGED
@@ -3,14 +3,12 @@ from scrapy.crawler import CrawlerProcess
3
  from datetime import datetime
4
  import re
5
  import os
6
- from dotenv import load_dotenv
7
  from supabase import create_client, Client
8
 
9
- # Load environment variables
10
- load_dotenv()
11
- SUPABASE_URL = os.getenv("NEXT_PUBLIC_SUPABASE_URL")
12
- SUPABASE_KEY = os.getenv("NEXT_PUBLIC_SUPABASE_SERVICE_KEY")
13
- SUPABASE_BUCKET = os.getenv("NEXT_PUBLIC_SUPABASE_STORAGE_BUCKET")
14
  supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY)
15
 
16
 
 
3
  from datetime import datetime
4
  import re
5
  import os
 
6
  from supabase import create_client, Client
7
 
8
+
9
+ SUPABASE_URL = os.environ.get("NEXT_PUBLIC_SUPABASE_URL")
10
+ SUPABASE_KEY = os.environ.get("NEXT_PUBLIC_SUPABASE_SERVICE_KEY")
11
+ SUPABASE_BUCKET = os.environ.get("NEXT_PUBLIC_SUPABASE_STORAGE_BUCKET")
 
12
  supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY)
13
 
14