0408happyfeet commited on
Commit
5ee4fa6
·
verified ·
1 Parent(s): f4adc28

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -0
app.py CHANGED
@@ -10,6 +10,53 @@ from autogluon.tabular import TabularPredictor
10
  MODEL_REPO_ID = "rlogh/cheese-texture-autogluon-classifier"
11
  DATASET_ID = "aslan-ng/cheese-tabular"
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  def _safe_concat_splits(ds):
14
  frames = []
15
  for split in ds.keys():
 
10
  MODEL_REPO_ID = "rlogh/cheese-texture-autogluon-classifier"
11
  DATASET_ID = "aslan-ng/cheese-tabular"
12
 
13
+ import os, json
14
+ import urllib.request
15
+
16
+ USE_HF_LOCAL = os.getenv("USE_HF", "0").lower() in {"1", "true", "yes"}
17
+ USE_HF_API = os.getenv("USE_HF_API", "0").lower() in {"1", "true", "yes"}
18
+ HF_MODEL = os.getenv("HF_MODEL", "google/flan-t5-small")
19
+ HF_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
20
+
21
+ def _hf_inference_api(prompt: str) -> str:
22
+ """Call HF Inference API for text2text; avoids local downloads."""
23
+ url = f"https://api-inference.huggingface.co/models/{HF_MODEL}"
24
+ req = urllib.request.Request(
25
+ url,
26
+ data=json.dumps({"inputs": prompt}).encode("utf-8"),
27
+ headers={"Authorization": f"Bearer {HF_TOKEN}", "Content-Type": "application/json"},
28
+ method="POST",
29
+ )
30
+ with urllib.request.urlopen(req, timeout=60) as resp:
31
+ data = json.loads(resp.read().decode("utf-8"))
32
+ # HF returns either a list of dicts with 'generated_text' or a dict with 'generated_text'
33
+ if isinstance(data, list) and data and "generated_text" in data[0]:
34
+ return data[0]["generated_text"].strip()
35
+ if isinstance(data, dict) and "generated_text" in data:
36
+ return data["generated_text"].strip()
37
+ # Fallback if provider returns a different schema
38
+ return str(data).strip()
39
+
40
+ def explain(structured: dict, engine: str = "auto") -> str:
41
+ prompt = build_prompt(structured)
42
+ # 1) Remote API (preferred for Spaces)
43
+ if USE_HF_API and HF_TOKEN:
44
+ try:
45
+ return _hf_inference_api(prompt)
46
+ except Exception as e:
47
+ return explain_template(structured) + f"\n\n_Explanation engine fell back to template: {e}_"
48
+ # 2) Local transformers (optional – see Fix B)
49
+ if USE_HF_LOCAL:
50
+ try:
51
+ from transformers import pipeline
52
+ pipe = pipeline("text2text-generation", model=HF_MODEL)
53
+ out = pipe(prompt, max_new_tokens=180, do_sample=False)
54
+ return out[0]["generated_text"].strip()
55
+ except Exception as e:
56
+ return explain_template(structured) + f"\n\n_Explanation engine fell back to template: {e}_"
57
+ # 3) Deterministic template
58
+ return explain_template(structured)
59
+
60
  def _safe_concat_splits(ds):
61
  frames = []
62
  for split in ds.keys():