Spaces:
Running
on
Zero
Running
on
Zero
Merge pull request #13 from huggingface/feature/add-support-for-zero-gpu
Browse files- app/app.py +1 -0
app/app.py
CHANGED
@@ -49,6 +49,7 @@ def create_inference_client(
|
|
49 |
model: Optional[str] = None, base_url: Optional[str] = None
|
50 |
) -> InferenceClient:
|
51 |
"""Create an InferenceClient instance with the given model or environment settings.
|
|
|
52 |
|
53 |
Args:
|
54 |
model: Optional model identifier to use. If not provided, will use environment settings.
|
|
|
49 |
model: Optional[str] = None, base_url: Optional[str] = None
|
50 |
) -> InferenceClient:
|
51 |
"""Create an InferenceClient instance with the given model or environment settings.
|
52 |
+
This function will run the model locally if ZERO_GPU is set to True.
|
53 |
|
54 |
Args:
|
55 |
model: Optional model identifier to use. If not provided, will use environment settings.
|