George-API commited on
Commit
decfb95
·
verified ·
1 Parent(s): bbd5ba9

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. app.py +24 -9
  2. install.py +153 -0
app.py CHANGED
@@ -7,7 +7,9 @@ import json
7
  import logging
8
  import subprocess
9
  import time
 
10
  from datetime import datetime
 
11
 
12
  # Configure logging to match HF Space logs
13
  logging.basicConfig(
@@ -36,17 +38,13 @@ CONFIG_DIR = "."
36
  TRANSFORMERS_CONFIG = os.path.join(CONFIG_DIR, "transformers_config.json")
37
 
38
  def load_config(config_path):
39
- """Load configuration from JSON file."""
40
  try:
41
- if os.path.exists(config_path):
42
- with open(config_path, 'r') as f:
43
- return json.load(f)
44
- else:
45
- log_info(f"Config file not found: {config_path}")
46
- return None
47
  except Exception as e:
48
  log_info(f"Error loading config: {str(e)}")
49
- return None
50
 
51
  def display_config():
52
  """Display current training configuration."""
@@ -174,7 +172,24 @@ def create_interface():
174
  return demo
175
 
176
  if __name__ == "__main__":
177
- # If run directly, create and launch the Gradio interface
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
  demo = create_interface()
179
  demo.queue()
180
  demo.launch()
 
7
  import logging
8
  import subprocess
9
  import time
10
+ import traceback
11
  from datetime import datetime
12
+ from pathlib import Path
13
 
14
  # Configure logging to match HF Space logs
15
  logging.basicConfig(
 
38
  TRANSFORMERS_CONFIG = os.path.join(CONFIG_DIR, "transformers_config.json")
39
 
40
  def load_config(config_path):
41
+ """Load configuration from a JSON file."""
42
  try:
43
+ with open(config_path, 'r') as f:
44
+ return json.load(f)
 
 
 
 
45
  except Exception as e:
46
  log_info(f"Error loading config: {str(e)}")
47
+ return {}
48
 
49
  def display_config():
50
  """Display current training configuration."""
 
172
  return demo
173
 
174
  if __name__ == "__main__":
175
+ # Print basic system information to help with debugging
176
+ try:
177
+ import torch
178
+ logger.info(f"Python: {sys.version.split()[0]}")
179
+ logger.info(f"PyTorch: {torch.__version__}")
180
+ logger.info(f"CUDA available: {torch.cuda.is_available()}")
181
+ if torch.cuda.is_available():
182
+ logger.info(f"CUDA device: {torch.cuda.get_device_name(0)}")
183
+
184
+ from transformers import __version__ as tf_version
185
+ logger.info(f"Transformers: {tf_version}")
186
+
187
+ from unsloth import __version__ as un_version
188
+ logger.info(f"Unsloth: {un_version}")
189
+ except Exception as e:
190
+ logger.warning(f"Error printing system info: {e}")
191
+
192
+ # Create and launch the Gradio interface
193
  demo = create_interface()
194
  demo.queue()
195
  demo.launch()
install.py ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+
4
+ """
5
+ Installation script for Hugging Face Space setup.
6
+ This script ensures all dependencies are installed correctly
7
+ during the Space build process.
8
+ """
9
+
10
+ import os
11
+ import sys
12
+ import subprocess
13
+ import logging
14
+ import traceback
15
+ from pathlib import Path
16
+
17
+ # Configure logging
18
+ logging.basicConfig(
19
+ level=logging.INFO,
20
+ format="%(asctime)s - %(levelname)s - %(message)s",
21
+ handlers=[logging.StreamHandler(sys.stdout)]
22
+ )
23
+ logger = logging.getLogger(__name__)
24
+
25
+ def run_command(cmd, description=""):
26
+ """Run a shell command and log the output."""
27
+ logger.info(f"Running: {description if description else cmd}")
28
+ try:
29
+ process = subprocess.run(
30
+ cmd,
31
+ shell=True,
32
+ check=True,
33
+ stdout=subprocess.PIPE,
34
+ stderr=subprocess.PIPE,
35
+ text=True
36
+ )
37
+ logger.info(f"Command output: {process.stdout}")
38
+ return True
39
+ except subprocess.CalledProcessError as e:
40
+ logger.error(f"Command failed with exit code {e.returncode}")
41
+ logger.error(f"Error output: {e.stderr}")
42
+ return False
43
+
44
+ def install_dependencies():
45
+ """Install all required dependencies in the correct order."""
46
+ current_dir = Path(__file__).parent
47
+ req_path = current_dir / "requirements.txt"
48
+
49
+ if not req_path.exists():
50
+ logger.error(f"Requirements file not found: {req_path}")
51
+ return False
52
+
53
+ try:
54
+ # Step 1: Upgrade pip
55
+ run_command(f"{sys.executable} -m pip install --upgrade pip", "Upgrading pip")
56
+
57
+ # Step 2: Install direct torch version for CUDA compatibility
58
+ run_command(
59
+ f"{sys.executable} -m pip install torch>=2.0.0,<2.2.0 --extra-index-url https://download.pytorch.org/whl/cu118",
60
+ "Installing PyTorch with CUDA support"
61
+ )
62
+
63
+ # Step 3: Install base dependencies
64
+ run_command(
65
+ f"{sys.executable} -m pip install transformers accelerate bitsandbytes peft einops",
66
+ "Installing ML dependencies"
67
+ )
68
+
69
+ # Step 4: Install unsloth separately
70
+ run_command(
71
+ f"{sys.executable} -m pip install unsloth>=2024.3",
72
+ "Installing Unsloth"
73
+ )
74
+
75
+ # Step 5: Install all remaining requirements
76
+ run_command(
77
+ f"{sys.executable} -m pip install -r {req_path}",
78
+ "Installing all requirements"
79
+ )
80
+
81
+ # Verify critical packages
82
+ import_check = verify_imports()
83
+ if not import_check:
84
+ logger.error("Failed to verify critical packages")
85
+ return False
86
+
87
+ logger.info("All dependencies installed successfully!")
88
+ return True
89
+
90
+ except Exception as e:
91
+ logger.error(f"Error installing dependencies: {str(e)}")
92
+ traceback.print_exc()
93
+ return False
94
+
95
+ def verify_imports():
96
+ """Verify that critical packages can be imported."""
97
+ critical_packages = [
98
+ "torch", "transformers", "unsloth", "peft",
99
+ "gradio", "accelerate", "bitsandbytes"
100
+ ]
101
+
102
+ success = True
103
+ for package in critical_packages:
104
+ try:
105
+ module = __import__(package)
106
+ version = getattr(module, "__version__", "unknown")
107
+ logger.info(f"Successfully imported {package} (version: {version})")
108
+ except ImportError:
109
+ logger.error(f"CRITICAL: Failed to import {package}")
110
+ success = False
111
+ except Exception as e:
112
+ logger.error(f"Error verifying {package}: {str(e)}")
113
+ success = False
114
+
115
+ # Check CUDA
116
+ try:
117
+ import torch
118
+ cuda_available = torch.cuda.is_available()
119
+ device_count = torch.cuda.device_count() if cuda_available else 0
120
+ if cuda_available:
121
+ device_name = torch.cuda.get_device_name(0)
122
+ logger.info(f"CUDA available - Devices: {device_count}, Name: {device_name}")
123
+ else:
124
+ logger.warning(f"CUDA not available - This might affect performance")
125
+ except Exception as e:
126
+ logger.error(f"Error checking CUDA: {str(e)}")
127
+
128
+ return success
129
+
130
+ def main():
131
+ logger.info("Starting installation for Phi-4 Unsloth Training Space")
132
+
133
+ try:
134
+ # Install dependencies
135
+ if not install_dependencies():
136
+ logger.error("Failed to install dependencies")
137
+ sys.exit(1)
138
+
139
+ # Create marker file to show successful installation
140
+ with open("INSTALL_SUCCESS.txt", "w") as f:
141
+ f.write("Installation completed successfully")
142
+
143
+ logger.info("Installation completed successfully")
144
+ return 0
145
+
146
+ except Exception as e:
147
+ logger.error(f"Installation failed with error: {str(e)}")
148
+ traceback.print_exc()
149
+ return 1
150
+
151
+ if __name__ == "__main__":
152
+ exit_code = main()
153
+ sys.exit(exit_code)