File size: 6,286 Bytes
35fb65d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
#
# This PowerShell script installs the Hugging Face transformers library
# and patches it to include the 'Echo' model architecture from
# MythWorxAI. This allows you to load the model without needing
# the `trust_remote_code=True` flag.
#
# The script will:
# 1. Ensure dependencies like Python, pip, and web access are available.
# 2. Install the `transformers`, `torch`, and `accelerate` libraries.
# 3. Locate the installed `transformers` library path using a robust method.
# 4. Create a new directory for the 'echo' model inside the library.
# 5. Download the necessary model definition files from the Hugging Face Hub.
# 6. Create an __init__.py file to make the new model a proper module.
#
# Stop the script if any command fails
$ErrorActionPreference = "Stop"
# --- Helper Functions ---
function Get-CommandPath($command) {
return (Get-Command $command -ErrorAction SilentlyContinue).Source
}
# --- Dependency Checks ---
Write-Host "โถ Checking for required tools (python, pip)..."
$pythonCmdPath = Get-CommandPath "python"
if ([string]::IsNullOrWhiteSpace($pythonCmdPath)) {
Write-Host "โ Error: Python is not installed or not in your PATH. Please install Python 3 and try again." -ForegroundColor Red
exit 1
}
$pipCmdPath = Get-CommandPath "pip"
if ([string]::IsNullOrWhiteSpace($pipCmdPath)) {
Write-Host "โ Error: pip is not installed or not in your PATH. Please install pip and try again." -ForegroundColor Red
exit 1
}
Write-Host "โ
All tools are available." -ForegroundColor Green
# --- Installation ---
Write-Host "`nโถ Installing Hugging Face libraries (transformers, torch, accelerate)..."
# Use '&' to execute the command stored in the variable
& $pipCmdPath install transformers torch accelerate --quiet
Write-Host "โ
Libraries installed successfully." -ForegroundColor Green
# --- Patching ---
Write-Host "`nโถ Locating transformers installation..."
# Use `pip show` to robustly find the package location.
$pipShowOutput = & $pipCmdPath show transformers
$locationLine = $pipShowOutput | Where-Object { $_ -match '^Location:' }
$sitePackagesPath = ($locationLine -split ': ', 2)[1].Trim()
if ([string]::IsNullOrWhiteSpace($sitePackagesPath)) {
Write-Host "โ Error: Could not determine transformers library location via 'pip show'." -ForegroundColor Red
exit 1
}
# The final path is the site-packages location + the 'transformers' directory name.
$transformersPath = Join-Path -Path $sitePackagesPath -ChildPath "transformers"
if (-not (Test-Path -Path $transformersPath -PathType Container)) {
Write-Host "โ Error: The transformers directory was not found at the expected path: $transformersPath" -ForegroundColor Red
exit 1
}
Write-Host "โ
Found transformers at: $transformersPath" -ForegroundColor Green
# Use Join-Path to correctly build the model's path for Windows
$modelPath = Join-Path -Path $transformersPath -ChildPath "models\echo"
# Check if the directory already exists
if (Test-Path -Path $modelPath -PathType Container) {
Write-Host "โ
Patch directory '$modelPath' already exists. No action needed." -ForegroundColor Yellow
Write-Host "`n๐ Patching complete! You can now use 'Echo' models." -ForegroundColor Cyan
exit 0
}
Write-Host "`nโถ Applying patch: Creating 'echo' model directory..."
# -Force ensures parent directories are created if they don't exist
New-Item -Path $modelPath -ItemType Directory -Force | Out-Null
Write-Host "โ
Directory created." -ForegroundColor Green
Write-Host "โถ Downloading model architecture files..."
$configUrl = "https://huggingface.co/MythWorxAI/Echo-mini/raw/main/configuration_echo.py"
$modelingUrl = "https://huggingface.co/MythWorxAI/Echo-mini/raw/main/modeling_echo.py"
$configOutFile = Join-Path -Path $modelPath -ChildPath "configuration_echo.py"
$modelingOutFile = Join-Path -Path $modelPath -ChildPath "modeling_echo.py"
# Define a User-Agent to mimic a browser
$userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36"
# Ensure modern TLS security protocol is used
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
try {
Write-Host "Downloading from: $configUrl"
Invoke-WebRequest -Uri $configUrl -OutFile $configOutFile -UserAgent $userAgent -UseBasicParsing
Write-Host "Downloading from: $modelingUrl"
Invoke-WebRequest -Uri $modelingUrl -OutFile $modelingOutFile -UserAgent $userAgent -UseBasicParsing
Write-Host "โ
Model files downloaded." -ForegroundColor Green
}
catch {
# Provide more detailed error information
$errorMessage = "โ Error downloading files. "
if ($_.Exception.Response) {
$statusCode = [int]$_.Exception.Response.StatusCode
$errorMessage += "Status Code: $statusCode. "
$statusDescription = $_.Exception.Response.StatusDescription
$errorMessage += "Description: $statusDescription."
} else {
$errorMessage += "Details: $($_.Exception.Message)"
}
Write-Host $errorMessage -ForegroundColor Red
exit 1
}
Write-Host "โถ Finalizing module structure..."
# Create an __init__.py to make `echo` a recognizable Python module
$initFile = Join-Path -Path $modelPath -ChildPath "__init__.py"
New-Item -Path $initFile -ItemType File -Force | Out-Null
Write-Host "โ
Module created." -ForegroundColor Green
# --- Completion ---
Write-Host "`n๐ Patching complete! The 'transformers' library now natively supports 'echo' models." -ForegroundColor Cyan
Write-Host " You can now load 'MythWorxAI/Echo-mini' without 'trust_remote_code=True'."
# --- Test Code ---
Write-Host "`n๐งช To test the installation, run the following Python code:" -ForegroundColor Yellow
Write-Host @"
from transformers import AutoTokenizer, AutoModelForCausalLM
model_id = 'MythWorxAI/Echo-mini'
print(f"Loading model: {model_id}")
# This now works without trust_remote_code=True
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id)
print('โ
Model and tokenizer loaded successfully!')
print(model.config)
"@
|