# # This PowerShell script installs the Hugging Face transformers library # and patches it to include the 'Echo' model architecture from # MythWorxAI. This allows you to load the model without needing # the `trust_remote_code=True` flag. # # The script will: # 1. Ensure dependencies like Python, pip, and web access are available. # 2. Install the `transformers`, `torch`, and `accelerate` libraries. # 3. Locate the installed `transformers` library path using a robust method. # 4. Create a new directory for the 'echo' model inside the library. # 5. Download the necessary model definition files from the Hugging Face Hub. # 6. Create an __init__.py file to make the new model a proper module. # # Stop the script if any command fails $ErrorActionPreference = "Stop" # --- Helper Functions --- function Get-CommandPath($command) { return (Get-Command $command -ErrorAction SilentlyContinue).Source } # --- Dependency Checks --- Write-Host "▶ Checking for required tools (python, pip)..." $pythonCmdPath = Get-CommandPath "python" if ([string]::IsNullOrWhiteSpace($pythonCmdPath)) { Write-Host "❌ Error: Python is not installed or not in your PATH. Please install Python 3 and try again." -ForegroundColor Red exit 1 } $pipCmdPath = Get-CommandPath "pip" if ([string]::IsNullOrWhiteSpace($pipCmdPath)) { Write-Host "❌ Error: pip is not installed or not in your PATH. Please install pip and try again." -ForegroundColor Red exit 1 } Write-Host "✅ All tools are available." -ForegroundColor Green # --- Installation --- Write-Host "`n▶ Installing Hugging Face libraries (transformers, torch, accelerate)..." # Use '&' to execute the command stored in the variable & $pipCmdPath install transformers torch accelerate --quiet Write-Host "✅ Libraries installed successfully." -ForegroundColor Green # --- Patching --- Write-Host "`n▶ Locating transformers installation..." # Use `pip show` to robustly find the package location. $pipShowOutput = & $pipCmdPath show transformers $locationLine = $pipShowOutput | Where-Object { $_ -match '^Location:' } $sitePackagesPath = ($locationLine -split ': ', 2)[1].Trim() if ([string]::IsNullOrWhiteSpace($sitePackagesPath)) { Write-Host "❌ Error: Could not determine transformers library location via 'pip show'." -ForegroundColor Red exit 1 } # The final path is the site-packages location + the 'transformers' directory name. $transformersPath = Join-Path -Path $sitePackagesPath -ChildPath "transformers" if (-not (Test-Path -Path $transformersPath -PathType Container)) { Write-Host "❌ Error: The transformers directory was not found at the expected path: $transformersPath" -ForegroundColor Red exit 1 } Write-Host "✅ Found transformers at: $transformersPath" -ForegroundColor Green # Use Join-Path to correctly build the model's path for Windows $modelPath = Join-Path -Path $transformersPath -ChildPath "models\echo" # Check if the directory already exists if (Test-Path -Path $modelPath -PathType Container) { Write-Host "✅ Patch directory '$modelPath' already exists. No action needed." -ForegroundColor Yellow Write-Host "`n🎉 Patching complete! You can now use 'Echo' models." -ForegroundColor Cyan exit 0 } Write-Host "`n▶ Applying patch: Creating 'echo' model directory..." # -Force ensures parent directories are created if they don't exist New-Item -Path $modelPath -ItemType Directory -Force | Out-Null Write-Host "✅ Directory created." -ForegroundColor Green Write-Host "▶ Downloading model architecture files..." $configUrl = "https://huggingface.co/MythWorxAI/Echo-mini/raw/main/configuration_echo.py" $modelingUrl = "https://huggingface.co/MythWorxAI/Echo-mini/raw/main/modeling_echo.py" $configOutFile = Join-Path -Path $modelPath -ChildPath "configuration_echo.py" $modelingOutFile = Join-Path -Path $modelPath -ChildPath "modeling_echo.py" # Define a User-Agent to mimic a browser $userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36" # Ensure modern TLS security protocol is used [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 try { Write-Host "Downloading from: $configUrl" Invoke-WebRequest -Uri $configUrl -OutFile $configOutFile -UserAgent $userAgent -UseBasicParsing Write-Host "Downloading from: $modelingUrl" Invoke-WebRequest -Uri $modelingUrl -OutFile $modelingOutFile -UserAgent $userAgent -UseBasicParsing Write-Host "✅ Model files downloaded." -ForegroundColor Green } catch { # Provide more detailed error information $errorMessage = "❌ Error downloading files. " if ($_.Exception.Response) { $statusCode = [int]$_.Exception.Response.StatusCode $errorMessage += "Status Code: $statusCode. " $statusDescription = $_.Exception.Response.StatusDescription $errorMessage += "Description: $statusDescription." } else { $errorMessage += "Details: $($_.Exception.Message)" } Write-Host $errorMessage -ForegroundColor Red exit 1 } Write-Host "▶ Finalizing module structure..." # Create an __init__.py to make `echo` a recognizable Python module $initFile = Join-Path -Path $modelPath -ChildPath "__init__.py" New-Item -Path $initFile -ItemType File -Force | Out-Null Write-Host "✅ Module created." -ForegroundColor Green # --- Completion --- Write-Host "`n🎉 Patching complete! The 'transformers' library now natively supports 'echo' models." -ForegroundColor Cyan Write-Host " You can now load 'MythWorxAI/Echo-mini' without 'trust_remote_code=True'." # --- Test Code --- Write-Host "`n🧪 To test the installation, run the following Python code:" -ForegroundColor Yellow Write-Host @" from transformers import AutoTokenizer, AutoModelForCausalLM model_id = 'MythWorxAI/Echo-mini' print(f"Loading model: {model_id}") # This now works without trust_remote_code=True tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForCausalLM.from_pretrained(model_id) print('✅ Model and tokenizer loaded successfully!') print(model.config) "@